diff --git a/.env.demo.bodc b/.env.demo.bodc new file mode 100644 index 0000000..6c0dcda --- /dev/null +++ b/.env.demo.bodc @@ -0,0 +1,14 @@ +# file checker image +FILECHECKER_IMAGE=ghcr.io/oneargo/argoformatchecker/app +FILECHECKER_IMAGE_TAG=develop +#availables tags at https://github.com/OneArgo/ArgoFormatChecker/tags + develop + latest + +# External directories to mount to the container +FILECHECKER_SPEC_VOLUME=./file_checker_spec +FILECHECKER_INPUT_VOLUME=./demo/inputs/3901945 +FILECHECKER_OUTPUT_VOLUME=./demo/outputs/3901945 + +# Variable specific to floats to check +DAC_NAME=bodc +FILECHECKER_OPTIONS= +FILES_NAMES= diff --git a/.env.demo.coriolis b/.env.demo.coriolis new file mode 100644 index 0000000..78a0a1f --- /dev/null +++ b/.env.demo.coriolis @@ -0,0 +1,14 @@ +# file checker image +FILECHECKER_IMAGE=ghcr.io/oneargo/argoformatchecker/app +FILECHECKER_IMAGE_TAG=develop +#availables tags at https://github.com/OneArgo/ArgoFormatChecker/tags + develop + latest + +# External directories to mount to the container +FILECHECKER_SPEC_VOLUME=./file_checker_spec +FILECHECKER_INPUT_VOLUME=./demo/inputs/2903996_coriolis +FILECHECKER_OUTPUT_VOLUME=./demo/outputs/2903996_coriolis + +# Variable specific to floats to check +DAC_NAME=coriolis +FILECHECKER_OPTIONS= +FILES_NAMES= diff --git a/.env.docs b/.env.docs new file mode 100644 index 0000000..1c793b6 --- /dev/null +++ b/.env.docs @@ -0,0 +1,18 @@ +# file checker image +FILECHECKER_IMAGE=ghcr.io/oneargo/argoformatchecker/app +FILECHECKER_IMAGE_TAG=develop +#availables tags at https://github.com/OneArgo/ArgoFormatChecker/tags + develop + latest + +# External directories to mount to the container +FILECHECKER_SPEC_VOLUME= +FILECHECKER_INPUT_VOLUME= +FILECHECKER_OUTPUT_VOLUME= + +# Variable specific to floats to check +DAC_NAME= +FILECHECKER_OPTIONS= +FILES_NAMES= + +# Variable specific to Docker +DOCKER_UID= +DOCKER_GID= diff --git a/.github/workflows/component-container-image-security.yml b/.github/workflows/component-container-image-security.yml new file mode 100644 index 0000000..5c5f5ce --- /dev/null +++ b/.github/workflows/component-container-image-security.yml @@ -0,0 +1,64 @@ +name: Generic security scan + +on: + workflow_call: + inputs: + context: + description: "Path to the Dockerfile directory to analyse" + required: true + default: "." + type: string + image-path: + description: "Path of the docker image to analyse" + default: "ghcr.io/${{ github.repository }}/app" + type: string + +permissions: + contents: read # for actions/checkout to fetch code + security-events: write # for github/codeql-action/upload-sarif to upload SARIF results + actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status + +jobs: + security-dependency-trivy: + name: Trivy dependency scan + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v3 + # need to format as github.repository contains uppercase + # and pull request workflow contains slashes + - id: format + name: Format proper image path and tag + env: + IMAGE_PATH: ${{ inputs.image-path }} + run: | + echo "image-tag=${GITHUB_REF_NAME/\//-}" >> $GITHUB_OUTPUT + echo "image-path=${IMAGE_PATH@L}" >> $GITHUB_OUTPUT + - name: Build Docker image + uses: docker/build-push-action@v6 + if: ${{ github.event_name != 'release' }} + with: + context: "{{defaultContext}}:${{ inputs.context }}" + tags: "${{ steps.format.outputs.image-path }}:${{ steps.format.outputs.image-tag }}" + push: false + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@0.28.0 + env: + TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db,aquasec/trivy-db,ghcr.io/aquasecurity/trivy-db + TRIVY_JAVA_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-java-db,aquasec/trivy-java-db,ghcr.io/aquasecurity/trivy-java-db + with: + image-ref: "${{ steps.format.outputs.image-path }}:${{ steps.format.outputs.image-tag }}" + format: 'sarif' + output: 'trivy-results.sarif' + vuln-type: "os,library" + severity: "CRITICAL,HIGH" + exit-code: "1" + ignore-unfixed: true + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v3 + if: always() + with: + sarif_file: 'trivy-results.sarif' + - name: Inspect bandit SARIF report + if: always() + run: cat trivy-results.sarif diff --git a/.github/workflows/component-container-image.yml b/.github/workflows/component-container-image.yml new file mode 100644 index 0000000..68b5542 --- /dev/null +++ b/.github/workflows/component-container-image.yml @@ -0,0 +1,63 @@ +name: Build and push Docker image + +on: + workflow_call: + inputs: + context: + description: "Path to the Dockerfile directory to build" + default: "." + type: string + image-path: + description: "Path of the docker image Tag" + default: "ghcr.io/${{ github.repository }}/app" + type: string + +jobs: + container-image-build: + name: Docker + runs-on: ubuntu-latest + steps: + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - id: format + name: Lowercase repository path + uses: ASzc/change-string-case-action@v1 + with: + string: ${{ inputs.image-path }} + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + if: ${{ github.event_name == 'release' || github.event_name == 'push' }} + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build (pull request) + uses: docker/build-push-action@v6 + if: ${{ github.event_name == 'pull_request'}} + with: + context: "{{defaultContext}}:${{ inputs.context }}" + tags: "${{ steps.format.outputs.lowercase }}:develop" + push: false + - name: Build and push (develop) + uses: docker/build-push-action@v6 + if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }} + with: + context: "{{defaultContext}}:${{ inputs.context }}" + tags: "${{ steps.format.outputs.lowercase }}:develop" + push: true + - name: Build and push (main) + uses: docker/build-push-action@v6 + if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} + with: + context: "{{defaultContext}}:${{ inputs.context }}" + tags: "${{ steps.format.outputs.lowercase }}:latest" + push: true + - name: Build and push (release) + uses: docker/build-push-action@v6 + if: ${{ github.event_name == 'release' }} + with: + context: "{{defaultContext}}:${{ inputs.context }}" + tags: "${{ steps.format.outputs.lowercase }}:${{ github.ref_name }}" + push: true diff --git a/.github/workflows/workflow-java.yml b/.github/workflows/workflow-java.yml new file mode 100644 index 0000000..7b1cbe9 --- /dev/null +++ b/.github/workflows/workflow-java.yml @@ -0,0 +1,30 @@ +name: Image build + +on: + push: + branches: + # Run on our main branch + - main + - develop + paths: + - file_checker_exec/** + pull_request: + # Run for any pull requests + paths: + - file_checker_exec/** + release: + types: [created] + + +jobs: + container-image-build: + uses: ./.github/workflows/component-container-image.yml + with: + context: file_checker_exec + image-path: ghcr.io/OneArgo/ArgoFormatChecker/app + container-image-security: + needs: [container-image-build] + uses: ./.github/workflows/component-container-image-security.yml + with: + context: file_checker_exec + image-path: ghcr.io/OneArgo/ArgoFormatChecker/app diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5319791 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +demo/outputs/* +.env + +!.gitkeep \ No newline at end of file diff --git a/README.md b/README.md index 110f950..0403056 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,10 @@ # Argo NetCDF file format checker + The Argo NetCDF file format checker performs format and content checks on Argo NetCDF files. -The Argo NetCDF format is described in "Argo user's manual" http://dx.doi.org/10.13155/29825 -More information on https://www.argodatamgt.org/Documentation +The Argo NetCDF format is described in "Argo user's manual" +More information on The format checker has two directories: + - file_checker_exec : the java code - file_checker_spec : the rules applied on the NetCDF file by the java code @@ -10,16 +12,106 @@ The rules files implement the Argo vocabularies managed on [NVS vocabulary serve Vocabularies and format changes are managed on [Argo Vocabs Task Team - AVTT GitHub](https://github.com/orgs/OneArgo/projects/4/views/1). **With each release (from 2.9.2) you will find :** + - **file_checker_exec-[version].jar** which consolidates both application's compiled code and all its dependencies into a single executable file. -- source code +- source code ## Run Argo NetCDF file format checker -- Using file_checker_exec-[version].jar : + +### Using file_checker_exec-{version}.jar : + +```bash +java -jar file_checker_exec-{version}.jar $OPTION $DAC_NAME $SPEC $OUTPUT_DIR $INPUT_DIR [$FILES_NAMES] +``` + +$FILES_NAMES is a list of file's name from the INPUT_DIR. It is optional : without it, all files from INPUT_DIR will be checked. + +### Run the application using Docker + +```bash +docker run --rm -v [ABSOLUTE_PATH_TO_SPEC]:/app/file_checker_spec -v [ABSOLUTE_PATH_TO_DATA_FOLDER]:/app/data -v [ABSOLUTE_PATH_TO_OUTPUT_DIR]:/app/results ghcr.io/oneargo/argoformatchecker/app:{TAG} [$OPTIONS] $DAC_NAME ./file_checker_spec ./results ./data [$FILES_NAMES] +``` + +You need to mount external directories to the container : + +[ABSOLUTE_PATH_TO_SPEC] : the file_checker_spec directory path. + +[ABSOLUTE_PATH_TO_DATA_FOLDER] : Path to directory containing the argo necdf files to be checked. The fileChecker will not seek files in subfolders + +[ABSOLUTE_PATH_TO_OUTPUT_DIR] : the directory where xml results files \*.filecheck will be created + +Example : + +```bash +docker run --rm -v D:\test_file_checker\file_checker_spec:/app/file_checker_spec -v D:\test_file_checker\datatest:/app/data -v D:\test_file_checker\results:/app/results ghcr.io/oneargo/argoformatchecker/app:develop -no-name-check coriolis ./file_checker_spec ./results ./data +``` + +### Run the application using Docker Compose + +To facilitate the use of Argo file checker a compose.yaml and .env files are provided : + +- Prepare your data. +- Copy `.env.docs` as `.env` file, and customize variables to configure the file checker for your environment. +- Download compose.yaml +- Run the service using Docker Compose: + +```bash +docker compose -f compose.yaml up +``` + +or in background : + +```bash +docker compose -f compose.yaml up -d +``` + +Example of an .env file : + +```text +# file checker image +FILECHECKER_IMAGE=ghcr.io/oneargo/argoformatchecker/app +FILECHECKER_IMAGE_TAG=develop + +# External directories to mount to the container +FILECHECKER_SPEC_VOLUME='D:\test_compose\file_checker_spec' +FILECHECKER_INPUT_VOLUME='D:\test_compose\data' +FILECHECKER_OUTPUT_VOLUME='D:\test_compose\results' + +# Variable specific to floats to check +DAC_NAME=bodc +FILECHECKER_OPTIONS= +FILES_NAMES= +``` + +### Run the application on demonstration files + +Demonstration data are availables to run the application locally easily. + +- Clone the repository : + +```bash +git clone https://github.com/OneArgo/ArgoFormatChecker.git +``` + +- Run the script dedicated + ```bash -java -jar file_checker_exec-[version].jar $OPTION $DAC_NAME $SPEC $OUTPUT_DIR $INPUT_DIR $FILE_NAME +./run-file-checker-linux.sh ``` -Only this JAR file is needed. You can delete legacy log4j2 & netcdf libraries jar files. +or for Windows : + +```bash +./run-file-checker-windows.bat +``` + +output files will be generated in `./demo/outputs`. + +### Test data + +To further test the Argo File Checker, you will find argo data here : https://www.argodatamgt.org/DataAccess.html + +The Argo File Checker is not yet designed to checking *prof.nc and *Sprof.nc. It checks only TRAJ, META, TECH and PROFILES files. ## TOOLS @@ -39,7 +131,6 @@ git clone https://github.com/OneArgo/ArgoFormatChecker.git - Build the application with maven (will requiert Java jdk installed), in file_checker_exec folder : - ```bash cd file_checker_exec ./mvnw clean install @@ -47,23 +138,16 @@ cd file_checker_exec In target folder you will find both original-file_checker_exec and file_checker_exec-[version]. It is this last one to use. - -### Using docker - +### build docker image - Build the application with Docker : -``` +```bash docker build -t filechecker_2.8.14 . ``` -- Run the application using Docker - -``` -docker run --rm -v [ABSOLUTE_PATH_TO_file_checker_spec]:/app/file_checker_spec -v [ABSOLUTE_PATH_TO_DATA_FOLDER]:/app/data -v [ABSOLUTE_PATH_TO_OUTPUT_DIR]:/app/results filechecker_2.8.14:latest $DAC_NAME ./file_checker_spec ./results ./data $FILE_NAME -``` - ### Run integration tests + The source code comes with some netcdf test files. You can run the integration tests with this following command : ```bash diff --git a/compose.yaml b/compose.yaml new file mode 100644 index 0000000..4a71e93 --- /dev/null +++ b/compose.yaml @@ -0,0 +1,12 @@ +services: + argo-netcdf-checker: + container_name: ArgoFormatChecker + image: ${FILECHECKER_IMAGE}:${FILECHECKER_IMAGE_TAG} + user: "${DOCKER_UID:-0}:${DOCKER_GID:-0}" + group_add: + - 1001 + volumes: + - ${FILECHECKER_SPEC_VOLUME}:/app/file_checker_spec:ro + - ${FILECHECKER_INPUT_VOLUME}:/app/data:ro + - ${FILECHECKER_OUTPUT_VOLUME}:/app/results:rw + command: ${FILECHECKER_OPTIONS} ${DAC_NAME} ./file_checker_spec ./results ./data ${FILES_NAMES} \ No newline at end of file diff --git a/demo/inputs/2903996_coriolis/2903996_Rtraj.nc b/demo/inputs/2903996_coriolis/2903996_Rtraj.nc new file mode 100644 index 0000000..673446b Binary files /dev/null and b/demo/inputs/2903996_coriolis/2903996_Rtraj.nc differ diff --git a/demo/inputs/2903996_coriolis/2903996_meta.nc b/demo/inputs/2903996_coriolis/2903996_meta.nc new file mode 100644 index 0000000..0fe655a Binary files /dev/null and b/demo/inputs/2903996_coriolis/2903996_meta.nc differ diff --git a/demo/inputs/2903996_coriolis/2903996_tech.nc b/demo/inputs/2903996_coriolis/2903996_tech.nc new file mode 100644 index 0000000..f33a6d9 Binary files /dev/null and b/demo/inputs/2903996_coriolis/2903996_tech.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_001.nc b/demo/inputs/2903996_coriolis/R2903996_001.nc new file mode 100644 index 0000000..cf921fc Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_001.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_001D.nc b/demo/inputs/2903996_coriolis/R2903996_001D.nc new file mode 100644 index 0000000..5257bfb Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_001D.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_002.nc b/demo/inputs/2903996_coriolis/R2903996_002.nc new file mode 100644 index 0000000..5e58b03 Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_002.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_003.nc b/demo/inputs/2903996_coriolis/R2903996_003.nc new file mode 100644 index 0000000..d32daf0 Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_003.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_009.nc b/demo/inputs/2903996_coriolis/R2903996_009.nc new file mode 100644 index 0000000..15f22d4 Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_009.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_010.nc b/demo/inputs/2903996_coriolis/R2903996_010.nc new file mode 100644 index 0000000..7942148 Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_010.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_011.nc b/demo/inputs/2903996_coriolis/R2903996_011.nc new file mode 100644 index 0000000..b93c874 Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_011.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_012.nc b/demo/inputs/2903996_coriolis/R2903996_012.nc new file mode 100644 index 0000000..3998b8d Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_012.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_013.nc b/demo/inputs/2903996_coriolis/R2903996_013.nc new file mode 100644 index 0000000..cd77af8 Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_013.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_014.nc b/demo/inputs/2903996_coriolis/R2903996_014.nc new file mode 100644 index 0000000..0a2c116 Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_014.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_015.nc b/demo/inputs/2903996_coriolis/R2903996_015.nc new file mode 100644 index 0000000..fa85f55 Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_015.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_016.nc b/demo/inputs/2903996_coriolis/R2903996_016.nc new file mode 100644 index 0000000..791b82e Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_016.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_017.nc b/demo/inputs/2903996_coriolis/R2903996_017.nc new file mode 100644 index 0000000..111c59b Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_017.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_018.nc b/demo/inputs/2903996_coriolis/R2903996_018.nc new file mode 100644 index 0000000..5445bfa Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_018.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_019.nc b/demo/inputs/2903996_coriolis/R2903996_019.nc new file mode 100644 index 0000000..543dd8e Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_019.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_020.nc b/demo/inputs/2903996_coriolis/R2903996_020.nc new file mode 100644 index 0000000..cdc8e68 Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_020.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_023.nc b/demo/inputs/2903996_coriolis/R2903996_023.nc new file mode 100644 index 0000000..d4ebeb8 Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_023.nc differ diff --git a/demo/inputs/2903996_coriolis/R2903996_024.nc b/demo/inputs/2903996_coriolis/R2903996_024.nc new file mode 100644 index 0000000..f05471f Binary files /dev/null and b/demo/inputs/2903996_coriolis/R2903996_024.nc differ diff --git a/demo/inputs/3901945/3901945_Rtraj.nc b/demo/inputs/3901945/3901945_Rtraj.nc new file mode 100644 index 0000000..e7bca07 Binary files /dev/null and b/demo/inputs/3901945/3901945_Rtraj.nc differ diff --git a/demo/inputs/3901945/3901945_meta.nc b/demo/inputs/3901945/3901945_meta.nc new file mode 100644 index 0000000..f165578 Binary files /dev/null and b/demo/inputs/3901945/3901945_meta.nc differ diff --git a/demo/inputs/3901945/3901945_tech.nc b/demo/inputs/3901945/3901945_tech.nc new file mode 100644 index 0000000..c9006e7 Binary files /dev/null and b/demo/inputs/3901945/3901945_tech.nc differ diff --git a/demo/inputs/3901945/BD3901945_004.nc b/demo/inputs/3901945/BD3901945_004.nc new file mode 100644 index 0000000..a91f8e2 Binary files /dev/null and b/demo/inputs/3901945/BD3901945_004.nc differ diff --git a/demo/inputs/3901945/D3901945_001D.nc b/demo/inputs/3901945/D3901945_001D.nc new file mode 100644 index 0000000..83a84e7 Binary files /dev/null and b/demo/inputs/3901945/D3901945_001D.nc differ diff --git a/demo/inputs/3901945/D3901945_002.nc b/demo/inputs/3901945/D3901945_002.nc new file mode 100644 index 0000000..92e9bb9 Binary files /dev/null and b/demo/inputs/3901945/D3901945_002.nc differ diff --git a/demo/inputs/3901945/D3901945_003.nc b/demo/inputs/3901945/D3901945_003.nc new file mode 100644 index 0000000..bd3dad2 Binary files /dev/null and b/demo/inputs/3901945/D3901945_003.nc differ diff --git a/demo/inputs/3901945/D3901945_004.nc b/demo/inputs/3901945/D3901945_004.nc new file mode 100644 index 0000000..a806522 Binary files /dev/null and b/demo/inputs/3901945/D3901945_004.nc differ diff --git a/demo/inputs/3901945/D3901945_005.nc b/demo/inputs/3901945/D3901945_005.nc new file mode 100644 index 0000000..2a37b6b Binary files /dev/null and b/demo/inputs/3901945/D3901945_005.nc differ diff --git a/demo/inputs/3901945/D3901945_006.nc b/demo/inputs/3901945/D3901945_006.nc new file mode 100644 index 0000000..b5178d5 Binary files /dev/null and b/demo/inputs/3901945/D3901945_006.nc differ diff --git a/demo/inputs/3901945/D3901945_007.nc b/demo/inputs/3901945/D3901945_007.nc new file mode 100644 index 0000000..ca13c85 Binary files /dev/null and b/demo/inputs/3901945/D3901945_007.nc differ diff --git a/demo/inputs/3901945/D3901945_008.nc b/demo/inputs/3901945/D3901945_008.nc new file mode 100644 index 0000000..3c90927 Binary files /dev/null and b/demo/inputs/3901945/D3901945_008.nc differ diff --git a/demo/inputs/3901945/D3901945_009.nc b/demo/inputs/3901945/D3901945_009.nc new file mode 100644 index 0000000..bb6b819 Binary files /dev/null and b/demo/inputs/3901945/D3901945_009.nc differ diff --git a/demo/inputs/3901945/D3901945_010.nc b/demo/inputs/3901945/D3901945_010.nc new file mode 100644 index 0000000..613795f Binary files /dev/null and b/demo/inputs/3901945/D3901945_010.nc differ diff --git a/demo/inputs/3901945/D3901945_011.nc b/demo/inputs/3901945/D3901945_011.nc new file mode 100644 index 0000000..2120d97 Binary files /dev/null and b/demo/inputs/3901945/D3901945_011.nc differ diff --git a/demo/inputs/3901945/D3901945_012.nc b/demo/inputs/3901945/D3901945_012.nc new file mode 100644 index 0000000..af41520 Binary files /dev/null and b/demo/inputs/3901945/D3901945_012.nc differ diff --git a/demo/inputs/3901945/D3901945_013.nc b/demo/inputs/3901945/D3901945_013.nc new file mode 100644 index 0000000..97e4472 Binary files /dev/null and b/demo/inputs/3901945/D3901945_013.nc differ diff --git a/demo/inputs/3901945/D3901945_014.nc b/demo/inputs/3901945/D3901945_014.nc new file mode 100644 index 0000000..6eddf0a Binary files /dev/null and b/demo/inputs/3901945/D3901945_014.nc differ diff --git a/demo/inputs/3901945/D3901945_015.nc b/demo/inputs/3901945/D3901945_015.nc new file mode 100644 index 0000000..2213cb8 Binary files /dev/null and b/demo/inputs/3901945/D3901945_015.nc differ diff --git a/demo/inputs/3901945/D3901945_016.nc b/demo/inputs/3901945/D3901945_016.nc new file mode 100644 index 0000000..d118579 Binary files /dev/null and b/demo/inputs/3901945/D3901945_016.nc differ diff --git a/demo/inputs/3901945/D3901945_017.nc b/demo/inputs/3901945/D3901945_017.nc new file mode 100644 index 0000000..3216249 Binary files /dev/null and b/demo/inputs/3901945/D3901945_017.nc differ diff --git a/demo/inputs/3901945/D3901945_018.nc b/demo/inputs/3901945/D3901945_018.nc new file mode 100644 index 0000000..8836d13 Binary files /dev/null and b/demo/inputs/3901945/D3901945_018.nc differ diff --git a/demo/inputs/3901945/D3901945_019.nc b/demo/inputs/3901945/D3901945_019.nc new file mode 100644 index 0000000..a4dd875 Binary files /dev/null and b/demo/inputs/3901945/D3901945_019.nc differ diff --git a/demo/inputs/3901945/D3901945_020.nc b/demo/inputs/3901945/D3901945_020.nc new file mode 100644 index 0000000..554c052 Binary files /dev/null and b/demo/inputs/3901945/D3901945_020.nc differ diff --git a/demo/inputs/3901945/D3901945_021.nc b/demo/inputs/3901945/D3901945_021.nc new file mode 100644 index 0000000..bd9813a Binary files /dev/null and b/demo/inputs/3901945/D3901945_021.nc differ diff --git a/demo/inputs/3901945/D3901945_022.nc b/demo/inputs/3901945/D3901945_022.nc new file mode 100644 index 0000000..c731c49 Binary files /dev/null and b/demo/inputs/3901945/D3901945_022.nc differ diff --git a/demo/inputs/3901945/D3901945_023.nc b/demo/inputs/3901945/D3901945_023.nc new file mode 100644 index 0000000..3f174ca Binary files /dev/null and b/demo/inputs/3901945/D3901945_023.nc differ diff --git a/file_checker_exec/Dockerfile b/file_checker_exec/Dockerfile index 4f1955c..c459e60 100644 --- a/file_checker_exec/Dockerfile +++ b/file_checker_exec/Dockerfile @@ -1,5 +1,14 @@ FROM eclipse-temurin:21-jdk-alpine AS builder +LABEL org.opencontainers.image.authors="yvan.lubac@euro-argo.eu" +LABEL org.opencontainers.image.description="Docker Image for Argo netcdf File Checler" +LABEL org.opencontainers.image.url="https://github.com/OneArgo/ArgoFormatChecker" +LABEL org.opencontainers.image.source="https://github.com/OneArgo/ArgoFormatChecker" +LABEL org.opencontainers.image.documentation="https://github.com/OneArgo/ArgoFormatChecker" +LABEL org.opencontainers.image.licenses="TBD" +LABEL org.opencontainers.image.vendor="OneArgo" + + WORKDIR /build COPY ./.mvn ./.mvn @@ -19,6 +28,12 @@ RUN set -e \ && addgroup --system --gid 1001 gcontainer \ && adduser --system --uid 1001 -G gcontainer fileCheckerRunner +RUN set -e \ +&& mkdir -p /app/results /app/data /app/file_checker_spec \ +&& chown root:gcontainer /app/results /app/data /app/file_checker_spec \ +&& chmod 2775 /app/results \ +&& chmod 2755 /app/data /app/file_checker_spec + # Copy the built jar file to the runtime stage COPY --from=builder --chown=root:gcontainer --chmod=750 /build/target/file_checker*.jar app.jar # Specify the default command for running the app diff --git a/file_checker_exec/pom.xml b/file_checker_exec/pom.xml index c74b720..7e8a97f 100644 --- a/file_checker_exec/pom.xml +++ b/file_checker_exec/pom.xml @@ -4,12 +4,15 @@ 4.0.0 fr.ifremer file_checker_exec - 2.9.2 + 2.9.3 + Argo NetCDF file format checker UTF-8 5.11.4 + 1.8 + 1.8 @@ -133,10 +136,6 @@ - - maven-surefire-plugin - 3.5.2 - com.coderplus.maven.plugins diff --git a/file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoDataFile.java b/file_checker_exec/src/main/java/fr/coriolis/checker/core/ArgoDataFile.java similarity index 57% rename from file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoDataFile.java rename to file_checker_exec/src/main/java/fr/coriolis/checker/core/ArgoDataFile.java index d5570ed..d4540ce 100644 --- a/file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoDataFile.java +++ b/file_checker_exec/src/main/java/fr/coriolis/checker/core/ArgoDataFile.java @@ -1,33 +1,23 @@ -package fr.coriolis.checker.filetypes; +package fr.coriolis.checker.core; import java.io.File; import java.io.IOException; import java.io.PrintStream; -import java.text.DecimalFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; -import java.util.Date; import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Set; -import java.util.regex.Pattern; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.coriolis.checker.specs.ArgoAttribute; -import fr.coriolis.checker.specs.ArgoDate; -import fr.coriolis.checker.specs.ArgoDimension; import fr.coriolis.checker.specs.ArgoFileSpecification; -import fr.coriolis.checker.specs.ArgoVariable; +import fr.coriolis.checker.specs.ArgoReferenceTable; import ucar.ma2.ArrayChar; import ucar.ma2.ArrayDouble; import ucar.ma2.ArrayFloat; import ucar.ma2.ArrayInt; import ucar.ma2.ArrayShort; -import ucar.ma2.DataType; -import ucar.ma2.Index; import ucar.nc2.Attribute; import ucar.nc2.Dimension; import ucar.nc2.NetcdfFile; @@ -125,8 +115,8 @@ public class ArgoDataFile { //................................................ // ..class variables private static final String BLANK_MESSAGE = new String(""); - protected final static Date earliestDate = ArgoDate.get("19970101000000"); - private final static DecimalFormat cycleFmt = new DecimalFormat("000"); + +// private final static DecimalFormat cycleFmt = new DecimalFormat("000"); // ..NOTE: The string values map to spec file names @@ -141,18 +131,18 @@ public static enum FileType { } }; - protected static String message = BLANK_MESSAGE; +// protected static String message = BLANK_MESSAGE; protected static HashMap fullSpecCache = new HashMap(); protected static HashMap tmpltSpecCache = new HashMap(); protected static SimpleDateFormat stringDateFormat = new SimpleDateFormat("yyyyMMddHHmmss"); - static Pattern pDataMode; // ..CDL "variables:" tag - static { - // ..check for legal data_mode - pDataMode = Pattern.compile("[RDA]+"); - } +// static Pattern pDataMode; // ..CDL "variables:" tag +// static { +// // ..check for legal data_mode +// pDataMode = Pattern.compile("[RDA]+"); +// } // ..standard i/o shortcuts private static PrintStream stdout = new PrintStream(System.out); @@ -161,19 +151,24 @@ public static enum FileType { // ........object variables........ - protected String dacName = null; - protected File file = null; - protected FileType fileType = FileType.UNKNOWN; - protected String format_version = null; - protected NetcdfFile ncReader = null; - protected String ncFileName = new String(""); - protected ArgoFileSpecification spec = null; - protected boolean verified = false; - - protected List varList; - - protected ArrayList formatErrors; - protected ArrayList formatWarnings; + private String dacName = null; + private File file = null; + private FileType fileType = FileType.UNKNOWN; + private String format_version = null; + private NetcdfFile ncReader = null; + private String ncFileName = new String(""); + private ArgoFileSpecification spec = null; + private ArgoReferenceTable.DACS validatedDAC; + + // dates properties + private boolean haveCreationDate = false; + private boolean haveUpdateDate = false; + private long creationSec = 0; + private long updateSec = 0; + String creationDate; + String updateDate; + + private List varList; // ......................................... // CONSTRUCTORS @@ -182,7 +177,7 @@ public static enum FileType { /** * No public constructors are provided. See ( * - * @see fr.coriolis.checker.filetypes.ArgoDataFile#open ) + * @see fr.coriolis.checker.core.ArgoDataFile#open ) */ protected ArgoDataFile() { } @@ -191,23 +186,79 @@ protected ArgoDataFile() { // ACCESSORS //......................................... - /** Clear the errors */ - public void clearFormatErrors() { - if (formatErrors != null) { - formatErrors.clear(); - } + public void setValidatedDac(ArgoReferenceTable.DACS validatedDAC) { + this.validatedDAC = validatedDAC; } - /** Clear the warnings */ - public void clearFormatWarnings() { - if (formatWarnings != null) { - formatWarnings.clear(); - } + public long getUpdateSec() { + return updateSec; + } + + public void setUpdateSec(long updateSec) { + this.updateSec = updateSec; + } + + public boolean isHaveUpdateDate() { + return haveUpdateDate; + } + + public void setHaveUpdateDate(boolean haveUpdateDate) { + this.haveUpdateDate = haveUpdateDate; + } + + public String getUpdateDate() { + return updateDate; + } + + public void setUpdateDate(String update) { + this.updateDate = update; + } + + public String getCreationDate() { + return creationDate; + } + + public void setCreationDate(String creation) { + this.creationDate = creation; + } + + public boolean isHaveCreationDate() { + return haveCreationDate; + } + + public void setHaveCreationDate(boolean haveCreationDate) { + this.haveCreationDate = haveCreationDate; + } + + public long getCreationSec() { + return creationSec; + } + + public void setCreationSec(long creationSec) { + this.creationSec = creationSec; + } + + public File getFile() { + return file; + } + + public ArgoReferenceTable.DACS getValidatedDac() { + return this.validatedDAC; } /** Retrieve the NetcdfFile reference */ public String getFileName() { - return ncFileName; + return file.getName(); + } + + /** Retrieve varList */ + public List getVarList() { + return this.varList; + } + + /** Access the NetcdfFile */ + public NetcdfFile getNcReader() { + return this.ncReader; } /** Retrieve file type **/ @@ -230,42 +281,17 @@ public Variable findVariable(String name) { return ncReader.findVariable(name); } - /** Retrieve the error descriptions for errors found during file checking */ - public ArrayList formatErrors() { - return new ArrayList(formatErrors); - } - - /** Retrieve the error descriptions for errors found during file checking */ - public ArrayList formatWarnings() { - return new ArrayList(formatWarnings); - } - - /** Retrieve the most recent error message generated by a method */ - public static String getMessage() { - return new String(message); - } - - /** Retrieve the NetcdfFile reference */ - public NetcdfFile getNetcdfFile() { - return ncReader; - } - - /** - * Retrieve the number of format errors found during file checking - * - * @returns number of file errors (0 = no errors) - */ - public int nFormatErrors() { - return formatErrors.size(); + /** Retrieve a global Attribute from an Argo data file */ + public Attribute findGlobalAttribute(String name) { + return ncReader.findGlobalAttribute(name); } /** - * Retrieve the number of format warnings found during file checking + * Access dac name * - * @returns number of file warnings (0 = no warnings) */ - public int nFormatWarnings() { - return formatWarnings.size(); + public String getDacName() { + return this.dacName; } /** @@ -281,82 +307,6 @@ public ArgoFileSpecification getFileSpec() { // METHODS //......................................... - // ............................................. - // convenience "check" methods - // ............................................. - - /** - * Determines if a float value is the specified FillValue Performs a "real - * value" comparison to a resolution of 1.e-5 (not an exact equality). - * - * @param _FillValue for the parameter - * @param data value - * @return true = value is FillValue; false = value is not FillValue - */ - public static boolean is_FillValue(float fillValue, float value) { - float diff = Math.abs(1.f - (value / fillValue)); - return diff < 1.0e-8f; - } - - /** - * Determines if a double value is the specified FillValue Performs a "real - * value" comparison to a resolution of 1.e-5 (not an exact equality). - * - * @param _FillValue for the parameter - * @param data value - * @return true = value is FillValue; false = value is not FillValue - */ - public static boolean is_FillValue(double fillValue, double value) { - double diff = Math.abs(1.d - (value / fillValue)); - return diff < 1.0e-8d; - } - - /** - * Determines if a float value is a "99,999.0" missing value. Performs a "real - * value" comparison to a resolution of 1.e-5 (not an exact equality).
- *
- * NOTE: This is required because we've run into examples where "bad values" of - * a parameter have been larger than the FillValue so simple - * greater-than/less-than tests fails - * - * @param data value - * @return true = value is FillValue; false = value is not FillValue - */ - public static boolean is_99_999_FillValue(float value) { - float diff = Math.abs(value - 99999.f); - return diff < 0.00001f; - } - - /** - * Determines if a double value is a "99,999.0" missing value. Performs a "real - * value" comparison to a resolution of 1.e-5 (not an exact equality).
- *
- * NOTE: This is required because we've run into examples where "bad values" of - * a parameter have been larger than the FillValue so simple - * greater-than/less-than tests fails - * - * @param data value - * @return true = value is FillValue; false = value is not FillValue - */ - public static boolean is_99_999_FillValue(double value) { - double diff = Math.abs(value - 99999.d); - return diff < 0.00001d; - } - - /** - * Determines if a double value is a "999,999.0" missing value. Performs a "real - * value" comparison to a resolution of 1.e-5 (not an exact equality). - * - * @param data value - * @return true = value is FillValue; false = value is not FillValue - */ - public static boolean is_999_999_FillValue(double value) { - double diff = Math.abs(value - 999999.d); - return diff < 0.00001d; - } - - // ............ end "check" methods .............. - /** * Determines if a file can be opened * @@ -430,7 +380,7 @@ public static ArgoDataFile open(String inFile, boolean overrideBadTYPE, String.. log.info("data type = '" + dt + "'"); if (dt == null) { log.error("DATA_TYPE not in file '" + inFile + "'"); - message = new String("DATA_TYPE not in file"); + ValidationResult.lastMessage = new String("DATA_TYPE not in file"); return null; } dt = dt.trim(); @@ -516,7 +466,9 @@ public static ArgoDataFile open(String inFile, boolean overrideBadTYPE, String.. } else { log.info("Invalid DATA_TYPE: '" + dt + "'"); ft = FileType.UNKNOWN; - message = new String("Invalid DATA_TYPE: '" + dt + "'"); + stderr.println("\n\n******\n" + "****** PROGRAM ERROR: Unexpected file type. TERMINATING.\n" + "******"); + System.exit(1); + // ValidationResult.lastMessage = new String("Invalid DATA_TYPE: '" + dt + "'"); return null; } @@ -526,62 +478,61 @@ public static ArgoDataFile open(String inFile, boolean overrideBadTYPE, String.. log.info("version = '" + fv + "'"); if (fv == null) { log.info("FORMAT_VERSION not in file"); - message = new String("FORMAT_VERSION not in file"); + ValidationResult.lastMessage = new String("FORMAT_VERSION not in file"); return null; } // ..create the correct type of File - ArgoDataFile arFile = null; - if (ft == FileType.METADATA) { - log.debug("creating ArgoMetadataFile"); - arFile = new ArgoMetadataFile(); - - } else if (ft == FileType.PROFILE) { - log.debug("creating ArgoProfileFile"); - arFile = new ArgoProfileFile(); - - } else if (ft == FileType.TECHNICAL) { - log.debug("creating ArgoTechnicalFile"); - arFile = new ArgoTechnicalFile(); - - } else if (ft == FileType.TRAJECTORY) { - log.debug("creating ArgoTrajectoryFile"); - arFile = new ArgoTrajectoryFile(); - - } else if (ft == FileType.BIO_PROFILE) { - log.debug("creating ArgoProfileFile"); - arFile = new ArgoProfileFile(); - - } else if (ft == FileType.BIO_TRAJECTORY) { - log.debug("creating ArgoTrajectoryFile"); - arFile = new ArgoTrajectoryFile(); - - } else { - stderr.println("\n\n******\n" + "****** PROGRAM ERROR: Unexpected file type. TERMINATING.\n" + "******"); - System.exit(1); - } // ..end open + ArgoDataFile arFile = new ArgoDataFile(); +// if (ft == FileType.METADATA) { +// log.debug("creating ArgoMetadataFile"); +// arFile = new ArgoMetadataValidator(); +// +// } else if (ft == FileType.PROFILE) { +// log.debug("creating ArgoProfileFile"); +// arFile = new ArgoProfileFileValidator(); +// +// } else if (ft == FileType.TECHNICAL) { +// log.debug("creating ArgoTechnicalFile"); +// arFile = new ArgoTechnicalFile(); +// +// } else if (ft == FileType.TRAJECTORY) { +// log.debug("creating ArgoTrajectoryFile"); +// arFile = new ArgoTrajectoryFile(); +// +// } else if (ft == FileType.BIO_PROFILE) { +// log.debug("creating ArgoProfileFile"); +// arFile = new ArgoProfileFileValidator(); +// +// } else if (ft == FileType.BIO_TRAJECTORY) { +// log.debug("creating ArgoTrajectoryFile"); +// arFile = new ArgoTrajectoryFile(); +// +// } else { +// stderr.println("\n\n******\n" + "****** PROGRAM ERROR: Unexpected file type. TERMINATING.\n" + "******"); +// System.exit(1); +// } // ..end open // ..set object variables - + arFile.dacName = dac; arFile.file = file; arFile.ncReader = nc; arFile.ncFileName = inFile; arFile.fileType = ft; - arFile.formatErrors = new ArrayList(); - arFile.formatWarnings = new ArrayList(); arFile.format_version = fv; + arFile.varList = nc.getVariables(); if (!overrideBadTYPE) { if (badtype != null && fv.trim().equals("3.1")) { log.info("Invalid DATA_TYPE: '" + dt + "'"); ft = FileType.UNKNOWN; - message = new String("Invalid DATA_TYPE: '" + dt + "'"); + ValidationResult.lastMessage = new String("Invalid DATA_TYPE: '" + dt + "'"); return null; } } - message = BLANK_MESSAGE; + ValidationResult.lastMessage = BLANK_MESSAGE; return arFile; } // ..end open(inFile) @@ -611,8 +562,8 @@ public static ArgoDataFile open(String inFile, String specDir, boolean fullSpec, arFile.spec = openSpecification(fullSpec, specDir, arFile.fileType, arFile.format_version); } catch (IOException e) { if (e.getMessage().matches("cdlFileName.*does not exist")) { - message = "File type / version not valid in the FileChecker: " + arFile.fileType + " / " - + arFile.format_version; + ValidationResult.lastMessage = "File type / version not valid in the FileChecker: " + arFile.fileType + + " / " + arFile.format_version; return (null); } else { throw e; @@ -635,10 +586,9 @@ public void close() throws IOException { file = null; ncFileName = null; fileType = null; - clearFormatErrors(); - clearFormatWarnings(); + dacName = null; - message = BLANK_MESSAGE; + ValidationResult.lastMessage = BLANK_MESSAGE; } // ..end close() // ...............openSpecification................... @@ -646,8 +596,7 @@ public void close() throws IOException { * Opens the Argo specification associated with this Argo file. An * Argo specification is a description of the official Argo file format. * - * @see fr.coriolis.checker.specs.ArgoFileSpecification - * ArgoFileSpecification + * @see fr.coriolis.checker.specs.ArgoFileSpecification ArgoFileSpecification * * @param fullSpec true = open full specification; false = open file template * @param specDir The string name of the directory containing the specification @@ -696,7 +645,7 @@ public static ArgoFileSpecification openSpecification(boolean fullSpec, String s s = new ArgoFileSpecification(fullSpec, specDir, ft, version); } catch (IOException e) { - message = "Failed in ArgoFileSpecification"; + ValidationResult.lastMessage = "Failed in ArgoFileSpecification"; throw e; } @@ -713,1111 +662,6 @@ public static ArgoFileSpecification openSpecification(boolean fullSpec, String s return s; } // ..end openSpecification - // ......................ckVarAttr................... - /** - * Compares the attributes of a Netcdf Variable and an ArgoVariable - * - * @param dataVar the Netcdf variable object - * @param specVar the ArgoVariable object - * @return the boolean status. True - they are the same. False - they differ. - */ - private boolean ckVarAttr(Variable dataVar, ArgoVariable specVar) { - // ..compare attributes - boolean returnVal = true; - - String varName = dataVar.getShortName(); - - DATA_ATTR_LOOP: for (Attribute dataAttr : dataVar.getAttributes()) { - String attrName = dataAttr.getShortName(); - ArgoAttribute specAttr = specVar.getAttribute(attrName); - // log.debug ("ckVarAttr: {}", attrName); - - if (specAttr == null) { - // ..data file attribute is not in the specification - - /* - * 2015-11: ADMT-16 decided that "extra attributes" are allowed - * - * The following code would cause it to be an error: - * - * returnVal = false; formatErrors.add("attribute: '"+varName+":"+ - * attrName+"' not defined in specification '"+ spec.getSpecName()+"'"); - * - * For now... they will be ignored by just doing nothing about it - */ - - log.info("extra attribute (allowed): {}:{} not in spec", varName, attrName); - continue; - } - - // ..check if the spec attribute is label "NOT_ALLOWED" error - ArgoAttribute.AttrHandling specialHandling = specAttr.getHandling(); - if (!checkAttributNotAllowed(varName, attrName, specialHandling)) { - returnVal = false; - continue; - } - ; - - // ..check the data attribute type - // ..the 3 types supported by ArgoAttribute are: Number, String, Object - // ..netCDF supports: Number, String, List of (number or string) - // .. - // ..Number and String are the only 2 currently in use. - // ..Hard crash if it is anything else. - - if (!checkAttributeType(varName, dataAttr, attrName, specAttr)) { - returnVal = false; - continue; - } - // ..check the attribute value - if (!checkVarAttributeValue(dataVar, varName, dataAttr, attrName, specAttr, specialHandling)) { - returnVal = false; - continue; - } - } // ..end for (dataAttr) - return returnVal; - } // ..end ckVarAttr - - /** - * // ..check the attribute value. if the spec attribute is "IGNORE_COMPLETELY" - * or "IGNORE_VALUE", we just don't care about the value so skip the check - * - * @param dataVar - * @param varName - * @param dataAttr - * @param attrName - * @param specAttr - * @param specialHandling - * @return - */ - private boolean checkVarAttributeValue(Variable dataVar, String varName, Attribute dataAttr, String attrName, - ArgoAttribute specAttr, ArgoAttribute.AttrHandling specialHandling) { - if (!(specialHandling == ArgoAttribute.AttrHandling.IGNORE_COMPLETELY - || specialHandling == ArgoAttribute.AttrHandling.IGNORE_VALUE)) { - String dataAttrValue = null; - String specAttrValue = specAttr.getValue().toString(); - - if (dataAttr.isString()) { - try { - dataAttrValue = dataAttr.getStringValue(); - } catch (Exception e) { - formatErrors.add("attribute: " + varName + ":" + attrName + ": Bad value. Not a string."); - return false; - - } - - } else { - try { - dataAttrValue = dataAttr.getNumericValue().toString(); - } catch (Exception e) { - formatErrors.add("attribute: " + varName + ":" + attrName + ": Bad value. Not a numeric value."); - return false; - } - } - - if (!dataAttrValue.equals(specAttrValue)) { - // ..data file attribute is not the same as the spec file attribute - ArgoFileSpecification.AttrRegex regex = spec.getAttrRegex(varName, attrName); - if (regex == null) { - // ..no regex .. this is a format error - formatErrors.add( - "attribute: " + varName + ":" + attrName + ": Definitions differ " + "\n\tSpecification = '" - + specAttrValue + "'" + "\n\tData File = '" + dataAttrValue + "'"); - log.info("format error: {}:{} " + "attribute mismatch (no regex): spec, data = {}, {}", varName, - attrName, specAttrValue, dataAttrValue); - - return false; - - } else { - // ..regex defined ... does it match? - if (!regex.pattern.matcher(dataAttrValue).matches()) { - formatErrors.add("attribute: " + dataVar.getShortName() + ":" + dataAttr.getShortName() - + ": Definitions differ " + "\n\tSpecification = '" + regex.pattern + "' (regex)" - + "\n\tData File = '" + dataAttrValue + "'"); - log.info("format error: " + attrName + " attribute regex mismatch '" + regex.pattern + "'"); - return false; - - } else { - if (regex.warn) { - formatWarnings.add("attribute: " + dataVar.getShortName() + ":" + dataAttr.getShortName() - + ": Accepted; not standard value" + "\n\tSpecification = '" + specAttrValue - + "'" + "\n\tException allowed = '" + regex.pattern + "' (regex)" - + "\n\tData File = '" + dataAttrValue + "'"); - log.warn("regex match (WARN): attribute '{}:{} = '{}' matches '{}'", varName, attrName, - dataAttrValue, regex.pattern); - } else { - log.warn("regex match (NO WARN): attribute '{}:{} = '{}' matches '{}'", varName, attrName, - dataAttrValue, regex.pattern); - } - } - } // ..end if regex - } // ..end attr.equals - } else { - log.debug("ckVarAttr: '{}': marked as IGNORE", attrName); - } // ..end if (marked IGNORE) - return true; - } - - /** - * // ..check the data attribute type. The 3 types supported by ArgoAttribute - * are: Number, String, Object netCDF supports: Number, String, List of (number - * or string) Number and String are the only 2 currently in use. Hard crash if - * it is anything else. - * - * @param varName - * @param dataAttr - * @param attrName - * @param specAttr - * @return - */ - private boolean checkAttributeType(String varName, Attribute dataAttr, String attrName, ArgoAttribute specAttr) { - DataType dataAttrType = dataAttr.getDataType(); - - if (specAttr.isString()) { - if (!dataAttr.isString()) { - String err = String.format("attribute: %s:%s: Incorrect attribute value type. Must be string", varName, - attrName); - // formatErrors.add(err) - - // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("{}: {}: {}", dacName, file.getName(), err); - - return false; - - } - // log.debug("ckVarAttr: '{}': spec/data both String", attrName); - - } else if (specAttr.isNumeric()) { - if (!dataAttrType.isNumeric()) { - String err = String.format("attribute: %s:%s: Incorrect attribute value type. Must be numeric", varName, - attrName); - - // formatErrors.add(err); - // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - - log.warn("{}: {}: {}", dacName, file.getName(), err); - - return false; - - } - // log.debug("ckVarAttr: '{}': spec/data both Number", attrName); - - } else { - // ..what the hell were you thinking? - stderr.println("\n\n******\n" + "****** PROGRAM ERROR: ArgoDataFile(ckvarattr) " + varName + ":" + attrName - + ": unknown specAttr type. TERMINATING.\n" + "******"); - System.exit(1); - } - return true; - } - - private boolean checkAttributNotAllowed(String varName, String attrName, - ArgoAttribute.AttrHandling specialHandling) { - if (specialHandling == ArgoAttribute.AttrHandling.NOT_ALLOWED) { - String err = String.format("attribute: %s:%s: Attribute is not allowed.", varName, attrName); - // formatErrors.add(err) - - // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("{}: {}: {}", dacName, file.getName(), err); - - return false; - - } - return true; - } - - // ......................ckVarDims................... - /** - * Compares the dimensions of a Netcdf Variable and an ArgoVariable - * - * @param dataVar the Netcdf variable object - * @param specVar the ArgoVariable object - * @return the boolean status. True - they are the same. False - they differ. - */ - private boolean ckVarDims(Variable dataVar, ArgoVariable specVar) { - String specDims = specVar.getDimensionsString(); - String dataDims = dataVar.getDimensionsString(); - - int specRank = specVar.getRank(); // ..rank of the variable W/O extra dims - int dataRank = dataVar.getRank(); - - boolean altDimVar = specVar.canHaveAlternateDimensions(); - boolean exDimVar = specVar.canHaveExtraDimensions(); - - boolean pass = true; - if (dataRank < specRank) { - // ..always an error - pass = false; - - } else if (specDims.equals(dataDims)) { - // ..always a success - pass = true; - - } else if (!(altDimVar || exDimVar)) { - // ..dimension strings don't match - // ..not a "special variable" - pass = false; - - } else { - // ..where are we? - // .. dataRank >= specRank - // .. data dimension names != spec dimension names - // .. it is a "special variable" - - // ..need to disect the individual dimensions - - for (int n = 0; n < dataRank; n++) { - String dDimName = dataVar.getDimension(n).getShortName(); - - if (n < specRank) { - // ..one of the regular dimensions - - ArgoDimension sDim = specVar.getDimension(n); - String sDimName = sDim.getName(); - - if (!sDimName.equals(dDimName)) { - // ..isn't the expected one - - if (!sDim.isAllowedAlternateDimensionName(dDimName)) { - // ..this captures to possibilities - // ..1) this dimension is not an alt-dim - // ..or - // ..2) this dimension is not an allowed dim within an alt-dim - - pass = false; - break; - - } else { - log.debug("ckVarDims: allowed alternate dim = '{}'", dDimName); - } - - } - - } else { - // ..an "extra dimension" - // ..check that it is valid - if (!spec.getDimension(dDimName).isExtraDimension()) { - pass = false; - break; - } - } - } // ..end for (dataRank) - } // ..end if (specRank ... dataRank)..end if evaluating dimensions - - log.debug("ckVarDims: pass = {}: specRank, dataRank, altDimVar, exDimVar = {}, {}, {}, {}", pass, specRank, - dataRank, altDimVar, exDimVar); - log.debug(" specDims, dataDims = '{}', '{}'", specDims, dataDims); - - if (!pass) { - if (exDimVar) { - formatErrors.add("variable: " + dataVar.getShortName() + ": Definitions differ" - + "\n\tSpecification dimensions = '" + specDims + " (+ extra-dimensions)'" - + "\n\tData File dimensions = '" + dataDims + "'"); - - log.info("format error: '{}' dimensions mismatch (extra dimension)", dataVar.getShortName()); - - } else { - formatErrors.add("variable: " + dataVar.getShortName() + ": Definitions differ" - + "\n\tSpecification dimensions = '" + specDims + "'" + "\n\tData File dimensions = '" - + dataDims + "'"); - - log.info("format error: '{}' dimensions mismatch", dataVar.getShortName()); - } - } - - return pass; - } // ..end ckVarDims - - // ......................ckVarTypes................... - /** - * Compares the data types of a Netcdf Variable and an ArgoVariable - * - * @param dataVar the Netcdf variable object - * @param specVar the ArgoVariable object - * @return the boolean status. True - they are the same. False - they differ. - */ - - private boolean ckVarTypes(Variable dataVar, ArgoVariable specVar) { - // ..compare data type - DataType specType = specVar.getType(); - DataType dataType = dataVar.getDataType(); - - if (specType.equals(dataType)) { - return true; - - } else { - // ..there is an odd exception where a type may be either float or double - // ..in the argo spec, that is encoded as DataType.OPAQUE - // ..check for that exception - - if (specVar.getType() == DataType.OPAQUE && (dataType == DataType.FLOAT || dataType == DataType.DOUBLE)) { - return true; - } - - formatErrors.add("variable: " + dataVar.getShortName() + ": Definitions differ" - + "\n\tSpecification type = '" + specVar.getType().toString() + "'" + "\n\tData File type = '" - + dataVar.getDataType().toString() + "'"); - log.info("format error: '{}' data type mismatch", dataVar.getShortName()); - - return false; - } - } // ..end ckVarTypes - - // .......................verifyFormat..................... - /** - * Verifies whether this file conforms to the Argo Netcdf specification. The - * specification for this file type and version must be open. (See - * - * @see fr.coriolis.checker.specs.ArgoFileSpecification ) - *

- *

    - *
  • nFormatErrors will retrieve the number of errors. - *
  • formatErrors will retrieve the error descriptions. - *
  • nFormatWarnings will retrieve the number of warnings - *
  • formatWarnings will retrieve the warning descriptions. - *
- * - * @param dacName Name of the dac for diagnostic purposes - * @return boolean status indicator: True - file checked (status unspecified); - * False - failed to check format - */ - public boolean verifyFormat(String dacName) { - if (spec == null) { - log.info("File specification not opened"); - message = "ERROR: File specification not opened for this file"; - formatErrors.add("ERROR: File specification not opened for this file"); - return false; - } - - /* - * METHOD: 1) Iterate through the data file dimensions and compare to the spec - - * if an optional "extra" dimension is encountered, add it to the spec 2) - * Iterate through the data file variables and compare to the spec 2a) - for - * each variable, iterate through the attributes and compare - attribute - * regexs?????? 3) Iterate through the spec dimensions and compare to the data - * file 4) Iterate through the spec variables and compare to the data file - - * postpone processing of missing optional variables 4a) - for each variable, - * iterate through the attributes and compare 5) For missing optional variables, - * check that all variables for the group are missing. 6) Iterate through the - * spec global attributes and compare to the data file 7) Clear any "extra" - * dimensions that were added - */ - - HashSet dataElement = new HashSet(); // ..reported elements - HashSet dataGroup = new HashSet(); // ..groups with reported elements - - // ......Step 1: Compare the data file dimensions to the specification....... - log.debug(".....verifyFormat: compare data dimensions to spec....."); - verifyFileDimensions(dataElement, dataGroup); - - // .......Step 2: Compare the data file variables to the spec....... - - log.debug(".....verifyFormat: compare data variables to spec....."); - verifyFileVariables(dataElement, dataGroup); - - // ......Step 3: Check the spec dimensions against data file......... - // .. - only need to check existence - definitions already checked above - log.debug(".....verifyFormat: compare spec dimensions to data....."); - verifySpecDimensionsPresenceInData(); - - // ......Step 4: Check the spec variables against the data......... - // .. - only need to check existence - definitions already checked above - log.debug(".....verifyFormat: compare spec variables to data....."); - verifySpecVariablePresenceInData(); - - // ..............Step 5: Finish group variables......................... - // .. - for each group with a reported variable -- make sure all of them are - // reported - log.debug(".....verifyFormat: check reported groups....."); - checkGroupsCompleteness(dataElement, dataGroup); - - // ......Step 6: Compare the spec global attributes to the file....... - log.debug(".....verifyFormat: compare spec global attr to data file....."); - verifyGlobalAttributes(dacName); - - spec.clearExtraDimensions(); - - if (formatErrors.size() == 0) { - verified = true; - } - - log.debug(".....verifyFormat: completed....."); - - return true; - } // ..end verifyFormat - - private void verifyGlobalAttributes(String dacName) { - for (String name : spec.getGlobalAttributeNames()) { - ArgoAttribute specAttr = spec.getGlobalAttribute(name); - Attribute dataAttr = ncReader.findGlobalAttribute(name); - if (log.isDebugEnabled()) { - log.debug("spec attribute: " + name); - } - - if (dataAttr == null) { - // ..attribute in spec file is not in the data file - - String err = String.format("global attribute: %s: not defined in data file", name); - // formatErrors.add(err); - - // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("{}: {}: {}", dacName, file.getName(), err); - - } else { - // ..spec attr is in data file -- check values - checkGlobalAttributeValue(dacName, name, specAttr, dataAttr); - } - } - } - - private void checkGlobalAttributeValue(String dacName, String name, ArgoAttribute specAttr, Attribute dataAttr) { - if (dataAttr.isString()) { - String specValue = specAttr.getValue().toString(); - String dataValue = dataAttr.getStringValue(); - - if (!(specValue.startsWith(ArgoFileSpecification.ATTR_IGNORE) - || specValue.startsWith(ArgoFileSpecification.ATTR_IGNORE_VALUE))) { - // ..specAttr is not set for ignore - - if (!dataValue.equals(specValue)) { - // ..data file attribute is not the same as the spec file attribute - - // Pattern regex = spec.getAttrRegex("", name); - ArgoFileSpecification.AttrRegex regex = spec.getAttrRegex("", name); - - if (regex == null) { - // ..no regex .. this is a format error - - String err = String.format("global attribute: %s: Definitions differ" - + "\n\tSpecification = '%s'" + "\n\tData File = '%s'", name, specValue, dataValue); - - // formatErrors.add(err); - - // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); - - } else { - // ..regex defined ... does it match? - - if (!regex.pattern.matcher(dataValue).matches()) { - String err = String.format("global attribute: %s: Definitions differ" - + "\n\tSpecification = '%s' (regex)" + "\n\tData File = '%s'", name, - regex.pattern, dataValue); - // formatErrors.add("global attribute: "+ - - // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); - - } else { - if (regex.warn) { - formatWarnings.add("global attribute: " + name + ": Accepted; not standard value" - + "\n\tSpecification = '" + specValue + "'" + "\n\tException allowed = '" - + regex.pattern + "' (regex)" + "\n\tData File = '" + dataValue + "'"); - log.warn("regex match (WARN): global attribute ':{} = '{}' matches '{}'", name, - dataValue, regex.pattern); - } else { - log.warn("regex match (NO WARN): global attribute ':{} = '{}' matches '{}'", name, - dataValue, regex.pattern); - } - } - } // ..end if regex - - } // ..end attr.equals - } // ..end if (ignore) - - } else { - String err = String.format("global attribute: %s: not a \"string valued\" attribute", name); - // formatErrors.add("global attribute: "+name+ - - // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); - - } // ..end if(dataAttr.isString) - } - - private void checkGroupsCompleteness(HashSet dataElement, HashSet dataGroup) { - for (String group : dataGroup) { - log.debug("group with reported variable: '{}'", group); - - // ..at least one member of "group" was reported - // ..check that all group memebers are reported - - Set vReq = spec.groupMembers(group); - HashSet vMiss = new HashSet(); - - for (String member : vReq) { - if (!dataElement.contains(member)) { - // ..this member of the group was reported - vMiss.add(member); - } - } - - if (vMiss.size() > 0) { - // ..some variables in this group were missing but not all - // ..format the list of required and reported variables - StringBuilder req = new StringBuilder(); - for (String str : vReq) { - req.append("'" + str + "' "); - } - - // ..format the list of missing variables - StringBuilder miss = new StringBuilder(); - for (String str : vMiss) { - miss.append("'" + str + "' "); - } - - // ..add to formatErrors - formatErrors.add("Parameter group " + group + ": Variables are missing for this group" - + "\n\tRequired variables: " + req + "\n\tMissing variables: " + miss); - - log.info("format error: option group '{}' variables missing from data file", group); - } - } - } - - private void verifySpecVariablePresenceInData() { - - for (String name : spec.getSpecVariableNames()) { - ArgoVariable specVar = spec.getVariable(name); - Variable dataVar = ncReader.findVariable(name); - log.debug("spec var: {}", name); - - if (dataVar == null) { - // ..variable in spec file is not in the data file - - if (spec.isOptional(name)) { - // ..the variable is optional - log.debug("optional variable not defined in data file: '{}'", name); - - } else { - formatErrors.add("variable: " + name + ": not defined in data file"); - - log.info("format error: variable not in data file: '{}'", name); - } - - // ..........Step 4a: Check the spec attributes against the data file....... - } else { - // ..we are looking for attributes the spec says must exist - // ..and checking the data variable to see if they do - // ..- don't have to check values because the data var attr values were checked - // above - for (String attrName : specVar.getAttributeNames()) { - checkSpevVarAttributePresenceInData(name, specVar, dataVar, attrName); - } - } - } // end for (spec-Var-name) - } - - private void checkSpevVarAttributePresenceInData(String name, ArgoVariable specVar, Variable dataVar, - String attrName) { - ArgoAttribute attr = specVar.getAttribute(attrName); - ArgoAttribute.AttrHandling handling = attr.getHandling(); - - if (handling == ArgoAttribute.AttrHandling.IGNORE_COMPLETELY - || handling == ArgoAttribute.AttrHandling.NOT_ALLOWED) { - - // ..attribute is allowed to be missing OR - // ..attribute must NOT exist - // ..- don't bother checking - log.debug("optional attr: '" + name + ":" + attrName + "' - ignored"); - - } else { - Attribute dataAttr = dataVar.findAttribute(attrName); - - if (dataAttr == null) { - // ..attribute in spec file is not in the data file - - formatErrors.add("attribute: '" + name + ":" + attrName + "' not defined in data file"); - - log.info("format error: attribute not in data file: '{}:{}'", name, attrName); - } - } - } - - private void verifySpecDimensionsPresenceInData() { - - for (ArgoDimension dim : spec.getDimensions()) { - String name = dim.getName(); - Dimension dataDim = ncReader.findDimension(name); - log.debug("spec dim: {}", name); - - if (dataDim == null) { - if (spec.isOptional(name)) { - // ..dimension is optional - log.debug("optional dimension '{}': not defined in data file - allowed", name); - - } else if (dim.isAlternateDimension()) { - // ..this is an alt-dim --- it will never appear in the data file by name - log.debug("alt-dim '{}': not defined in data file - expected", name); - - } else { - // ..dimension in spec file is not in the data - formatErrors.add("dimension: " + name + ": not defined in data file"); - - log.info("format error: dimension not in data file: '{}'", name); - } - } - } // end for (spec-Dim-name) - } - - private void verifyFileVariables(HashSet dataElement, HashSet dataGroup) { - - varList = ncReader.getVariables(); - - for (Variable dataVar : varList) { - String name = dataVar.getShortName(); - - dataElement.add(name); - - checkVariableAgainstSpec(name, dataVar); - - addElementToGroupIfDefinedInSpec(dataGroup, name); - - } - } - - private void checkVariableAgainstSpec(String name, Variable dataVar) { - - if (log.isDebugEnabled()) { - log.debug("data var: '{}'", name); - } - - ArgoVariable specVar = spec.getVariable(name); - - if (specVar == null) { - // ..data file variable is not in the specification - formatErrors.add("variable: " + name + ": not defined in specification '" + spec.getSpecName() + "'"); - - log.info("format error: variable not in spec: '{}'", name); - - } else if (!ckVarTypes(dataVar, specVar)) { - // ..data types don't match - return; - - } else if (!ckVarDims(dataVar, specVar)) { - // ..variable dimensions don't match - return; - // .....Step 2a: Compare the attributes for this variable...... - } else if (!ckVarAttr(dataVar, specVar)) { - // ..variable attributes don't match - return; - } - - } - -// ......................................................... -// ........... verifyFileDimensions .............. -// ......................................................... - private void verifyFileDimensions(HashSet dataElement, HashSet dataGroup) { - - List dimList = ncReader.getDimensions(); - - if (dimList == null || dimList.isEmpty()) { - log.debug("no dimensions in this file"); - return; - } - - for (Dimension dataDim : dimList) { - String dimName = dataDim.getShortName(); - ArgoDimension specDim = spec.getDimension(dimName); - dataElement.add(dimName); - if (log.isDebugEnabled()) { - log.debug("data dim: {} -- {}", dataDim, specDim); - } - - if (specDim == null) { - // ..dimension in data file is not in the spec - // ..is it an "extra dimension"? - handleExtraDimension(dataDim, dimName); - - } else { - // ..dimension is in the spec, check its value - validateDimensionLength(dataDim, specDim); - // ..if a data dimension is in a group, - // .. we have to check the rest of the group later - addElementToGroupIfDefinedInSpec(dataGroup, dimName); - } - } - } - - private void addElementToGroupIfDefinedInSpec(HashSet dataGroup, String elementName) { - String group = spec.inGroup(elementName); - if (group != null) { - dataGroup.add(group); - } - } - - private void validateDimensionLength(Dimension dataDim, ArgoDimension specDim) { - int specValue = specDim.getValue(); - int dataValue = dataDim.getLength(); - if (specValue > 0 && specValue != dataValue) { - // ..tValue > 0: dimension is not _unspecified_ or UNLIMITED - // .. AND it doesn't have the same value -> error - formatErrors.add("dimension: " + dataDim.getShortName() + ": Definitions differ" + "\n\tSpecification = '" - + specValue + "'" + "\n\tData File = '" + dataValue + "'"); - - log.info("format error: '{}' dimension value mismatch", dataDim.getShortName()); - } - } - - private void handleExtraDimension(Dimension dataDim, String dimName) { - ArgoDimension specDim; - specDim = spec.addExtraDimension(dimName, dataDim.getLength()); - - if (specDim == null) { - // ..nope, not an allowed "extra dimension" -> error - formatErrors.add( - String.format("dimension: %s: not defined in specification '%s'", dimName, spec.getSpecName())); - - log.info("format error: '{}' not in spec", dimName); - - } else { - log.debug("extra dimension: '{}'. value = {}", dimName, dataDim.getLength()); - } - } - - // ......................................................... - // ........... getGdacFileName .............. - // ......................................................... - /** - * Validates that the file name conforms to GDAC standards - * - */ - public String getGdacFileName() { - log.debug("......getGdacFileName: start....."); - - StringBuilder gdacName = new StringBuilder(20); - - if (fileType == FileType.METADATA) { - String platform = readString("PLATFORM_NUMBER"); - - gdacName.append(platform.trim()); - gdacName.append("_meta.nc"); - - log.debug("meta-data file: platform, gdacName = '{}', '{}'", platform, gdacName); - - } else if (fileType == FileType.PROFILE || fileType == FileType.BIO_PROFILE) { - String platform = readString("PLATFORM_NUMBER", 0); - int cycle = readInt("CYCLE_NUMBER", 0); - char direction = readString("DIRECTION", true).charAt(0); - - if (fileType == FileType.BIO_PROFILE) { - gdacName.append('B'); - } - - if (fileType == FileType.PROFILE) { - char data_mode = readString("DATA_MODE").charAt(0); - - log.debug("{} file: platform, cycle, direction, data_mode = " + "'{}', '{}', '{}', '{}'", fileType, - platform, cycle, direction, data_mode); - - if (data_mode == 'R' || data_mode == 'A') { - gdacName.append('R'); - } else if (data_mode == 'D') { - gdacName.append('D'); - } else { // ..pre-v3.1 files are not data-checked - // ..do some rudimentary data checks here - formatErrors.add("Could not determine file name: invalid DATA_MODE ='" + data_mode + "'"); - return null; - } - - } else { // ..this is a BIO file - String data_mode = readString("DATA_MODE"); - - log.debug("{} file: platform, cycle, direction, data_mode = " + "'{}', '{}', '{}', '{}'", fileType, - platform, cycle, direction, data_mode); - - // ..a 'D' for any n_prof makes the whole file D-mode - - if (pDataMode.matcher(data_mode).matches()) { - // ..pre-v3.1 files are not data-checked - // ..do a rudimentary data checks here - - if (data_mode.indexOf('D') >= 0) { - // ..any single 'D' means D-mode for file - gdacName.append("D"); - - } else { - gdacName.append("R"); - } - - } else { - formatErrors.add("Could not determine file name: invalid DATA_MODE = '" + data_mode + "'"); - return null; - } - } - - gdacName.append(platform.trim()); - gdacName.append("_"); - gdacName.append(cycleFmt.format(cycle)); - - if (direction == 'D') { - gdacName.append(direction); - - } else if (direction != 'A') { // ..pre-v3.1 files are not data-checked - // ..do some rudimentary data checks here - formatErrors.add("Could not determine file name: invalid DIRECTION ='" + direction + "'"); - return null; - } - - gdacName.append(".nc"); - - log.debug("{} file: gdacName = '{}'", fileType, gdacName); - - } else if (fileType == FileType.TECHNICAL) { - String platform = readString("PLATFORM_NUMBER"); - - gdacName.append(platform.trim()); - gdacName.append("_tech.nc"); - - log.debug("tech-data file: platform, gdacName = '{}', '{}'", platform, gdacName); - - } else if (fileType == FileType.TRAJECTORY || fileType == FileType.BIO_TRAJECTORY) { - String platform = readString("PLATFORM_NUMBER"); - String data_mode = readString("DATA_MODE", true);// ..true->incl NULLs - - log.debug("{} file: platform, data_mode = " + "'{}', '{}'", fileType, platform, data_mode); - - gdacName.append(platform.trim()); - gdacName.append("_"); - - if (fileType == FileType.BIO_TRAJECTORY) { - gdacName.append("B"); - } - - if (pDataMode.matcher(data_mode).matches()) { - // ..pre-v3.1 files are not data-checked - // ..do a rudimentary data checks here - - if (data_mode.indexOf('D') >= 0) { - // ..any single 'D' means D-mode for file - gdacName.append("Dtraj.nc"); - - } else { - gdacName.append("Rtraj.nc"); - } - - } else { - formatErrors.add("Could not determine file name: invalid DATA_MODE = '" + data_mode + "'"); - return null; - } - - log.debug("trajectory file: gdacName = '{}'", gdacName); - - } else { - formatErrors.add("Could not determine file name: unexpected file type = '" + fileType + "'"); - log.debug("unknown file type: '{}'", fileType); - return null; - } - - log.debug("......getGdacFileName: end....."); - return gdacName.toString(); - } - - // .............................................................. - // ........... validateGdacFileName .............. - // .............................................................. - /** - * Validates that the input file name conforms to GDAC standards - * - * @return true = file name conforms; false = file name non-conforming - */ - public boolean validateGdacFileName() { - - String expected = getGdacFileName(); - - if (expected == null) { - // ..error occured ... formatErrors set by getGdacFileName - return false; - } - - String name = file.getName(); - - if (!name.equals(expected)) { - // ..actual file name and expected file name don't match - - if (name.endsWith("_traj.nc") && expected.endsWith("_Rtraj.nc")) { - // ..have to deal with the special case of pre-v3.1 *_traj files - if (format_version.startsWith("2.") || format_version.startsWith("3.0")) { - String ex = expected.replaceFirst("_Rtraj", "_traj"); - - if (name.equals(ex)) { - log.debug("validateGdacFileName: PASSED using special *_traj rules: " - + "expected, name = '{}', '{}'", expected, name); - return true; - } else { - log.debug("validateGdacFileName: FAILED using special *_traj rules: " - + "expected, name = '{}', '{}'", expected, name); - } - } - } - - formatErrors.add("Incorrect file name\n\tDAC file name: '" + name + "'\n\tFile name from data: '" - + expected.toString() + "'"); - log.debug("validateGdacFileName: FAILED: expected, name = '{}', '{}'", expected, name); - - return false; - } - - log.debug("validateGdacFileName: PASSED: expected, name = '{}', '{}'", expected, name); - return true; - } - - // .......................................................... - // ........... validateStringNulls .............. - // .......................................................... - /** - * Checks for nulls in the "char" variables in an Argo file. - * - *

- * Upon completion obj.nFormatErrors(), obj.nFormatWarnings, - * obj.formatErrors(), and obj.formatWarnings will return results. - * - * @throws IOException If an I/O error occurs - */ - public void validateStringNulls() throws IOException { - char nullChar = (char) 0; - - // .....Check all Strings -- no nulls allowed..... - if (log.isDebugEnabled()) { - log.debug(".....validateStrings....."); - } - - for (Variable var : varList) { - if (var.getDataType() == DataType.CHAR) { - ArrayChar ch; - ch = (ArrayChar) var.read(); - - Index ndx = ch.getIndex(); - int shape[] = var.getShape(); - int rank = shape.length; - - // log.debug(var.getShortName()+": shape:"+shape.length+" rank:"+rank); - - if (rank == 1) { - // log.debug("rank 1: '"+ch.getString()+"'"); - for (int i = 0; i < shape[0]; i++) { - ndx.set(i); - if (ch.getChar(ndx) == nullChar) { - formatWarnings.add(var.getShortName() + ": NULL character at [" + (i + 1) + "]"); - log.warn("warning: {}[{}]: null character", var.getShortName(), i); - break; - } - } - } else if (rank == 2) { - for (int i = 0; i < shape[0]; i++) { - // log.debug("rank 2: '"+ch.getString(ndx.set(i,0))+"'"); - for (int j = 0; j < shape[1]; j++) { - ndx.set(i, j); - if (ch.getChar(ndx) == nullChar) { - formatWarnings.add( - var.getShortName() + ": NULL character at [" + (i + 1) + "," + (j + 1) + "]"); - log.warn("warning: {}[{},{}]: null character", var.getShortName(), i, j); - break; - } - } - } - } else if (rank == 3) { - for (int i = 0; i < shape[0]; i++) { - for (int j = 0; j < shape[1]; j++) { - // log.debug("rank 3: '"+ch.getString(ndx.set(i,j,0))+"'"); - for (int k = 0; k < shape[2]; k++) { - ndx.set(i, j, k); - if (ch.getChar(ndx) == nullChar) { - formatWarnings.add(var.getShortName() + ": NULL character at [" + (i + 1) + "," - + (j + 1) + "," + (k + 1) + "]"); - log.warn("warning: {}[{},{},{}]: null character", var.getShortName(), i, j, k); - break; - } - } - } - } - } else if (rank == 4) { - for (int i = 0; i < shape[0]; i++) { - for (int j = 0; j < shape[1]; j++) { - for (int k = 0; k < shape[2]; k++) { - // log.debug("rank 4: '"+ch.getString(ndx.set(i,j,k,0))+"'"); - for (int l = 0; l < shape[3]; l++) { - ndx.set(i, j, k, l); - if (ch.getChar(ndx) == nullChar) { - formatWarnings.add(var.getShortName() + ": NULL character at (" + (i + 1) + "," - + (j + 1) + "," + (k + 1) + "," + (l + 1) + "]"); - log.warn("warning: {}[{},{},{},{}]: null character", var.getShortName(), i, j, - k, l); - break; - } - } - } - } - } - } else { - throw new IOException("validateString cannot handle rank " + rank + " arrays"); - } // ..end if (rank) - } - } - }// ..end validateStringNulls - - // ..................................................... - // ......... rudimentaryDateChecks ............. - // ..................................................... - /** - * Performs rudimentary date checks on a file. Essentially, just checks that - * DATE_CREATE and DATE_UPDATE are valid date strings. - * - *

- * Upon completion obj.nFormatErrors(), obj.nFormatWarnings, - * obj.formatErrors(), and obj.formatWarnings will return results. - * - */ - public void rudimentaryDateChecks() { - log.debug(".....rudimentaryDateChecks: start....."); - - // ..creation date check - // ..just check for valid date - - String creation = readString("DATE_CREATION"); - - if (creation.trim().length() <= 0) { - formatErrors.add("DATE_CREATION: Not set"); - log.info("format error: DATE_CREATION not set"); - - } else { - Date dateCreation = ArgoDate.get(creation); - - if (dateCreation == null) { - formatErrors.add("DATE_CREATION: '" + creation + "': Invalid date"); - log.info("format error: bad DATE_CREATION: '{}'", creation); - } - } - - // ..update date check - // ..just check for valid date - - String update = readString("DATE_UPDATE"); - - if (update.trim().length() <= 0) { - formatErrors.add("DATE_UPDATE: Not set"); - log.info("format error: DATE_UPDATE not set"); - - } else { - Date dateUpdate = ArgoDate.get(update); - - if (dateUpdate == null) { - formatErrors.add("DATE_UPDATE: '" + update + "': Invalid date"); - log.info("format error: bad DATE_UPDATE: '{}'", update); - } - } - log.debug(".....rudimentaryDateChecks: end....."); - } - // ........................................................ // ........... convenience "reader" functions ............ // ........................................................ @@ -1840,7 +684,7 @@ public int getDimensionLength(String dimName) { private int getDimensionLength(NetcdfFile ncReader, String dimName) { Dimension dim = ncReader.findDimension(dimName); if (dim == null) { - message = "Dimension '" + dimName + "' not in Argo data file."; + ValidationResult.lastMessage = "Dimension '" + dimName + "' not in Argo data file."; return -1; } @@ -1850,7 +694,23 @@ private int getDimensionLength(NetcdfFile ncReader, String dimName) { // ******************************************************************* // *************** CONVENIENCE READER METHODS ************************ // ******************************************************************* + /** + * Retrieve a list of variables in the associated file + */ + public ArrayList getVariableNames() { + ArrayList varNames = new ArrayList(); + if (ncReader != null) { + for (Variable var : ncReader.getVariables()) { + varNames.add(var.getShortName()); + } + + } else { + varNames = null; + } + + return varNames; + } // ................ STRING READER METHODS ..................... /** @@ -1881,7 +741,7 @@ public String readString(String varName, boolean... returnNulls) { private static String readString(NetcdfFile ncReader, String varName, boolean... returnNulls) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -1891,7 +751,7 @@ private static String readString(NetcdfFile ncReader, String varName, boolean... } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -1962,7 +822,7 @@ public String readString(String varName, int n, boolean... returnNulls) { private static String readString(NetcdfFile ncReader, String varName, int n, boolean... returnNulls) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -1972,7 +832,7 @@ private static String readString(NetcdfFile ncReader, String varName, int n, boo } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2043,7 +903,7 @@ public String readString(String varName, int n, int m, boolean... returnNulls) { private static String readString(NetcdfFile ncReader, String varName, int n, int m, boolean... returnNulls) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -2053,7 +913,7 @@ private static String readString(NetcdfFile ncReader, String varName, int n, int } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2124,7 +984,7 @@ public String readString(String varName, int n, int m, int k, boolean... returnN private static String readString(NetcdfFile ncReader, String varName, int n, int m, int k, boolean... returnNulls) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -2134,7 +994,7 @@ private static String readString(NetcdfFile ncReader, String varName, int n, int } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2207,7 +1067,7 @@ public String[] readStringArr(String varName, boolean... returnNulls) { private static String[] readStringArr(NetcdfFile ncReader, String varName, boolean... returnNulls) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -2217,7 +1077,7 @@ private static String[] readStringArr(NetcdfFile ncReader, String varName, boole } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2293,7 +1153,7 @@ public String[] readStringArr(String varName, int n, boolean... returnNulls) { private static String[] readStringArr(NetcdfFile ncReader, String varName, int n, boolean... returnNulls) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -2303,7 +1163,7 @@ private static String[] readStringArr(NetcdfFile ncReader, String varName, int n } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2379,7 +1239,7 @@ public String[] readStringArr(String varName, int n, int m, boolean... returnNul private static String[] readStringArr(NetcdfFile ncReader, String varName, int n, int m, boolean... returnNulls) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -2389,7 +1249,7 @@ private static String[] readStringArr(NetcdfFile ncReader, String varName, int n } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2464,7 +1324,7 @@ private static int readInt(NetcdfFile ncReader, String varName) { ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return Integer.MAX_VALUE; } @@ -2474,7 +1334,7 @@ private static int readInt(NetcdfFile ncReader, String varName) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return Integer.MAX_VALUE; } @@ -2507,7 +1367,7 @@ public int readInt(String varName, int n) { private static int readInt(NetcdfFile ncReader, String varName, int n) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return Integer.MAX_VALUE; } @@ -2517,7 +1377,7 @@ private static int readInt(NetcdfFile ncReader, String varName, int n) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return Integer.MAX_VALUE; } @@ -2551,7 +1411,7 @@ private static int[] readIntArr(NetcdfFile ncReader, String varName) { ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -2561,7 +1421,7 @@ private static int[] readIntArr(NetcdfFile ncReader, String varName) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2593,7 +1453,7 @@ public int[] readIntArr(String varName, int n) { private static int[] readIntArr(NetcdfFile ncReader, String varName, int n) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -2609,7 +1469,7 @@ private static int[] readIntArr(NetcdfFile ncReader, String varName, int n) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2646,7 +1506,7 @@ public int[] readIntArr(String varName, int n, int m) { private static int[] readIntArr(NetcdfFile ncReader, String varName, int n, int m) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -2663,7 +1523,7 @@ private static int[] readIntArr(NetcdfFile ncReader, String varName, int n, int } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2699,7 +1559,7 @@ private static double readDouble(NetcdfFile ncReader, String varName) { ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return Double.NaN; } @@ -2709,7 +1569,7 @@ private static double readDouble(NetcdfFile ncReader, String varName) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return Double.NaN; } @@ -2745,7 +1605,7 @@ private static double readDouble(NetcdfFile ncReader, String varName, int n) { ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return Double.NaN; } @@ -2755,7 +1615,7 @@ private static double readDouble(NetcdfFile ncReader, String varName, int n) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return Double.NaN; } @@ -2789,7 +1649,7 @@ private static double[] readDoubleArr(NetcdfFile ncReader, String varName) { ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -2799,7 +1659,7 @@ private static double[] readDoubleArr(NetcdfFile ncReader, String varName) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2832,7 +1692,7 @@ public double[] readDoubleArr(String varName, int n) { private static double[] readDoubleArr(NetcdfFile ncReader, String varName, int n) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -2848,7 +1708,7 @@ private static double[] readDoubleArr(NetcdfFile ncReader, String varName, int n } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2886,7 +1746,7 @@ public double[] readDoubleArr(String varName, int n, int m) { private static double[] readDoubleArr(NetcdfFile ncReader, String varName, int n, int m) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -2903,7 +1763,7 @@ private static double[] readDoubleArr(NetcdfFile ncReader, String varName, int n } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -2939,7 +1799,7 @@ private static float readFloat(NetcdfFile ncReader, String varName) { ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return Float.NaN; } @@ -2949,7 +1809,7 @@ private static float readFloat(NetcdfFile ncReader, String varName) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return Float.NaN; } @@ -2985,7 +1845,7 @@ private static float readFloat(NetcdfFile ncReader, String varName, int n) { ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return Float.NaN; } @@ -2995,7 +1855,7 @@ private static float readFloat(NetcdfFile ncReader, String varName, int n) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return Float.NaN; } @@ -3029,7 +1889,7 @@ private static float[] readFloatArr(NetcdfFile ncReader, String varName) { ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -3039,7 +1899,7 @@ private static float[] readFloatArr(NetcdfFile ncReader, String varName) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -3071,7 +1931,7 @@ public float[] readFloatArr(String varName, int n) { private static float[] readFloatArr(NetcdfFile ncReader, String varName, int n) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -3090,7 +1950,7 @@ private static float[] readFloatArr(NetcdfFile ncReader, String varName, int n) } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.println(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -3128,7 +1988,7 @@ public float[] readFloatArr(String varName, int n, int m) { private static float[] readFloatArr(NetcdfFile ncReader, String varName, int n, int m) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -3145,7 +2005,7 @@ private static float[] readFloatArr(NetcdfFile ncReader, String varName, int n, } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -3181,7 +2041,7 @@ private static short readShort(NetcdfFile ncReader, String varName) { ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return Short.MAX_VALUE; } @@ -3191,7 +2051,7 @@ private static short readShort(NetcdfFile ncReader, String varName) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return Short.MAX_VALUE; } @@ -3224,7 +2084,7 @@ public short readShort(String varName, int n) { private static short readShort(NetcdfFile ncReader, String varName, int n) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return Short.MAX_VALUE; } @@ -3234,7 +2094,7 @@ private static short readShort(NetcdfFile ncReader, String varName, int n) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return Short.MAX_VALUE; } @@ -3268,7 +2128,7 @@ private static short[] readShortArr(NetcdfFile ncReader, String varName) { ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -3278,7 +2138,7 @@ private static short[] readShortArr(NetcdfFile ncReader, String varName) { } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -3310,7 +2170,7 @@ public short[] readShortArr(String varName, int n) { private static short[] readShortArr(NetcdfFile ncReader, String varName, int n) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -3329,7 +2189,7 @@ private static short[] readShortArr(NetcdfFile ncReader, String varName, int n) } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.println(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -3367,7 +2227,7 @@ public short[] readShortArr(String varName, int n, int m) { private static short[] readShortArr(NetcdfFile ncReader, String varName, int n, int m) { Variable ncVar = ncReader.findVariable(varName); if (ncVar == null) { - message = "Variable '" + varName + "' not in Argo data file."; + ValidationResult.lastMessage = "Variable '" + varName + "' not in Argo data file."; return null; } @@ -3384,7 +2244,7 @@ private static short[] readShortArr(NetcdfFile ncReader, String varName, int n, } catch (Exception e) { stderr.println("ERROR: Reading '" + varName + "': " + e); // stderr.print(e); - message = "Netcdf read exception: " + e; + ValidationResult.lastMessage = "Netcdf read exception: " + e; return null; } @@ -3394,4 +2254,157 @@ private static short[] readShortArr(NetcdfFile ncReader, String varName, int n, // ******************************************************** // ******* end convenience "reader" functions ************ // ******************************************************** + + /** + * Computes the "index-file psal adjustment" statistics defined as:
+ *

    + *
  • Mean of (psal_adjusted - psal) on the deepest 500 meters with good + * psal_adjusted_qc (equal to 1) + *
  • Standard deviation of (psal_adjusted - psal) on the deepest 500 meters + * with good psal_adjusted_qc (equal to 1) + *
+ *

+ * NOTES: Only performed for a core-file for the first profile. + * + * @param nProf number of profiles in the file + * @param nParam number of parameters in the file + * @param nLevel number of levels in the file + * @throws IOException If an I/O error occurs + */ + public double[] computePsalAdjStats() { + log.debug(".....computePsalAdjStats....."); + + double[] stats = { 99999., 99999. }; + + if (this.fileType() != FileType.PROFILE) { + log.debug("not a core-file. fileType = {}", this.fileType()); + return (stats); + } + + float[] p_adj = this.readFloatArr("PRES_ADJUSTED", 0); + String p_adj_qc = this.readString("PRES_ADJUSTED_QC", 0, true); + + if (p_adj == null) { + log.debug("failed: no PRES_ADJUSTED variable"); + return (stats); + } + + // ..find the deepest good pres (must be > 500db) + float pDeep = 99999.f; + int nDeep = -1; + + for (int n = p_adj.length - 1; n > 0; n--) { + // if (p_adj[n] >= 500.f) { + if (p_adj_qc.charAt(n) == '1') { + pDeep = p_adj[n]; + nDeep = n; + break; + } + // } + } + + if (nDeep < 0) { + log.debug("failed: no good PRES_ADJUSTED > 500db"); + return (stats); + } + + log.debug("nDeep, pDeep = {}, {}", nDeep, pDeep); + + // ..find the starting point for the stats + float pShallow = 99999.f; + int nShallow = -1; + + float shallowest = pDeep - 500.f; + + for (int n = 0; n < p_adj.length; n++) { + if (p_adj[n] >= shallowest) { + pShallow = p_adj[n]; + nShallow = n; + break; + } + } + + if (nShallow < 0) { + log.debug("failed: could not find a starting PRES_ADJUSTED index"); + return (stats); + } + + log.debug("nShallow, pShallow = {}, {}", nShallow, pShallow); + + // ..is this mode = A or D (otherwise, the psal_adj) + + char m = this.readString("DATA_MODE", true).charAt(0); + log.debug("mode = {}", m); + + if (m == 'R') { + // ..no adjustment + stats[0] = 0.; + stats[1] = 0.; + log.debug("r-mode. no adjustment. stats = 0"); + return (stats); + } + + // ..get psal and psal_adj + + float[] s = this.readFloatArr("PSAL", 0); + float[] sadj = this.readFloatArr("PSAL_ADJUSTED", 0); + + String s_qc = this.readString("PSAL_QC", 0, true); + String sadj_qc = this.readString("PSAL_ADJUSTED_QC", 0, true); + + if (s == null) { + log.debug("failed: no PSAL data"); + return (stats); + } + + // ..compute mean + + double[] diff = new double[s.length]; + double sum = 0.; + int n_data = 0; + boolean[] include = new boolean[s.length]; + + for (int n = nShallow; n <= nDeep; n++) { + if (s_qc.charAt(n) == '1' && sadj_qc.charAt(n) == '1') { + include[n] = true; + n_data++; + diff[n] = sadj[n] - s[n]; + sum += diff[n]; + } else { + include[n] = false; + diff[n] = 1.e10; + } + } + + if (n_data < 2) { + log.debug("failed: fewer than 2 good data"); + return (stats); + } + + double mean = sum / n_data; + + log.debug("sum, n_data, mean = {}, {}, {}", sum, n_data, mean); + + // ..compute std dev + + sum = 0.; + + for (int n = nShallow; n <= nDeep; n++) { + if (include[n]) { + double d = diff[n] - mean; + sum += d * d; + } + } + + double sdev = Math.sqrt(sum / ((double) n_data - 1)); + + log.debug("sum, n_data, sdev = {}, {}, {}", sum, n_data, sdev); + + // ..done + + stats[0] = mean; + stats[1] = sdev; + + return (stats); + } } // ..end class diff --git a/file_checker_exec/src/main/java/fr/coriolis/checker/core/ValidateSubmit.java b/file_checker_exec/src/main/java/fr/coriolis/checker/core/ValidateSubmit.java index 060aa17..526f519 100644 --- a/file_checker_exec/src/main/java/fr/coriolis/checker/core/ValidateSubmit.java +++ b/file_checker_exec/src/main/java/fr/coriolis/checker/core/ValidateSubmit.java @@ -18,16 +18,16 @@ import org.apache.logging.log4j.Logger; import fr.coriolis.checker.config.Options; +import fr.coriolis.checker.core.ArgoDataFile.FileType; import fr.coriolis.checker.exceptions.NotAnArgoFileException; import fr.coriolis.checker.exceptions.ValidateFileDataFailedException; import fr.coriolis.checker.exceptions.VerifyFileFormatFailedException; -import fr.coriolis.checker.filetypes.ArgoDataFile; -import fr.coriolis.checker.filetypes.ArgoDataFile.FileType; -import fr.coriolis.checker.filetypes.ArgoMetadataFile; -import fr.coriolis.checker.filetypes.ArgoProfileFile; -import fr.coriolis.checker.filetypes.ArgoTechnicalFile; -import fr.coriolis.checker.filetypes.ArgoTrajectoryFile; import fr.coriolis.checker.output.ResultsFile; +import fr.coriolis.checker.validators.ArgoFileValidator; +import fr.coriolis.checker.validators.ArgoMetadataFileValidator; +import fr.coriolis.checker.validators.ArgoProfileFileValidator; +import fr.coriolis.checker.validators.ArgoTechnicalFileValidator; +import fr.coriolis.checker.validators.ArgoTrajectoryFileValidator; /** * Implements the Argo FileChecker data file validation checking. @@ -47,6 +47,7 @@ public class ValidateSubmit { // ......................Variable Declarations................ private static ArgoDataFile argo; + private static ArgoFileValidator argoFileValidator; private static boolean doXml = true; @@ -184,7 +185,7 @@ private static ArgoDataFile openArgoFile(String inFileName, String specDirName, if (argo == null) { // ..null file means it did not meet the min criteria to be an argo file - throw new NotAnArgoFileException("ArgoDataFile.open failed: " + ArgoDataFile.getMessage()); + throw new NotAnArgoFileException("ArgoDataFile.open failed: " + ValidationResult.getMessage()); } return argo; @@ -214,6 +215,9 @@ private static void validateFiles(Options options, String dacName, List // ..............open Argo file .................... argo = openArgoFile(inFileName, options.getSpecDirName(), dacName); + // ..............instanciate File validator .................... + argoFileValidator = new ArgoFileValidator(argo); + // .................check the format................ String phase = "FORMAT-VERIFICATION"; boolean[] checkFormatResults = checkArgoFileFormat(dacName); @@ -237,7 +241,7 @@ private static void validateFiles(Options options, String dacName, List if (options.isDoNameCheck() && formatPassed) { // .."name check" requested and no other errors phase = "FILE-NAME-CHECK"; - argo.validateGdacFileName(); + argoFileValidator.validateGdacFileName(); } // ...............report status and meta-data results............... // ..status is that open was successful @@ -250,9 +254,9 @@ private static void validateFiles(Options options, String dacName, List if (!specialPreV31FormatCheckPassed) { out.oldDModeFile(dacName, argo.fileVersion()); } else { - out.statusAndPhase((argo.nFormatErrors() == 0), phase); + out.statusAndPhase((argoFileValidator.getValidationResult().nFormatErrors() == 0), phase); out.metaData(dacName, argo, formatPassed, options.isDoPsalStats()); - out.errorsAndWarnings(argo); + out.errorsAndWarnings(argoFileValidator); } // .............................close Argo file...................... @@ -296,15 +300,15 @@ private static boolean[] checkArgoFileFormat(String dacName) throws VerifyFileFo private static boolean regularCheckArgoFileFormat(String dacName) throws VerifyFileFormatFailedException { // check the format and return true if all process could be done - boolean isVerifyFormatCompleted = argo.verifyFormat(dacName); + boolean isVerifyFormatCompleted = argoFileValidator.validateFormat(dacName); if (!isVerifyFormatCompleted) { // ..verifyFormat *failed* -- not format errors - an actual failure - throw new VerifyFileFormatFailedException("verifyFormat check failed: " + ArgoDataFile.getMessage()); + throw new VerifyFileFormatFailedException("verifyFormat check failed: " + ValidationResult.getMessage()); } else { // ..verifyFormat completed -- chech error/warning counts to determine status - if (argo.nFormatErrors() == 0) { + if (argoFileValidator.getValidationResult().nFormatErrors() == 0) { log.debug("format ACCEPTED"); return true; @@ -352,7 +356,7 @@ private static boolean rudimentaryDateCheck(Options options, boolean formatPasse if (doRudimentaryDateCheck) { // ..passed format checks, format accepted, full data checks not performed // because "early version" so do a couple of rudimentary date checks - argo.rudimentaryDateChecks(); + argoFileValidator.rudimentaryDateChecks(); } return doRudimentaryDateCheck; } @@ -369,45 +373,73 @@ private static boolean rudimentaryDateCheck(Options options, boolean formatPasse */ private static void checkArgoFileData(String dacName, boolean doNulls, boolean doBatteryChecks) throws IOException, ValidateFileDataFailedException { + + // argoFileValidator must be in the right specialized validation class: + instanciateSpecializedValidator(); + if (argo.fileType() == FileType.METADATA) { + // Do metadata file validate data - boolean isValidateArgoMetadaFileDataCompleted = ((ArgoMetadataFile) argo).validate(dacName, doNulls, - doBatteryChecks); + boolean isValidateArgoMetadaFileDataCompleted = ((ArgoMetadataFileValidator) argoFileValidator) + .validateData(doNulls, doBatteryChecks); if (!isValidateArgoMetadaFileDataCompleted) { // ..the validate process failed (not errors within the data) - log.error("ArgoMetadataFile.validate failed: " + ArgoDataFile.getMessage()); + log.error("ArgoMetadataFile.validate failed: " + ValidationResult.getMessage()); throw new ValidateFileDataFailedException("Meta-data"); } } else if (argo.fileType() == FileType.PROFILE || argo.fileType() == FileType.BIO_PROFILE) { // Do profile file validate data - boolean isValidateArgoProfileFileDataCompleted = ((ArgoProfileFile) argo).validate(false, dacName, doNulls); + boolean isValidateArgoProfileFileDataCompleted = ((ArgoProfileFileValidator) argoFileValidator) + .validateData(false, dacName, doNulls); if (!isValidateArgoProfileFileDataCompleted) { // ..the validate process failed (not errors within the data) - log.error("ArgoProfileFile.validate failed: " + ArgoDataFile.getMessage()); + log.error("ArgoProfileFile.validate failed: " + ValidationResult.getMessage()); throw new ValidateFileDataFailedException("Profile"); } } else if (argo.fileType() == FileType.TECHNICAL) { // Do Technical file validate data - boolean isValidateArgoTechnicalFileDataCompleted = ((ArgoTechnicalFile) argo).validate(dacName, doNulls); + boolean isValidateArgoTechnicalFileDataCompleted = ((ArgoTechnicalFileValidator) argoFileValidator) + .validateData(dacName, doNulls); if (!isValidateArgoTechnicalFileDataCompleted) { // ..the validate process failed (not errors within the data) - log.error("ArgoTechnicalFile.validate failed: " + ArgoDataFile.getMessage()); + log.error("ArgoTechnicalFile.validate failed: " + ValidationResult.getMessage()); throw new ValidateFileDataFailedException("Technical"); } } else if (argo.fileType() == FileType.TRAJECTORY || argo.fileType() == FileType.BIO_TRAJECTORY) { // Do Trajectory file validate data - boolean isValidateArgoTrajectoryFileDataCompleted = ((ArgoTrajectoryFile) argo).validate(dacName, doNulls); + boolean isValidateArgoTrajectoryFileDataCompleted = ((ArgoTrajectoryFileValidator) argoFileValidator) + .validateData(dacName, doNulls); if (!isValidateArgoTrajectoryFileDataCompleted) { // ..the validate process failed (not errors within the data) - log.error("ArgoTrajectoryFile.validate failed: " + ArgoDataFile.getMessage()); + log.error("ArgoTrajectoryFile.validate failed: " + ValidationResult.getMessage()); throw new ValidateFileDataFailedException("Trajectory"); } } } + private static void instanciateSpecializedValidator() throws IOException { + ValidationResult formatResult = argoFileValidator.getValidationResult(); + + if (argo.fileType() == FileType.METADATA) { + argoFileValidator = new ArgoMetadataFileValidator(argo); + } else if (argo.fileType() == FileType.PROFILE || argo.fileType() == FileType.BIO_PROFILE) { + // Do profile file validate data + argoFileValidator = new ArgoProfileFileValidator(argo); + } else if (argo.fileType() == FileType.TECHNICAL) { + // Do Technical file validate data + argoFileValidator = new ArgoTechnicalFileValidator(argo); + + } else if (argo.fileType() == FileType.TRAJECTORY || argo.fileType() == FileType.BIO_TRAJECTORY) { + argoFileValidator = new ArgoTrajectoryFileValidator(argo); + } + // copy the previous ValidationResult from format verification + argoFileValidator.setValidationResult(formatResult); + + } + /** * Evaluate if data check has to be done. * diff --git a/file_checker_exec/src/main/java/fr/coriolis/checker/core/ValidationResult.java b/file_checker_exec/src/main/java/fr/coriolis/checker/core/ValidationResult.java new file mode 100644 index 0000000..79bdffa --- /dev/null +++ b/file_checker_exec/src/main/java/fr/coriolis/checker/core/ValidationResult.java @@ -0,0 +1,80 @@ +package fr.coriolis.checker.core; + +import java.util.ArrayList; + +public class ValidationResult { + private static final String BLANK_MESSAGE = new String(""); + + private ArrayList warnings; + private ArrayList errors; + public static String lastMessage = BLANK_MESSAGE; + + public ValidationResult() { + errors = new ArrayList(); + warnings = new ArrayList(); + } + + /** Clear the errors */ + public void clearFormatErrors() { + if (errors != null) { + errors.clear(); + } + } + + /** Clear the warnings */ + public void clearFormatWarnings() { + if (warnings != null) { + warnings.clear(); + } + } + + /** Retrieve the error descriptions for errors found during file checking */ + public ArrayList getErrors() { + return new ArrayList(errors); + } + + /** Retrieve the error descriptions for errors found during file checking */ + public ArrayList getWarnings() { + return new ArrayList(warnings); + } + + public void addError(String errorStr) { + errors.add(errorStr); + } + + public void addWarning(String warningStr) { + warnings.add(warningStr); + } + + /** + * Retrieve the number of format errors found during file checking + * + * @returns number of file errors (0 = no errors) + */ + public int nFormatErrors() { + return errors.size(); + } + + /** + * Retrieve the number of format warnings found during file checking + * + * @returns number of file warnings (0 = no warnings) + */ + public int nFormatWarnings() { + return warnings.size(); + } + + /** Retrieve the most recent error message generated by a method */ + public static String getMessage() { + return new String(lastMessage); + } + + public boolean isValid() { + if (errors.size() == 0) { + return true; + } else { + return false; + } + } + +} \ No newline at end of file diff --git a/file_checker_exec/src/main/java/fr/coriolis/checker/output/ResultsFile.java b/file_checker_exec/src/main/java/fr/coriolis/checker/output/ResultsFile.java index 750b7d5..35b85b1 100644 --- a/file_checker_exec/src/main/java/fr/coriolis/checker/output/ResultsFile.java +++ b/file_checker_exec/src/main/java/fr/coriolis/checker/output/ResultsFile.java @@ -1,4 +1,5 @@ package fr.coriolis.checker.output; + import java.io.BufferedWriter; import java.io.IOException; import java.io.PrintWriter; @@ -24,10 +25,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.coriolis.checker.filetypes.ArgoDataFile; -import fr.coriolis.checker.filetypes.ArgoDataFile.FileType; -import fr.coriolis.checker.filetypes.ArgoProfileFile; +import fr.coriolis.checker.core.ArgoDataFile; +import fr.coriolis.checker.core.ArgoDataFile.FileType; +import fr.coriolis.checker.core.ValidationResult; import fr.coriolis.checker.specs.ArgoDate; +import fr.coriolis.checker.validators.ArgoFileValidator; public class ResultsFile { @@ -256,7 +258,7 @@ public void notArgoFile(String dacName) throws XMLStreamException { xml.writeStartElement("errors"); xml.writeAttribute("number", "1"); xml.writeStartElement("error"); - xml.writeCharacters(ArgoDataFile.getMessage()); + xml.writeCharacters(ValidationResult.getMessage()); xml.writeEndElement(); xml.writeEndElement(); @@ -267,7 +269,7 @@ public void notArgoFile(String dacName) throws XMLStreamException { out.println("DAC: " + dacName); out.println("META-DATA: end"); out.println("FORMAT-ERRORS: start"); - out.println(ArgoDataFile.getMessage()); + out.println(ValidationResult.getMessage()); out.println("FORMAT-ERRORS: end"); out.println("FORMAT-WARNINGS: start"); out.println("FORMAT-WARNINGS: end"); @@ -287,12 +289,12 @@ public void formatErrorMessage(String phase) throws XMLStreamException { xml.writeStartElement("errors"); xml.writeAttribute("number", "1"); xml.writeStartElement("error"); - xml.writeCharacters("Format check failed. " + ArgoDataFile.getMessage()); + xml.writeCharacters("Format check failed. " + ValidationResult.getMessage()); xml.writeEndElement(); xml.writeEndElement(); } else { - out.println("ERROR: Format check failed." + ArgoDataFile.getMessage()); + out.println("ERROR: Format check failed." + ValidationResult.getMessage()); out.println("PHASE: " + phase); } } @@ -312,12 +314,12 @@ public void dataErrorMessage(String type) throws XMLStreamException { xml.writeStartElement("errors"); xml.writeAttribute("number", "1"); xml.writeStartElement("error"); - xml.writeCharacters(type + " validation failed. " + ArgoDataFile.getMessage()); + xml.writeCharacters(type + " validation failed. " + ValidationResult.getMessage()); xml.writeEndElement(); xml.writeEndElement(); } else { - out.println("ERROR: " + type + " validation failed: " + ArgoDataFile.getMessage()); + out.println("ERROR: " + type + " validation failed: " + ValidationResult.getMessage()); } } @@ -455,7 +457,7 @@ public void metaData(String dacName, ArgoDataFile argo, boolean formatPassed, bo metaStationParameters(argo); if (formatPassed && argo.fileType() == FileType.PROFILE) { - addMetaPsalStats((ArgoProfileFile) argo); + addMetaPsalStats(argo); } } else if (argo.fileType() == FileType.TRAJECTORY || argo.fileType() == FileType.BIO_TRAJECTORY) { @@ -562,7 +564,7 @@ public void metaData(String dacName, ArgoDataFile argo, boolean formatPassed, bo } } // ..end metaData - public void addMetaPsalStats(ArgoProfileFile argo) throws XMLStreamException { + public void addMetaPsalStats(ArgoDataFile argo) throws XMLStreamException { // ...............report PSAL adjustment statistics............... // ..assumes: // ..- Argo-open was successful @@ -581,16 +583,16 @@ public void addMetaPsalStats(ArgoProfileFile argo) throws XMLStreamException { // ************************** errorsAndWarnings ************************ - public void errorsAndWarnings(ArgoDataFile argo) throws XMLStreamException { + public void errorsAndWarnings(ArgoFileValidator argoFileValidator) throws XMLStreamException { if (doXml) { xml.writeStartElement("errors"); - xml.writeAttribute("number", Integer.toString(argo.nFormatErrors())); + xml.writeAttribute("number", Integer.toString(argoFileValidator.getValidationResult().nFormatErrors())); } else { out.println("FORMAT-ERRORS: start"); } - log.debug("format errors:" + argo.nFormatErrors()); + log.debug("format errors:" + argoFileValidator.getValidationResult().nFormatErrors()); - for (String err : argo.formatErrors()) { + for (String err : argoFileValidator.getValidationResult().getErrors()) { if (doXml) { xml.writeStartElement("error"); xml.writeCharacters(err); @@ -611,13 +613,13 @@ public void errorsAndWarnings(ArgoDataFile argo) throws XMLStreamException { // ...............report warnings................ if (doXml) { xml.writeStartElement("warnings"); - xml.writeAttribute("number", Integer.toString(argo.nFormatWarnings())); + xml.writeAttribute("number", Integer.toString(argoFileValidator.getValidationResult().nFormatWarnings())); } else { out.println("FORMAT-WARNINGS: start"); } - log.debug("format warnings: " + argo.nFormatWarnings()); + log.debug("format warnings: " + argoFileValidator.getValidationResult().nFormatWarnings()); - for (String err : argo.formatWarnings()) { + for (String err : argoFileValidator.getValidationResult().getWarnings()) { if (doXml) { xml.writeStartElement("warning"); xml.writeCharacters(err); diff --git a/file_checker_exec/src/main/java/fr/coriolis/checker/specs/ArgoConfigTechParam.java b/file_checker_exec/src/main/java/fr/coriolis/checker/specs/ArgoConfigTechParam.java index d3019b1..2fec52c 100644 --- a/file_checker_exec/src/main/java/fr/coriolis/checker/specs/ArgoConfigTechParam.java +++ b/file_checker_exec/src/main/java/fr/coriolis/checker/specs/ArgoConfigTechParam.java @@ -1,1047 +1,1361 @@ package fr.coriolis.checker.specs; -import java.io.*; -import java.util.*; -import java.util.regex.*; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; /** - * Implements features to check the Meta-data CONFIG_PARAMETER_NAME (including LAUNCH_...) and - * TECHNICAL_PARAMETER_NAME. This includes the units allowed for both. + * Implements features to check the Meta-data CONFIG_PARAMETER_NAME (including + * LAUNCH_...) and TECHNICAL_PARAMETER_NAME. This includes the units + * allowed for both. * * @author Mark Ignaszewski - * @version $HeadURL: https://inversion.nrlmry.navy.mil/svn/godae/trunk/argo/bin/java/usgdac/ArgoConfigTechParam.java $ - * @version $Id: ArgoConfigTechParam.java 1257 2021-04-26 14:09:25Z ignaszewski $ + * @version $HeadURL: + * https://inversion.nrlmry.navy.mil/svn/godae/trunk/argo/bin/java/usgdac/ArgoConfigTechParam.java + * $ + * @version $Id: ArgoConfigTechParam.java 1257 2021-04-26 14:09:25Z ignaszewski + * $ */ -public class ArgoConfigTechParam -{ - +public class ArgoConfigTechParam { //............................................ // VARIABLES //............................................ - //..........Class variables.............. - - public static enum ConfigTechValueType { - DATE_TIME ("date/time"), - FLOAT ("float"), - HEX ("hex"), - INTEGER ("integer"), - LOGICAL ("logical"), - STRING ("string"), - UNKNOWN ("unknown"); - - public final String name; - - ConfigTechValueType (String d) { name = d; } - - public static ConfigTechValueType getType (String name) - { - for (ConfigTechValueType c : ConfigTechValueType.values()) { - if (name.equals(c.name)) { - return c; - } - } - return ConfigTechValueType.UNKNOWN; - } - - public String toString() { return name; } - }; - - //......define and initialize the template replacement patterns...... - - static final String defaultTemplateReplacement; - static final Map templateReplacement; - static final String[] knownTemplates; - - static { - defaultTemplateReplacement = "(?\\w+)"; - - Map temp = new HashMap(10); - //..config templates (alphabetical order) - temp.put("D", "(?\\d+?)"); - temp.put("cycle_phase_name", "(?[A-Z][a-z]+(?:[A-Z][a-z]+)*?Phase)"); - temp.put("I", "(?\\d+?)"); - temp.put("N", "(?\\d+?)"); - temp.put("N+1", "(?\\d+?)"); - temp.put("param", "(?[A-Z][a-z]+(?:[A-Z][a-z]+)??)"); - temp.put("PARAM", "(?[A-Z]+?)"); - temp.put("S", "(?\\d+?)"); - temp.put("SubS", "(?\\d+?)"); - temp.put("short_sensor_name", "(?[A-Z][a-z]+?|CTD)"); - //..tech templates (alphabetical order) - temp.put("digit", "(?\\d)"); - temp.put("int", "(?\\d+?)"); - //..already defined above - temp.put("Z", "(?\\d+?)"); - - templateReplacement = Collections.unmodifiableMap(temp); - - knownTemplates = new String[]{ - "D", - "horizontalphasename", - "I", - "N", - "N1", - "param", - "PARAM", - "S", - "SubS", - "shortsensorname", - "verticalphasename", - "digit", - "int", - "Z" - }; - } - - - //......define and initialize the pattern matcher objects...... - static Pattern pBlankOrComment; //..match a blank line or a comment line - static Pattern pComment; //..comment - - static - { - //..match a blank line (or a line with just comments) - pBlankOrComment = Pattern.compile("^\\s*(?://.*)*"); - - //..match a comment (any where on the line) - recognize the "//@" comments - //..group 1: the word following "@"; group 2: the setting - pComment = Pattern.compile("//(?:@\\s*(\\w+)\\s*=\\s*\"(.+)\")?.*$"); - } - - //.....object variables...... - - private String bioCfgParmFileName; - private String coreCfgParmFileName; - private String tecParmFileName; - private String unitFileName; - private String version; - - private LinkedHashSet configParamList; //..config param fixed names - private LinkedHashSet configParamList_DEP; //..config param fixed names (deprctd) - // private LinkedHashSet configParamRegex; //..config param variable names - // private LinkedHashSet configParamRegex_DEP; //..config param variable names - private LinkedHashMap>> configParamRegex; //..config param variable names - private LinkedHashMap>> configParamRegex_DEP; //..config param variable names - - private LinkedHashSet techParamList; //..tech param fixed names - private LinkedHashSet techParamList_DEP; //..tech param fixed names (deprctd) - - // private LinkedHashSet techParamRegex; //..tech param regex names - // private LinkedHashSet techParamRegex_DEP; //..tech param regex names (deprctd) - private LinkedHashMap>> techParamRegex; //..config param variable names - private LinkedHashMap>> techParamRegex_DEP; //..config param variable names - - private LinkedHashMap unitList; //..config/tech units and type - private LinkedHashMap unitList_DEP; //..config/tech units - deprecated - - - //..logger - private static final Logger log = LogManager.getLogger("ArgoConfigTechParam"); + // ..........Class variables.............. + + public static enum ConfigTechValueType { + DATE_TIME("date/time"), FLOAT("float"), HEX("hex"), INTEGER("integer"), LOGICAL("logical"), STRING("string"), + UNKNOWN("unknown"); + + public final String name; + + ConfigTechValueType(String d) { + name = d; + } + + public static ConfigTechValueType getType(String name) { + for (ConfigTechValueType c : ConfigTechValueType.values()) { + if (name.equals(c.name)) { + return c; + } + } + return ConfigTechValueType.UNKNOWN; + } + + @Override + public String toString() { + return name; + } + }; + + // ......define and initialize the template replacement patterns...... + + static final String defaultTemplateReplacement; + static final Map templateReplacement; + static final String[] knownTemplates; + static final Map> possibleValuesByKey; + + static { + defaultTemplateReplacement = "(?\\w+)"; + + Map temp = new HashMap(10); + // ..config templates (alphabetical order) + temp.put("D", "(?\\d+?)"); + temp.put("cycle_phase_name", "(?[A-Z][a-z]+(?:[A-Z][a-z]+)*?Phase)"); + temp.put("I", "(?\\d+?)"); + temp.put("N", "(?\\d+?)"); + temp.put("N+1", "(?\\d+?)"); + temp.put("param", "(?[A-Z][a-z]+(?:[A-Z][a-z]+)??)"); + temp.put("PARAM", "(?[A-Z]+?)"); + temp.put("S", "(?\\d+?)"); + temp.put("SubS", "(?\\d+?)"); + temp.put("short_sensor_name", "(?[A-Z][a-z]+?|CTD)"); + // ..tech templates (alphabetical order) + temp.put("digit", "(?\\d)"); + temp.put("int", "(?\\d+?)"); + // ..already defined above + temp.put("Z", "(?\\d+?)"); + templateReplacement = Collections.unmodifiableMap(temp); + + knownTemplates = new String[] { "D", "horizontalphasename", "I", "N", "N1", "param", "PARAM", "S", "SubS", + "shortsensorname", "verticalphasename", "digit", "int", "Z" }; + + Map> temp2 = new HashMap<>(); + temp2.put("short_sensor_name", ArgoReferenceTable.GENERIC_TEMPLATE_short_sensor_name); + temp2.put("Z", new HashSet<>(Arrays.asList("1", "2", "3", "4", "5"))); + temp2.put("cycle_phase_name", ArgoReferenceTable.GENERIC_TEMPLATE_cycle_phase_name); + temp2.put("param", ArgoReferenceTable.GENERIC_TEMPLATE_param); + possibleValuesByKey = Collections.unmodifiableMap(temp2); + } + + // ......define and initialize the pattern matcher objects...... + static Pattern pBlankOrComment; // ..match a blank line or a comment line + static Pattern pComment; // ..comment + + static { + // ..match a blank line (or a line with just comments) + pBlankOrComment = Pattern.compile("^\\s*(?://.*)*"); + + // ..match a comment (any where on the line) - recognize the "//@" comments + // ..group 1: the word following "@"; group 2: the setting + pComment = Pattern.compile("//(?:@\\s*(\\w+)\\s*=\\s*\"(.+)\")?.*$"); + } + + // .....object variables...... + + private String bioCfgParmFileName; + private String coreCfgParmFileName; + private String tecParmFileName; + private String unitFileName; + private String version; + + private LinkedHashSet configParamList; // ..config param fixed names + private LinkedHashSet configParamList_DEP; // ..config param fixed names (deprctd) + // private LinkedHashSet configParamRegex; //..config param variable + // names + // private LinkedHashSet configParamRegex_DEP; //..config param + // variable names + private LinkedHashMap>> configParamRegex; // ..config param variable names + private LinkedHashMap>> configParamRegex_DEP; // ..config param variable + // names + + private LinkedHashSet techParamList; // ..tech param fixed names + private LinkedHashSet techParamCodeList; // ..full tech param names -with unit- : reference table 14a + + private LinkedHashSet techParamList_DEP; // ..tech param fixed names (deprctd) + private LinkedHashSet techParamCodeList_DEP; + + // The following map is needed to check TECH time series variable (same variable + // name but different units and long_name posssible) : + private Map> paramAuthorizedUnits = new HashMap<>(); + private Map> paramAuthorizedLongName = new HashMap<>(); + + // private LinkedHashSet techParamRegex; //..tech param regex names + // private LinkedHashSet techParamRegex_DEP; //..tech param regex names + // (deprctd) + private LinkedHashMap>> techParamRegex; // ..config param variable names + private LinkedHashMap>> techParamRegex_DEP; // ..config param variable + // names + + private LinkedHashMap unitList; // ..config/tech units and type + private LinkedHashMap unitList_DEP; // ..config/tech units - deprecated + + // ..logger + private static final Logger log = LogManager.getLogger("ArgoConfigTechParam"); //............................................ // CONSTRUCTORS //............................................ - /** - * Reads the CONFIGURATION_PARAMETER and TECHNICAL_PARAMETER specification files. - *
The files must be in specDir and are named argo-config_names-spec-v<version> - * and "argo-tech_names-spec-v<version> - * - * @param specDir Path to the specification file directory - * @param version Argo netCDF file version - * @param initConfig true: Read CONFIGURATION_PARAMETER specification file - * @param initTech true: Read TECHNICAL_PARAMETER specification file - * @throws IOException File I/O errors - */ - - public ArgoConfigTechParam (String specDir, String version, boolean initConfig, boolean initTech) - throws IOException - { - log.debug("...ArgoConfigTechParam: start..."); - - this.version = version.trim(); - - String prefix = specDir.trim() + "/argo-"; - coreCfgParmFileName = prefix + "core_config_names-spec-v" + this.version; - bioCfgParmFileName = prefix + "bio_config_names-spec-v" + this.version; - tecParmFileName = prefix + "tech_names-spec-v" + this.version; - unitFileName = prefix + "tech_units-spec-v" + this.version; - - if (initConfig) parseConfigParamFiles(); - if (initTech) parseTechParamFile(); - - parseUnitFile(); - - if (log.isDebugEnabled()) { - if (configParamList == null) log.debug("configParamList size = null"); - else log.debug("configParamList size = {}", configParamList.size()); - if (configParamList_DEP == null) log.debug("configParamList_DEP size = null"); - else log.debug("configParamList_DEP size = {}", configParamList_DEP.size()); - if (techParamList == null) log.debug("techParamList size = null"); - else log.debug("techParamList size = {}", techParamList.size()); - if (techParamList_DEP == null) log.debug("techParamList_DEP size = null"); - else log.debug("techParamList_DEP size = {}", techParamList_DEP.size()); - log.debug("...ArgoConfigTechParam: end..."); - } - } + /** + * Reads the CONFIGURATION_PARAMETER and TECHNICAL_PARAMETER specification + * files.
+ * The files must be in specDir and are named + * argo-config_names-spec-v<version> and + * "argo-tech_names-spec-v<version> + * + * @param specDir Path to the specification file directory + * @param version Argo netCDF file version + * @param initConfig true: Read CONFIGURATION_PARAMETER specification file + * @param initTech true: Read TECHNICAL_PARAMETER specification file + * @throws IOException File I/O errors + */ + + public ArgoConfigTechParam(String specDir, String version, boolean initConfig, boolean initTech) + throws IOException { + log.debug("...ArgoConfigTechParam: start..."); + + this.version = version.trim(); + + String prefix = specDir.trim() + "/argo-"; + coreCfgParmFileName = prefix + "core_config_names-spec-v" + this.version; + bioCfgParmFileName = prefix + "bio_config_names-spec-v" + this.version; + tecParmFileName = prefix + "tech_names-spec-v" + this.version; + unitFileName = prefix + "tech_units-spec-v" + this.version; + + if (initConfig) { + parseConfigParamFiles(); + } + if (initTech) { + parseTechParamFile(); + } + + parseUnitFile(); + + if (log.isDebugEnabled()) { + if (configParamList == null) { + log.debug("configParamList size = null"); + } else { + log.debug("configParamList size = {}", configParamList.size()); + } + if (configParamList_DEP == null) { + log.debug("configParamList_DEP size = null"); + } else { + log.debug("configParamList_DEP size = {}", configParamList_DEP.size()); + } + if (techParamList == null) { + log.debug("techParamList size = null"); + } else { + log.debug("techParamList size = {}", techParamList.size()); + } + if (techParamList_DEP == null) { + log.debug("techParamList_DEP size = null"); + } else { + log.debug("techParamList_DEP size = {}", techParamList_DEP.size()); + } + log.debug("...ArgoConfigTechParam: end..."); + } + } //............................................ // METHODS //............................................ - /** - * Determines if the name is matched by a REGEX in the set - * @param name The parameter name in question. - * @param regexSet The set of REGEXPs to check - * @return An ArgoConfigTechParamMatch containing full information about the match - * (null = NO MATCH) - */ - private ArgoConfigTechParamMatch checkRegex - (String name, - HashMap>> regexSet) - { - ArgoConfigTechParamMatch match = null; //..return object - HashMap unMatchedTemplates = null; //..return templates - HashMap failedMatchedTemplates = null; //..return templates - - //..NOTE..NOTE..NOTE.. - //..Should be able to truncate the loop below on the first match - //..ie, find a match -> return - //..However, during development I *really* want to know if there are - //..multiple matches - which indicates ambiguous regex patterns - //..So, FOR NOW, complete the looping and report multiple matches - //..Returns the LAST one matched - - for (Map.Entry>> entry : regexSet.entrySet()) { - Pattern pattern = entry.getKey(); - HashMap> value = entry.getValue(); - - Matcher m = pattern.matcher(name); - - if (m.matches()) { - unMatchedTemplates = new HashMap(m.groupCount()); - failedMatchedTemplates = new HashMap(m.groupCount()); - - for (String key : knownTemplates) { - try { - String str = m.group(key); - - //..is there a "match-list" for this (regex param and template) - - if (value != null) { - HashSet matchSet = value.get(key); - - if (matchSet == null) { - //..there was no match-list to compare to - unMatchedTemplates.put(key, str); - - } else { - //..there is a match-list - compare it - - if (! matchSet.contains(str)) { - failedMatchedTemplates.put(key, str); - log.debug("checkRegex: match-list success: key '{}', matched '{}'", - key, str); - - } else if (log.isDebugEnabled()) { - log.debug("checkRegex: match-list failed: key '{}', matched '{}'", - key, str); - } - } - } else { - unMatchedTemplates.put(key, str); - } - - } catch (IllegalArgumentException e) { - } - } - - match = new ArgoConfigTechParamMatch - (pattern.toString(), false, - unMatchedTemplates.size(), unMatchedTemplates, - failedMatchedTemplates.size(), failedMatchedTemplates); - - log.debug("checkRegex: '{}' regex match #{}: '{}', unMatchedTemplates {}"+ - "failedTemplates {}", - unMatchedTemplates.size(), failedMatchedTemplates.size()); - - //****temporary**** return match; - } - } - - return match; - } //..checkRegex - - /** - * Determines if the name is a CONFIG parameter name - * @param name the parameter name in question. - * @return An ArgoConfigTechParamMatch containing full information about the match - * (null = NO MATCH) - * - */ - public ArgoConfigTechParamMatch findConfigParam (String name) - { - ArgoConfigTechParamMatch match = findParam(name, - configParamList, configParamRegex, - configParamList_DEP, configParamRegex_DEP); - return match; - } - - /** - * Determines if the name is a Tech parameter name - * @param name the parameter name in question. - * @return An ArgoConfigTechParamMatch containing full information about the match - * (null = NO MATCH) - * - */ - public ArgoConfigTechParamMatch findTechParam (String name) - { - ArgoConfigTechParamMatch match = findParam(name, - techParamList, techParamRegex, - techParamList_DEP, techParamRegex_DEP); - return match; - } - - /** - * Determines if the name is a valid parameter name within the supplied lists - * @param name the parameter name in question. - * @return An ArgoConfigTechParamMatch containing full information about the match - * (null = NO MATCH) - * - */ - private ArgoConfigTechParamMatch findParam - (String name, - HashSet activeList, - HashMap>> activeRegex, - HashSet deprecatedList, - HashMap>> deprecatedRegex) - { - boolean literal = false; - HashMap templates = null; - ArgoConfigTechParamMatch match = null; - - if (activeList != null) { - if (activeList.contains(name)) { - match = new ArgoConfigTechParamMatch (name, false); - - log.debug("findParam: '{}': active literal match", name); - return match; - } - } - - if (deprecatedList != null) { - if (deprecatedList.contains(name)) { - match = new ArgoConfigTechParamMatch (name, true); - - log.debug("findParam: '{}': deprecated literal match", name); - return match; - } - } - - //..did NOT match one of the literal strings - //..check for a regex match - - if (activeRegex != null) { - //log.debug("findParam: checking active regex"); - - match = checkRegex(name, activeRegex); - - if (match != null) { - match.isDeprecated = false; - log.debug("findParam: '{}': active regex match '{}'", name, match.match); - return match; - } - } - - if (deprecatedRegex != null) { - //log.debug("findParam: checking active regex"); - - match = checkRegex(name, deprecatedRegex); - - if (match != null) { - match.isDeprecated = true; - log.debug("findParam: '{}': deprecated regex match '{}'", name, match.match); - return match; - } - } - - log.debug("findParam: '{}': failed match", name); - return null; - } - - /** - * Determines if the value is valid for the unit (mapped to data-type) - * @param unit the unit name - * @param value the config value - * @return True - value is a valid setting for the given unit (or unit is unknown) - * False - parameter name is NOT valid (or tech spec not opened) - * - */ - public boolean isConfigTechValidValue(String unit, String value) - { - //..turn the unit name into a data type - ConfigTechValueType dt = unitList.get(unit); - - if (dt == null || dt == ConfigTechValueType.UNKNOWN) { - //..unit name is unknown OR the data type for this unit is unkown - //..can't check the value setting - //..so blindly return "it's good" (???) - log.debug("isConfigTechValidValue: default to true: unit, data-type, value = '{}', '{}', '{}'", - unit, dt, value); - - return true; - } - - boolean valid = false; - switch (dt) { - case DATE_TIME: - valid = ArgoDate.checkArgoDatePattern(unit, value); - break; - - case FLOAT: - try { - double n = Double.parseDouble(value); - valid = true; - - } catch (NumberFormatException e) { - valid = false; - } - break; - - case HEX: - valid = value.matches("(?i)(0x)?[0-9a-f]+"); - break; - - case INTEGER: - try { - int n = Integer.parseInt(value); - valid = true; - - } catch (NumberFormatException e) { - valid = false; - } - break; - - case LOGICAL: - valid = value.matches("(?i)true|false|yes|no|1|0"); - break; - - case STRING: - valid = true; - break; - - default: - valid = true; - } - - log.debug("isConfigTechValidValue: valid = {}: unit, data-type, value = '{}', '{}', '{}',", - valid, unit, dt, value); - - return valid; - } - - /** - * Determines the data-type of a parameter unit - * @param unit the unit name - * @return data-type name - */ - public String getConfigTechDataType(String name) - { - ConfigTechValueType dt = unitList.get(name); - if (dt == null) { - return null; - } else { - return dt.toString(); - } - } - - /** - * Determines if the name is a configuration/technical parameter unit - * @param name the unit name in question. - * @return True - unit is a valid technical parameter unit; - * False - parameter name is NOT valid (or tech spec not opened) - * - */ - public boolean isConfigTechUnit(String name) - { - if (unitList == null) return false; - return unitList.containsKey(name); - } - - /** - * Determines if the name is a DEPRECATED configuration/technical parameter unit - * @param name the unit name in question. - * @return True - unit is a valid technical parameter unit; - * False - parameter name is NOT valid (or tech spec not opened) - * - */ - public boolean isDeprecatedConfigTechUnit(String name) - { - if (unitList_DEP == null) return false; - return unitList_DEP.containsKey(name); - } - - //............................................................... - //.....................parseConfigParamFiles..................... - //...........................................,................... - /** - * Parses the list of configurations parameters of the specification - * @return True - file parsed; False - failed to parse file - * @throws IOException indicates file read or permission error - */ - public void parseConfigParamFiles () throws IOException - { - log.debug(".....parseConfigParamFiles: start....."); - - String[] fileNames = {coreCfgParmFileName, bioCfgParmFileName, - coreCfgParmFileName+".deprecated", bioCfgParmFileName+".deprecated"}; - LinkedHashSet paramList = null; - LinkedHashMap>> paramRegex = null; - - //....loop over the active and deprecated files..... - - for (int nFile = 0; nFile < fileNames.length; nFile++) { - String fileName = fileNames[nFile]; - - log.debug("parsing '{}'", fileName); - - //.......parse the config param name file....... - //..open the file - File f = new File(fileName); - if (! f.isFile()) { - - if (nFile <= 1) { - //..both primary files MUST exist - log.error("Config-Param-file '{}' does not exist", fileName); - throw new IOException("Config-Parm-File '" + fileName + - "' does not exist"); - } else { - //..deprecated file MAY NOT exist - log.debug("Deprecated-Config-Param-file '{}' does not exist (optional)", - fileName); - continue; - } - - } else if (! f.canRead()) { - //..file exists but cannot be read --> error - log.error("Config-Param-File '{}' cannot be read", fileName); - throw new IOException("Config-Parm-File '" + fileName + - "' cannot be read"); - } - - //..create list variables - //..open file - - if (nFile == 0) { - configParamList = new LinkedHashSet(250); - configParamRegex = new LinkedHashMap>>(250); - - paramList = configParamList; - paramRegex = configParamRegex; - - } else if (nFile == 2) { - //..do this only for the first deprecated file - configParamList_DEP = new LinkedHashSet(25); - configParamRegex_DEP = new LinkedHashMap>>(25); - - paramList = configParamList_DEP; - paramRegex = configParamRegex_DEP; - } - - BufferedReader file = new BufferedReader(new FileReader(fileName)); - - //.....read through the files.... - - //..pattern to recognize/replace templates - Pattern pTemplate = Pattern.compile("<([^>]+?)>"); - log.debug("template regex: '{}'", pTemplate); - - String line; - while ((line = file.readLine()) != null) { - if (pBlankOrComment.matcher(line).matches()) { - log.debug ("blank/comment: '{}'", line); - continue; - } - - //..break the line into individual entries - String column[] = line.split("\\|"); - for (int n = 0 ; n < column.length ; n++) { - String s = column[n].trim(); - column[n] = s; - } - - //..column[0] is the parameter name and includes an example unit - //..need to strip off the unit - - int index = column[0].lastIndexOf('_'); - - if (index <= 0) { - //..poorly formed name - file.close(); - throw new IOException("Config-Parm-File '" + fileName + - "': Badly formed name '"+column[0]+"'"); - } - - //..well formed named, break it apart - - String param = column[0].substring(0, index); - - //..process parameter - - Matcher matcher = pTemplate.matcher(param); - - if (! matcher.find()) { - //..no <*> structures -- just a straight fixed name - - paramList.add(param); - log.debug ("add param: '{}'", param); - - } else { - //..contains <*> structures -- convert to a regex - //..convert CONFIG_CpAscentPhaseDepthZoneSampleRate_hertz - //..to CONFIG_(?\w*)CpAscentPhaseDepthZone(?\w*)SampleRate_hertz - //.. similar for the other templates - - //........parse all templates off line........ - //..capture up to first template - int start = matcher.start(); - StringBuilder regex = new StringBuilder(); - - if (start > 0) { - regex.append(param.substring(0, matcher.start())); - } - log.debug ("regex line: param = '{}'", param); - //log.debug ("...start regex: start, regex = '{}', '{}'", start, regex); - - //..add first group (already matched) - String repl = templateReplacement.get(matcher.group(1)); - if (repl == null) { - repl = defaultTemplateReplacement; - log.warn("*** DEFAULT TEMPLATE ***"); - } - - regex.append(repl); - //log.debug ("...add template: regex = '{}'", regex); - - //..loop over remaining templates - - int end_after = matcher.end(); - - while (matcher.find()) { - start = matcher.start(); - //log.debug("...end_after, start = '{}', '{}'", end_after, start); - - if (end_after < start) { - regex.append(param.substring(end_after, start)); - //log.debug ("...add literal: '{}'", regex); - } - - repl = templateReplacement.get(matcher.group(1)); - if (repl == null) { - repl = defaultTemplateReplacement; - log.warn("*** DEFAULT TEMPLATE ***"); - } - - regex.append(repl); - //log.debug ("...add template: '{}'", regex); - - end_after = matcher.end(); - } - - //..patch last bit - - if (end_after < param.length()) { - regex.append(param.substring(end_after)); - //log.debug ("...add ending literal: '{}'", regex); - } - - log.debug ("add regex: '{}'", regex); - Pattern pRegex = Pattern.compile(regex.toString()); - - //..........finished parsing templates.......... - - //..decide if there are "match lists" to compare to - //..- core_config_name spec does NOT have any match lines - //..- bio_config_name spec can have "match lists" - - HashMap> matchList = null; - - if (nFile == 1 || nFile == 3) { - //..bio-config file has matching list in these columns - String[] templates = - {"shortsensorname", "param", "cyclephasename"}; - int[] nColumn = {2, 3, 4, 5}; //.."0-based" - - matchList = new HashMap>(templates.length); - - for (int n = 0 ; n < templates.length ; n++) { - if (column.length > nColumn[n]) { - String[] list = column[nColumn[n]].split("[, /]"); - - HashSet set = new HashSet(list.length); - - for (int m = 0; m < list.length; m++) { - String s = list[m].trim(); - if (s.length() > 0) { - set.add(list[m].trim()); - } - } - - if (set.size() > 0) { - //..have to handle the "CTD" exception .. I hate exceptions - if (templates[n].equals("shortsensorname")) { - set.add("CTD"); - } - - matchList.put(templates[n], set); - log.debug("matchList: add '{}' = '{}'", templates[n], set); - } - } - } - } - - if (matchList != null && matchList.size() > 0) { - paramRegex.put(pRegex, matchList); - } else { - paramRegex.put(pRegex, null); - log.debug("matchList: null"); - } - } - } //..end while (line) - - file.close(); - } //..end for (fileNames) - - log.debug("configParamList: {}", configParamList); - - log.debug(".....parseConfigParamFiles: end....."); - } //..end parseConfigParamFiles - - - //............................................................ - //.....................parseTechParamFile..................... - //............................................................ - /** - * Parses the list of technical parameter names of the specification - * There is a provision to detect deprecated units and produce warnings. - * @return True - file parsed; False - failed to parse file - * @throws IOException indicates file read or permission error - */ - public void parseTechParamFile () throws IOException - { - log.debug(".....parseTechParamFile: start....."); - - String[] fileNames = {tecParmFileName, tecParmFileName+".deprecated"}; - LinkedHashSet paramList; - LinkedHashMap>> paramRegex; - - //....loop over the active and deprecated files..... - - for (int nFile = 0; nFile < fileNames.length; nFile++) { - String fileName = fileNames[nFile]; - - log.debug("parsing '{}'", fileName); - - //.......parse the tech param name file....... - //..open the file - File f = new File(fileName); - if (! f.isFile()) { - - if (nFile == 0) { - //..primary file MUST exist - log.error("Tech-Param-file '{}' does not exist", fileName); - throw new IOException("Tech-Parm-File '" + fileName + - "' does not exist"); - } else { - //..deprecated file MAY NOT exist - log.debug("Deprecated-Tech-Param-file '{}' does not exist (optional)", - fileName); - continue; - } - - } else if (! f.canRead()) { - //..file exists but cannot be read --> error - log.error("Tech-Param-File '{}' cannot be read", fileName); - throw new IOException("Tech-Parm-File '" + fileName + - "' cannot be read"); - } - - //..create list variables - //..open file - - if (nFile == 0) { - techParamList = new LinkedHashSet(250); - techParamRegex = new LinkedHashMap>>(250); - - paramList = techParamList; - paramRegex = techParamRegex; - - } else { - techParamList_DEP = new LinkedHashSet(25); - techParamRegex_DEP = new LinkedHashMap>>(25); - - paramList = techParamList_DEP; - paramRegex = techParamRegex_DEP; - } - - BufferedReader file = new BufferedReader(new FileReader(fileName)); - - //.....read through the file.... - - //..pattern to recognize/replace templates - Pattern pTemplate = Pattern.compile("<([^>]+?)>"); - log.debug("template regex: '{}'", pTemplate); - - String line; - while ((line = file.readLine()) != null) { - if (pBlankOrComment.matcher(line).matches()) { - log.debug ("blank/comment: '{}'", line); - continue; - } - - //..break the line into individual entries - String column[] = line.split("\\|"); - for (int n = 0 ; n < column.length ; n++) { - String s = column[n].trim(); - column[n] = s; - } - - //..column[0] is the parameter name and includes an example unit - //..need to strip off the unit - - int index = column[0].lastIndexOf('_'); - - if (index <= 0) { - //..poorly formed name - file.close(); - throw new IOException("Technical-Parm-File '" + fileName + - "': Badly formed name '"+column[0]+"'"); - } - - //..well formed named, break it apart - - String param = column[0].substring(0, index); - - //..process the parameter - - Matcher matcher = pTemplate.matcher(param); - - if (! matcher.find()) { - //..no <*> structures -- just a straight fixed name - - paramList.add(param); - log.debug ("add param: '{}'", param); - - } else { - //..contains <*> structures -- convert to a regex - //..convert CONFIG_CpAscentPhaseDepthZoneSampleRate_hertz - //..to CONFIG_(?\w*)CpAscentPhaseDepthZone(?\w*)SampleRate_hertz - - //..capture up to first template - int start = matcher.start(); - StringBuilder regex = new StringBuilder(); - - if (start > 0) { - regex.append(param.substring(0, matcher.start())); - } - //log.debug ("...regex line: param = '{}'", param); - //log.debug ("...start regex: start, regex = '{}', '{}'", start, regex); - - //..add first group (already matched) - String repl = templateReplacement.get(matcher.group(1)); - - if (repl == null) { - repl = defaultTemplateReplacement; - //log.warn("*** DEFAULT TEMPLATE ***"); - } - - regex.append(repl); - //log.debug ("...add template: regex = '{}'", regex); - - //..loop over remaining templates - - int end_after = matcher.end(); - - while (matcher.find()) { - start = matcher.start(); - //log.debug("...end_after, start = '{}', '{}'", end_after, start); - - if (end_after < start) { - regex.append(param.substring(end_after, start)); - //log.debug ("...add literal: '{}'", regex); - } - - repl = templateReplacement.get(matcher.group(1)); - if (repl == null) { - repl = defaultTemplateReplacement; - //log.warn("*** DEFAULT TEMPLATE ***"); - } - - regex.append(repl); - //log.debug ("...add template: '{}'", regex); - - end_after = matcher.end(); - } - - //..patch last bit - - if (end_after < param.length()) { - regex.append(param.substring(end_after)); - //log.debug ("...add ending literal: '{}'", regex); - } - - log.debug ("add regex: '{}'", regex); - Pattern pRegex = Pattern.compile(regex.toString()); - paramRegex.put(pRegex, null); - } - } //..end while (line) - - file.close(); - } //..end for (fileNames) - - log.debug(".....parseTechParamFile: end....."); - } //..end parseTechParamFile + /** + * Determines if the name is matched by a REGEX in the set + * + * @param name The parameter name in question. + * @param regexSet The set of REGEXPs to check + * @return An ArgoConfigTechParamMatch containing full information about the + * match (null = NO MATCH) + */ + private ArgoConfigTechParamMatch checkRegex(String name, + HashMap>> regexSet) { + ArgoConfigTechParamMatch match = null; // ..return object + HashMap unMatchedTemplates = null; // ..return templates + HashMap failedMatchedTemplates = null; // ..return templates + + // ..NOTE..NOTE..NOTE.. + // ..Should be able to truncate the loop below on the first match + // ..ie, find a match -> return + // ..However, during development I *really* want to know if there are + // ..multiple matches - which indicates ambiguous regex patterns + // ..So, FOR NOW, complete the looping and report multiple matches + // ..Returns the LAST one matched + + for (Map.Entry>> entry : regexSet.entrySet()) { + Pattern pattern = entry.getKey(); + HashMap> value = entry.getValue(); + + Matcher m = pattern.matcher(name); + + if (m.matches()) { + unMatchedTemplates = new HashMap(m.groupCount()); + failedMatchedTemplates = new HashMap(m.groupCount()); + + for (String key : knownTemplates) { + try { + String str = m.group(key); + + // ..is there a "match-list" for this (regex param and template) + + if (value != null) { + HashSet matchSet = value.get(key); + + if (matchSet == null) { + // ..there was no match-list to compare to + unMatchedTemplates.put(key, str); + + } else { + // ..there is a match-list - compare it + + if (!matchSet.contains(str)) { + failedMatchedTemplates.put(key, str); + log.debug("checkRegex: match-list success: key '{}', matched '{}'", key, str); + + } else if (log.isDebugEnabled()) { + log.debug("checkRegex: match-list failed: key '{}', matched '{}'", key, str); + } + } + } else { + unMatchedTemplates.put(key, str); + } + + } catch (IllegalArgumentException e) { + } + } + + match = new ArgoConfigTechParamMatch(pattern.toString(), false, unMatchedTemplates.size(), + unMatchedTemplates, failedMatchedTemplates.size(), failedMatchedTemplates); + + log.debug("checkRegex: '{}' regex match #{}: '{}', unMatchedTemplates {}" + "failedTemplates {}", + unMatchedTemplates.size(), failedMatchedTemplates.size()); + + // ****temporary**** return match; + } + } + + return match; + } // ..checkRegex + + /** + * Determines if the name is a CONFIG parameter name + * + * @param name the parameter name in question. + * @return An ArgoConfigTechParamMatch containing full information about the + * match (null = NO MATCH) + * + */ + public ArgoConfigTechParamMatch findConfigParam(String name) { + ArgoConfigTechParamMatch match = findParam(name, configParamList, configParamRegex, configParamList_DEP, + configParamRegex_DEP); + return match; + } + + /** + * Determines if the name is a Tech parameter name + * + * @param name the parameter name in question. + * @return An ArgoConfigTechParamMatch containing full information about the + * match (null = NO MATCH) + * + */ + public ArgoConfigTechParamMatch findTechParam(String name) { + ArgoConfigTechParamMatch match = findParam(name, techParamList, techParamRegex, techParamList_DEP, + techParamRegex_DEP); + return match; + } + + /** + * Determines if the name is a valid parameter name within the supplied lists + * + * @param name the parameter name in question. + * @return An ArgoConfigTechParamMatch containing full information about the + * match (null = NO MATCH) + * + */ + private ArgoConfigTechParamMatch findParam(String name, HashSet activeList, + HashMap>> activeRegex, HashSet deprecatedList, + HashMap>> deprecatedRegex) { + boolean literal = false; + HashMap templates = null; + ArgoConfigTechParamMatch match = null; + + if (activeList != null) { + if (activeList.contains(name)) { + match = new ArgoConfigTechParamMatch(name, false); + + log.debug("findParam: '{}': active literal match", name); + return match; + } + } + + if (deprecatedList != null) { + if (deprecatedList.contains(name)) { + match = new ArgoConfigTechParamMatch(name, true); + + log.debug("findParam: '{}': deprecated literal match", name); + return match; + } + } + + // ..did NOT match one of the literal strings + // ..check for a regex match + + if (activeRegex != null) { + // log.debug("findParam: checking active regex"); + + match = checkRegex(name, activeRegex); + + if (match != null) { + match.isDeprecated = false; + log.debug("findParam: '{}': active regex match '{}'", name, match.match); + return match; + } + } + + if (deprecatedRegex != null) { + // log.debug("findParam: checking active regex"); + + match = checkRegex(name, deprecatedRegex); + + if (match != null) { + match.isDeprecated = true; + log.debug("findParam: '{}': deprecated regex match '{}'", name, match.match); + return match; + } + } + + log.debug("findParam: '{}': failed match", name); + return null; + } + + /** + * Return the TechParam list defined in spec files (argo-tech_names-spec-v*) + * + * @return + */ + public LinkedHashSet getTechParamList() { + return techParamList; + } + + public LinkedHashSet getTechCodeList() { + return techParamCodeList; + } + + public Map> getParamAuthorizedUnits() { + return paramAuthorizedUnits; + } + + public Map> getParamAuthorizedLongName() { + return paramAuthorizedLongName; + } + + /** + * Return the TechParam list containing a regex defined in spec files + * (argo-tech_names-spec-v*) + * + * @return + */ + public LinkedHashMap>> getTechParamRegex() { + return techParamRegex; + } + + /** + * Determines if the value is valid for the unit (mapped to data-type) + * + * @param unit the unit name + * @param value the config value + * @return True - value is a valid setting for the given unit (or unit is + * unknown) False - parameter name is NOT valid (or tech spec not + * opened) + * + */ + public boolean isConfigTechValidValue(String unit, String value) { + // ..turn the unit name into a data type + ConfigTechValueType dt = unitList.get(unit); + + if (dt == null || dt == ConfigTechValueType.UNKNOWN) { + // ..unit name is unknown OR the data type for this unit is unkown + // ..can't check the value setting + // ..so blindly return "it's good" (???) + log.debug("isConfigTechValidValue: default to true: unit, data-type, value = '{}', '{}', '{}'", unit, dt, + value); + + return true; + } + + boolean valid = false; + switch (dt) { + case DATE_TIME: + valid = ArgoDate.checkArgoDatePattern(unit, value); + break; + + case FLOAT: + try { + double n = Double.parseDouble(value); + valid = true; + + } catch (NumberFormatException e) { + valid = false; + } + break; + + case HEX: + valid = value.matches("(?i)(0x)?[0-9a-f]+"); + break; + + case INTEGER: + try { + int n = Integer.parseInt(value); + valid = true; + + } catch (NumberFormatException e) { + valid = false; + } + break; + + case LOGICAL: + valid = value.matches("(?i)true|false|yes|no|1|0"); + break; + + case STRING: + valid = true; + break; + + default: + valid = true; + } + + log.debug("isConfigTechValidValue: valid = {}: unit, data-type, value = '{}', '{}', '{}',", valid, unit, dt, + value); + + return valid; + } + + /** + * Determines the data-type of a parameter unit + * + * @param unit the unit name + * @return data-type name + */ + public String getConfigTechDataType(String name) { + ConfigTechValueType dt = unitList.get(name); + if (dt == null) { + return null; + } else { + return dt.toString(); + } + } + + /** + * Determines if the name is a configuration/technical parameter unit + * + * @param name the unit name in question. + * @return True - unit is a valid technical parameter unit; False - parameter + * name is NOT valid (or tech spec not opened) + * + */ + public boolean isConfigTechUnit(String name) { + if (unitList == null) { + return false; + } + return unitList.containsKey(name); + } + + /** + * Determines if the name is a DEPRECATED configuration/technical parameter unit + * + * @param name the unit name in question. + * @return True - unit is a valid technical parameter unit; False - parameter + * name is NOT valid (or tech spec not opened) + * + */ + public boolean isDeprecatedConfigTechUnit(String name) { + if (unitList_DEP == null) { + return false; + } + return unitList_DEP.containsKey(name); + } + + // ............................................................... + // .....................parseConfigParamFiles..................... + // ...........................................,................... + /** + * Parses the list of configurations parameters of the specification + * + * @return True - file parsed; False - failed to parse file + * @throws IOException indicates file read or permission error + */ + public void parseConfigParamFiles() throws IOException { + log.debug(".....parseConfigParamFiles: start....."); + + String[] fileNames = { coreCfgParmFileName, bioCfgParmFileName, coreCfgParmFileName + ".deprecated", + bioCfgParmFileName + ".deprecated" }; + LinkedHashSet paramList = null; + LinkedHashMap>> paramRegex = null; + + // ....loop over the active and deprecated files..... + + for (int nFile = 0; nFile < fileNames.length; nFile++) { + String fileName = fileNames[nFile]; + + log.debug("parsing '{}'", fileName); + + // .......parse the config param name file....... + // ..open the file + File f = new File(fileName); + if (!f.isFile()) { + + if (nFile <= 1) { + // ..both primary files MUST exist + log.error("Config-Param-file '{}' does not exist", fileName); + throw new IOException("Config-Parm-File '" + fileName + "' does not exist"); + } else { + // ..deprecated file MAY NOT exist + log.debug("Deprecated-Config-Param-file '{}' does not exist (optional)", fileName); + continue; + } + + } else if (!f.canRead()) { + // ..file exists but cannot be read --> error + log.error("Config-Param-File '{}' cannot be read", fileName); + throw new IOException("Config-Parm-File '" + fileName + "' cannot be read"); + } + + // ..create list variables + // ..open file + + if (nFile == 0) { + configParamList = new LinkedHashSet(250); + configParamRegex = new LinkedHashMap>>(250); + + paramList = configParamList; + paramRegex = configParamRegex; + + } else if (nFile == 2) { + // ..do this only for the first deprecated file + configParamList_DEP = new LinkedHashSet(25); + configParamRegex_DEP = new LinkedHashMap>>(25); + + paramList = configParamList_DEP; + paramRegex = configParamRegex_DEP; + } + + BufferedReader file = new BufferedReader(new FileReader(fileName)); + + // .....read through the files.... + + // ..pattern to recognize/replace templates + Pattern pTemplate = Pattern.compile("<([^>]+?)>"); + log.debug("template regex: '{}'", pTemplate); + + String line; + while ((line = file.readLine()) != null) { + if (pBlankOrComment.matcher(line).matches()) { + log.debug("blank/comment: '{}'", line); + continue; + } + + // ..break the line into individual entries + String column[] = line.split("\\|"); + for (int n = 0; n < column.length; n++) { + String s = column[n].trim(); + column[n] = s; + } + + // ..column[0] is the parameter name and includes an example unit + // ..need to strip off the unit + + int index = column[0].lastIndexOf('_'); + + if (index <= 0) { + // ..poorly formed name + file.close(); + throw new IOException("Config-Parm-File '" + fileName + "': Badly formed name '" + column[0] + "'"); + } + + // ..well formed named, break it apart + + String param = column[0].substring(0, index); + + // ..process parameter + + Matcher matcher = pTemplate.matcher(param); + + if (!matcher.find()) { + // ..no <*> structures -- just a straight fixed name + + paramList.add(param); + log.debug("add param: '{}'", param); + + } else { + // ..contains <*> structures -- convert to a regex + // ..convert CONFIG_CpAscentPhaseDepthZoneSampleRate_hertz + // ..to CONFIG_(?\w*)CpAscentPhaseDepthZone(?\w*)SampleRate_hertz + // .. similar for the other templates + + // ........parse all templates off line........ + // ..capture up to first template + int start = matcher.start(); + StringBuilder regex = new StringBuilder(); + + if (start > 0) { + regex.append(param.substring(0, matcher.start())); + } + log.debug("regex line: param = '{}'", param); + // log.debug ("...start regex: start, regex = '{}', '{}'", start, regex); + + // ..add first group (already matched) + String repl = templateReplacement.get(matcher.group(1)); + if (repl == null) { + repl = defaultTemplateReplacement; + log.warn("*** DEFAULT TEMPLATE ***"); + } + + regex.append(repl); + // log.debug ("...add template: regex = '{}'", regex); + // ..loop over remaining templates + + int end_after = matcher.end(); + + while (matcher.find()) { + start = matcher.start(); + // log.debug("...end_after, start = '{}', '{}'", end_after, start); + + if (end_after < start) { + regex.append(param.substring(end_after, start)); + // log.debug ("...add literal: '{}'", regex); + } - //............................................................ - //.....................parseUnitFile..................... - //............................................................ - /** - * Parses the list of configuration/technical parameter units for the specification. - * There is a provision to detect deprecated units and produce warnings. - * @return True - file parsed; False - failed to parse file - * @throws IOException indicates file read or permission error - */ - public void parseUnitFile () throws IOException - { - log.debug(".....parseConfigTechUnitFile: start....."); + repl = templateReplacement.get(matcher.group(1)); + if (repl == null) { + repl = defaultTemplateReplacement; + log.warn("*** DEFAULT TEMPLATE ***"); + } - String[] fileNames = {unitFileName, unitFileName+".deprecated"}; - LinkedHashMap list; + regex.append(repl); + // log.debug ("...add template: '{}'", regex); - //....loop over the active and deprecated files..... - - for (int n = 0; n < fileNames.length; n++) { - String fileName = fileNames[n]; - - //.......parse the param unit file....... - //..open the file - File f = new File(fileName); - if (! f.isFile()) { - - if (n == 0) { - //..primary file MUST exist - log.error("Config-Tech-Unit-file '{}' does not exist", fileName); - throw new IOException("Config-Tech-Unit-File '" + fileName + - "' does not exist"); - } else { - //..deprecated file MAY NOT exist - log.debug("Deprecated-Config-Tech-Unit-file '{}' does not exist (optional)", - fileName); - continue; - } - - } else if (! f.canRead()) { - //..file exists but cannot be read --> error - log.error("Config-Tech-Unit-File '{}' cannot be read", fileName); - throw new IOException("Config-Tech-Unit-File '" + fileName + - "' cannot be read"); - } - - //..create list variables - //..open file - - if (n == 0) { - unitList = new LinkedHashMap(100); - list = unitList; - - } else { - unitList_DEP = new LinkedHashMap(25); - list = unitList_DEP; - } - - BufferedReader file = new BufferedReader(new FileReader(fileName)); - - //.....read through the file.... - log.debug("parsing config/tech unit file '" + fileName + "'"); - - String line; - while ((line = file.readLine()) != null) { - if (pBlankOrComment.matcher(line).matches()) { - log.debug ("blank/comment: '{}'", line); - continue; - } - - //.....split the line: col 1 = unit name; col 2 = data type..... - String st[] = line.split("\\|"); - - if (st[0].length() > 0) { - String unit_name = st[0].trim(); - - ConfigTechValueType dt; - - if (st.length > 1) { - dt = ConfigTechValueType.getType(st[1].trim()); - } else { - dt = ConfigTechValueType.UNKNOWN; - } - - log.debug("add unit: '{}' / '{}'", unit_name, dt); - - list.put(st[0].trim(), dt); - } - } - - file.close(); - } + end_after = matcher.end(); + } - } //..end parseUnitFile + // ..patch last bit + + if (end_after < param.length()) { + regex.append(param.substring(end_after)); + // log.debug ("...add ending literal: '{}'", regex); + } + + log.debug("add regex: '{}'", regex); + Pattern pRegex = Pattern.compile(regex.toString()); + + // ..........finished parsing templates.......... + + // ..decide if there are "match lists" to compare to + // ..- core_config_name spec does NOT have any match lines + // ..- bio_config_name spec can have "match lists" + + HashMap> matchList = null; + + if (nFile == 1 || nFile == 3) { + // ..bio-config file has matching list in these columns + String[] templates = { "shortsensorname", "param", "cyclephasename" }; + int[] nColumn = { 2, 3, 4, 5 }; // .."0-based" + + matchList = new HashMap>(templates.length); + + for (int n = 0; n < templates.length; n++) { + if (column.length > nColumn[n]) { + String[] list = column[nColumn[n]].split("[, /]"); + + HashSet set = new HashSet(list.length); + + for (int m = 0; m < list.length; m++) { + String s = list[m].trim(); + if (s.length() > 0) { + set.add(list[m].trim()); + } + } + + if (set.size() > 0) { + // ..have to handle the "CTD" exception .. I hate exceptions + if (templates[n].equals("shortsensorname")) { + set.add("CTD"); + } + + matchList.put(templates[n], set); + log.debug("matchList: add '{}' = '{}'", templates[n], set); + } + } + } + } + + if (matchList != null && matchList.size() > 0) { + paramRegex.put(pRegex, matchList); + } else { + paramRegex.put(pRegex, null); + log.debug("matchList: null"); + } + } + } // ..end while (line) + + file.close(); + } // ..end for (fileNames) + + log.debug("configParamList: {}", configParamList); + + log.debug(".....parseConfigParamFiles: end....."); + } // ..end parseConfigParamFiles + + // ............................................................ + // .....................parseTechParamFile..................... + // ............................................................ + /** + * Parses the list of technical parameter names of the specification There is a + * provision to detect deprecated units and produce warnings. + * + * @return True - file parsed; False - failed to parse file + * @throws IOException indicates file read or permission error + */ + public void parseTechParamFile() throws IOException { + log.debug(".....parseTechParamFile: start....."); + + String[] fileNames = { tecParmFileName, tecParmFileName + ".deprecated" }; + LinkedHashSet paramList; + LinkedHashSet paramCodeList; + LinkedHashMap>> paramRegex; + + // ....loop over the active and deprecated files..... + + for (int nFile = 0; nFile < fileNames.length; nFile++) { + String fileName = fileNames[nFile]; + + log.debug("parsing '{}'", fileName); + + // .......parse the tech param name file....... + // ..open the file + // TO DO : TRY CATCH exception handling with closing file in finally + File f = new File(fileName); + if (!f.isFile()) { + + if (nFile == 0) { + // ..primary file MUST exist + log.error("Tech-Param-file '{}' does not exist", fileName); + throw new IOException("Tech-Parm-File '" + fileName + "' does not exist"); + } else { + // ..deprecated file MAY NOT exist + log.debug("Deprecated-Tech-Param-file '{}' does not exist (optional)", fileName); + continue; + } + + } else if (!f.canRead()) { + // ..file exists but cannot be read --> error + log.error("Tech-Param-File '{}' cannot be read", fileName); + throw new IOException("Tech-Parm-File '" + fileName + "' cannot be read"); + } + + // ..create list variables + // ..open file + + if (nFile == 0) { + techParamList = new LinkedHashSet(250); + techParamRegex = new LinkedHashMap>>(250); + techParamCodeList = new LinkedHashSet(250); + + paramList = techParamList; + paramCodeList = techParamCodeList; + paramRegex = techParamRegex; + + } else { + techParamList_DEP = new LinkedHashSet(25); + techParamRegex_DEP = new LinkedHashMap>>(25); + techParamCodeList_DEP = new LinkedHashSet(25); + + paramList = techParamList_DEP; + paramCodeList = techParamCodeList_DEP; + paramRegex = techParamRegex_DEP; + } + + BufferedReader file = new BufferedReader(new FileReader(fileName)); + + // .....read through the file.... + + // ..pattern to recognize/replace templates + Pattern pTemplate = Pattern.compile("<([^>]+?)>"); + log.debug("template regex: '{}'", pTemplate); + + String line; + while ((line = file.readLine()) != null) { + if (pBlankOrComment.matcher(line).matches()) { + log.debug("blank/comment: '{}'", line); + continue; + } + + // ..break the line into individual entries + String column[] = line.split("\\|"); + for (int n = 0; n < column.length; n++) { + String s = column[n].trim(); + column[n] = s; + } + + parseTechParamName(paramList, paramCodeList, paramRegex, fileName, file, pTemplate, column); + } // ..end while (line) + + file.close(); + + } // ..end for (fileNames) + log.debug(".....parseTechParamFile: end....."); + } // ..end parseTechParamFile + + /** + * Parse a argo-tech_names-spec file line. The first column is + * _unit, the second column is definition which are used for + * long_name. Need to exctract and link the 3 informations : param name, unit + * and long_name. It may have multiple units and long_name available for the + * same parameter name. + * + * @param paramList + * @param paramCodeList + * @param paramRegex + * @param fileName + * @param file + * @param pTemplate + * @param column + * @throws IllegalArgumentException + */ + private void parseTechParamName(LinkedHashSet paramList, LinkedHashSet paramCodeList, + LinkedHashMap>> paramRegex, String fileName, BufferedReader file, + Pattern pTemplate, String[] column) throws IllegalArgumentException { + + String parameterCode = column[0]; + String parameterLongName = column[1]; + // ..column[0] is the parameter name and includes an example unit + // ..need to strip off the unit + String[] paramNameAndUnit = extractParamNameAndUnitFromParamCode(fileName, parameterCode); + String paramName = paramNameAndUnit[0]; + String unit = paramNameAndUnit[1]; + + // add list to authorized unit list for this parameter. TO USE LATER. FOR NOW + // USE AUTHORIZE ALL UNITS FROM UNITS TABLE +// if (paramAuthorizedUnits.containsKey(paramName)) { +// paramAuthorizedUnits.get(paramName).add(unit); +// } else { +// paramAuthorizedUnits.put(paramName, new ArrayList<>(Arrays.asList(unit))); +// } + + // add list to authorized long_name list for this parameter + processParamAndLongNameFields(paramName, parameterLongName, pTemplate); + + // ..process the parameter name (can contain regex) and add to right list + // (paramList or paramRegex) + processParameterField(paramList, paramRegex, pTemplate, paramName); + + } + + /** + * tech paramater's variable's name and long_name may contain + * . This function compute the list of different possibility + * for a given tech parameter name. + * + * @param field + */ + private void processParamAndLongNameFields(String paramName, String longName, Pattern pTemplate) { + Matcher matcherParamName = pTemplate.matcher(paramName); + Matcher matcherLongName = pTemplate.matcher(longName); + + if (!matcherParamName.find()) { + + List longNameListFromRegex; + // no in tech param's name. Check if there is one in + // long_name. In this case all possibilities are authorized : + if (matcherLongName.find()) { + // ..contains <*> structures in long_name, build list of possibilities + String regexString = convertParamStringToRegex(longName, matcherLongName); + + Pattern pRegex = Pattern.compile(regexString.toString()); + + longNameListFromRegex = generateParamListFromPattern(pRegex.pattern()); + + } else { + // no in paramName nor in longName. + longNameListFromRegex = new ArrayList<>(Arrays.asList(longName)); + } + + // add list to authorized long_name list for this parameter + if (paramAuthorizedLongName.containsKey(paramName)) { + paramAuthorizedLongName.get(paramName).addAll(longNameListFromRegex); + } else { + paramAuthorizedLongName.put(paramName, longNameListFromRegex); + } + + } else { + // ..contains <*> structures -- convert to a regex and build list of param name + String regexString = convertParamStringToRegex(paramName, matcherParamName); + + Pattern pRegex = Pattern.compile(regexString.toString()); + List paramNameListFromRegex = generateParamListFromPattern(pRegex.pattern()); + + // for each paramName, apply this function (recursivity) : + for (String paramNameWithNoRegex : paramNameListFromRegex) { + // need to repace the short_sensor_name and Z in longName by the same found in + // paramName : + String shortSensorNamefound = extractSpecificValueFromPattern(paramName, paramNameWithNoRegex, pRegex, + "short_sensor_name"); + String zValue = extractSpecificValueFromPattern(paramName, paramNameWithNoRegex, pRegex, "Z"); + + // onlys sort_sensor_name and Z are found in paramName. Replace value if found + // in corresponding longName + String completedLongName = longName; + if (shortSensorNamefound != null) { + completedLongName = completedLongName.replace("", shortSensorNamefound); + } + + if (zValue != null) { + completedLongName = completedLongName.replace("", zValue); + } + + processParamAndLongNameFields(paramNameWithNoRegex, completedLongName, pTemplate); + + } + } + + } + + public String extractSpecificValueFromPattern(String originalPattern, String actualString, Pattern pRegex, + String placeholderName) { + Map values = extractValuesFromPattern(originalPattern, actualString, pRegex); + return values.get(placeholderName); + } + + /** + * Exctract the value which has been used to replace placeholder <*> in a + * string. return a Map with placeholder name as key and exctrated value as + * value. Example : originalPattern : + * "NUMBER_AscentSamplesDepthZone" actual string : + * "NUMBER_CroverAscentSamplesDepthZone1" => result = ["short_sensor_name" + * :"Crover", "Z" : "1"] + * + * @param template + * @param concreteValue + * @param pRegex + * @return + */ + private Map extractValuesFromPattern(String originalPattern, String concreteValue, Pattern pRegex) { + + Map extractedValues = new HashMap<>(); + + List placeholderNames = extractPlaceholderNames(originalPattern); + + // presence of <*> + Matcher matcher = pRegex.matcher(concreteValue); + + if (matcher.matches()) { + // Associate each captured group to his placeholder name + for (int i = 0; i < placeholderNames.size() && i < matcher.groupCount(); i++) { + String placeholderName = placeholderNames.get(i); + String value = matcher.group(i + 1); // group start at 1 + extractedValues.put(placeholderName, value); + } + } + + return extractedValues; + + } + + private List extractPlaceholderNames(String pattern) { + List names = new ArrayList<>(); + Pattern placeholderPattern = Pattern.compile("<([^>]+)>"); + Matcher matcher = placeholderPattern.matcher(pattern); + + while (matcher.find()) { + names.add(matcher.group(1)); + } + + return names; + } + + /** + * paramName can contain <*>. If so convert the param name to regex and add it + * to the list. If not, add the paramName to paramList. + * + * @param paramList + * @param paramRegex + * @param pTemplate + * @param param + */ + private void processParameterField(LinkedHashSet paramList, + LinkedHashMap>> paramRegex, Pattern pTemplate, String param) { + + Matcher matcher = pTemplate.matcher(param); + + if (!matcher.find()) { + // ..no <*> structures -- just a straight fixed name + paramList.add(param); + log.debug("add param: '{}'", param); + + } else { + // ..contains <*> structures -- convert to a regex + // ..convert CONFIG_CpAscentPhaseDepthZoneSampleRate_hertz + // ..to CONFIG_(?\w*)CpAscentPhaseDepthZone(?\w*)SampleRate_hertz + + String regexString = convertParamStringToRegex(param, matcher); + + log.debug("add regex: '{}'", regexString); + Pattern pRegex = Pattern.compile(regexString.toString()); + paramRegex.put(pRegex, null); + + // Build additional paramList using regex and referenceTable and add it to the + // paramList + List paramListFromRegex = generateParamListFromPattern(pRegex.pattern()); + + paramList.addAll(paramListFromRegex); + + } + } + + private List generateParamListFromPattern(String pRegex) { + List regexList = new ArrayList<>(Arrays.asList(pRegex)); + boolean replacedAtLeastOne = false; + + for (Map.Entry entry : templateReplacement.entrySet()) { + String key = entry.getKey(); + String groupPattern = entry.getValue(); + + if (!possibleValuesByKey.containsKey(key)) { + continue; + } // no values to inject for this regex pattern + + boolean containRegex = regexList.stream().anyMatch(regex -> regex.contains(groupPattern)); + + if (!containRegex) { + continue; + } + + replacedAtLeastOne = true; + // replace the regex pattern by values (create a liste) from possibleValuesByKey + regexList = regexList.stream().flatMap(pRegexvalue -> { + return generateStringsFromPattern(pRegexvalue, possibleValuesByKey.get(key), + generalizeNamedGroupPattern(groupPattern)).stream(); + + }).collect(Collectors.toList()); + + } + return replacedAtLeastOne ? regexList : new ArrayList<>(); + + } + + private List generateStringsFromPattern(String pRegex, Set values, String regexToReplace) { + if (!Pattern.compile(regexToReplace).matcher(pRegex).find()) { + // Pattern not processed (pattern to replace absent) + return Collections.singletonList(pRegex); // return the original pRegex + + } + return values.stream().map(value -> pRegex.toString().replaceAll(regexToReplace, value)) + .collect(Collectors.toList()); + + } + + private String generalizeNamedGroupPattern(String groupRegex) { + Pattern groupPattern = Pattern.compile("\\(\\?<([a-zA-Z][a-zA-Z0-9_]*)>.*?\\)"); + Matcher matcher = groupPattern.matcher(groupRegex); + + if (matcher.matches()) { + String groupName = matcher.group(1); + return "\\(\\?<" + groupName + ">.*?\\)"; + } else { + throw new IllegalArgumentException("Input regex does not contain a valid named group: " + groupRegex); + } + } + + /** + * contains <*> structures -- convert to a regex convert + * CONFIG_CpAscentPhaseDepthZoneSampleRate_hertz to + * CONFIG_(?\w*)CpAscentPhaseDepthZone(?\w*)SampleRate_hertz + * + * @param param + * @param matcher + * @return + */ + private String convertParamStringToRegex(String param, Matcher matcher) { + StringBuilder regex = new StringBuilder(); + + // ..capture up to first template + int start = matcher.start(); + if (start > 0) { + regex.append(param.substring(0, matcher.start())); + } + + // ..add first group (already matched) + String repl = templateReplacement.get(matcher.group(1)); + + if (repl == null) { + repl = defaultTemplateReplacement; + } + + regex.append(repl); + + // ..loop over remaining templates + + int end_after = matcher.end(); + + while (matcher.find()) { + start = matcher.start(); + // log.debug("...end_after, start = '{}', '{}'", end_after, start); + + if (end_after < start) { + regex.append(param.substring(end_after, start)); + // log.debug ("...add literal: '{}'", regex); + } + + repl = templateReplacement.get(matcher.group(1)); + if (repl == null) { + repl = defaultTemplateReplacement; + // log.warn("*** DEFAULT TEMPLATE ***"); + } + + regex.append(repl); + // log.debug ("...add template: '{}'", regex); + + end_after = matcher.end(); + } + + // ..patch last bit + + if (end_after < param.length()) { + regex.append(param.substring(end_after)); + // log.debug ("...add ending literal: '{}'", regex); + } + return regex.toString(); + } + + /** + * Parameter code in Table 14a contain an example unit at the end of string. + * Example : "PRES_SurfaceOffsetTruncatedPlus5dbar_dbar". This method extract + * the tech param name by stripping off the unit. Example : + * "PRES_SurfaceOffsetTruncatedPlus5dbar" + * + * @param fileName + * @param file + * @param parameterCode + * @return parameterName + * @throws IOException + */ + private String[] extractParamNameAndUnitFromParamCode(String fileName, String parameterCode) + throws IllegalArgumentException { + // ..column[0] is the parameter name and includes an example unit + // ..need to strip off the unit + + int index = parameterCode.lastIndexOf('_'); + + if (index <= 0) { + throw new IllegalArgumentException( + "Technical-Parm-File '" + fileName + "': Badly formed name '" + parameterCode + "'"); + } + + // ..well formed named, break it apart + + String parameterName = parameterCode.substring(0, index); + String parameterUnit = parameterCode.substring(index + 1, parameterCode.length()); + String[] results = { parameterName, parameterUnit }; + return results; + } + + // ............................................................ + // .....................parseUnitFile..................... + // ............................................................ + /** + * Parses the list of configuration/technical parameter units for the + * specification. There is a provision to detect deprecated units and produce + * warnings. + * + * @return True - file parsed; False - failed to parse file + * @throws IOException indicates file read or permission error + */ + public void parseUnitFile() throws IOException { + log.debug(".....parseConfigTechUnitFile: start....."); + + String[] fileNames = { unitFileName, unitFileName + ".deprecated" }; + LinkedHashMap list; + + // ....loop over the active and deprecated files..... + + for (int n = 0; n < fileNames.length; n++) { + String fileName = fileNames[n]; + + // .......parse the param unit file....... + // ..open the file + File f = new File(fileName); + if (!f.isFile()) { + + if (n == 0) { + // ..primary file MUST exist + log.error("Config-Tech-Unit-file '{}' does not exist", fileName); + throw new IOException("Config-Tech-Unit-File '" + fileName + "' does not exist"); + } else { + // ..deprecated file MAY NOT exist + log.debug("Deprecated-Config-Tech-Unit-file '{}' does not exist (optional)", fileName); + continue; + } + + } else if (!f.canRead()) { + // ..file exists but cannot be read --> error + log.error("Config-Tech-Unit-File '{}' cannot be read", fileName); + throw new IOException("Config-Tech-Unit-File '" + fileName + "' cannot be read"); + } + + // ..create list variables + // ..open file + + if (n == 0) { + unitList = new LinkedHashMap(100); + list = unitList; + + } else { + unitList_DEP = new LinkedHashMap(25); + list = unitList_DEP; + } + + BufferedReader file = new BufferedReader(new FileReader(fileName)); + + // .....read through the file.... + log.debug("parsing config/tech unit file '" + fileName + "'"); + + String line; + while ((line = file.readLine()) != null) { + if (pBlankOrComment.matcher(line).matches()) { + log.debug("blank/comment: '{}'", line); + continue; + } + + // .....split the line: col 1 = unit name; col 2 = data type..... + String st[] = line.split("\\|"); + + if (st[0].length() > 0) { + String unit_name = st[0].trim(); + + ConfigTechValueType dt; + + if (st.length > 1) { + dt = ConfigTechValueType.getType(st[1].trim()); + } else { + dt = ConfigTechValueType.UNKNOWN; + } + + log.debug("add unit: '{}' / '{}'", unit_name, dt); + + list.put(st[0].trim(), dt); + } + } + + file.close(); + } + + } // ..end parseUnitFile //................................................................... // INNER CLASSES //................................................................... - public class ArgoConfigTechParamMatch - { - //......object variables........ - - public boolean isDeprecated; - public String match; - public int nUnMatchedTemplates; - public HashMap unMatchedTemplates; - public int nFailedMatchedTemplates; - public HashMap failedMatchedTemplates; - - //........constructors.......... - - public ArgoConfigTechParamMatch (String match, boolean isDeprecated) - { - this.match = new String(match); - this.isDeprecated = isDeprecated; - this.nUnMatchedTemplates = 0; - this.unMatchedTemplates = null; - this.nFailedMatchedTemplates = 0; - this.failedMatchedTemplates = null; - } - - public ArgoConfigTechParamMatch (String match, boolean isDeprecated, - int nUnMatchedTemplates, - HashMap unMatchedTemplates, - int nFailedMatchedTemplates, - HashMap failedMatchedTemplates) - { - this.match = new String(match); - this.isDeprecated = isDeprecated; - this.nUnMatchedTemplates = nUnMatchedTemplates; - this.unMatchedTemplates = unMatchedTemplates; - this.nFailedMatchedTemplates = nFailedMatchedTemplates; - this.failedMatchedTemplates = failedMatchedTemplates; - } - } - - -} //..end class + public class ArgoConfigTechParamMatch { + // ......object variables........ + + public boolean isDeprecated; + public String match; + public int nUnMatchedTemplates; + public HashMap unMatchedTemplates; + public int nFailedMatchedTemplates; + public HashMap failedMatchedTemplates; + + // ........constructors.......... + + public ArgoConfigTechParamMatch(String match, boolean isDeprecated) { + this.match = new String(match); + this.isDeprecated = isDeprecated; + this.nUnMatchedTemplates = 0; + this.unMatchedTemplates = null; + this.nFailedMatchedTemplates = 0; + this.failedMatchedTemplates = null; + } + + public ArgoConfigTechParamMatch(String match, boolean isDeprecated, int nUnMatchedTemplates, + HashMap unMatchedTemplates, int nFailedMatchedTemplates, + HashMap failedMatchedTemplates) { + this.match = new String(match); + this.isDeprecated = isDeprecated; + this.nUnMatchedTemplates = nUnMatchedTemplates; + this.unMatchedTemplates = unMatchedTemplates; + this.nFailedMatchedTemplates = nFailedMatchedTemplates; + this.failedMatchedTemplates = failedMatchedTemplates; + } + } + +} // ..end class diff --git a/file_checker_exec/src/main/java/fr/coriolis/checker/specs/ArgoFileSpecification.java b/file_checker_exec/src/main/java/fr/coriolis/checker/specs/ArgoFileSpecification.java index de1ceb6..35d85c3 100644 --- a/file_checker_exec/src/main/java/fr/coriolis/checker/specs/ArgoFileSpecification.java +++ b/file_checker_exec/src/main/java/fr/coriolis/checker/specs/ArgoFileSpecification.java @@ -17,7 +17,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.coriolis.checker.filetypes.ArgoDataFile; +import fr.coriolis.checker.core.ArgoDataFile; import ucar.ma2.DataType; /** @@ -25,7 +25,7 @@ * *

* This class does NOT implement the "format verification" process. See - * {@link fr.coriolis.checker.filetypes.ArgoDataFile#verifyFormat() + * {@link fr.coriolis.checker.core.ArgoDataFile#verifyFormat() * ArgoDataFile.verifyFormat} for more information. *

* @@ -1111,6 +1111,10 @@ public void openSpecification(boolean fullSpec, String specDir, ArgoDataFile.Fil // ..technical parameter file if (fType == ArgoDataFile.FileType.TECHNICAL) { ConfigTech = new ArgoConfigTechParam(specDir, version, false, true); + + // ..for each entry, automatically make * variables too + addOptionnalTechParamVariables(); + } } } // ..end openFullSpecification @@ -1141,6 +1145,96 @@ private void addPersistentGroups() { } } + // .................................................. + // ........TECH TIME SERIES SPECIAL HANDLING......... + // .................................................. + /** + * Argo Technical file can have times series of TECH_PARAM. For each + * entry, automatically make * variables too. These + * variables are members of group : JULD, CYCLE_NUMBER_MEAS, MEASUREMENT_CODE, + * . SO for each , create a group named and + * containing variable and JULD, CYCLE_NUMBER_MEAS, + * MEASUREMENT_CODE and all potential variables defined under the + * "TECH_TIMESERIES" group name defined in the opt file. Summary : for each + * create a group named and add into this group all + * variables contained in the groupe(if exists) "TECH_TIMESERIES". + */ + private void addOptionnalTechParamVariables() { + // get tech param names (without unit) + for (String techParamName : ConfigTech.getTechParamList()) { + createAndAddToGroupOptionnalTechParamVariable(techParamName); + } + + } + + /** + * Build the variable. The long_name and unit are retrieved from + * + * @param techParamName : variable name (without the unit). + */ + private void createAndAddToGroupOptionnalTechParamVariable(String techParamName) { + // 1 - create the variable : + ArgoDimension dimTechParam[] = new ArgoDimension[1]; + dimTechParam[0] = dimHash.get("N_TECH_MEASUREMENT"); + ArgoVariable techParamVar = new ArgoVariable(techParamName, DataType.FLOAT, dimTechParam, techParamName); + + // may have multiple units or longç_name for a same parameter name. In those + // cases, we ignore units and/or long_name: IGNORE THIS +// if (ConfigTech.getParamAuthorizedLongName().get(techParamName) == null +// || ConfigTech.getParamAuthorizedLongName().get(techParamName).size() != 1) { +// addAttr(techParamVar, long_name, ATTR_IGNORE_VALUE + "%15.1f", DataType.STRING); +// } else { +// addAttr(techParamVar, long_name, ConfigTech.getParamAuthorizedLongName().get(techParamName).get(0), +// DataType.STRING); +// } +// if (ConfigTech.getParamAuthorizedUnits().get(techParamName) == null +// || ConfigTech.getParamAuthorizedUnits().get(techParamName).size() != 1) { +// addAttr(techParamVar, units, ATTR_IGNORE_VALUE + "%15.1f", DataType.STRING); +// } else { +// addAttr(techParamVar, units, ConfigTech.getParamAuthorizedUnits().get(techParamName).get(0), +// DataType.STRING); +// } +// + //2025-09 : as it may have multiple units (in fact all units from reference units table) and multiple long_name (defined in R14), we will have a special check for this. + // For the generic test, we ignore these attributes. + addAttr(techParamVar, long_name, ATTR_IGNORE_VALUE + "%15.1f", DataType.STRING); + addAttr(techParamVar, units, ATTR_IGNORE_VALUE + "%15.1f", DataType.STRING); + + varHash.put(techParamName, techParamVar); + // 2 - add it to the optionnal variables : + optVar.add(techParamName); + // 3 - create the group for the variable + createGroup(groupMembers, techParamName); + // 4 - add into this group the variable member of group "TECH_TIMESERIES" (JULD + HashSet techTimeSeriesGroupMembers = groupMembers.get("TECH_TIMESERIES"); + addMembersToGroup(groupMembers, techParamName, techTimeSeriesGroupMembers); + // 5 - link varName to group name : + varGroup.put(techParamName, techParamName); + } + + /** + * add a new group to groupMembers instance variable. + * + * @param groupMembers + * @param groupName + */ + private void createGroup(HashMap> groupMembers, String groupName) { + if (!groupMembers.containsKey(groupName)) { // ..new group - init + groupMembers.put(groupName, new HashSet()); + log.debug("create group: '{}'", groupName); + } + + } + + private void addMembersToGroup(HashMap> groups, String groupName, + HashSet newMembers) { + if (groups.containsKey(groupName)) { + for (String member : newMembers) { + groups.get(groupName).add(member); + } + } + } + // ............................................. // .................parseCdlFile................ // ............................................. @@ -1556,7 +1650,8 @@ private boolean parseVarLine(String line) { // ...................................................... /** * Parses the "optional element" file of a specification. This file defines - * which elements in the CDL specification are optional. + * which elements in the CDL specification are optional. Element can me + * variables or dimensions. * * @return True - file parsed; False - failed to parse file * @throws IOException indicates file read or permission error @@ -1800,6 +1895,8 @@ public boolean parseAttrRegexFile() throws IOException { */ public boolean parseParamFile(ArgoDataFile.FileType fileType, String version, boolean listOnly) throws IOException { + // TO DO : HANDLE REGEX in PARAM NAME and ATTRIBUTE : see ArgoConfigTechParam + // Create a generic class ? log.debug(".....parseParamFile: start....."); log.debug("fileType, version: '{}' '{}' --- listOnly {}", fileType, version, listOnly); diff --git a/file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoFileValidator.java b/file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoFileValidator.java new file mode 100644 index 0000000..9c5a457 --- /dev/null +++ b/file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoFileValidator.java @@ -0,0 +1,1521 @@ +package fr.coriolis.checker.validators; + +import java.io.IOException; +import java.io.PrintStream; +import java.text.DecimalFormat; +import java.util.Date; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.regex.Pattern; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.coriolis.checker.core.ArgoDataFile; +import fr.coriolis.checker.core.ArgoDataFile.FileType; +import fr.coriolis.checker.core.ValidationResult; +import fr.coriolis.checker.specs.ArgoAttribute; +import fr.coriolis.checker.specs.ArgoDate; +import fr.coriolis.checker.specs.ArgoDimension; +import fr.coriolis.checker.specs.ArgoFileSpecification; +import fr.coriolis.checker.specs.ArgoReferenceTable; +import fr.coriolis.checker.specs.ArgoVariable; +import ucar.ma2.ArrayChar; +import ucar.ma2.DataType; +import ucar.ma2.Index; +import ucar.nc2.Attribute; +import ucar.nc2.Dimension; +import ucar.nc2.Variable; + +/** + * FORMAT VERIFICATION:
+ * The format verification process (verifyFormat) compares the data file + * to the "specification". Details of the specification are documented in + * {@link fr.coriolis.checker.specs.ArgoFileSpecification}. . + *

+ * The verification process ensures that: + *

    + *
  • The global attributes match the specification + *
  • The dimensions in the data file match the specification (name and value) + *
  • The variables in the data file match the specification (name, type, and + * dimensions) + *
  • The attributes of the variables match (name and setting) + *
+ *

+ * The specification includes two additional features: optional parameters and + * highly-desirable parameters. + *

+ * Optional parameters: These are dimensions and variables that may be omitted + * from a data file. These are generally parameters that are not measured by all + * platforms (such as CNDC and DOXY). Additionally, the variables can be grouped + * into parameter groups such that all of the variables in the group must either + * be omitted or present. (For instance, for DOXY either all DOXY-related + * variables must be present or missing.) + * + * Highly-desirable parameters: The specification also includes + * highly-desirable parameters. These are variables that are expected to + * have values with certain settings. Failure to match these only generates a + * format-warning. + * + * The result of verifyFormat is a simple boolean: true - format passed; + * false - format failed. Format failed could be either an ERROR if the format + * is invalid or a WARNING if the highly-desirable parameters failed. + *

+ * Use the methods to retrieve information about the failures, including + * descriptive text messages. + */ +public class ArgoFileValidator { + // class variables + // ..standard i/o shortcuts + private static PrintStream stderr = new PrintStream(System.err); + private static final Logger log = LogManager.getLogger("ArgoFileValidator"); + + static Pattern pDataMode; // ..CDL "variables:" tag + static { + // ..check for legal data_mode + pDataMode = Pattern.compile("[RDA]+"); + } + private final static DecimalFormat cycleFmt = new DecimalFormat("000"); + protected final static Date earliestDate = ArgoDate.get("19970101000000"); + protected final static long oneDaySec = 1L * 24L * 60L * 60L * 1000L; + + // ..object variables + protected final ArgoDataFile arFile; + protected ValidationResult validationResult; + + public ArgoFileValidator(ArgoDataFile arFile) { + this.arFile = arFile; + this.validationResult = new ValidationResult(); + } + + // ================== + // Validation methods + // ================== + + /** + * Verifies whether this file conforms to the Argo Netcdf specification. The + * specification for this file type and version must be open. (See + * + * @see fr.coriolis.checker.specs.ArgoFileSpecification ) + *

+ *

    + *
  • nFormatErrors will retrieve the number of errors. + *
  • formatErrors will retrieve the error descriptions. + *
  • nFormatWarnings will retrieve the number of warnings + *
  • formatWarnings will retrieve the warning descriptions. + *
+ * + * @param dacName Name of the dac for diagnostic purposes + * @return boolean status indicator: True - file checked (status unspecified); + * False - failed to check format + */ + public boolean validateFormat(String dacName) { + if (arFile.getFileSpec() == null) { + log.info("File specification not opened"); + ValidationResult.lastMessage = "ERROR: File specification not opened for this file"; + validationResult.addError("ERROR: File specification not opened for this file"); + return false; + } + + /* + * METHOD: 1) Iterate through the data file dimensions and compare to the spec - + * if an optional "extra" dimension is encountered, add it to the spec 2) + * Iterate through the data file variables and compare to the spec 2a) - for + * each variable, iterate through the attributes and compare - attribute + * regexs?????? 3) Iterate through the spec dimensions and compare to the data + * file 4) Iterate through the spec variables and compare to the data file - + * postpone processing of missing optional variables 4a) - for each variable, + * iterate through the attributes and compare 5) For missing optional variables, + * check that all variables for the group are missing. 6) Iterate through the + * spec global attributes and compare to the data file 7) Clear any "extra" + * dimensions that were added + */ + + HashSet dataElement = new HashSet(); // ..reported elements + HashSet dataGroup = new HashSet(); // ..groups with reported elements + + // ......Step 1: Compare the data file dimensions to the specification....... + log.debug(".....verifyFormat: compare data dimensions to spec....."); + verifyFileDimensions(dataElement, dataGroup); + + // .......Step 2: Compare the data file variables to the spec....... + + log.debug(".....verifyFormat: compare data variables to spec....."); + verifyFileVariables(dataElement, dataGroup); + + // ......Step 3: Check the spec dimensions against data file......... + // .. - only need to check existence - definitions already checked above + log.debug(".....verifyFormat: compare spec dimensions to data....."); + verifySpecDimensionsPresenceInData(); + + // ......Step 4: Check the spec variables against the data......... + // .. - only need to check existence - definitions already checked above + log.debug(".....verifyFormat: compare spec variables to data....."); + verifySpecVariablePresenceInData(); + + // ..............Step 5: Finish group variables......................... + // .. - for each group with a reported variable -- make sure all of them are + // reported + log.debug(".....verifyFormat: check reported groups....."); + checkGroupsCompleteness(dataElement, dataGroup); + + // ......Step 6: Compare the spec global attributes to the file....... + log.debug(".....verifyFormat: compare spec global attr to data file....."); + verifyGlobalAttributes(dacName); + + arFile.getFileSpec().clearExtraDimensions(); + + log.debug(".....verifyFormat: completed....."); + + return true; + } // ..end validateFormat + + /** + * Before checking data, verify if the file had not failed the format + * validations and if a valid dac name hase been passed in command line. Nulls + * in the "char" variables are also checked. + * + * @param dacName name of the DAC for this file + * @param ckNulls true = check all strings for NULL values; false = skip + * @return success indicator. true - validation was performed. false - + * validation could not be performed (getMessage() will return the + * reason). + * @throws IOException If an I/O error occurs + */ + public boolean basicDataValidation(boolean ckNulls) throws IOException { + // before checking data, verify if the file had not failed the format validation + // : + if (!validationResult.isValid()) { + ValidationResult.lastMessage = new String( + "File must be verified (verifyFormat) " + "successfully before validation"); + return false; + } + + // check dacName passed in argument line: + if (!checkDacNameArgument()) { + ValidationResult.lastMessage = new String("Unknown DAC name = '" + arFile.getDacName() + "'"); + return false; + } + + if (ckNulls) { + validateStringNulls(); + } + + return true; + } + + protected void validateCreationUpdateDates(Date fileTime) { + long fileSec = fileTime.getTime(); + + String creation = arFile.readString("DATE_CREATION").trim(); + String update = arFile.readString("DATE_UPDATE").trim(); + arFile.setCreationDate(creation); + arFile.setUpdateDate(update); + + log.debug("DATE_CREATION: '{}'", creation); + log.debug("DATE_UPDATE: '{}'", update); + + // ...........creation date checks:............. + // ..set, after earliestDate, and before file time + Date dateCreation = null; + boolean haveCreation = false; + long creationSec = 0; + + if (creation.trim().length() <= 0) { + validationResult.addError("DATE_CREATION: Not set"); + + } else { + dateCreation = ArgoDate.get(creation); + haveCreation = true; + + if (dateCreation == null) { + haveCreation = false; + validationResult.addError("DATE_CREATION: '" + creation + "': Invalid date"); + + } else { + creationSec = dateCreation.getTime(); + + if (dateCreation.before(earliestDate)) { + validationResult.addError("DATE_CREATION: '" + creation + "': Before earliest allowed date ('" + + ArgoDate.format(earliestDate) + "')"); + + } else if ((arFile.getCreationSec() - fileSec) > oneDaySec) { + validationResult.addError("DATE_CREATION: '" + creation + "': After GDAC receipt time ('" + + ArgoDate.format(fileTime) + "')"); + } + } + } + arFile.setHaveCreationDate(haveCreation); + arFile.setCreationSec(creationSec); + // ............update date checks:........... + // ..set, not before creation time, before file time + Date dateUpdate = null; + boolean haveUpdate = false; + long updateSec = 0; + + if (update.trim().length() <= 0) { + validationResult.addError("DATE_UPDATE: Not set"); + } else { + dateUpdate = ArgoDate.get(update); + haveUpdate = true; + + if (dateUpdate == null) { + validationResult.addError("DATE_UPDATE: '" + update + "': Invalid date"); + haveUpdate = false; + + } else { + updateSec = dateUpdate.getTime(); + + if (arFile.isHaveCreationDate() && dateUpdate.before(dateCreation)) { + validationResult + .addError("DATE_UPDATE: '" + update + "': Before DATE_CREATION ('" + creation + "')"); + } + + if ((updateSec - fileSec) > oneDaySec) { + validationResult.addError("DATE_UPDATE: '" + update + "': After GDAC receipt time ('" + + ArgoDate.format(fileTime) + "')"); + } + } + } + arFile.setHaveUpdateDate(haveUpdate); + arFile.setUpdateSec(updateSec); + } + + protected boolean validatePlatfomNumber(String platformNumberStr) { + log.debug("{}: '{}'", "PLATFORM_NUMBER", platformNumberStr); + + return platformNumberStr.matches("[1-9][0-9]{4}|[1-9]9[0-9]{5}"); + } + + protected void validateDataCentre(ArgoReferenceTable.DACS dac) { + String name = "DATA_CENTRE"; // ..ref table 4 (and valid for DAC) + String str = arFile.readString(name).trim(); + log.debug("{}: '{}'", name, str); + if (dac != null) { + if (!ArgoReferenceTable.DacCenterCodes.get(dac).contains(str)) { + validationResult.addError("DATA_CENTRE: '" + str + "': Invalid for DAC " + dac); + } + + } else { // ..incoming DAC not set + if (!ArgoReferenceTable.DacCenterCodes.containsValue(str)) { + validationResult.addError("DATA_CENTRE: '" + str + "': Invalid (for all DACs)"); + } + } + } + + private void verifyGlobalAttributes(String dacName) { + for (String name : arFile.getFileSpec().getGlobalAttributeNames()) { + ArgoAttribute specAttr = arFile.getFileSpec().getGlobalAttribute(name); + Attribute dataAttr = arFile.findGlobalAttribute(name); + if (log.isDebugEnabled()) { + log.debug("spec attribute: " + name); + } + + if (dataAttr == null) { + // ..attribute in spec file is not in the data file + + String err = String.format("global attribute: %s: not defined in data file", name); + // formatErrors.add(err); + + // ################# TEMPORARY WARNING ################ + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("{}: {}: {}", dacName, arFile.getFileName(), err); + + } else { + // ..spec attr is in data file -- check values + checkGlobalAttributeValue(dacName, name, specAttr, dataAttr); + } + } + } + + // ......................ckVarAttr................... + /** + * Compares the attributes of a Netcdf Variable and an ArgoVariable + * + * @param dataVar the Netcdf variable object + * @param specVar the ArgoVariable object + * @return the boolean status. True - they are the same. False - they differ. + */ + private boolean ckVarAttr(Variable dataVar, ArgoVariable specVar) { + // ..compare attributes + boolean returnVal = true; + + String varName = dataVar.getShortName(); + + DATA_ATTR_LOOP: for (Attribute dataAttr : dataVar.getAttributes()) { + String attrName = dataAttr.getShortName(); + ArgoAttribute specAttr = specVar.getAttribute(attrName); + // log.debug ("ckVarAttr: {}", attrName); + + if (specAttr == null) { + // ..data file attribute is not in the specification + + /* + * 2015-11: ADMT-16 decided that "extra attributes" are allowed + * + * The following code would cause it to be an error: + * + * returnVal = false; formatErrors.add("attribute: '"+varName+":"+ + * attrName+"' not defined in specification '"+ spec.getSpecName()+"'"); + * + * For now... they will be ignored by just doing nothing about it + */ + + log.info("extra attribute (allowed): {}:{} not in spec", varName, attrName); + continue; + } + + // ..check if the spec attribute is label "NOT_ALLOWED" error + ArgoAttribute.AttrHandling specialHandling = specAttr.getHandling(); + if (!checkAttributNotAllowed(varName, attrName, specialHandling)) { + returnVal = false; + continue; + } + ; + + // ..check the data attribute type + // ..the 3 types supported by ArgoAttribute are: Number, String, Object + // ..netCDF supports: Number, String, List of (number or string) + // .. + // ..Number and String are the only 2 currently in use. + // ..Hard crash if it is anything else. + + if (!checkAttributeType(varName, dataAttr, attrName, specAttr)) { + returnVal = false; + continue; + } + // ..check the attribute value + if (!checkVarAttributeValue(dataVar, varName, dataAttr, attrName, specAttr, specialHandling)) { + returnVal = false; + continue; + } + // 2025-09 : special check for TECH_PARAM variable that authorized multiple + // units and long_name for a same variable name: + // (not ideal and should be changed when tech_param names and units handling in + // NVS change. + if (arFile.fileType() == ArgoDataFile.FileType.TECHNICAL) { + if (!checkSpecialTechParamVarAttributeValue(dataVar, varName, dataAttr, attrName, specAttr, + specialHandling)) { + returnVal = false; + continue; + } + } + + } // ..end for (dataAttr) + return returnVal; + } // ..end ckVarAttr + + /** + * // ..check the attribute value. if the spec attribute is "IGNORE_COMPLETELY" + * or "IGNORE_VALUE", we just don't care about the value so skip the check + * + * @param dataVar + * @param varName + * @param dataAttr + * @param attrName + * @param specAttr + * @param specialHandling + * @return + */ + private boolean checkVarAttributeValue(Variable dataVar, String varName, Attribute dataAttr, String attrName, + ArgoAttribute specAttr, ArgoAttribute.AttrHandling specialHandling) { + if (!(specialHandling == ArgoAttribute.AttrHandling.IGNORE_COMPLETELY + || specialHandling == ArgoAttribute.AttrHandling.IGNORE_VALUE)) { + String dataAttrValue = null; + String specAttrValue = specAttr.getValue().toString(); + + if (dataAttr.isString()) { + try { + dataAttrValue = dataAttr.getStringValue(); + } catch (Exception e) { + validationResult.addError("attribute: " + varName + ":" + attrName + ": Bad value. Not a string."); + return false; + + } + + } else { + try { + dataAttrValue = dataAttr.getNumericValue().toString(); + } catch (Exception e) { + validationResult + .addError("attribute: " + varName + ":" + attrName + ": Bad value. Not a numeric value."); + return false; + } + } + + if (!dataAttrValue.equals(specAttrValue)) { + // ..data file attribute is not the same as the spec file attribute + ArgoFileSpecification.AttrRegex regex = arFile.getFileSpec().getAttrRegex(varName, attrName); + if (regex == null) { + // ..no regex .. this is a format error + validationResult.addError( + "attribute: " + varName + ":" + attrName + ": Definitions differ " + "\n\tSpecification = '" + + specAttrValue + "'" + "\n\tData File = '" + dataAttrValue + "'"); + log.info("format error: {}:{} " + "attribute mismatch (no regex): spec, data = {}, {}", varName, + attrName, specAttrValue, dataAttrValue); + + return false; + + } else { + // ..regex defined ... does it match? + if (!regex.pattern.matcher(dataAttrValue).matches()) { + validationResult.addError("attribute: " + dataVar.getShortName() + ":" + dataAttr.getShortName() + + ": Definitions differ " + "\n\tSpecification = '" + regex.pattern + "' (regex)" + + "\n\tData File = '" + dataAttrValue + "'"); + log.info("format error: " + attrName + " attribute regex mismatch '" + regex.pattern + "'"); + return false; + + } else { + if (regex.warn) { + validationResult.addError("attribute: " + dataVar.getShortName() + ":" + + dataAttr.getShortName() + ": Accepted; not standard value" + + "\n\tSpecification = '" + specAttrValue + "'" + "\n\tException allowed = '" + + regex.pattern + "' (regex)" + "\n\tData File = '" + dataAttrValue + "'"); + log.warn("regex match (WARN): attribute '{}:{} = '{}' matches '{}'", varName, attrName, + dataAttrValue, regex.pattern); + } else { + log.warn("regex match (NO WARN): attribute '{}:{} = '{}' matches '{}'", varName, attrName, + dataAttrValue, regex.pattern); + } + } + } // ..end if regex + } // ..end attr.equals + } else { + log.debug("ckVarAttr: '{}': marked as IGNORE", attrName); + } // ..end if (marked IGNORE) + return true; + } + + /** + * Special case with wich for a same variable name have list of + * units possble and may have different long_name. First, identify if the + * variable name is in the ConfigTech's param list. Then, if attribute to check + * is units or long_name, check the authorized list : all units from reference + * table are authorized for all parameters and list of long_name by parameter. + */ + private boolean checkSpecialTechParamVarAttributeValue(Variable dataVar, String varName, Attribute dataAttr, + String attrName, ArgoAttribute specAttr, ArgoAttribute.AttrHandling specialHandling) { + if (arFile.getFileSpec().ConfigTech != null && arFile.getFileSpec().ConfigTech.findTechParam(varName) != null) { + // this variable is TECH_PARAM variable, check its attribut with list of units + // and long_name. + // dataAtt type already check in checkVarAttributeValue + String dataAttrValue = dataAttr.getStringValue(); + if (attrName.equals("units") && !arFile.getFileSpec().ConfigTech.isConfigTechUnit(dataAttrValue) + && !arFile.getFileSpec().ConfigTech.isDeprecatedConfigTechUnit(dataAttrValue)) { + // dataAtt type already check in checkVarAttributeValue + // units not found in reference table ! + validationResult.addError("attribute: " + varName + ":" + attrName + ": Definitions differ " + + "\n\tSpecification = See argo-tech_units-spec units list" + "\n\tData File = '" + + dataAttrValue + "'"); + + } + List authorizedLongName = arFile.getFileSpec().ConfigTech.getParamAuthorizedLongName().get(varName); + if (attrName.equals("long_name") && authorizedLongName != null + && !authorizedLongName.contains(dataAttrValue)) { + + validationResult.addError("attribute: " + varName + ":" + attrName + ": Definitions differ " + + "\n\tSpecification = See argo-tech_names-spec list, definition column" + + "\n\tData File = '" + dataAttrValue + "'"); + + } + + } + + return true; + } + + /** + * // ..check the data attribute type. The 3 types supported by ArgoAttribute + * are: Number, String, Object netCDF supports: Number, String, List of (number + * or string) Number and String are the only 2 currently in use. Hard crash if + * it is anything else. + * + * @param varName + * @param dataAttr + * @param attrName + * @param specAttr + * @return + */ + private boolean checkAttributeType(String varName, Attribute dataAttr, String attrName, ArgoAttribute specAttr) { + DataType dataAttrType = dataAttr.getDataType(); + + if (specAttr.isString()) { + if (!dataAttr.isString()) { + String err = String.format("attribute: %s:%s: Incorrect attribute value type. Must be string", varName, + attrName); + // formatErrors.add(err) + + // ################# TEMPORARY WARNING ################ + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("{}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); + + return false; + + } + // log.debug("ckVarAttr: '{}': spec/data both String", attrName); + + } else if (specAttr.isNumeric()) { + if (!dataAttrType.isNumeric()) { + String err = String.format("attribute: %s:%s: Incorrect attribute value type. Must be numeric", varName, + attrName); + + // formatErrors.add(err); + // ################# TEMPORARY WARNING ################ + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + + log.warn("{}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); + + return false; + + } + // log.debug("ckVarAttr: '{}': spec/data both Number", attrName); + + } else { + // ..what the hell were you thinking? + stderr.println("\n\n******\n" + "****** PROGRAM ERROR: ArgoDataFile(ckvarattr) " + varName + ":" + attrName + + ": unknown specAttr type. TERMINATING.\n" + "******"); + System.exit(1); + } + return true; + } + + private boolean checkAttributNotAllowed(String varName, String attrName, + ArgoAttribute.AttrHandling specialHandling) { + if (specialHandling == ArgoAttribute.AttrHandling.NOT_ALLOWED) { + String err = String.format("attribute: %s:%s: Attribute is not allowed.", varName, attrName); + // formatErrors.add(err) + + // ################# TEMPORARY WARNING ################ + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("{}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); + + return false; + + } + return true; + } + + // ......................ckVarDims................... + /** + * Compares the dimensions of a Netcdf Variable and an ArgoVariable + * + * @param dataVar the Netcdf variable object + * @param specVar the ArgoVariable object + * @return the boolean status. True - they are the same. False - they differ. + */ + private boolean ckVarDims(Variable dataVar, ArgoVariable specVar) { + String specDims = specVar.getDimensionsString(); + String dataDims = dataVar.getDimensionsString(); + + int specRank = specVar.getRank(); // ..rank of the variable W/O extra dims + int dataRank = dataVar.getRank(); + + boolean altDimVar = specVar.canHaveAlternateDimensions(); + boolean exDimVar = specVar.canHaveExtraDimensions(); + + boolean pass = true; + if (dataRank < specRank) { + // ..always an error + pass = false; + + } else if (specDims.equals(dataDims)) { + // ..always a success + pass = true; + + } else if (!(altDimVar || exDimVar)) { + // ..dimension strings don't match + // ..not a "special variable" + pass = false; + + } else { + // ..where are we? + // .. dataRank >= specRank + // .. data dimension names != spec dimension names + // .. it is a "special variable" + + // ..need to disect the individual dimensions + + for (int n = 0; n < dataRank; n++) { + String dDimName = dataVar.getDimension(n).getShortName(); + + if (n < specRank) { + // ..one of the regular dimensions + + ArgoDimension sDim = specVar.getDimension(n); + String sDimName = sDim.getName(); + + if (!sDimName.equals(dDimName)) { + // ..isn't the expected one + + if (!sDim.isAllowedAlternateDimensionName(dDimName)) { + // ..this captures to possibilities + // ..1) this dimension is not an alt-dim + // ..or + // ..2) this dimension is not an allowed dim within an alt-dim + + pass = false; + break; + + } else { + log.debug("ckVarDims: allowed alternate dim = '{}'", dDimName); + } + + } + + } else { + // ..an "extra dimension" + // ..check that it is valid + if (!arFile.getFileSpec().getDimension(dDimName).isExtraDimension()) { + pass = false; + break; + } + } + } // ..end for (dataRank) + } // ..end if (specRank ... dataRank)..end if evaluating dimensions + + log.debug("ckVarDims: pass = {}: specRank, dataRank, altDimVar, exDimVar = {}, {}, {}, {}", pass, specRank, + dataRank, altDimVar, exDimVar); + log.debug(" specDims, dataDims = '{}', '{}'", specDims, dataDims); + + if (!pass) { + if (exDimVar) { + validationResult.addError("variable: " + dataVar.getShortName() + ": Definitions differ" + + "\n\tSpecification dimensions = '" + specDims + " (+ extra-dimensions)'" + + "\n\tData File dimensions = '" + dataDims + "'"); + + log.info("format error: '{}' dimensions mismatch (extra dimension)", dataVar.getShortName()); + + } else { + validationResult.addError("variable: " + dataVar.getShortName() + ": Definitions differ" + + "\n\tSpecification dimensions = '" + specDims + "'" + "\n\tData File dimensions = '" + + dataDims + "'"); + + log.info("format error: '{}' dimensions mismatch", dataVar.getShortName()); + } + } + + return pass; + } // ..end ckVarDims + + // ......................ckVarTypes................... + /** + * Compares the data types of a Netcdf Variable and an ArgoVariable + * + * @param dataVar the Netcdf variable object + * @param specVar the ArgoVariable object + * @return the boolean status. True - they are the same. False - they differ. + */ + + private boolean ckVarTypes(Variable dataVar, ArgoVariable specVar) { + // ..compare data type + DataType specType = specVar.getType(); + DataType dataType = dataVar.getDataType(); + + if (specType.equals(dataType)) { + return true; + + } else { + // ..there is an odd exception where a type may be either float or double + // ..in the argo spec, that is encoded as DataType.OPAQUE + // ..check for that exception + + if (specVar.getType() == DataType.OPAQUE && (dataType == DataType.FLOAT || dataType == DataType.DOUBLE)) { + return true; + } + + validationResult.addError("variable: " + dataVar.getShortName() + ": Definitions differ" + + "\n\tSpecification type = '" + specVar.getType().toString() + "'" + "\n\tData File type = '" + + dataVar.getDataType().toString() + "'"); + log.info("format error: '{}' data type mismatch", dataVar.getShortName()); + + return false; + } + } // ..end ckVarTypes + + private void checkGlobalAttributeValue(String dacName, String name, ArgoAttribute specAttr, Attribute dataAttr) { + if (dataAttr.isString()) { + String specValue = specAttr.getValue().toString(); + String dataValue = dataAttr.getStringValue(); + + if (!(specValue.startsWith(ArgoFileSpecification.ATTR_IGNORE) + || specValue.startsWith(ArgoFileSpecification.ATTR_IGNORE_VALUE))) { + // ..specAttr is not set for ignore + + if (!dataValue.equals(specValue)) { + // ..data file attribute is not the same as the spec file attribute + + // Pattern regex = spec.getAttrRegex("", name); + ArgoFileSpecification.AttrRegex regex = arFile.getFileSpec().getAttrRegex("", name); + + if (regex == null) { + // ..no regex .. this is a format error + + String err = String.format("global attribute: %s: Definitions differ" + + "\n\tSpecification = '%s'" + "\n\tData File = '%s'", name, specValue, dataValue); + + // formatErrors.add(err); + + // ################# TEMPORARY WARNING ################ + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", dacName, arFile.getFileName(), err); + + } else { + // ..regex defined ... does it match? + + if (!regex.pattern.matcher(dataValue).matches()) { + String err = String.format("global attribute: %s: Definitions differ" + + "\n\tSpecification = '%s' (regex)" + "\n\tData File = '%s'", name, + regex.pattern, dataValue); + // formatErrors.add("global attribute: "+ + + // ################# TEMPORARY WARNING ################ + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + + log.warn("TEMP WARNING: {}: {}: {}", dacName, arFile.getFileName(), err); + + } else { + if (regex.warn) { + validationResult.addWarning("global attribute: " + name + + ": Accepted; not standard value" + "\n\tSpecification = '" + specValue + + "'" + "\n\tException allowed = '" + regex.pattern + "' (regex)" + + "\n\tData File = '" + dataValue + "'"); + log.warn("regex match (WARN): global attribute ':{} = '{}' matches '{}'", name, + dataValue, regex.pattern); + } else { + log.warn("regex match (NO WARN): global attribute ':{} = '{}' matches '{}'", name, + dataValue, regex.pattern); + } + } + } // ..end if regex + + } // ..end attr.equals + } // ..end if (ignore) + + } else { + String err = String.format("global attribute: %s: not a \"string valued\" attribute", name); + // formatErrors.add("global attribute: "+name+ + + // ################# TEMPORARY WARNING ################ + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", dacName, arFile.getFileName(), err); + + } // ..end if(dataAttr.isString) + } + + private void checkGroupsCompleteness(HashSet dataElement, HashSet dataGroup) { + for (String group : dataGroup) { + log.debug("group with reported variable: '{}'", group); + + // ..at least one member of "group" was reported + // ..check that all group memebers are reported + + Set vReq = arFile.getFileSpec().groupMembers(group); + HashSet vMiss = new HashSet(); + + for (String member : vReq) { + if (!dataElement.contains(member)) { + // ..this member of the group was reported + vMiss.add(member); + } + } + + if (vMiss.size() > 0) { + // ..some variables in this group were missing but not all + // ..format the list of required and reported variables + StringBuilder req = new StringBuilder(); + for (String str : vReq) { + req.append("'" + str + "' "); + } + + // ..format the list of missing variables + StringBuilder miss = new StringBuilder(); + for (String str : vMiss) { + miss.append("'" + str + "' "); + } + + // ..add to formatErrors + validationResult.addError("Parameter group " + group + ": Variables are missing for this group" + + "\n\tRequired variables: " + req + "\n\tMissing variables: " + miss); + + log.info("format error: option group '{}' variables missing from data file", group); + } + } + } + + private void verifySpecVariablePresenceInData() { + + for (String name : arFile.getFileSpec().getSpecVariableNames()) { + ArgoVariable specVar = arFile.getFileSpec().getVariable(name); + Variable dataVar = arFile.getNcReader().findVariable(name); + log.debug("spec var: {}", name); + + if (dataVar == null) { + // ..variable in spec file is not in the data file + + if (arFile.getFileSpec().isOptional(name)) { + // ..the variable is optional + log.debug("optional variable not defined in data file: '{}'", name); + + } else { + validationResult.addError("variable: " + name + ": not defined in data file"); + + log.info("format error: variable not in data file: '{}'", name); + } + + // ..........Step 4a: Check the spec attributes against the data file....... + } else { + // ..we are looking for attributes the spec says must exist + // ..and checking the data variable to see if they do + // ..- don't have to check values because the data var attr values were checked + // above + for (String attrName : specVar.getAttributeNames()) { + checkSpecVarAttributePresenceInData(name, specVar, dataVar, attrName); + } + } + } // end for (spec-Var-name) + } + + private void checkSpecVarAttributePresenceInData(String name, ArgoVariable specVar, Variable dataVar, + String attrName) { + ArgoAttribute attr = specVar.getAttribute(attrName); + ArgoAttribute.AttrHandling handling = attr.getHandling(); + + if (handling == ArgoAttribute.AttrHandling.IGNORE_COMPLETELY + || handling == ArgoAttribute.AttrHandling.NOT_ALLOWED) { + + // ..attribute is allowed to be missing OR + // ..attribute must NOT exist + // ..- don't bother checking + log.debug("optional attr: '" + name + ":" + attrName + "' - ignored"); + + } else { + Attribute dataAttr = dataVar.findAttribute(attrName); + + if (dataAttr == null) { + // ..attribute in spec file is not in the data file + + validationResult.addError("attribute: '" + name + ":" + attrName + "' not defined in data file"); + + log.info("format error: attribute not in data file: '{}:{}'", name, attrName); + } + } + } + + private void verifySpecDimensionsPresenceInData() { + + for (ArgoDimension dim : arFile.getFileSpec().getDimensions()) { + String name = dim.getName(); + Dimension dataDim = arFile.getNcReader().findDimension(name); + log.debug("spec dim: {}", name); + + if (dataDim == null) { + if (arFile.getFileSpec().isOptional(name)) { + // ..dimension is optional + log.debug("optional dimension '{}': not defined in data file - allowed", name); + + } else if (dim.isAlternateDimension()) { + // ..this is an alt-dim --- it will never appear in the data file by name + log.debug("alt-dim '{}': not defined in data file - expected", name); + + } else { + // ..dimension in spec file is not in the data + validationResult.addError("dimension: " + name + ": not defined in data file"); + + log.info("format error: dimension not in data file: '{}'", name); + } + } + } // end for (spec-Dim-name) + } + + private void verifyFileVariables(HashSet dataElement, HashSet dataGroup) { + + for (Variable dataVar : arFile.getVarList()) { + String name = dataVar.getShortName(); + + dataElement.add(name); + + checkVariableAgainstSpec(name, dataVar); + + addElementToGroupIfDefinedInSpec(dataGroup, name); + + } + } + + private void checkVariableAgainstSpec(String name, Variable dataVar) { + + if (log.isDebugEnabled()) { + log.debug("data var: '{}'", name); + } + + ArgoVariable specVar = arFile.getFileSpec().getVariable(name); + + if (specVar == null) { + // ..data file variable is not in the specification + validationResult.addError("variable: " + name + ": not defined in specification '" + + arFile.getFileSpec().getSpecName() + "'"); + + log.info("format error: variable not in spec: '{}'", name); + + } else if (!ckVarTypes(dataVar, specVar)) { + // ..data types don't match + return; + + } else if (!ckVarDims(dataVar, specVar)) { + // ..variable dimensions don't match + return; + // .....Step 2a: Compare the attributes for this variable...... + } else if (!ckVarAttr(dataVar, specVar)) { + // ..variable attributes don't match + return; + } + + } + + // ......................................................... + // ........... verifyFileDimensions .............. + // ......................................................... + private void verifyFileDimensions(HashSet dataElement, HashSet dataGroup) { + + List dimList = arFile.getNcReader().getDimensions(); + + if (dimList == null || dimList.isEmpty()) { + log.debug("no dimensions in this file"); + return; + } + + for (Dimension dataDim : dimList) { + String dimName = dataDim.getShortName(); + ArgoDimension specDim = arFile.getFileSpec().getDimension(dimName); + dataElement.add(dimName); + if (log.isDebugEnabled()) { + log.debug("data dim: {} -- {}", dataDim, specDim); + } + + if (specDim == null) { + // ..dimension in data file is not in the spec + // ..is it an "extra dimension"? + handleExtraDimension(dataDim, dimName); + + } else { + // ..dimension is in the spec, check its value + validateDimensionLength(dataDim, specDim); + // ..if a data dimension is in a group, + // .. we have to check the rest of the group later + addElementToGroupIfDefinedInSpec(dataGroup, dimName); + } + } + } + + private void addElementToGroupIfDefinedInSpec(HashSet dataGroup, String elementName) { + String group = arFile.getFileSpec().inGroup(elementName); + if (group != null) { + dataGroup.add(group); + } + } + + private void validateDimensionLength(Dimension dataDim, ArgoDimension specDim) { + int specValue = specDim.getValue(); + int dataValue = dataDim.getLength(); + if (specValue > 0 && specValue != dataValue) { + // ..tValue > 0: dimension is not _unspecified_ or UNLIMITED + // .. AND it doesn't have the same value -> error + validationResult.addError("dimension: " + dataDim.getShortName() + ": Definitions differ" + + "\n\tSpecification = '" + specValue + "'" + "\n\tData File = '" + dataValue + "'"); + + log.info("format error: '{}' dimension value mismatch", dataDim.getShortName()); + } + } + + private void handleExtraDimension(Dimension dataDim, String dimName) { + ArgoDimension specDim; + specDim = arFile.getFileSpec().addExtraDimension(dimName, dataDim.getLength()); + + if (specDim == null) { + // ..nope, not an allowed "extra dimension" -> error + validationResult.addError(String.format("dimension: %s: not defined in specification '%s'", dimName, + arFile.getFileSpec().getSpecName())); + + log.info("format error: '{}' not in spec", dimName); + + } else { + log.debug("extra dimension: '{}'. value = {}", dimName, dataDim.getLength()); + } + } + + // ......................................................... + // ............. validate DAC name argument................. + // ......................................................... + + private boolean checkDacNameArgument() { + ArgoReferenceTable.DACS dac = null; + // .......check arguments....... + if (arFile.getDacName().trim().length() > 0) { + for (ArgoReferenceTable.DACS d : ArgoReferenceTable.DACS.values()) { + if (d.name.equals(arFile.getDacName())) { + dac = d; + this.arFile.setValidatedDac(dac); + break; + } + } + if (dac == null) { + return false; + } + } + return true; + } + + // ......................................................... + // ........... getGdacFileName .............. + // ......................................................... + /** + * Validates that the file name conforms to GDAC standards + * + */ + public String getGdacFileName() { + log.debug("......getGdacFileName: start....."); + + StringBuilder gdacName = new StringBuilder(20); + + if (arFile.fileType() == FileType.METADATA) { + String platform = arFile.readString("PLATFORM_NUMBER"); + + gdacName.append(platform.trim()); + gdacName.append("_meta.nc"); + + log.debug("meta-data file: platform, gdacName = '{}', '{}'", platform, gdacName); + + } else if (arFile.fileType() == FileType.PROFILE || arFile.fileType() == FileType.BIO_PROFILE) { + String platform = arFile.readString("PLATFORM_NUMBER", 0); + int cycle = arFile.readInt("CYCLE_NUMBER", 0); + char direction = arFile.readString("DIRECTION", true).charAt(0); + + if (arFile.fileType() == FileType.BIO_PROFILE) { + gdacName.append('B'); + } + + if (arFile.fileType() == FileType.PROFILE) { + char data_mode = arFile.readString("DATA_MODE").charAt(0); + + log.debug("{} file: platform, cycle, direction, data_mode = " + "'{}', '{}', '{}', '{}'", + arFile.fileType(), platform, cycle, direction, data_mode); + + if (data_mode == 'R' || data_mode == 'A') { + gdacName.append('R'); + } else if (data_mode == 'D') { + gdacName.append('D'); + } else { // ..pre-v3.1 files are not data-checked + // ..do some rudimentary data checks here + validationResult.addError("Could not determine file name: invalid DATA_MODE ='" + data_mode + "'"); + return null; + } + + } else { // ..this is a BIO file + String data_mode = arFile.readString("DATA_MODE"); + + log.debug("{} file: platform, cycle, direction, data_mode = " + "'{}', '{}', '{}', '{}'", + arFile.fileType(), platform, cycle, direction, data_mode); + + // ..a 'D' for any n_prof makes the whole file D-mode + + if (pDataMode.matcher(data_mode).matches()) { + // ..pre-v3.1 files are not data-checked + // ..do a rudimentary data checks here + + if (data_mode.indexOf('D') >= 0) { + // ..any single 'D' means D-mode for file + gdacName.append("D"); + + } else { + gdacName.append("R"); + } + + } else { + validationResult.addError("Could not determine file name: invalid DATA_MODE = '" + data_mode + "'"); + return null; + } + } + + gdacName.append(platform.trim()); + gdacName.append("_"); + gdacName.append(cycleFmt.format(cycle)); + + if (direction == 'D') { + gdacName.append(direction); + + } else if (direction != 'A') { // ..pre-v3.1 files are not data-checked + // ..do some rudimentary data checks here + validationResult.addError("Could not determine file name: invalid DIRECTION ='" + direction + "'"); + return null; + } + + gdacName.append(".nc"); + + log.debug("{} file: gdacName = '{}'", arFile.fileType(), gdacName); + + } else if (arFile.fileType() == FileType.TECHNICAL) { + String platform = arFile.readString("PLATFORM_NUMBER"); + + gdacName.append(platform.trim()); + gdacName.append("_tech.nc"); + + log.debug("tech-data file: platform, gdacName = '{}', '{}'", platform, gdacName); + + } else if (arFile.fileType() == FileType.TRAJECTORY || arFile.fileType() == FileType.BIO_TRAJECTORY) { + String platform = arFile.readString("PLATFORM_NUMBER"); + String data_mode = arFile.readString("DATA_MODE", true);// ..true->incl NULLs + + log.debug("{} file: platform, data_mode = " + "'{}', '{}'", arFile.fileType(), platform, data_mode); + + gdacName.append(platform.trim()); + gdacName.append("_"); + + if (arFile.fileType() == FileType.BIO_TRAJECTORY) { + gdacName.append("B"); + } + + if (pDataMode.matcher(data_mode).matches()) { + // ..pre-v3.1 files are not data-checked + // ..do a rudimentary data checks here + + if (data_mode.indexOf('D') >= 0) { + // ..any single 'D' means D-mode for file + gdacName.append("Dtraj.nc"); + + } else { + gdacName.append("Rtraj.nc"); + } + + } else { + validationResult.addError("Could not determine file name: invalid DATA_MODE = '" + data_mode + "'"); + return null; + } + + log.debug("trajectory file: gdacName = '{}'", gdacName); + + } else { + validationResult + .addError("Could not determine file name: unexpected file type = '" + arFile.fileType() + "'"); + log.debug("unknown file type: '{}'", arFile.fileType()); + return null; + } + + log.debug("......getGdacFileName: end....."); + return gdacName.toString(); + } + + // .............................................................. + // ........... validateGdacFileName .............. + // .............................................................. + /** + * Validates that the input file name conforms to GDAC standards + * + * @return true = file name conforms; false = file name non-conforming + */ + public boolean validateGdacFileName() { + + String expected = getGdacFileName(); + + if (expected == null) { + // ..error occured ... formatErrors set by getGdacFileName + return false; + } + + String name = arFile.getFileName(); + // String name = file.getName(); + + if (!name.equals(expected)) { + // ..actual file name and expected file name don't match + + if (name.endsWith("_traj.nc") && expected.endsWith("_Rtraj.nc")) { + // ..have to deal with the special case of pre-v3.1 *_traj files + if (arFile.fileVersion().startsWith("2.") || arFile.fileVersion().startsWith("3.0")) { + String ex = expected.replaceFirst("_Rtraj", "_traj"); + + if (name.equals(ex)) { + log.debug("validateGdacFileName: PASSED using special *_traj rules: " + + "expected, name = '{}', '{}'", expected, name); + return true; + } else { + log.debug("validateGdacFileName: FAILED using special *_traj rules: " + + "expected, name = '{}', '{}'", expected, name); + } + } + } + + validationResult.addError("Inconsistent file name\n\tDAC file name '" + name + + "'\n\tExpected file name according to file type, DIRECTION, DATA_MODE, CYCLE_NUMBER and PLATFORM_NUMBER : '" + + expected.toString() + "'"); + log.debug("validateGdacFileName: FAILED: expected, name = '{}', '{}'", expected, name); + + return false; + } + + log.debug("validateGdacFileName: PASSED: expected, name = '{}', '{}'", expected, name); + return true; + } + + // .......................................................... + // ........... validateStringNulls .............. + // .......................................................... + /** + * Checks for nulls in the "char" variables in an Argo file. + * + *

+ * Upon completion obj.nFormatErrors(), obj.nFormatWarnings, + * obj.formatErrors(), and obj.formatWarnings will return results. + * + * @throws IOException If an I/O error occurs + */ + private void validateStringNulls() throws IOException { + char nullChar = (char) 0; + + // .....Check all Strings -- no nulls allowed..... + if (log.isDebugEnabled()) { + log.debug(".....validateStrings....."); + } + + for (Variable var : arFile.getVarList()) { + if (var.getDataType() == DataType.CHAR) { + ArrayChar ch; + ch = (ArrayChar) var.read(); + + Index ndx = ch.getIndex(); + int shape[] = var.getShape(); + int rank = shape.length; + + // log.debug(var.getShortName()+": shape:"+shape.length+" rank:"+rank); + + if (rank == 1) { + // log.debug("rank 1: '"+ch.getString()+"'"); + for (int i = 0; i < shape[0]; i++) { + ndx.set(i); + if (ch.getChar(ndx) == nullChar) { + validationResult.addWarning(var.getShortName() + ": NULL character at [" + (i + 1) + "]"); + log.warn("warning: {}[{}]: null character", var.getShortName(), i); + break; + } + } + } else if (rank == 2) { + for (int i = 0; i < shape[0]; i++) { + // log.debug("rank 2: '"+ch.getString(ndx.set(i,0))+"'"); + for (int j = 0; j < shape[1]; j++) { + ndx.set(i, j); + if (ch.getChar(ndx) == nullChar) { + validationResult.addWarning( + var.getShortName() + ": NULL character at [" + (i + 1) + "," + (j + 1) + "]"); + log.warn("warning: {}[{},{}]: null character", var.getShortName(), i, j); + break; + } + } + } + } else if (rank == 3) { + for (int i = 0; i < shape[0]; i++) { + for (int j = 0; j < shape[1]; j++) { + // log.debug("rank 3: '"+ch.getString(ndx.set(i,j,0))+"'"); + for (int k = 0; k < shape[2]; k++) { + ndx.set(i, j, k); + if (ch.getChar(ndx) == nullChar) { + validationResult.addWarning(var.getShortName() + ": NULL character at [" + (i + 1) + + "," + (j + 1) + "," + (k + 1) + "]"); + log.warn("warning: {}[{},{},{}]: null character", var.getShortName(), i, j, k); + break; + } + } + } + } + } else if (rank == 4) { + for (int i = 0; i < shape[0]; i++) { + for (int j = 0; j < shape[1]; j++) { + for (int k = 0; k < shape[2]; k++) { + // log.debug("rank 4: '"+ch.getString(ndx.set(i,j,k,0))+"'"); + for (int l = 0; l < shape[3]; l++) { + ndx.set(i, j, k, l); + if (ch.getChar(ndx) == nullChar) { + validationResult.addWarning(var.getShortName() + ": NULL character at (" + + (i + 1) + "," + (j + 1) + "," + (k + 1) + "," + (l + 1) + "]"); + log.warn("warning: {}[{},{},{},{}]: null character", var.getShortName(), i, j, + k, l); + break; + } + } + } + } + } + } else { + throw new IOException("validateString cannot handle rank " + rank + " arrays"); + } // ..end if (rank) + } + } + }// ..end validateStringNulls + + // ..................................................... + // ......... rudimentaryDateChecks ............. + // ..................................................... + /** + * Performs rudimentary date checks on a file. Essentially, just checks that + * DATE_CREATE and DATE_UPDATE are valid date strings. + * + *

+ * Upon completion obj.nFormatErrors(), obj.nFormatWarnings, + * obj.formatErrors(), and obj.formatWarnings will return results. + * + */ + public void rudimentaryDateChecks() { + log.debug(".....rudimentaryDateChecks: start....."); + + // ..creation date check + // ..just check for valid date + + String creation = arFile.readString("DATE_CREATION"); + + if (creation.trim().length() <= 0) { + validationResult.addError("DATE_CREATION: Not set"); + log.info("format error: DATE_CREATION not set"); + + } else { + Date dateCreation = ArgoDate.get(creation); + + if (dateCreation == null) { + validationResult.addError("DATE_CREATION: '" + creation + "': Invalid date"); + log.info("format error: bad DATE_CREATION: '{}'", creation); + } + } + + // ..update date check + // ..just check for valid date + + String update = arFile.readString("DATE_UPDATE"); + + if (update.trim().length() <= 0) { + validationResult.addError("DATE_UPDATE: Not set"); + log.info("format error: DATE_UPDATE not set"); + + } else { + Date dateUpdate = ArgoDate.get(update); + + if (dateUpdate == null) { + validationResult.addError("DATE_UPDATE: '" + update + "': Invalid date"); + log.info("format error: bad DATE_UPDATE: '{}'", update); + } + } + log.debug(".....rudimentaryDateChecks: end....."); + } + + // ............................................. + // convenience "check" methods + // ............................................. + + /** + * Determines if a float value is the specified FillValue Performs a "real + * value" comparison to a resolution of 1.e-5 (not an exact equality). + * + * @param _FillValue for the parameter + * @param data value + * @return true = value is FillValue; false = value is not FillValue + */ + public static boolean is_FillValue(float fillValue, float value) { + float diff = Math.abs(1.f - (value / fillValue)); + return diff < 1.0e-8f; + } + + /** + * Determines if a double value is the specified FillValue Performs a "real + * value" comparison to a resolution of 1.e-5 (not an exact equality). + * + * @param _FillValue for the parameter + * @param data value + * @return true = value is FillValue; false = value is not FillValue + */ + public static boolean is_FillValue(double fillValue, double value) { + double diff = Math.abs(1.d - (value / fillValue)); + return diff < 1.0e-8d; + } + + /** + * Determines if a float value is a "99,999.0" missing value. Performs a "real + * value" comparison to a resolution of 1.e-5 (not an exact equality).
+ *
+ * NOTE: This is required because we've run into examples where "bad values" of + * a parameter have been larger than the FillValue so simple + * greater-than/less-than tests fails + * + * @param data value + * @return true = value is FillValue; false = value is not FillValue + */ + public static boolean is_99_999_FillValue(float value) { + float diff = Math.abs(value - 99999.f); + return diff < 0.00001f; + } + + /** + * Determines if a double value is a "99,999.0" missing value. Performs a "real + * value" comparison to a resolution of 1.e-5 (not an exact equality).
+ *
+ * NOTE: This is required because we've run into examples where "bad values" of + * a parameter have been larger than the FillValue so simple + * greater-than/less-than tests fails + * + * @param data value + * @return true = value is FillValue; false = value is not FillValue + */ + public static boolean is_99_999_FillValue(double value) { + double diff = Math.abs(value - 99999.d); + return diff < 0.00001d; + } + + /** + * Determines if a double value is a "999,999.0" missing value. Performs a "real + * value" comparison to a resolution of 1.e-5 (not an exact equality). + * + * @param data value + * @return true = value is FillValue; false = value is not FillValue + */ + public static boolean is_999_999_FillValue(double value) { + double diff = Math.abs(value - 999999.d); + return diff < 0.00001d; + } + + // ............ end "check" methods .............. + + /** + * Convenience method to add to String list for "pretty printing". + * + * @param list the StringBuilder list + * @param add the String to add + */ + protected void addToList(StringBuilder list, String add) { + if (list.length() == 0) { + list.append("'" + add + "'"); + } else { + list.append(", '" + add + "'"); + } + } + + /** + * Convenience method to add to String list for "pretty printing". + * + * @param list the StringBuilder list + * @param add the String to add + */ + protected void addToList(StringBuilder list, int add) { + if (list.length() == 0) { + list.append(add); + } else { + list.append(", " + add); + } + } + +//......................................... +// ACCESSORS +//......................................... + + public ValidationResult getValidationResult() { + return validationResult; + } + + public void setValidationResult(ValidationResult validationResult) { + this.validationResult = validationResult; + } + +} diff --git a/file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoMetadataFile.java b/file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoMetadataFileValidator.java similarity index 67% rename from file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoMetadataFile.java rename to file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoMetadataFileValidator.java index 8251875..f32afbd 100644 --- a/file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoMetadataFile.java +++ b/file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoMetadataFileValidator.java @@ -1,4 +1,4 @@ -package fr.coriolis.checker.filetypes; +package fr.coriolis.checker.validators; import java.io.IOException; import java.util.Date; @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import fr.coriolis.checker.core.ArgoDataFile; import fr.coriolis.checker.specs.ArgoConfigTechParam; import fr.coriolis.checker.specs.ArgoDate; import fr.coriolis.checker.specs.ArgoReferenceTable; @@ -37,9 +38,9 @@ * @version $Id: ArgoMetadataFile.java 1314 2022-04-13 00:20:48Z ignaszewski $ */ -public class ArgoMetadataFile extends ArgoDataFile { +public class ArgoMetadataFileValidator extends ArgoFileValidator { - // ......................................... + // .........................................y // VARIABLES // ......................................... @@ -47,8 +48,6 @@ public class ArgoMetadataFile extends ArgoDataFile { // ..standard i/o shortcuts private static final Logger log = LogManager.getLogger("ArgoMetadataFile"); - private final static long oneDaySec = 1L * 24L * 60L * 60L * 1000L; - private static Pattern pBatteryType; private static Pattern pBatteryPacks; @@ -72,68 +71,36 @@ public class ArgoMetadataFile extends ArgoDataFile { // CONSTRUCTORS // ....................................... - protected ArgoMetadataFile() throws IOException { - super(); - } - - public ArgoMetadataFile(String specDir, String version) { - // super(specDir, FileType.METADATA, version); + public ArgoMetadataFileValidator(ArgoDataFile arFile) { + super(arFile); } // .......................................... // METHODS // .......................................... - /** - * Convenience method to add to String list for "pretty printing". - * - * @param list the StringBuilder list - * @param add the String to add - */ - private void addToList(StringBuilder list, String add) { - if (list.length() == 0) { - list.append("'" + add + "'"); - } else { - list.append(", '" + add + "'"); - } - } - - /** - * Convenience method to add to String list for "pretty printing". - * - * @param list the StringBuilder list - * @param add the String to add - */ - private void addToList(StringBuilder list, int add) { - if (list.length() == 0) { - list.append(add); - } else { - list.append(", " + add); - } - } - - /** - * Opens an existing file and the assoicated Argo specification). - * - * @param inFile the string name of the file to open - * @param specDir the string name of the directory containing the format - * specification files - * @param fullSpec true = open the full specification; false = open the template - * specification - * @return the file object reference. Returns null if the file is not opened - * successfully. (ArgoMetadataFile.getMessage() will return the reason - * for the failure to open.) - * @throws IOException If an I/O error occurs - */ - public static ArgoMetadataFile open(String inFile, String specDir, boolean fullSpec) throws IOException { - ArgoDataFile arFile = ArgoDataFile.open(inFile, specDir, fullSpec); - if (!(arFile instanceof ArgoMetadataFile)) { - message = "ERROR: '" + inFile + "' not an Argo META-DATA file"; - return null; - } - - return (ArgoMetadataFile) arFile; - } +// /** +// * Opens an existing file and the assoicated Argo specification). +// * +// * @param inFile the string name of the file to open +// * @param specDir the string name of the directory containing the format +// * specification files +// * @param fullSpec true = open the full specification; false = open the template +// * specification +// * @return the file object reference. Returns null if the file is not opened +// * successfully. (ArgoMetadataFile.getMessage() will return the reason +// * for the failure to open.) +// * @throws IOException If an I/O error occurs +// */ +// public static ArgoMetadataFile open(String inFile, String specDir, boolean fullSpec) throws IOException { +// ArgoDataFile arFile = ArgoDataFile.open(inFile, specDir, fullSpec); +// if (!(arFile instanceof ArgoMetadataFile)) { +// ValidationResult.lastMessage = "ERROR: '" + inFile + "' not an Argo META-DATA file"; +// return null; +// } +// +// return (ArgoMetadataFile) arFile; +// } /** * Validates the data in the meta-data file. This is a driver routine that @@ -161,42 +128,19 @@ public static ArgoMetadataFile open(String inFile, String specDir, boolean fullS * reason). * @throws IOException If an I/O error occurs */ - public boolean validate(String dacName, boolean ckNulls, boolean... optionalChecks) throws IOException { - ArgoReferenceTable.DACS dac = null; - - if (!verified) { - message = new String("File must be verified (verifyFormat) " + "successfully before validation"); + public boolean validateData(boolean ckNulls, boolean... optionalChecks) throws IOException { + boolean basicsChecks = super.basicDataValidation(ckNulls); + if (!basicsChecks) { return false; } - - // .......check arguments....... - if (dacName.trim().length() > 0) { - for (ArgoReferenceTable.DACS d : ArgoReferenceTable.DACS.values()) { - if (d.name.equals(dacName)) { - dac = d; - break; - } - } - if (dac == null) { - message = new String("Unknown DAC name = '" + dacName + "'"); - return false; - } - } - - // .......do validations.......... - - this.dacName = dacName; - - if (ckNulls) { - validateStringNulls(); - } + // .......do meta-data file specific validations.......... validateDates(); - if (format_version.trim().compareTo("2.2") <= 0) { - validateHighlyDesirable_v2(dac); + if (this.arFile.fileVersion().trim().compareTo("2.2") <= 0) { + validateHighlyDesirable_v2(this.arFile.getValidatedDac()); } else { - validateMandatory_v3(dac); + validateMandatory_v3(this.arFile.getValidatedDac()); validateOptionalParams(); validateConfigMission(); validateConfigParams(); @@ -240,15 +184,13 @@ public void validateDates() throws IOException { // ..read times - String creation = readString("DATE_CREATION").trim(); - String update = readString("DATE_UPDATE").trim(); - String launch = readString("LAUNCH_DATE").trim(); - String start = readString("START_DATE").trim(); - String end = readString("END_MISSION_DATE").trim(); + String launch = arFile.readString("LAUNCH_DATE").trim(); + String start = arFile.readString("START_DATE").trim(); + String end = arFile.readString("END_MISSION_DATE").trim(); // ..version specific dates - Variable var = ncReader.findVariable("STARTUP_DATE"); + Variable var = arFile.getNcReader().findVariable("STARTUP_DATE"); String startup; if (var != null) { @@ -257,80 +199,19 @@ public void validateDates() throws IOException { startup = " "; } - Date fileTime = new Date(file.lastModified()); - long fileSec = fileTime.getTime(); + Date fileTime = new Date(arFile.getFile().lastModified()); if (log.isDebugEnabled()) { log.debug("earliestDate: '{}'", ArgoDate.format(earliestDate)); log.debug("fileTime: '{}'", ArgoDate.format(fileTime)); - log.debug("DATE_CREATION: '{}'", creation); - log.debug("DATE_UPDATE: '{}'", update); log.debug("LAUNCH_DATE: '{}'", launch); log.debug("START_DATE: '{}' length = {}", start, start.length()); log.debug("STARTUP_DATE: '{}'", startup); log.debug("END_MISSION_DATE: '{}'", end); } - // ...........creation date checks:............. - // ..set, after earliestDate, and before file time - Date dateCreation = null; - boolean haveCreation = false; - long creationSec = 0; - - if (creation.trim().length() <= 0) { - formatErrors.add("DATE_CREATION: Not set"); - - } else { - dateCreation = ArgoDate.get(creation); - haveCreation = true; - - if (dateCreation == null) { - haveCreation = false; - formatErrors.add("DATE_CREATION: '" + creation + "': Invalid date"); - - } else { - creationSec = dateCreation.getTime(); - - if (dateCreation.before(earliestDate)) { - formatErrors.add("DATE_CREATION: '" + creation + "': Before earliest allowed date ('" - + ArgoDate.format(earliestDate) + "')"); - - } else if ((creationSec - fileSec) > oneDaySec) { - formatErrors.add("DATE_CREATION: '" + creation + "': After GDAC receipt time ('" - + ArgoDate.format(fileTime) + "')"); - } - } - } - - // ............update date checks:........... - // ..set, not before creation time, before file time - Date dateUpdate = null; - boolean haveUpdate = false; - long updateSec = 0; - - if (update.trim().length() <= 0) { - formatErrors.add("DATE_UPDATE: Not set"); - } else { - dateUpdate = ArgoDate.get(update); - haveUpdate = true; - - if (dateUpdate == null) { - formatErrors.add("DATE_UPDATE: '" + update + "': Invalid date"); - haveUpdate = false; - - } else { - updateSec = dateUpdate.getTime(); - - if (haveCreation && dateUpdate.before(dateCreation)) { - formatErrors.add("DATE_UPDATE: '" + update + "': Before DATE_CREATION ('" + creation + "')"); - } - - if ((updateSec - fileSec) > oneDaySec) { - formatErrors.add("DATE_UPDATE: '" + update + "': After GDAC receipt time ('" - + ArgoDate.format(fileTime) + "')"); - } - } - } + // ...........creation and update dates checks:............. + super.validateCreationUpdateDates(fileTime); // ............launch date checks:........... // ..must be set ... not before earliest allowed date @@ -342,18 +223,18 @@ public void validateDates() throws IOException { haveLaunch = true; if (dateLaunch == null) { - formatErrors.add("LAUNCH_DATE: '" + update + "': Invalid date"); + validationResult.addError("LAUNCH_DATE: '" + launch + "': Invalid date"); haveLaunch = false; } else { if (dateLaunch.before(earliestDate)) { - formatErrors.add("LAUNCH_DATE: '" + launch + "': Before earliest allowed date ('" + validationResult.addError("LAUNCH_DATE: '" + launch + "': Before earliest allowed date ('" + ArgoDate.format(earliestDate) + "')"); } } } else { - formatErrors.add("LAUNCH_DATE: Not set"); + validationResult.addError("LAUNCH_DATE: Not set"); } // ............start date checks:........... @@ -364,7 +245,7 @@ public void validateDates() throws IOException { dateStart = ArgoDate.get(start); if (dateStart == null) { - formatErrors.add("START_DATE: '" + start + "': Invalid date"); + validationResult.addError("START_DATE: '" + start + "': Invalid date"); } } @@ -376,7 +257,7 @@ public void validateDates() throws IOException { dateStartup = ArgoDate.get(startup); if (dateStartup == null) { - formatErrors.add("STARTUP_DATE: '" + startup + "': Invalid date"); + validationResult.addError("STARTUP_DATE: '" + startup + "': Invalid date"); } } @@ -388,15 +269,16 @@ public void validateDates() throws IOException { dateEnd = ArgoDate.get(end); if (dateEnd == null) { - formatErrors.add("END_MISSION_DATE: '" + end + "': Invalid date"); + validationResult.addError("END_MISSION_DATE: '" + end + "': Invalid date"); } else { if (haveLaunch) { if (dateEnd.before(dateLaunch)) { - formatErrors.add("END_MISSION_DATE: '" + start + "': Before LAUNCH_DATE ('" + launch + "')"); + validationResult + .addError("END_MISSION_DATE: '" + start + "': Before LAUNCH_DATE ('" + launch + "')"); } } else { - formatWarnings.add("END_MISSION_DATE: Set. LAUNCH_DATE missing"); + validationResult.addWarning("END_MISSION_DATE: Set. LAUNCH_DATE missing"); } } } @@ -436,16 +318,16 @@ public void validateHighlyDesirable_v2(ArgoReferenceTable.DACS dac) throws IOExc // ..CYCLE_TIME ---> see "per-cycle" checks below name = "DATA_CENTRE"; // ..valid (and valid for DAC) - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (dac != null) { if (!ArgoReferenceTable.DacCenterCodes.get(dac).contains(str)) { - formatErrors.add("DATA_CENTRE: '" + str + "': Invalid for DAC " + dac); + validationResult.addError("DATA_CENTRE: '" + str + "': Invalid for DAC " + dac); } } else { // ..incoming DAC not set if (!ArgoReferenceTable.DacCenterCodes.containsValue(str)) { - formatErrors.add("DATA_CENTRE: '" + str + "': Invalid (for all DACs)"); + validationResult.addError("DATA_CENTRE: '" + str + "': Invalid (for all DACs)"); } } @@ -455,30 +337,30 @@ public void validateHighlyDesirable_v2(ArgoReferenceTable.DACS dac) throws IOExc ch = getChar(name); log.debug("{}: '{}'", name, ch); if (ch != 'A' && ch != 'D') { - formatWarnings.add(name + ": '" + ch + "': Not A or D"); + validationResult.addWarning(name + ": '" + ch + "': Not A or D"); } name = "LAUNCH_LATITUDE"; // ..on the earth - dVal = readDouble(name); + dVal = arFile.readDouble(name); log.debug("{}: {}", name, dVal); if (dVal < -90.d || dVal > 90.d) { - formatWarnings.add(name + ": " + dVal + ": Invalid"); + validationResult.addWarning(name + ": " + dVal + ": Invalid"); } name = "LAUNCH_DATE"; // ..not empty -- validity checked elsewhere - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatWarnings.add(name + ": Empty"); + validationResult.addWarning(name + ": Empty"); } name = "LAUNCH_LONGITUDE"; // ..on the earth - dVal = readDouble(name); + dVal = arFile.readDouble(name); log.debug("{}: {}", name, dVal); if (dVal < -180.d || dVal > 180.d) { - formatWarnings.add(name + ": " + dVal + ": Invalid"); + validationResult.addWarning(name + ": " + dVal + ": Invalid"); } name = "LAUNCH_QC"; // ..valid ref table 2 value @@ -486,47 +368,45 @@ public void validateHighlyDesirable_v2(ArgoReferenceTable.DACS dac) throws IOExc log.debug("{}: '{}'", name, ch); if (!(info = ArgoReferenceTable.QC_FLAG.contains(ch)).isActive) { - formatWarnings.add(name + ": '" + ch + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + ch + "' Status: " + info.message); } // ..PARAMETER --> see below // ..PARKING_PRESSURE ---> see "per-cycle" checks below name = "PLATFORM_MODEL"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatWarnings.add(name + ": Empty"); + validationResult.addWarning(name + ": Empty"); } - name = "PLATFORM_NUMBER"; // ..valid platform number - str = readString(name).trim(); - log.debug("{}: '{}'", name, str); - - if (!str.matches("[1-9][0-9]{4}|[1-9]9[0-9]{5}")) { - formatErrors.add(name + ": '" + str + "': Invalid"); + name = "PLATFORM_NUMBER"; // ..valid wmo id + str = arFile.readString(name).trim(); + if (!super.validatePlatfomNumber(str)) { + validationResult.addError("PLATFORM_NUMBER" + ": '" + str + "': Invalid"); } name = "POSITIONING_SYSTEM"; // ..ref table 9 - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (!(info = ArgoReferenceTable.POSITIONING_SYSTEM.contains(str)).isActive) { - formatWarnings.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + str + "' Status: " + info.message); } name = "PTT"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatWarnings.add(name + ": Empty"); + validationResult.addWarning(name + ": Empty"); } name = "START_DATE"; // ..not empty -- validity checked elsewhere - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatWarnings.add(name + ": Empty"); + validationResult.addWarning(name + ": Empty"); } name = "START_DATE_QC"; // ..valid ref table 2 value @@ -534,73 +414,73 @@ public void validateHighlyDesirable_v2(ArgoReferenceTable.DACS dac) throws IOExc log.debug("{}: '{}'", name, ch); if (!(info = ArgoReferenceTable.QC_FLAG.contains(ch)).isActive) { - formatWarnings.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + str + "' Status: " + info.message); } name = "TRANS_SYSTEM"; // ..ref table 10 - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (!(info = ArgoReferenceTable.TRANS_SYSTEM.contains(str)).isActive) { - formatWarnings.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + str + "' Status: " + info.message); } name = "TRANS_SYSTEM_ID"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatWarnings.add(name + ": Empty"); + validationResult.addWarning(name + ": Empty"); } // ..............parameter names........... - int nParam = getDimensionLength("N_PARAM"); + int nParam = arFile.getDimensionLength("N_PARAM"); log.debug("n_param: '{}'", nParam); - String paramVar[] = readStringArr("PARAMETER"); + String paramVar[] = arFile.readStringArr("PARAMETER"); for (int n = 0; n < nParam; n++) { str = paramVar[n].trim(); log.debug("param[{}]: '{}'", n, str); - if (!spec.isPhysicalParamName(str)) { - formatWarnings.add("Physical parameter name: '" + str + "': Invalid"); + if (!arFile.getFileSpec().isPhysicalParamName(str)) { + validationResult.addWarning("Physical parameter name: '" + str + "': Invalid"); } } // .........per-cycle checks.......... - int nCycles = getDimensionLength("N_CYCLES"); + int nCycles = arFile.getDimensionLength("N_CYCLES"); log.debug("n_cycles: '{}'", nCycles); name = "CYCLE_TIME"; // ..not empty - fVar = readFloatArr(name); + fVar = arFile.readFloatArr(name); for (int n = 0; n < nCycles; n++) { fVal = fVar[n]; log.debug("{}[{}]: {}", name, n, fVal); if (fVal > 9999.f || fVal <= 0.f) { - formatWarnings.add(name + "[" + (n + 1) + "]: Not set"); + validationResult.addWarning(name + "[" + (n + 1) + "]: Not set"); } } name = "PARKING_PRESSURE"; // ..not empty - fVar = readFloatArr(name); + fVar = arFile.readFloatArr(name); for (int n = 0; n < nCycles; n++) { fVal = fVar[n]; log.debug("{}[{}]: {}", name, n, fVal); if (fVal > 9999.f || fVal <= 0.f) { - formatWarnings.add(name + "[" + (n + 1) + "]: Not set"); + validationResult.addWarning(name + "[" + (n + 1) + "]: Not set"); } } name = "DEEPEST_PRESSURE"; // ..not empty - fVar = readFloatArr(name); + fVar = arFile.readFloatArr(name); for (int n = 0; n < nCycles; n++) { fVal = fVar[n]; log.debug("{}[{}]: {}", name, n, fVal); if (fVal > 9999.f || fVal <= 0.f) { - formatWarnings.add(name + "[" + (n + 1) + "]: Not set"); + validationResult.addWarning(name + "[" + (n + 1) + "]: Not set"); } } @@ -698,70 +578,59 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException // ...........single valued variables.............. name = "CONTROLLER_BOARD_SERIAL_NO_PRIMARY"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatErrors.add(name + ": Empty"); + validationResult.addError(name + ": Empty"); } name = "CONTROLLER_BOARD_TYPE_PRIMARY"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatErrors.add(name + ": Empty"); + validationResult.addError(name + ": Empty"); } name = "DAC_FORMAT_ID"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatErrors.add(name + ": Empty"); + validationResult.addError(name + ": Empty"); } - name = "DATA_CENTRE"; // ..ref table 4 (and valid for DAC) - str = readString(name).trim(); - log.debug("{}: '{}'", name, str); - if (dac != null) { - if (!ArgoReferenceTable.DacCenterCodes.get(dac).contains(str)) { - formatErrors.add("DATA_CENTRE: '" + str + "': Invalid for DAC " + dac); - } - - } else { // ..incoming DAC not set - if (!ArgoReferenceTable.DacCenterCodes.containsValue(str)) { - formatErrors.add("DATA_CENTRE: '" + str + "': Invalid (for all DACs)"); - } - } + // DATA_CENTRE + super.validateDataCentre(dac); name = "FIRMWARE_VERSION"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatErrors.add(name + ": Empty"); + validationResult.addError(name + ": Empty"); } name = "FLOAT_SERIAL_NO"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatErrors.add(name + ": Empty"); + validationResult.addError(name + ": Empty"); } // ..LAUNCH_DATE --> checked elsewhere name = "LAUNCH_LATITUDE"; // ..on the earth - dVal = readDouble(name); + dVal = arFile.readDouble(name); log.debug("{}: {}", name, dVal); if (dVal < -90.d || dVal > 90.d) { - formatErrors.add(name + ": " + dVal + ": Invalid"); + validationResult.addError(name + ": " + dVal + ": Invalid"); } name = "LAUNCH_LONGITUDE"; // ..on the earth - dVal = readDouble(name); + dVal = arFile.readDouble(name); log.debug("{}: {}", name, dVal); if (dVal < -180.d || dVal > 180.d) { - formatErrors.add(name + ": " + dVal + ": Invalid"); + validationResult.addError(name + ": " + dVal + ": Invalid"); } name = "LAUNCH_QC"; // ..ref table 2 @@ -770,89 +639,87 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException if ((info = ArgoReferenceTable.QC_FLAG.contains(ch)).isValid()) { if (info.isDeprecated) { - formatWarnings.add(name + ": '" + ch + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + ch + "' Status: " + info.message); } } else { - formatErrors.add(name + ": '" + ch + "' Status: " + info.message); + validationResult.addError(name + ": '" + ch + "' Status: " + info.message); } name = "MANUAL_VERSION"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatErrors.add(name + ": Empty"); + validationResult.addError(name + ": Empty"); } // ..PARAMETER --> see below name = "PI_NAME"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatErrors.add(name + ": Empty"); + validationResult.addError(name + ": Empty"); } name = "PLATFORM_FAMILY"; // ..ref table 22 - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if ((info = ArgoReferenceTable.PLATFORM_FAMILY.contains(str)).isValid()) { if (info.isDeprecated) { - formatWarnings.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + str + "' Status: " + info.message); } } else { - formatErrors.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addError(name + ": '" + str + "' Status: " + info.message); } name = "PLATFORM_NUMBER"; // ..valid wmo id - str = readString(name).trim(); - log.debug("{}: '{}'", name, str); - - if (!str.matches("[1-9][0-9]{4}|[1-9]9[0-9]{5}")) { - formatErrors.add(name + ": '" + str + "': Invalid"); + str = arFile.readString(name).trim(); + if (!super.validatePlatfomNumber(str)) { + validationResult.addError("PLATFORM_NUMBER" + ": '" + str + "': Invalid"); } boolean pmkrValid = false; String plfmMakerName = "PLATFORM_MAKER"; // ..ref table 24 - String plfmMaker = readString(plfmMakerName).trim(); + String plfmMaker = arFile.readString(plfmMakerName).trim(); log.debug("{}: '{}'", plfmMakerName, plfmMaker); if ((info = ArgoReferenceTable.PLATFORM_MAKER.contains(plfmMaker)).isValid()) { pmkrValid = true; if (info.isDeprecated) { - formatWarnings.add(plfmMakerName + ": '" + plfmMaker + "' Status: " + info.message); + validationResult.addWarning(plfmMakerName + ": '" + plfmMaker + "' Status: " + info.message); } } else { - formatErrors.add(plfmMakerName + ": '" + plfmMaker + "' Status: " + info.message); + validationResult.addError(plfmMakerName + ": '" + plfmMaker + "' Status: " + info.message); } boolean typValid = false; String plfmTypeName = "PLATFORM_TYPE"; // ..ref table 23 - String plfmType = readString(plfmTypeName).trim(); + String plfmType = arFile.readString(plfmTypeName).trim(); log.debug("{}: '{}'", plfmTypeName, plfmType); if ((info = ArgoReferenceTable.PLATFORM_TYPE.contains(plfmType)).isValid()) { typValid = true; if (info.isDeprecated) { - formatWarnings.add(plfmTypeName + ": '" + plfmType + "' Status: " + info.message); + validationResult.addWarning(plfmTypeName + ": '" + plfmType + "' Status: " + info.message); } } else { - formatErrors.add(plfmTypeName + ": '" + plfmType + "' Status: " + info.message); + validationResult.addError(plfmTypeName + ": '" + plfmType + "' Status: " + info.message); } if (pmkrValid && typValid) { if (!plfmType.equals("FLOAT")) { if (!ArgoReferenceTable.PLATFORM_TYPExPLATFORM_MAKER.xrefContains(plfmType, plfmMaker)) { - formatErrors.add(plfmTypeName + "/" + plfmMakerName + ": Inconsistent: '" + plfmType + "'/'" - + plfmMaker + "'"); + validationResult.addError(plfmTypeName + "/" + plfmMakerName + ": Inconsistent: '" + plfmType + + "'/'" + plfmMaker + "'"); log.debug("{}/{} xref inconsistent: plfmType, plfmMaker = '{}', '{}'", plfmTypeName, plfmMakerName, plfmType, plfmMaker); } else { @@ -867,10 +734,10 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException // ..PREDEPLOYMENT_CALIB_EQUATION --> see per-param below name = "PTT"; - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatErrors.add(name + ": Empty"); + validationResult.addError(name + ": Empty"); } // ..SENSOR --> see per-sensor below @@ -884,18 +751,18 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException if ((info = ArgoReferenceTable.QC_FLAG.contains(ch)).isValid()) { if (info.isDeprecated) { - formatWarnings.add(name + ": '" + ch + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + ch + "' Status: " + info.message); } } else { - formatErrors.add(name + ": '" + ch + "' Status: " + info.message); + validationResult.addError(name + ": '" + ch + "' Status: " + info.message); } name = "STANDARD_FORMAT_ID"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatErrors.add(name + ": Empty"); + validationResult.addError(name + ": Empty"); } // ..TRANS_FREQUENCY \ @@ -905,7 +772,7 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException boolean wmoValid = false; name = "WMO_INST_TYPE"; // ..ref table 8 - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); try { int N = Integer.valueOf(str); @@ -914,21 +781,22 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException wmoValid = true; if (info.isDeprecated) { - formatWarnings.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + str + "' Status: " + info.message); } } else { - formatErrors.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addError(name + ": '" + str + "' Status: " + info.message); } } catch (Exception e) { - formatErrors.add(name + ": '" + str + "' Invalid. Must be integer."); + validationResult.addError(name + ": '" + str + "' Invalid. Must be integer."); } if (wmoValid && typValid) { if (!plfmType.equals("FLOAT")) { if (!ArgoReferenceTable.PLATFORM_TYPExWMO_INST.xrefContains(plfmType, str)) { - formatErrors.add(plfmTypeName + "/" + name + ": Inconsistent: '" + plfmType + "'/'" + str + "'"); + validationResult + .addError(plfmTypeName + "/" + name + ": Inconsistent: '" + plfmType + "'/'" + str + "'"); log.debug("{}/{} xref inconsistent: plfmType, wmo = '{}', '{}'", plfmTypeName, name, plfmType, str); } else { log.debug("{}/{} xref valid: mdl, wmo = '{}', '{}'", plfmTypeName, name, plfmType, str); @@ -939,78 +807,78 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException // ...........per-parameter checks........... String[] paramVar; - int nParam = getDimensionLength("N_PARAM"); + int nParam = arFile.getDimensionLength("N_PARAM"); log.debug("N_PARAM: '{}'", nParam); name = "PARAMETER"; // ..in physical parameter list - paramVar = readStringArr(name); + paramVar = arFile.readStringArr(name); for (int n = 0; n < nParam; n++) { str = paramVar[n].trim(); log.debug(name + "[{}]: '{}'", n, str); - if (!spec.isPhysicalParamName(str)) { - formatErrors.add(name + "[" + (n + 1) + "]: '" + str + "': Invalid"); + if (!arFile.getFileSpec().isPhysicalParamName(str)) { + validationResult.addError(name + "[" + (n + 1) + "]: '" + str + "': Invalid"); } } name = "PARAMETER_UNITS"; // ..not empty - paramVar = readStringArr(name); + paramVar = arFile.readStringArr(name); for (int n = 0; n < nParam; n++) { str = paramVar[n].trim(); log.debug(name + "[{}]: '{}'", n, str); if (str.length() <= 0) { - formatErrors.add(name + "[" + (n + 1) + "]: Empty"); + validationResult.addError(name + "[" + (n + 1) + "]: Empty"); } } name = "PARAMETER_SENSOR"; // ..not empty - paramVar = readStringArr(name); + paramVar = arFile.readStringArr(name); for (int n = 0; n < nParam; n++) { str = paramVar[n]; log.debug(name + "[{}]: '{}'", n, str); if (str.length() <= 0) { - formatErrors.add(name + "[" + (n + 1) + "]: Empty"); + validationResult.addError(name + "[" + (n + 1) + "]: Empty"); } } name = "PREDEPLOYMENT_CALIB_COEFFICIENT"; // ..not empty - paramVar = readStringArr(name); + paramVar = arFile.readStringArr(name); for (int n = 0; n < nParam; n++) { str = paramVar[n].trim(); log.debug(name + "[{}]: '{}'", n, str); if (str.length() <= 0) { - formatErrors.add(name + "[" + (n + 1) + "]: Empty"); + validationResult.addError(name + "[" + (n + 1) + "]: Empty"); } } name = "PREDEPLOYMENT_CALIB_EQUATION"; // ..not empty - paramVar = readStringArr(name); + paramVar = arFile.readStringArr(name); for (int n = 0; n < nParam; n++) { str = paramVar[n].trim(); log.debug(name + "[{}]: '{}'", n, str); if (str.length() <= 0) { - formatErrors.add(name + "[" + (n + 1) + "]: Empty"); + validationResult.addError(name + "[" + (n + 1) + "]: Empty"); } } // .........per-sensor checks............ - int nSensor = getDimensionLength("N_SENSOR"); + int nSensor = arFile.getDimensionLength("N_SENSOR"); log.debug("N_SENSOR: '{}'", nSensor); String sensorName = "SENSOR"; // ..ref table 25 - String[] sensor = readStringArr(sensorName); + String[] sensor = arFile.readStringArr(sensorName); String sensorMakerName = "SENSOR_MAKER"; // ..ref table 26 - String[] sensorMaker = readStringArr(sensorMakerName); + String[] sensorMaker = arFile.readStringArr(sensorMakerName); String sensorModelName = "SENSOR_MODEL"; // ..ref table 27 - String[] sensorModel = readStringArr(sensorModelName); + String[] sensorModel = arFile.readStringArr(sensorModelName); for (int n = 0; n < nSensor; n++) { ArgoReferenceTable.ArgoReferenceEntry mdlInfo; @@ -1025,11 +893,12 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException if ((snsrInfo = ArgoReferenceTable.SENSOR.contains(snsr)).isValid()) { snsrValid = true; if (snsrInfo.isDeprecated) { - formatWarnings.add(sensorName + "[" + (n + 1) + "]: '" + snsr + "' Status: " + snsrInfo.message); + validationResult + .addWarning(sensorName + "[" + (n + 1) + "]: '" + snsr + "' Status: " + snsrInfo.message); } } else { - formatErrors.add(sensorName + "[" + (n + 1) + "]: '" + snsr + "' Status: " + snsrInfo.message); + validationResult.addError(sensorName + "[" + (n + 1) + "]: '" + snsr + "' Status: " + snsrInfo.message); } // ..check SENSOR_MAKER @@ -1040,12 +909,13 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException if ((mkrInfo = ArgoReferenceTable.SENSOR_MAKER.contains(snsrMaker)).isValid()) { smkrValid = true; if (mkrInfo.isDeprecated) { - formatWarnings - .add(sensorMakerName + "[" + (n + 1) + "]: '" + snsrMaker + "' Status: " + mkrInfo.message); + validationResult.addWarning( + sensorMakerName + "[" + (n + 1) + "]: '" + snsrMaker + "' Status: " + mkrInfo.message); } } else { - formatErrors.add(sensorMakerName + "[" + (n + 1) + "]: '" + snsrMaker + "' Status: " + mkrInfo.message); + validationResult.addError( + sensorMakerName + "[" + (n + 1) + "]: '" + snsrMaker + "' Status: " + mkrInfo.message); } // ..check SENSOR_MODEL @@ -1056,11 +926,12 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException if ((mdlInfo = ArgoReferenceTable.SENSOR_MODEL.contains(snsrModel)).isValid()) { mdlValid = true; if (mdlInfo.isDeprecated) { - formatWarnings - .add(sensorModelName + "[" + (n + 1) + "]: '" + snsrModel + "' Status: " + mdlInfo.message); + validationResult.addWarning( + sensorModelName + "[" + (n + 1) + "]: '" + snsrModel + "' Status: " + mdlInfo.message); } } else { - formatErrors.add(sensorModelName + "[" + (n + 1) + "]: '" + snsrModel + "' Status: " + mdlInfo.message); + validationResult.addError( + sensorModelName + "[" + (n + 1) + "]: '" + snsrModel + "' Status: " + mdlInfo.message); } // ..cross-reference SENSOR_MODEL / SENSOR_MAKER @@ -1070,7 +941,7 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException String mdl = mdlInfo.getColumn(1); if (!ArgoReferenceTable.SENSOR_MODELxSENSOR_MAKER.xrefContains(mdl, mkr)) { - formatErrors.add(sensorModelName + "/" + sensorMakerName + "[" + (n + 1) + "]: " + validationResult.addError(sensorModelName + "/" + sensorMakerName + "[" + (n + 1) + "]: " + "Inconsistent: '" + snsrModel + "'/'" + snsrMaker + "'"); log.debug("SENSOR_MODEL/SENSOR_MAKER xref inconsistent: mdl, mkr = '{}', '{}'", mdl, mkr); } else { @@ -1086,8 +957,8 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException String mdl = mdlInfo.getColumn(1); if (!ArgoReferenceTable.SENSOR_MODELxSENSOR.xrefContains(mdl, sn)) { - formatErrors.add(sensorModelName + "/" + sensorName + "[" + (n + 1) + "]: " + "Inconsistent: '" - + snsrModel + "'/'" + snsr + "'"); + validationResult.addError(sensorModelName + "/" + sensorName + "[" + (n + 1) + "]: " + + "Inconsistent: '" + snsrModel + "'/'" + snsr + "'"); log.debug("SENSOR_MODEL/SENSOR xref inconsistent: mdl, sn = '{}', '{}'", mdl, sn); } else { log.debug("SENSOR_MODEL/SENSOR xref valid: mdl, sn = '{}', '{}'", mdl, sn); @@ -1098,11 +969,11 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException // ..........per-positioning_system checks String[] positVar; - int nPosit = getDimensionLength("N_POSITIONING_SYSTEM"); + int nPosit = arFile.getDimensionLength("N_POSITIONING_SYSTEM"); log.debug("N_POSITIONING_SYSTEM: '{}'", nPosit); name = "POSITIONING_SYSTEM"; // ..ref table 9 - positVar = readStringArr(name); + positVar = arFile.readStringArr(name); for (int n = 0; n < nPosit; n++) { str = positVar[n].trim(); @@ -1110,20 +981,20 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException if ((info = ArgoReferenceTable.POSITIONING_SYSTEM.contains(str)).isValid()) { if (info.isDeprecated) { - formatWarnings.add(name + "[" + (n + 1) + "]: '" + str + "' Status: " + info.message); + validationResult.addWarning(name + "[" + (n + 1) + "]: '" + str + "' Status: " + info.message); } } else { - formatErrors.add(name + "[" + (n + 1) + "]: '" + str + "' Status: " + info.message); + validationResult.addError(name + "[" + (n + 1) + "]: '" + str + "' Status: " + info.message); } } // ..........per-trans_system checks String[] transVar; - int nTrans = getDimensionLength("N_TRANS_SYSTEM"); + int nTrans = arFile.getDimensionLength("N_TRANS_SYSTEM"); log.debug("N_TRANS_SYSTEM: '{}'", nTrans); name = "TRANS_SYSTEM"; // ..ref table 10 - transVar = readStringArr(name); + transVar = arFile.readStringArr(name); for (int n = 0; n < nTrans; n++) { str = transVar[n].trim(); @@ -1131,21 +1002,21 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException if ((info = ArgoReferenceTable.TRANS_SYSTEM.contains(str)).isValid()) { if (info.isDeprecated) { - formatWarnings.add(name + "[" + (n + 1) + "]: '" + str + "' Status: " + info.message); + validationResult.addWarning(name + "[" + (n + 1) + "]: '" + str + "' Status: " + info.message); } } else { - formatErrors.add(name + "[" + (n + 1) + "]: '" + str + "' Status: " + info.message); + validationResult.addError(name + "[" + (n + 1) + "]: '" + str + "' Status: " + info.message); } } name = "TRANS_SYSTEM_ID"; // ..not empty - transVar = readStringArr(name); + transVar = arFile.readStringArr(name); for (int n = 0; n < nTrans; n++) { str = transVar[n].trim(); log.debug(name + "[{}]: '{}'", n, str); if (str.length() <= 0) { - formatErrors.add(name + "[" + (n + 1) + "]: Empty"); + validationResult.addError(name + "[" + (n + 1) + "]: Empty"); } } @@ -1154,22 +1025,22 @@ public void validateMandatory_v3(ArgoReferenceTable.DACS dac) throws IOException private void checkParameterValueAgainstRefTable(String parameterName, StringTable refTable) { ArgoReferenceTable.ArgoReferenceEntry info; - String parameterValue = readString(parameterName).trim(); + String parameterValue = arFile.readString(parameterName).trim(); log.debug("{}: '{}'", parameterName, parameterValue); - if ((info = ArgoReferenceTable.PROGRAM_NAME.contains(parameterValue)).isValid()) { + if ((info = refTable.contains(parameterValue)).isValid()) { if (info.isDeprecated) { - formatWarnings.add(parameterName + ": '" + parameterValue + "' Status: " + info.message); + validationResult.addWarning(parameterName + ": '" + parameterValue + "' Status: " + info.message); } } else { - formatErrors.add(parameterName + ": '" + parameterValue + "' Status: " + info.message); + validationResult.addError(parameterName + ": '" + parameterValue + "' Status: " + info.message); } } private void checkOptionalParameterValueAgainstRefTable(String parameterName, StringTable refTable) { - Variable dataVar = ncReader.findVariable("PROGRAM_NAME"); + Variable dataVar = arFile.getNcReader().findVariable(parameterName); if (dataVar != null) { checkParameterValueAgainstRefTable(parameterName, refTable); } @@ -1183,7 +1054,7 @@ private void checkOptionalParameterValueAgainstRefTable(String parameterName, St * @throws IOException If an I/O error occurs */ private Character getChar(String name) throws IOException { - ArrayChar.D0 value = (ArrayChar.D0) ncReader.findVariable(name).read(); + ArrayChar.D0 value = (ArrayChar.D0) arFile.getNcReader().findVariable(name).read(); return Character.valueOf(value.get()); } @@ -1202,11 +1073,11 @@ public void validateBattery() throws IOException { // .........battery_type............ int nTypes = 0; - String str = readString("BATTERY_TYPE"); + String str = arFile.readString("BATTERY_TYPE"); log.debug("BATTERY_TYPE: '{}'", str); if (str.length() <= 0) { - formatErrors.add("BATTERY_TYPE: Empty"); + validationResult.addError("BATTERY_TYPE: Empty"); } else { @@ -1228,11 +1099,11 @@ public void validateBattery() throws IOException { if (!ArgoReferenceTable.BATTERY_TYPE_manufacturer.contains(manu)) { String err = String.format("BATTERY_TYPE[%d]: Invalid manufacturer: '{%s}'", nTypes, manu); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("...invalid manufacturer"); @@ -1242,11 +1113,11 @@ public void validateBattery() throws IOException { if (!ArgoReferenceTable.BATTERY_TYPE_type.contains(type)) { String err = String.format("BATTERY_TYPE[%d]: Invalid type: '{%s}'", nTypes, type); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("invalid type"); @@ -1259,11 +1130,11 @@ public void validateBattery() throws IOException { String err = String.format( "BATTERY_TYPE[%d]: Does not match template 'manufacturer type volts V': '%s'", nTypes, substr.trim()); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("...does not match template"); } @@ -1273,7 +1144,7 @@ public void validateBattery() throws IOException { // .....battery_packs..... int nPacks = -1; - str = readString("BATTERY_PACKS"); + str = arFile.readString("BATTERY_PACKS"); log.debug("BATTERY_PACKS: '{}'", str); @@ -1310,11 +1181,11 @@ public void validateBattery() throws IOException { if (!ArgoReferenceTable.BATTERY_PACKS_style.contains(style)) { String err = String.format("BATTERY_PACKS[%d]: Invalid style of battery: '{%s}'", nPacks, style); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("invalid style"); @@ -1324,11 +1195,11 @@ public void validateBattery() throws IOException { if (!ArgoReferenceTable.BATTERY_PACKS_type.contains(type)) { String err = String.format("BATTERY_PACKS[%d]: Invalid type: '{%s}'", nPacks, type); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("invalid type"); @@ -1341,11 +1212,11 @@ public void validateBattery() throws IOException { String err = String.format( "BATTERY_PACKS[%d]: Does not match template 'xStyle type (or U): '%s'", nPacks, substr.trim()); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("...does not match template"); } @@ -1359,11 +1230,11 @@ public void validateBattery() throws IOException { if (nTypes != nPacks) { String err = String.format("Number of BATTERY_TYPES {} != number of BATTERY_PACKS {}", nTypes, nPacks); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("number of types != number of packs => {} != {}", nTypes, nPacks); } @@ -1386,8 +1257,8 @@ public void validateBattery() throws IOException { public void validateConfigMission() throws IOException { log.debug(".....validateConfigMission....."); - int nMissions = getDimensionLength("N_MISSIONS"); - int[] mission = readIntArr("CONFIG_MISSION_NUMBER"); + int nMissions = arFile.getDimensionLength("N_MISSIONS"); + int[] mission = arFile.readIntArr("CONFIG_MISSION_NUMBER"); log.debug("N_MISSIONS = {}", nMissions); @@ -1395,7 +1266,7 @@ public void validateConfigMission() throws IOException { log.debug("CONFIG_MISSION_NUMBER[{}] = {}", n, mission[n]); if (mission[n] == 99999) { - formatWarnings.add("CONFIG_MISSION_NUMBER: Missing at index: " + (n + 1)); + validationResult.addWarning("CONFIG_MISSION_NUMBER: Missing at index: " + (n + 1)); log.debug("config_mission_number == 0 at {}", n); break; } @@ -1424,12 +1295,12 @@ public void validateConfigParams() throws IOException { String dim = "N_" + v + "PARAM"; String varName = v + "PARAMETER_NAME"; - int nParam = getDimensionLength(dim); + int nParam = arFile.getDimensionLength(dim); log.debug("'{}' checking: number of parameters = {}", v, nParam); // ..read config names - String[] full_name = readStringArr(varName); + String[] full_name = arFile.readStringArr(varName); // ...........loop over names............. @@ -1441,7 +1312,8 @@ public void validateConfigParams() throws IOException { // ..poorly formed name - only report if not already reported if (!nameAlreadyChecked.contains(full)) { - formatErrors.add(varName + "[" + (n + 1) + "]: " + "Incorrectly formed name '" + full + "'"); + validationResult + .addError(varName + "[" + (n + 1) + "]: " + "Incorrectly formed name '" + full + "'"); nameAlreadyChecked.add(full); } @@ -1463,23 +1335,24 @@ public void validateConfigParams() throws IOException { if (!nameAlreadyChecked.contains(param)) { // ..this parameter name has not been checked - ArgoConfigTechParam.ArgoConfigTechParamMatch match = spec.ConfigTech.findConfigParam(param); + ArgoConfigTechParam.ArgoConfigTechParamMatch match = arFile.getFileSpec().ConfigTech + .findConfigParam(param); if (match == null) { // ..NOT an active name, NOT a deprecated name --> error String err = String.format("%s[%d]: Invalid name '%s'", varName, (n + 1), param); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("parameter is invalid"); } else { if (match.isDeprecated) { // ..IS a deprecated name --> warning - formatWarnings.add(varName + "[" + (n + 1) + "]: " + "Deprecated name '" + param); + validationResult.addWarning(varName + "[" + (n + 1) + "]: " + "Deprecated name '" + param); log.debug("parameter is deprecated: '{}'", param); } @@ -1493,11 +1366,11 @@ public void validateConfigParams() throws IOException { String err = String.format("%s[%d]: Invalid template/value '%s'/'%s' in '%s'", varName, (n + 1), tmplt, value, param); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("...invalid template/value '{}'/'{}'", tmplt, value); } @@ -1516,11 +1389,12 @@ public void validateConfigParams() throws IOException { if (!ArgoReferenceTable.GENERIC_TEMPLATE_short_sensor_name.contains(str)) { String err = String.format("%s[%d]: Invalid short_sensor_name '%s' in '%s'", varName, (n + 1), str); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), + err); log.debug("...generic short_sensor_name lookup: INVALID = '{}'", str); } else { @@ -1533,11 +1407,12 @@ public void validateConfigParams() throws IOException { if (!ArgoReferenceTable.GENERIC_TEMPLATE_cycle_phase_name.contains(str)) { String err = String.format("%s[%d]: Invalid cycle_phase_name '%s' in '%s'", varName, (n + 1), str, param); - // formatErrors.add(err) + // validationResult.addError(err) // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), + err); log.debug("...generic cycle_phase_name lookup: INVALID = '{}'", str); @@ -1552,11 +1427,12 @@ public void validateConfigParams() throws IOException { String err = String.format("%s[%d]: Invalid param '%s' in '%s'", varName, (n + 1), str, param); - // formatErrors.add(err) + // validationResult.addError(err) // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), + err); log.debug("...generic param: generic name lookup: INVALID = '{}'", str); } else { @@ -1576,21 +1452,21 @@ public void validateConfigParams() throws IOException { if (!unitAlreadyChecked.containsKey(unit)) { // ..this unit name has not been checked - if (!spec.ConfigTech.isConfigTechUnit(unit)) { + if (!arFile.getFileSpec().ConfigTech.isConfigTechUnit(unit)) { // ..NOT an active unit name - if (spec.ConfigTech.isDeprecatedConfigTechUnit(unit)) { + if (arFile.getFileSpec().ConfigTech.isDeprecatedConfigTechUnit(unit)) { // ..IS a deprecated name --> warning validUnit = true; - formatWarnings.add(varName + "[" + (n + 1) + "]: " + "Deprecated unit '" + unit + "' in '" - + full + "'"); + validationResult.addWarning(varName + "[" + (n + 1) + "]: " + "Deprecated unit '" + unit + + "' in '" + full + "'"); log.debug("deprecated unit '{}'", unit); } else { // ..INVALID unit -- not active, not deprecated --> error validUnit = false; - formatErrors.add( + validationResult.addError( varName + "[" + (n + 1) + "]: " + "Invalid unit '" + unit + "' in '" + full + "'"); log.debug("name is valid, unit ({}) is not valid (new or old)", unit); } diff --git a/file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoProfileFile.java b/file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoProfileFileValidator.java similarity index 62% rename from file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoProfileFile.java rename to file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoProfileFileValidator.java index abfcdaf..5b352da 100644 --- a/file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoProfileFile.java +++ b/file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoProfileFileValidator.java @@ -1,31 +1,24 @@ -package fr.coriolis.checker.filetypes; +package fr.coriolis.checker.validators; import java.io.IOException; import java.io.PrintStream; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; -import java.util.HashSet; -import java.util.Set; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.coriolis.checker.specs.ArgoAttribute; +import fr.coriolis.checker.core.ArgoDataFile; +import fr.coriolis.checker.core.ArgoDataFile.FileType; import fr.coriolis.checker.specs.ArgoDate; -import fr.coriolis.checker.specs.ArgoDimension; -import fr.coriolis.checker.specs.ArgoFileSpecification; import fr.coriolis.checker.specs.ArgoReferenceTable; -import fr.coriolis.checker.specs.ArgoVariable; import ucar.ma2.Array; import ucar.ma2.ArrayChar; import ucar.ma2.DataType; import ucar.ma2.Index; import ucar.ma2.InvalidRangeException; -import ucar.nc2.Attribute; -import ucar.nc2.NetcdfFileWriter; import ucar.nc2.Variable; /** @@ -46,7 +39,7 @@ * $ * @version $Id: ArgoProfileFile.java 1269 2021-06-14 20:34:45Z ignaszewski $ */ -public class ArgoProfileFile extends ArgoDataFile { +public class ArgoProfileFileValidator extends ArgoFileValidator { // ......................................... // VARIABLES @@ -58,490 +51,420 @@ public class ArgoProfileFile extends ArgoDataFile { private static PrintStream stderr = new PrintStream(System.err); private static final Logger log = LogManager.getLogger("ArgoProfileFile"); - private final static long oneDaySec = 1L * 24L * 60L * 60L * 1000L; private final static String goodJuldQC = new String("01258"); - // ..object variables - private String data_mode; - - private NetcdfFileWriter ncWriter; - private ArrayList> profParam; // ....................................... // CONSTRUCTORS // ....................................... - protected ArgoProfileFile() throws IOException { - super(); + public ArgoProfileFileValidator(ArgoDataFile arFile) throws IOException { + super(arFile); } - protected ArgoProfileFile(String specDir, String version) { - // super(specDir, FileType.PROFILE, version); - } +// protected ArgoProfileFileValidator(String specDir, String version) { +// // super(specDir, FileType.PROFILE, version); +// } // .......................................... // METHODS // .......................................... - - /** Retrieve the NetcdfFileWriter reference */ - public NetcdfFileWriter getNetcdfFileWriter() { - return ncWriter; - } - - /** - * Retrieve a list of variables in the associated file - */ - public ArrayList getVariableNames() { - ArrayList varNames = new ArrayList(); - - if (ncReader != null) { - for (Variable var : ncReader.getVariables()) { - varNames.add(var.getShortName()); - } - - } else { - varNames = null; - } - - return varNames; - } - - /** - * Convenience method to add to String list for "pretty printing". - * - * @param list the StringBuilder list - * @param add the String to add - */ - private void addToList(StringBuilder list, String add) { - if (list.length() == 0) { - list.append("'" + add + "'"); - } else { - list.append(", '" + add + "'"); - } - } - - /** - * Convenience method to add to String list for "pretty printing". - * - * @param list the StringBuilder list - * @param add the String to add - */ - private void addToList(StringBuilder list, int add) { - if (list.length() == 0) { - list.append(add); - } else { - list.append(", " + add); - } - } - - /** - * Closes an existing file. - * - * @throws IOException If an I/O error occurs - */ - @Override - public void close() throws IOException { - if (ncWriter != null) { - ncWriter.close(); - } - super.close(); - } // ..end close() - - /** - * Creates a new Argo profile file. The "template" for the file is the indicated - * CDL specification file. The physical parameters ( variables) to be - * included must be specified -- see the "parameters" argument. - *

- * A few variables in the CDL spec can be float or double. These default to - * double. - * - * @param fileName The name of the output file - * @param specDir Path to the specification directory - * @param version The version string of the spec file to use as a template - * @param N_PROF N_PROF dimension of the new file - * @param N_PARAM N_PARAM dimension of the new file (max number of params in - * a single profile) - * @param N_LEVELS N_LEVELS dimension of the new file - * @param N_CALIB N_CALIB dimension of the new file - * @param parameters The parameter names that will be in the new file - * @throws IOException If problems creating the file are encountered - * @throws NumberFormatException If problems converting certain values encoded - * in the CDL attributes to number are - * encountered. - */ - public static ArgoProfileFile createNew(String fileName, String specDir, String version, - ArgoDataFile.FileType fileType, int N_PROF, int N_PARAM, int N_LEVELS, int N_CALIB, - Set inParameters) throws IOException, NumberFormatException { - log.debug(".....createNew: start....."); - - ArgoProfileFile arFile = new ArgoProfileFile(); - - // ..create the template specification - - arFile.spec = ArgoDataFile.openSpecification(false, specDir, fileType, version); - if (arFile.spec == null) { - return null; - } - - arFile.fileType = fileType; - - // ..remove any parameters that are not in this specification - - HashSet parameters = new HashSet(20); - - for (String p : inParameters) { - if (arFile.spec.isPhysicalParamName(p)) { - parameters.add(p); - } else { - log.debug("requested parameter '{}' not in spec: removed"); - } - } - - // ..create new file - - arFile.ncWriter = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf3, fileName); - arFile.ncWriter.setFill(true); - - // ..fill in object variables - arFile.file = null; // ..don't know why I need this ..... yet - arFile.fileType = fileType; - arFile.format_version = version; - arFile.ncFileName = fileName; - // arFile.spec is filled in by openSpec... - - // .....add globabl attributes..... - - for (ArgoAttribute a : arFile.spec.getGlobalAttributes()) { - String name = a.getName(); - String value = (String) a.getValue(); - - if (value.matches(ArgoFileSpecification.ATTR_SPECIAL_REGEX)) { - // ..the definition starts with one of the special ATTR_IGNORE codes - - if (value.length() > ArgoFileSpecification.ATTR_SPECIAL_LENGTH) { - // ..there is more than just the code on the line - // ..defining the default value, use it - - value = value.substring(ArgoFileSpecification.ATTR_SPECIAL_LENGTH); - log.debug("global attribute with special code: '{}'", value); - - } else { - // ..nothing but the code on the line - // ..ignore the attribute - value = null; - } - } - - if (name.equals("history")) { - value = (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'")).format(new Date()).toString(); - log.debug("history global attribute: '{}'", value); - } - - if (value != null) { - arFile.ncWriter.addGroupAttribute(null, new Attribute(name, value)); - log.debug("add global attribute: '{}' = '{}'", name, value); - } - //// 2014-11-24T10:31:58Z creation;2014-11-26T16:31:26Z update" ; - } - - // .........add Dimensions............... - // ..don't allow <= 0 dimensions - if (N_PROF <= 0) { - N_PROF = 1; - } - if (N_PARAM <= 0) { - N_PARAM = 1; - } - if (N_LEVELS <= 0) { - N_LEVELS = 1; - } - if (N_CALIB <= 0) { - N_CALIB = 1; - } - - for (ArgoDimension d : arFile.spec.getDimensions()) { - String name = d.getName(); - int value = d.getValue(); - - if (name.equals("N_PROF")) { - value = N_PROF; - } else if (name.equals("N_PARAM")) { - value = N_PARAM; - } else if (name.equals("N_LEVELS")) { - value = N_LEVELS; - } else if (name.equals("N_CALIB")) { - value = N_CALIB; - } - - if (!d.isAlternateDimension()) { - log.debug("add dimension: '{}' = '{}'", name, value); - - if (name.equals("N_HISTORY")) { - arFile.ncWriter.addUnlimitedDimension(name); - } else { - arFile.ncWriter.addDimension(null, name, value); - } - } else { - log.debug("skip alternate dimension: '{}'", name); - } - } - - // .........add Variables............... - - // ......ordered list..... - // ..this bit of code arranges the variables in the "expected order" - // ..this is, technically, completely unecessary - // ..the ordering of the variables in the file should not matter - // ..however, at least one user is complaining about the current files - // ..and I am guessing that variable ordering is the problem. - - // ..the way the "spec" files are parsed, the PARAM variables end up - // ..at the end of the variables list - // ..so I am trying to distribute the variables in the "expected order" - - // ..good coding by users would eliminate the need for this - - // ..the idea is to create an ordered list of variable names that are - // ..then used in the next section - - log.debug("...build ordered list of variables..."); - - ArrayList orderedList = new ArrayList(200); - - for (ArgoVariable v : arFile.spec.getVariables()) { - String name = v.getName(); - - if (v.isParamVar()) { - // ..when we get to the PARAM variables, we are done - // ..they are always at the end of the list and we handle - // ..them separately in the if-blocks below - break; - } - - orderedList.add(v); - log.debug("add {}", name); - - // ..insert the PROFILE__QC variables after POSITIONING_SYSTEM - - if (name.equals("POSITIONING_SYSTEM")) { - log.debug("insert PROFILE_ here"); - for (ArgoVariable w : arFile.spec.getVariables()) { - if (w.getName().startsWith("PROFILE_")) { - orderedList.add(w); - log.debug("add {}", w.getName()); - } - } - } - - // ..insert the variables after CONFIG_MISSION_NUMBER - - if (name.equals("CONFIG_MISSION_NUMBER")) { - log.debug("insert here"); - for (ArgoVariable w : arFile.spec.getVariables()) { - if (w.isParamVar()) { - if (!w.getName().startsWith("PROFILE_")) { - orderedList.add(w); - log.debug("add {}", w.getName()); - } - } - } - } - } - - log.debug("...ordered list complete..."); - - // ....end ordered list.... - - Boolean keep; - // ...if we didn't need the list to be ordered... - // for (ArgoVariable v : arFile.spec.getVariables()) { - - for (ArgoVariable v : orderedList) { - String name = v.getName(); - String prm = v.getParamName(); - - if (prm != null) { - // ..this is a physical parameter variable - // ..is it's parameter name in the parameter list - - if (parameters.contains(prm)) { - keep = true; - } else { - keep = false; - log.debug("skip variable: '{}'", name); - } - - } else { - // ..not a physical parameter, so keep it - keep = true; - } - - if (keep) { - DataType type = v.getType(); - String dims = v.getDimensionsString(); - - if (type == DataType.OPAQUE) { - // ..this is one of the float_or_double types - // ..default it to double - type = DataType.DOUBLE; - } - - if (v.canHaveAlternateDimensions()) { - // ..this variable has an alternate dimension - // ..dim string will have a "|" in it must remove it - String newDims = dims.replaceAll("\\|\\w+", ""); - log.debug("modify alternate dims: before = '{}' after = '{}'", dims, newDims); - dims = newDims; - } - - Variable var = arFile.ncWriter.addVariable(null, name, type, dims); - log.debug("add variable: '{}': '{}' '{}'", name, type, dims); - - // ..add attributes for this variable - for (ArgoAttribute a : v.getAttributes()) { - Attribute att = null; - String aname = a.getName(); - - if (a.isNumeric()) { - Number num = (Number) a.getValue(); - - if (num == null) { - // ..see if there is a default - String def = a.getDefaultValue(); - - if (def != null) { - try { - switch (type) { - case INT: - num = new Integer(def); - break; - case FLOAT: - num = new Float(def); - break; - case DOUBLE: - num = new Double(def); - break; - case LONG: - num = new Long(def); - break; - } - } catch (NumberFormatException e) { - throw new NumberFormatException( - "Attribute " + name + ":" + aname + ": Unable to convert to number"); - } - } - } - - if (num != null) { - var.addAttribute(new Attribute(aname, num)); - log.debug("add attribute: '{}:{}' = '{}'", name, aname, num); - } else { - log.debug("attribute ignored (no value): '{}:{}'", name, aname); - } - - } else if (a.isString()) { - Object value = a.getValue(); - String str = null; - - if (value == null) { - // ..value is not set, see if there is a default - str = a.getDefaultValue(); - } else { - str = value.toString(); - } - - if (str != null) { - var.addAttribute(new Attribute(aname, str)); - log.debug("add attribute: '{}:{}' = '{}'", name, aname, str); - } - - } else { - log.error("attribute not Number or String: '{}:{}'", name, aname); - continue; - } - - } - } - } - - // .....create the file -- end "define mode" - - arFile.ncWriter.create(); - arFile.ncWriter.close(); // ..having trouble exiting define mode so close/reopen - - arFile.ncWriter = NetcdfFileWriter.openExisting(fileName); - arFile.ncWriter.setFill(true); - - arFile.ncReader = arFile.ncWriter.getNetcdfFile(); - - log.debug(".....createNew: end....."); - return arFile; - } - - /** - * Opens an existing file without opening the specification - * - * @param inFile the string name of the file to open - * @return the file object reference. Returns null if the file is not opened - * successfully. (ArgoProfileFile.getMessage() will return the reason - * for the failure to open.) - * @throws IOException If an I/O error occurs - */ - public static ArgoProfileFile open(String inFile) throws IOException { - try { - return (ArgoProfileFile.open(inFile, false)); - - } catch (IOException e) { - throw e; - } - - } - - /** - * Opens an existing file without opening the specification and, - * optionally, ignores a bad DATA_TYPE --- will still detect accepted - * non-standard settings in old v3.1 files - * - * @param inFile the string name of the file to open - * @param overrideBadTYPE true = force "open" to ignore BadTYPE failure - * @return the file object reference. Returns null if the file is not opened - * successfully. (ArgoProfileFile.getMessage() will return the reason - * for the failure to open.) - * @throws IOException If an I/O error occurs - */ - public static ArgoProfileFile open(String inFile, boolean overrideBadTYPE) throws IOException { - ArgoDataFile arFile = ArgoDataFile.open(inFile, overrideBadTYPE); - if (!(arFile instanceof ArgoProfileFile)) { - message = "ERROR: '" + inFile + "' not an Argo PROFILE file"; - return null; - } - - return (ArgoProfileFile) arFile; - } - - /** - * Opens an existing file and the assoicated Argo specification). - * - * @param inFile the string name of the file to open - * @param specDir the string name of the directory containing the format - * specification files - * @param fullSpec true = open the full specification; false = open the template - * specification - * @return the file object reference. Returns null if the file is not opened - * successfully. (ArgoProfileFile.getMessage() will return the reason - * for the failure to open.) - * @throws IOException If an I/O error occurs - */ - public static ArgoProfileFile open(String inFile, String specDir, boolean fullSpec) throws IOException { - ArgoDataFile arFile = ArgoDataFile.open(inFile, specDir, fullSpec); - if (!(arFile instanceof ArgoProfileFile)) { - message = "ERROR: '" + inFile + "' not an Argo PROFILE file"; - return null; - } - - return (ArgoProfileFile) arFile; - } +// +// /** +// * Creates a new Argo profile file. The "template" for the file is the indicated +// * CDL specification file. The physical parameters ( variables) to be +// * included must be specified -- see the "parameters" argument. +// *

+// * A few variables in the CDL spec can be float or double. These default to +// * double. +// * +// * @param fileName The name of the output file +// * @param specDir Path to the specification directory +// * @param version The version string of the spec file to use as a template +// * @param N_PROF N_PROF dimension of the new file +// * @param N_PARAM N_PARAM dimension of the new file (max number of params in +// * a single profile) +// * @param N_LEVELS N_LEVELS dimension of the new file +// * @param N_CALIB N_CALIB dimension of the new file +// * @param parameters The parameter names that will be in the new file +// * @throws IOException If problems creating the file are encountered +// * @throws NumberFormatException If problems converting certain values encoded +// * in the CDL attributes to number are +// * encountered. +// */ +// public static ArgoProfileFileValidator createNew(String fileName, String specDir, String version, +// ArgoDataFile.FileType fileType, int N_PROF, int N_PARAM, int N_LEVELS, int N_CALIB, +// Set inParameters) throws IOException, NumberFormatException { +// log.debug(".....createNew: start....."); +// +// ArgoProfileFileValidator arFile = new ArgoProfileFileValidator(); +// +// // ..create the template specification +// +// arFile.spec = ArgoDataFile.openSpecification(false, specDir, fileType, version); +// if (arFile.spec == null) { +// return null; +// } +// +// arFile.fileType = fileType; +// +// // ..remove any parameters that are not in this specification +// +// HashSet parameters = new HashSet(20); +// +// for (String p : inParameters) { +// if (arFile.spec.isPhysicalParamName(p)) { +// parameters.add(p); +// } else { +// log.debug("requested parameter '{}' not in spec: removed"); +// } +// } +// +// // ..create new file +// +// arFile.ncWriter = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf3, fileName); +// arFile.ncWriter.setFill(true); +// +// // ..fill in object variables +// arFile.file = null; // ..don't know why I need this ..... yet +// arFile.fileType = fileType; +// arFile.format_version = version; +// arFile.ncFileName = fileName; +// // arFile.spec is filled in by openSpec... +// +// // .....add globabl attributes..... +// +// for (ArgoAttribute a : arFile.spec.getGlobalAttributes()) { +// String name = a.getName(); +// String value = (String) a.getValue(); +// +// if (value.matches(ArgoFileSpecification.ATTR_SPECIAL_REGEX)) { +// // ..the definition starts with one of the special ATTR_IGNORE codes +// +// if (value.length() > ArgoFileSpecification.ATTR_SPECIAL_LENGTH) { +// // ..there is more than just the code on the line +// // ..defining the default value, use it +// +// value = value.substring(ArgoFileSpecification.ATTR_SPECIAL_LENGTH); +// log.debug("global attribute with special code: '{}'", value); +// +// } else { +// // ..nothing but the code on the line +// // ..ignore the attribute +// value = null; +// } +// } +// +// if (name.equals("history")) { +// value = (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'")).format(new Date()).toString(); +// log.debug("history global attribute: '{}'", value); +// } +// +// if (value != null) { +// arFile.ncWriter.addGroupAttribute(null, new Attribute(name, value)); +// log.debug("add global attribute: '{}' = '{}'", name, value); +// } +// //// 2014-11-24T10:31:58Z creation;2014-11-26T16:31:26Z update" ; +// } +// +// // .........add Dimensions............... +// // ..don't allow <= 0 dimensions +// if (N_PROF <= 0) { +// N_PROF = 1; +// } +// if (N_PARAM <= 0) { +// N_PARAM = 1; +// } +// if (N_LEVELS <= 0) { +// N_LEVELS = 1; +// } +// if (N_CALIB <= 0) { +// N_CALIB = 1; +// } +// +// for (ArgoDimension d : arFile.spec.getDimensions()) { +// String name = d.getName(); +// int value = d.getValue(); +// +// if (name.equals("N_PROF")) { +// value = N_PROF; +// } else if (name.equals("N_PARAM")) { +// value = N_PARAM; +// } else if (name.equals("N_LEVELS")) { +// value = N_LEVELS; +// } else if (name.equals("N_CALIB")) { +// value = N_CALIB; +// } +// +// if (!d.isAlternateDimension()) { +// log.debug("add dimension: '{}' = '{}'", name, value); +// +// if (name.equals("N_HISTORY")) { +// arFile.ncWriter.addUnlimitedDimension(name); +// } else { +// arFile.ncWriter.addDimension(null, name, value); +// } +// } else { +// log.debug("skip alternate dimension: '{}'", name); +// } +// } +// +// // .........add Variables............... +// +// // ......ordered list..... +// // ..this bit of code arranges the variables in the "expected order" +// // ..this is, technically, completely unecessary +// // ..the ordering of the variables in the file should not matter +// // ..however, at least one user is complaining about the current files +// // ..and I am guessing that variable ordering is the problem. +// +// // ..the way the "spec" files are parsed, the PARAM variables end up +// // ..at the end of the variables list +// // ..so I am trying to distribute the variables in the "expected order" +// +// // ..good coding by users would eliminate the need for this +// +// // ..the idea is to create an ordered list of variable names that are +// // ..then used in the next section +// +// log.debug("...build ordered list of variables..."); +// +// ArrayList orderedList = new ArrayList(200); +// +// for (ArgoVariable v : arFile.spec.getVariables()) { +// String name = v.getName(); +// +// if (v.isParamVar()) { +// // ..when we get to the PARAM variables, we are done +// // ..they are always at the end of the list and we handle +// // ..them separately in the if-blocks below +// break; +// } +// +// orderedList.add(v); +// log.debug("add {}", name); +// +// // ..insert the PROFILE__QC variables after POSITIONING_SYSTEM +// +// if (name.equals("POSITIONING_SYSTEM")) { +// log.debug("insert PROFILE_ here"); +// for (ArgoVariable w : arFile.spec.getVariables()) { +// if (w.getName().startsWith("PROFILE_")) { +// orderedList.add(w); +// log.debug("add {}", w.getName()); +// } +// } +// } +// +// // ..insert the variables after CONFIG_MISSION_NUMBER +// +// if (name.equals("CONFIG_MISSION_NUMBER")) { +// log.debug("insert here"); +// for (ArgoVariable w : arFile.spec.getVariables()) { +// if (w.isParamVar()) { +// if (!w.getName().startsWith("PROFILE_")) { +// orderedList.add(w); +// log.debug("add {}", w.getName()); +// } +// } +// } +// } +// } +// +// log.debug("...ordered list complete..."); +// +// // ....end ordered list.... +// +// Boolean keep; +// // ...if we didn't need the list to be ordered... +// // for (ArgoVariable v : arFile.spec.getVariables()) { +// +// for (ArgoVariable v : orderedList) { +// String name = v.getName(); +// String prm = v.getParamName(); +// +// if (prm != null) { +// // ..this is a physical parameter variable +// // ..is it's parameter name in the parameter list +// +// if (parameters.contains(prm)) { +// keep = true; +// } else { +// keep = false; +// log.debug("skip variable: '{}'", name); +// } +// +// } else { +// // ..not a physical parameter, so keep it +// keep = true; +// } +// +// if (keep) { +// DataType type = v.getType(); +// String dims = v.getDimensionsString(); +// +// if (type == DataType.OPAQUE) { +// // ..this is one of the float_or_double types +// // ..default it to double +// type = DataType.DOUBLE; +// } +// +// if (v.canHaveAlternateDimensions()) { +// // ..this variable has an alternate dimension +// // ..dim string will have a "|" in it must remove it +// String newDims = dims.replaceAll("\\|\\w+", ""); +// log.debug("modify alternate dims: before = '{}' after = '{}'", dims, newDims); +// dims = newDims; +// } +// +// Variable var = arFile.ncWriter.addVariable(null, name, type, dims); +// log.debug("add variable: '{}': '{}' '{}'", name, type, dims); +// +// // ..add attributes for this variable +// for (ArgoAttribute a : v.getAttributes()) { +// Attribute att = null; +// String aname = a.getName(); +// +// if (a.isNumeric()) { +// Number num = (Number) a.getValue(); +// +// if (num == null) { +// // ..see if there is a default +// String def = a.getDefaultValue(); +// +// if (def != null) { +// try { +// switch (type) { +// case INT: +// num = new Integer(def); +// break; +// case FLOAT: +// num = new Float(def); +// break; +// case DOUBLE: +// num = new Double(def); +// break; +// case LONG: +// num = new Long(def); +// break; +// } +// } catch (NumberFormatException e) { +// throw new NumberFormatException( +// "Attribute " + name + ":" + aname + ": Unable to convert to number"); +// } +// } +// } +// +// if (num != null) { +// var.addAttribute(new Attribute(aname, num)); +// log.debug("add attribute: '{}:{}' = '{}'", name, aname, num); +// } else { +// log.debug("attribute ignored (no value): '{}:{}'", name, aname); +// } +// +// } else if (a.isString()) { +// Object value = a.getValue(); +// String str = null; +// +// if (value == null) { +// // ..value is not set, see if there is a default +// str = a.getDefaultValue(); +// } else { +// str = value.toString(); +// } +// +// if (str != null) { +// var.addAttribute(new Attribute(aname, str)); +// log.debug("add attribute: '{}:{}' = '{}'", name, aname, str); +// } +// +// } else { +// log.error("attribute not Number or String: '{}:{}'", name, aname); +// continue; +// } +// +// } +// } +// } +// +// // .....create the file -- end "define mode" +// +// arFile.ncWriter.create(); +// arFile.ncWriter.close(); // ..having trouble exiting define mode so close/reopen +// +// arFile.ncWriter = NetcdfFileWriter.openExisting(fileName); +// arFile.ncWriter.setFill(true); +// +// arFile.ncReader = arFile.ncWriter.getNetcdfFile(); +// +// log.debug(".....createNew: end....."); +// return arFile; +// } + +// /** +// * Opens an existing file without opening the specification +// * +// * @param inFile the string name of the file to open +// * @return the file object reference. Returns null if the file is not opened +// * successfully. (ArgoProfileFile.getMessage() will return the reason +// * for the failure to open.) +// * @throws IOException If an I/O error occurs +// */ +// public static ArgoProfileFileValidator open(String inFile) throws IOException { +// try { +// return (ArgoProfileFileValidator.open(inFile, false)); +// +// } catch (IOException e) { +// throw e; +// } +// +// } + +// /** +// * Opens an existing file without opening the specification and, +// * optionally, ignores a bad DATA_TYPE --- will still detect accepted +// * non-standard settings in old v3.1 files +// * +// * @param inFile the string name of the file to open +// * @param overrideBadTYPE true = force "open" to ignore BadTYPE failure +// * @return the file object reference. Returns null if the file is not opened +// * successfully. (ArgoProfileFile.getMessage() will return the reason +// * for the failure to open.) +// * @throws IOException If an I/O error occurs +// */ +// public static ArgoProfileFileValidator open(String inFile, boolean overrideBadTYPE) throws IOException { +// ArgoDataFile arFile = ArgoDataFile.open(inFile, overrideBadTYPE); +// if (!(arFile instanceof ArgoProfileFileValidator)) { +// ValidationResult.lastMessage = "ERROR: '" + inFile + "' not an Argo PROFILE file"; +// return null; +// } +// +// return (ArgoProfileFileValidator) arFile; +// } + +// /** +// * Opens an existing file and the assoicated Argo specification). +// * +// * @param inFile the string name of the file to open +// * @param specDir the string name of the directory containing the format +// * specification files +// * @param fullSpec true = open the full specification; false = open the template +// * specification +// * @return the file object reference. Returns null if the file is not opened +// * successfully. (ArgoProfileFile.getMessage() will return the reason +// * for the failure to open.) +// * @throws IOException If an I/O error occurs +// */ +// public static ArgoProfileFileValidator open(String inFile, String specDir, boolean fullSpec) throws IOException { +// ArgoDataFile arFile = ArgoDataFile.open(inFile, specDir, fullSpec); +// if (!(arFile instanceof ArgoProfileFileValidator)) { +// ValidationResult.lastMessage = "ERROR: '" + inFile + "' not an Argo PROFILE file"; +// return null; +// } +// +// return (ArgoProfileFileValidator) arFile; +// } /** * Validates the data in the profile file. This is a driver routine that @@ -563,44 +486,25 @@ public static ArgoProfileFile open(String inFile, String specDir, boolean fullSp * reason). * @throws IOException If an I/O error occurs */ - public boolean validate(boolean singleCycle, String dacName, boolean ckNulls) throws IOException { - ArgoReferenceTable.DACS dac = null; - - if (!verified) { - message = new String("File must be verified (verifyFormat) " + "successfully before validation"); + public boolean validateData(boolean singleCycle, String dacName, boolean ckNulls) throws IOException { + boolean basicsChecks = super.basicDataValidation(ckNulls); + if (!basicsChecks) { return false; } - // .......check arguments....... - if (dacName.trim().length() > 0) { - for (ArgoReferenceTable.DACS d : ArgoReferenceTable.DACS.values()) { - if (d.name.equals(dacName)) { - dac = d; - break; - } - } - if (dac == null) { - message = new String("Unknown DAC name = '" + dacName + "'"); - return false; - } - } - // ............Determine number of profiles............ - int nProf = getDimensionLength("N_PROF"); + int nProf = arFile.getDimensionLength("N_PROF"); log.debug(".....validate: number of profiles {}.....", nProf); - int nCalib = getDimensionLength("N_CALIB"); - int nHistory = getDimensionLength("N_HISTORY"); - int nParam = getDimensionLength("N_PARAM"); - int nLevel = getDimensionLength("N_LEVELS"); + int nCalib = arFile.getDimensionLength("N_CALIB"); + int nHistory = arFile.getDimensionLength("N_HISTORY"); + int nParam = arFile.getDimensionLength("N_PARAM"); + int nLevel = arFile.getDimensionLength("N_LEVELS"); - if (ckNulls) { - validateStringNulls(); - } validateHighlyDesirable(nProf); - if (!validateMetaData(nProf, dac, singleCycle)) { + if (!validateMetaData(nProf, arFile.getValidatedDac(), singleCycle)) { return true; } @@ -608,7 +512,7 @@ public boolean validate(boolean singleCycle, String dacName, boolean ckNulls) th validateParams(nProf, nParam, nLevel); validateQC(nProf, nParam, nLevel); - if (fileType == FileType.PROFILE) { // ..implies a core-file + if (arFile.fileType() == FileType.PROFILE) { // ..implies a core-file validateDMode(nProf, nParam, nCalib, nHistory); } @@ -644,90 +548,33 @@ public void validateDates(int nProf, int nParam, int nCalib, int nHistory) throw // ..check Reference_Date String name = "REFERENCE_DATE_TIME"; - String ref = readString(name); + String ref = arFile.readString(name); log.debug(name + ": " + ref); - if (!ref.matches(spec.getMeta(name))) { - formatErrors.add(name + ": '" + ref + "': Does not match specification ('" + spec.getMeta(name) + "')"); + if (!ref.matches(arFile.getFileSpec().getMeta(name))) { + validationResult.addError(name + ": '" + ref + "': Does not match specification ('" + + arFile.getFileSpec().getMeta(name) + "')"); } // ..read other times - double[] juld = readDoubleArr("JULD"); - String juld_qc = readString("JULD_QC", true); // ..true -> return NULLs if present - double[] juld_loc = readDoubleArr("JULD_LOCATION"); - String creation = readString("DATE_CREATION"); - String update = readString("DATE_UPDATE"); - Date fileTime = new Date(file.lastModified()); + double[] juld = arFile.readDoubleArr("JULD"); + String juld_qc = arFile.readString("JULD_QC", true); // ..true -> return NULLs if present + double[] juld_loc = arFile.readDoubleArr("JULD_LOCATION"); +// String creation = arFile.readString("DATE_CREATION"); +// String update = arFile.readString("DATE_UPDATE"); + Date fileTime = new Date(arFile.getFile().lastModified()); long fileSec = fileTime.getTime(); if (log.isDebugEnabled()) { log.debug("earliestDate: " + ArgoDate.format(earliestDate)); log.debug("fileTime: " + ArgoDate.format(fileTime)); - log.debug("DATE_CREATION: " + creation); - log.debug("DATE_UPDATE: " + update); +// log.debug("DATE_CREATION: " + creation); +// log.debug("DATE_UPDATE: " + update); } - // ...........initial creation date checks:............. - // ..set, after earliestDate, and before file time - Date dateCreation = null; - boolean haveCreation = false; - long creationSec = 0; - - if (creation.trim().length() <= 0) { - formatErrors.add("DATE_CREATION: Not set"); - - } else { - dateCreation = ArgoDate.get(creation); - haveCreation = true; - - if (dateCreation == null) { - haveCreation = false; - formatErrors.add("DATE_CREATION: '" + creation + "': Invalid date"); - - } else { - creationSec = dateCreation.getTime(); - - if (dateCreation.before(earliestDate)) { - formatErrors.add("DATE_CREATION: '" + creation + "': Before allowed date ('" - + ArgoDate.format(earliestDate) + "')"); - - } else if ((creationSec - fileSec) > oneDaySec) { - formatErrors.add("DATE_CREATION: '" + creation + "': After GDAC receipt time ('" - + ArgoDate.format(fileTime) + "')"); - } - } - } - - // ............initial update date checks:........... - // ..set, not before creation time, before file time - Date dateUpdate = null; - boolean haveUpdate = false; - long updateSec = 0; - - if (update.trim().length() <= 0) { - formatErrors.add("DATE_UPDATE: Not set"); - } else { - dateUpdate = ArgoDate.get(update); - haveUpdate = true; - - if (dateUpdate == null) { - formatErrors.add("DATE_UPDATE: '" + update + "': Invalid date"); - haveUpdate = false; - - } else { - updateSec = dateUpdate.getTime(); - - if (haveCreation && dateUpdate.before(dateCreation)) { - formatErrors.add("DATE_UPDATE: '" + update + "': Before DATE_CREATION ('" + creation + "')"); - } - - if ((updateSec - fileSec) > oneDaySec) { - formatErrors.add("DATE_UPDATE: '" + update + "': After GDAC receipt time ('" - + ArgoDate.format(fileTime) + "')"); - } - } - } + // ...........initial creation & update dates checks:............. + super.validateCreationUpdateDates(fileTime); // ............check per-profile dates............. // String posQC = readString("POSITION_QC"); @@ -738,7 +585,7 @@ public void validateDates(int nProf, int nParam, int nCalib, int nHistory) throw if (qc_index >= 0) { if (juld[n] > 999990.) { - formatErrors.add("JULD[" + (n + 1) + "]: Missing when QC = " + qc); + validationResult.addError("JULD[" + (n + 1) + "]: Missing when QC = " + qc); continue; } @@ -749,19 +596,19 @@ public void validateDates(int nProf, int nParam, int nCalib, int nHistory) throw log.debug("JULD[{}]: {} = {} (qc = {})", n, juld[n], juldDTG, qc); if (dateJuld.before(earliestDate)) { - formatErrors.add("JULD[" + (n + 1) + "]: " + juld[n] + " = '" + juldDTG + validationResult.addError("JULD[" + (n + 1) + "]: " + juld[n] + " = '" + juldDTG + "': Before earliest allowed date ('" + earliestDate + "')"); } // ..check that JULD is before DATE_CREATION and before file time long juldSec = dateJuld.getTime(); - if (haveCreation && (juldSec - creationSec) > oneDaySec) { - formatErrors.add("JULD[" + (n + 1) + "]: " + juld[n] + " = '" + juldDTG - + "': After DATE_CREATION ('" + creation + "')"); + if (arFile.isHaveCreationDate() && (juldSec - arFile.getCreationSec()) > oneDaySec) { + validationResult.addError("JULD[" + (n + 1) + "]: " + juld[n] + " = '" + juldDTG + + "': After DATE_CREATION ('" + arFile.getCreationDate() + "')"); } if ((juldSec - fileSec) > oneDaySec) { - formatErrors.add("JULD[" + (n + 1) + "]: " + juld[n] + " = '" + juldDTG + validationResult.addError("JULD[" + (n + 1) + "]: " + juld[n] + " = '" + juldDTG + "': After GDAC receipt time ('" + ArgoDate.format(fileTime) + "')"); } @@ -774,21 +621,21 @@ public void validateDates(int nProf, int nParam, int nCalib, int nHistory) throw if (juld_loc[n] < 99990.d) { double max = 2.d; if (Math.abs(juld_loc[n] - juld[n]) > max) { - formatWarnings.add("JULD_LOCATION[" + (n + 1) + "]: " + juld_loc[n] + ": Not within " + max - + " day of JULD (" + juld[n] + ")"); + validationResult.addWarning("JULD_LOCATION[" + (n + 1) + "]: " + juld_loc[n] + ": Not within " + + max + " day of JULD (" + juld[n] + ")"); } } else { // ..juld_location is missing // ..if this is missing, position better be missing // ..this is very rare -- spend the overhead to do it // .. here each time - double lat = readDouble("LATITUDE", n); - double lon = readDouble("LONGITUDE", n); + double lat = arFile.readDouble("LATITUDE", n); + double lon = arFile.readDouble("LONGITUDE", n); // if ((posQC.charAt(n) == '1' || posQC.charAt(n) == '2') && // (lat < 99990.d || lon < 99990.d)) { if (lat < 99990.d || lon < 99990.d) { - formatErrors.add("JULD_LOCATION[" + (n + 1) + "]: Missing when " + validationResult.addError("JULD_LOCATION[" + (n + 1) + "]: Missing when " + "LATITUDE and/or LONGITUDE are not missing."); } } @@ -802,7 +649,7 @@ public void validateDates(int nProf, int nParam, int nCalib, int nHistory) throw // ..if set, after DATE_CREATION, before DATE_UPDATE if (nHistory > 0) { - ArrayChar hDate = (ArrayChar) findVariable("HISTORY_DATE").read(); + ArrayChar hDate = (ArrayChar) arFile.findVariable("HISTORY_DATE").read(); Index ndx = hDate.getIndex(); for (int h = 0; h < nHistory; h++) { @@ -818,19 +665,19 @@ public void validateDates(int nProf, int nParam, int nCalib, int nHistory) throw Date date = ArgoDate.get(dateHist); if (date == null) { - formatErrors.add( + validationResult.addError( "HISTORY_DATE[" + (h + 1) + "," + (n + 1) + "]: '" + dateHist + "': Invalid date"); // } else if (haveCreation && date.before(dateCreation)) { - // formatErrors.add("HISTORY_DATE["+(h+1)+","+(n+1)+"]: '"+ + // validationResult.addError("HISTORY_DATE["+(h+1)+","+(n+1)+"]: '"+ // dateHist+ // "': Before DATE_CREATION ('"+creation+"')"); - } else if (haveUpdate) { + } else if (arFile.isHaveUpdateDate()) { long dateSec = date.getTime(); - if ((dateSec - updateSec) > oneDaySec) { - formatErrors.add("HISTORY_DATE[" + (h + 1) + "," + (n + 1) + "]: '" + dateHist - + "': After DATE_UPDATE ('" + update + "')"); + if ((dateSec - arFile.getUpdateSec()) > oneDaySec) { + validationResult.addError("HISTORY_DATE[" + (h + 1) + "," + (n + 1) + "]: '" + dateHist + + "': After DATE_UPDATE ('" + arFile.getUpdateDate() + "')"); } } } @@ -848,10 +695,10 @@ public void validateDates(int nProf, int nParam, int nCalib, int nHistory) throw // ..try newer first - should be more efficient String calib_date = "SCIENTIFIC_CALIB_DATE"; - Variable var = findVariable(calib_date); + Variable var = arFile.findVariable(calib_date); if (var == null) { calib_date = "CALIBRATION_DATE"; - var = findVariable(calib_date); + var = arFile.findVariable(calib_date); } ArrayChar cDate = (ArrayChar) var.read(); @@ -871,19 +718,20 @@ public void validateDates(int nProf, int nParam, int nCalib, int nHistory) throw Date date = ArgoDate.get(dateCal); if (date == null) { - formatErrors.add(calib_date + "[" + (n + 1) + "," + (c + 1) + "," + (p + 1) + "]: '" - + dateCal + "': Invalid date"); + validationResult.addError(calib_date + "[" + (n + 1) + "," + (c + 1) + "," + (p + 1) + + "]: '" + dateCal + "': Invalid date"); // } else if (haveCreation && date.before(dateCreation)) { - // formatErrors.add("CALIBRATION_DATE["+(n+1)+","+(c+1)+","+ + // validationResult.addError("CALIBRATION_DATE["+(n+1)+","+(c+1)+","+ // (p+1)+"]: '"+dateCal+ // "': Before DATE_CREATION ('"+creation+"')"); - } else if (haveUpdate) { + } else if (arFile.isHaveUpdateDate()) { long dateSec = date.getTime(); - if ((dateSec - updateSec) > oneDaySec) { - formatErrors.add(calib_date + "[" + (n + 1) + "," + (c + 1) + "," + (p + 1) + "]: '" - + dateCal + "': After DATE_UPDATE ('" + update + "')"); + if ((dateSec - arFile.getUpdateSec()) > oneDaySec) { + validationResult.addError(calib_date + "[" + (n + 1) + "," + (c + 1) + "," + (p + 1) + + "]: '" + dateCal + "': After DATE_UPDATE ('" + arFile.getUpdateDate() + + "')"); } } } // ..end if (dateCal) @@ -910,10 +758,10 @@ public void validateDates(int nProf, int nParam, int nCalib, int nHistory) throw public void validateDMode(int nProf, int nParam, int nCalib, int nHist) throws IOException { log.debug(".....validateDMode: start....."); - String dMode = readString("DATA_MODE", true); // ..true -> return NULLs if present - String[] dState = readStringArr("DATA_STATE_INDICATOR"); - ArrayChar params = (ArrayChar) findVariable("PARAMETER").read(); - ArrayChar cmts = (ArrayChar) findVariable("SCIENTIFIC_CALIB_COMMENT").read(); + String dMode = arFile.readString("DATA_MODE", true); // ..true -> return NULLs if present + String[] dState = arFile.readStringArr("DATA_STATE_INDICATOR"); + ArrayChar params = (ArrayChar) arFile.findVariable("PARAMETER").read(); + ArrayChar cmts = (ArrayChar) arFile.findVariable("SCIENTIFIC_CALIB_COMMENT").read(); // ArrayChar eqns = // (ArrayChar) findVariable("SCIENTIFIC_CALIB_EQUATION").read(); // ArrayChar coefs = @@ -924,10 +772,10 @@ public void validateDMode(int nProf, int nParam, int nCalib, int nHist) throws I // ..try newer first - should be more efficient String calib_date = "SCIENTIFIC_CALIB_DATE"; - Variable var = findVariable(calib_date); + Variable var = arFile.findVariable(calib_date); if (var == null) { calib_date = "CALIBRATION_DATE"; - var = findVariable(calib_date); + var = arFile.findVariable(calib_date); } ArrayChar dates = (ArrayChar) var.read(); @@ -944,8 +792,8 @@ public void validateDMode(int nProf, int nParam, int nCalib, int nHist) throws I if (dMode.charAt(n) == 'D') { String state = dState[n].trim(); if (!(state.equals("2C") || state.equals("2C+"))) { - formatErrors - .add("D-mode: DATA_STATE_INDICATOR[" + (n + 1) + "]: '" + state + "': Not set to \"2C\""); + validationResult.addError( + "D-mode: DATA_STATE_INDICATOR[" + (n + 1) + "]: '" + state + "': Not set to \"2C\""); } for (int c = 0; c < nCalib; c++) { calibParam.clear(); @@ -968,10 +816,10 @@ public void validateDMode(int nProf, int nParam, int nCalib, int nHist) throws I String qc; if (!profQC.containsKey(pQcName)) { - qc = readString(pQcName, true); // ..true->return NULLs if present + qc = arFile.readString(pQcName, true); // ..true->return NULLs if present if (qc == null) { - formatErrors.add("D-mode: " + pQcName + " does not exist for" + "PARAMETER[" + n - + "," + c + "," + p + "]: '" + param + "'"); + validationResult.addError("D-mode: " + pQcName + " does not exist for" + + "PARAMETER[" + n + "," + c + "," + p + "]: '" + param + "'"); } profQC.put(pQcName, qc); log.debug("adding PROFILE__QC for '" + pQcName + "'"); @@ -994,31 +842,31 @@ public void validateDMode(int nProf, int nParam, int nCalib, int nHist) throws I } if (pQC == 'X') { - formatErrors.add("D-mode: SCIENTIFIC_CALIB variables not " + validationResult.addError("D-mode: SCIENTIFIC_CALIB variables not " + "checked for PARAMETER '{}' due to missing PROFILE_param_QC"); } else { // if (pQC != ' ') { //....Qc manual pg 74. if (cmt.length() == 0) { // ################# TEMPORARY WARNING ################ - formatWarnings.add("D-mode: SCIENTIFIC_CALIB_COMMENT[" + (n + 1) + "," + (c + 1) - + "," + (p + 1) + "]: Not set for '" + param + "'"); + validationResult.addWarning("D-mode: SCIENTIFIC_CALIB_COMMENT[" + (n + 1) + "," + + (c + 1) + "," + (p + 1) + "]: Not set for '" + param + "'"); log.warn( "TEMP WARNING: {}: D-mode: SCIENTIFIC_CALIB_COMMENT[{},{},{}] not set for {}", - file.getName(), n, c, p, param); + arFile.getFile().getName(), n, c, p, param); } if (date.length() == 0) { // ################# TEMPORARY WARNING ################ - formatWarnings.add("D-mode: " + calib_date + "[" + (n + 1) + "," + (c + 1) + "," - + (p + 1) + "]: Not set for '" + param + "'"); - log.warn("TEMP WARNING: {}: D-mode: {}[{},{},{}] not set for {}", file.getName(), - calib_date, n, c, p, param); + validationResult.addWarning("D-mode: " + calib_date + "[" + (n + 1) + "," + (c + 1) + + "," + (p + 1) + "]: Not set for '" + param + "'"); + log.warn("TEMP WARNING: {}: D-mode: {}[{},{},{}] not set for {}", + arFile.getFile().getName(), calib_date, n, c, p, param); } // if (eqn.length() == 0) { - // formatErrors.add("D-mode: SCIENTIFIC_CALIB_EQUATION["+ + // validationResult.addError("D-mode: SCIENTIFIC_CALIB_EQUATION["+ // (n+1)+","+(c+1)+","+(p+1)+"]: Not set for '"+param+"'"); // } // if (coef.length() == 0) { - // formatErrors.add("D-mode: SCIENTIFIC_CALIB_COEFFICIENT["+ + // validationResult.addError("D-mode: SCIENTIFIC_CALIB_COEFFICIENT["+ // (n+1)+","+(c+1)+","+(p+1)+"]: Not set for '"+param+"'"); // } @@ -1032,15 +880,15 @@ public void validateDMode(int nProf, int nParam, int nCalib, int nHist) throws I // ..check that calibration info is set for all parameters for (String prm : profParam.get(n)) { if (!calibParam.contains(prm)) { - formatErrors.add("D-mode: PARAMETER[" + (n + 1) + "," + (c + 1) + ",*,*]: Parameter '" + prm - + "' not included"); + validationResult.addError("D-mode: PARAMETER[" + (n + 1) + "," + (c + 1) + + ",*,*]: Parameter '" + prm + "' not included"); } } // ..check that calibration info is set for all parameters for (String prm : calibParam) { if (!profParam.get(n).contains(prm)) { - formatErrors.add("D-mode: PARAMETER[" + (n + 1) + "," + (c + 1) + ",*,*]: '" + prm + validationResult.addError("D-mode: PARAMETER[" + (n + 1) + "," + (c + 1) + ",*,*]: '" + prm + "': Not in STATION_PARAMETERS"); } } @@ -1050,7 +898,7 @@ public void validateDMode(int nProf, int nParam, int nCalib, int nHist) throws I // ..either 'R' or 'A' String state = dState[n].trim(); if (state.startsWith("2C")) { - formatErrors.add( + validationResult.addError( "R/A-mode: DATA_STATE_INDICATOR[" + (n + 1) + "]: '" + state + "': Can not be \"2C...\""); } } @@ -1063,7 +911,7 @@ public void validateDMode(int nProf, int nParam, int nCalib, int nHist) throws I log.debug("nHist: " + nHist); } if (nHist < 1) { - formatErrors.add("D-mode: HISTORY_* not set"); + validationResult.addError("D-mode: HISTORY_* not set"); } log.debug(".....validateDMode: end....."); @@ -1085,24 +933,24 @@ public void validateDMode(int nProf, int nParam, int nCalib, int nHist) throws I public void validateHighlyDesirable(int nProf) throws IOException { log.debug(".....validateHighlyDesirable: start....."); - if (spec.getVariable("INST_REFERENCE") != null) { + if (arFile.getFileSpec().getVariable("INST_REFERENCE") != null) { // ..INST_REF is in spec, check that it is set - String str[] = readStringArr("INST_REFERENCE"); + String str[] = arFile.readStringArr("INST_REFERENCE"); for (int n = 0; n < nProf; n++) { log.debug("INST_REFERENCE[" + n + "]: '" + str[n]); if (str[n].trim().length() == 0) { - formatWarnings.add("INST_REFERENCE[" + (n + 1) + "]: Not set"); + validationResult.addWarning("INST_REFERENCE[" + (n + 1) + "]: Not set"); } } } - if (spec.getVariable("POSITIONING_SYSTEM") != null) { + if (arFile.getFileSpec().getVariable("POSITIONING_SYSTEM") != null) { // ..POSIT_SYS is in spec, check that it is set - String str[] = readStringArr("POSITIONING_SYSTEM"); + String str[] = arFile.readStringArr("POSITIONING_SYSTEM"); for (int n = 0; n < nProf; n++) { log.debug("POSITIONING_SYSTEM[" + n + "]: '" + str[n] + "'"); @@ -1110,7 +958,8 @@ public void validateHighlyDesirable(int nProf) throws IOException { ArgoReferenceTable.ArgoReferenceEntry info = ArgoReferenceTable.POSITIONING_SYSTEM .contains(str[n].trim()); if (!info.isActive) { - formatWarnings.add("POSITIONING_SYSTEM[" + (n + 1) + "]: '" + str[n] + "' Status: " + info.message); + validationResult.addWarning( + "POSITIONING_SYSTEM[" + (n + 1) + "]: '" + str[n] + "' Status: " + info.message); } } } @@ -1146,13 +995,13 @@ public void validateHighlyDesirable(int nProf) throws IOException { public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean singleCycle) throws IOException { log.debug(".....validateMetaData: start....."); - String[] plNum = readStringArr("PLATFORM_NUMBER"); - String dir = readString("DIRECTION", true);// ..true->return NULLs if present - String[] ds = readStringArr("DATA_STATE_INDICATOR"); - String[] dc = readStringArr("DATA_CENTRE"); - String[] wmo = readStringArr("WMO_INST_TYPE"); - int[] cyc = readIntArr("CYCLE_NUMBER"); - String[] vert = readStringArr("VERTICAL_SAMPLING_SCHEME"); + String[] plNum = arFile.readStringArr("PLATFORM_NUMBER"); + String dir = arFile.readString("DIRECTION", true);// ..true->return NULLs if present + String[] ds = arFile.readStringArr("DATA_STATE_INDICATOR"); + String[] dc = arFile.readStringArr("DATA_CENTRE"); + String[] wmo = arFile.readStringArr("WMO_INST_TYPE"); + int[] cyc = arFile.readIntArr("CYCLE_NUMBER"); + String[] vert = arFile.readStringArr("VERTICAL_SAMPLING_SCHEME"); String firstNum = new String(plNum[0].trim()); int firstCyc = cyc[0]; @@ -1167,15 +1016,15 @@ public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean log.debug("PLATFORM_NUMBER[{}]: '{}'", n, plNum[n]); String s = plNum[n].trim(); - if (!s.matches("[1-9][0-9]{4}|[1-9]9[0-9]{5}")) { - formatErrors.add("PLATFORM_NUMBER[" + (n + 1) + "]: '" + s + "': Invalid"); + if (!super.validatePlatfomNumber(s)) { + validationResult.addError("PLATFORM_NUMBER[" + (n + 1) + "]: '" + s + "': Invalid"); } if (singleCycle) { if (!s.equals(firstNum)) { // ..this isn't a single cycle file -- we're done log.debug("format error: requested single profile - platform number differs"); - formatErrors.add("File is not a single-cycle file (mulitple platforms)"); + validationResult.addError("File is not a single-cycle file (mulitple platforms)"); return false; } else { @@ -1183,7 +1032,7 @@ public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean if (cyc[n] != firstCyc) { // ..this isn't a single cycle file -- we're done log.debug("format error: requested single profile - cycle number differs"); - formatErrors.add("File is not a single-cycle file (multiple cycles)"); + validationResult.addError("File is not a single-cycle file (multiple cycles)"); return false; } } @@ -1193,7 +1042,7 @@ public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean log.debug("DIRECTION[{}]: '{}'", n, dir.charAt(n)); if (dir.charAt(n) != 'A' && dir.charAt(n) != 'D') { - formatErrors.add("DIRECTION[" + (n + 1) + "]: '" + dir.charAt(n) + "': Invalid"); + validationResult.addError("DIRECTION[" + (n + 1) + "]: '" + dir.charAt(n) + "': Invalid"); } // .....DATA_STATE_INDICATOR..... @@ -1209,10 +1058,10 @@ public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean // ..not checked above - float[] pres = readFloatArr("PRES", n); + float[] pres = arFile.readFloatArr("PRES", n); if (pres != null) { for (float d : pres) { - if (!ArgoDataFile.is_99_999_FillValue(d)) { + if (!ArgoFileValidator.is_99_999_FillValue(d)) { has_data = true; break; } @@ -1223,11 +1072,11 @@ public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean log.debug("...data_state_indicator empty. searched PRES. has_data = {}", has_data); if (has_data) { - formatErrors.add("DATA_STATE_INDICATOR[" + (n + 1) + "]: '" + s + "' Not set"); + validationResult.addError("DATA_STATE_INDICATOR[" + (n + 1) + "]: '" + s + "' Not set"); } } else if (!(info = ArgoReferenceTable.DATA_STATE_INDICATOR.contains(s)).isActive) { - formatErrors.add("DATA_STATE_INDICATOR[" + (n + 1) + "]: '" + s + "' Invalid"); + validationResult.addError("DATA_STATE_INDICATOR[" + (n + 1) + "]: '" + s + "' Invalid"); } // .....DATA_CENTRE..... @@ -1235,12 +1084,13 @@ public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean log.debug("DATA_CENTRE[{}]: '{}' DAC: '{}'", n, dc[n], dac); if (dac != null) { if (!ArgoReferenceTable.DacCenterCodes.get(dac).contains(dc[n].trim())) { - formatErrors.add("DATA_CENTRE[" + (n + 1) + "]: '" + dc[n] + "': Invalid for DAC '" + dac + "'"); + validationResult + .addError("DATA_CENTRE[" + (n + 1) + "]: '" + dc[n] + "': Invalid for DAC '" + dac + "'"); } } else { // ..incoming DAC not set if (!ArgoReferenceTable.DacCenterCodes.containsValue(dc[n].trim())) { - formatErrors.add("DATA_CENTRE[" + (n + 1) + "]: '" + dc[n] + "': Invalid (for all DACs)"); + validationResult.addError("DATA_CENTRE[" + (n + 1) + "]: '" + dc[n] + "': Invalid (for all DACs)"); } } @@ -1249,21 +1099,23 @@ public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean log.debug("WMO_INST_TYPE[{}]: '{}'", n, wmo[n]); // ..ref_table 8 s = wmo[n].trim(); if (s.length() == 0) { - formatErrors.add("WMO_INST_TYPE[" + (n + 1) + "]: Not set"); + validationResult.addError("WMO_INST_TYPE[" + (n + 1) + "]: Not set"); } else { try { int N = Integer.valueOf(s); if ((info = ArgoReferenceTable.WMO_INST_TYPE.contains(N)).isValid()) { if (info.isDeprecated) { - formatWarnings.add("WMO_INST_TYPE[" + (n + 1) + "]: '" + s + "' Status: " + info.message); + validationResult + .addWarning("WMO_INST_TYPE[" + (n + 1) + "]: '" + s + "' Status: " + info.message); } } else { - formatErrors.add("WMO_INST_TYPE[" + (n + 1) + "]: '" + s + "' Status: " + info.message); + validationResult + .addError("WMO_INST_TYPE[" + (n + 1) + "]: '" + s + "' Status: " + info.message); } } catch (Exception e) { - formatErrors.add("WMO_INST_TYPE[" + (n + 1) + "]: '" + s + "' Invalid. Must be integer."); + validationResult.addError("WMO_INST_TYPE[" + (n + 1) + "]: '" + s + "' Invalid. Must be integer."); } } // end if (wmo) @@ -1280,10 +1132,10 @@ public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean if (hasData == null) { // ..not checked above - float[] pres = readFloatArr("PRES", n); + float[] pres = arFile.readFloatArr("PRES", n); if (pres != null) { for (float d : pres) { - if (!ArgoDataFile.is_99_999_FillValue(d)) { + if (!ArgoFileValidator.is_99_999_FillValue(d)) { has_data = true; break; } @@ -1299,15 +1151,15 @@ public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean } if (has_data) { - formatErrors.add("VERTICAL_SAMPLING_SCHEME[" + (n + 1) + "]: Not set"); + validationResult.addError("VERTICAL_SAMPLING_SCHEME[" + (n + 1) + "]: Not set"); } } else { if ((info = ArgoReferenceTable.VERTICAL_SAMPLING_SCHEME.contains(s)).isValid()) { if (info.isDeprecated) { - formatWarnings.add("VERTICAL_SAMPLING_SCHEME[" + (n + 1) + "]: Status: " + info.message + ": '" - + s.trim() + "'"); + validationResult.addWarning("VERTICAL_SAMPLING_SCHEME[" + (n + 1) + "]: Status: " + info.message + + ": '" + s.trim() + "'"); } if (n == 0) { @@ -1315,32 +1167,32 @@ public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean String err = String.format( "VERTICAL_SAMPLING_SCHEME[%d]: Profile number 1 must be 'Primary sampling': '%s'", (n + 1), s.trim()); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFile().getName(), err); } } else { if (s.startsWith("Primary sampling")) { String err = String.format( "VERTICAL_SAMPLING_SCHEME[%d]: Not profile 1. Must NOT be 'Primary sampling': '%s'", (n + 1), s.trim()); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFile().getName(), err); } } } else { String err = String.format("VERTICAL_SAMPLING_SCHEME[%d]: Invalid: '%s'", (n + 1), s.trim()); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFile().getName(), err); } } @@ -1391,14 +1243,14 @@ public boolean validateMetaData(int nProf, ArgoReferenceTable.DACS dac, boolean public void validateParams(int nProf, int nParam, int nLevel) throws IOException { log.debug(".....validateParams: start....."); - ArrayList allowedParam = spec.getPhysicalParamNames(); // ..allowed + ArrayList allowedParam = arFile.getFileSpec().getPhysicalParamNames(); // ..allowed profParam = new ArrayList>(nProf); // ..allowed int maxParamUsed = -1; // ..max number of PARAMs used - all profiles // int maxLevelUsed = -1; //..max number of LEVELS with data - all profiles // ..read station parameters and data_mode - String dMode = readString("DATA_MODE", true); // incl NULLs + String dMode = arFile.readString("DATA_MODE", true); // incl NULLs // ..need orgin/shape for 2-dim variables multiple time int[] origin2 = { 0, 0 }; // ..will anchor read to {profNum, level=0} @@ -1407,7 +1259,7 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException // ...........MAIN LOOP: over each profile in the file............. for (int profNum = 0; profNum < nProf; profNum++) { - String[] stParam = readStringArr("STATION_PARAMETERS", profNum); + String[] stParam = arFile.readStringArr("STATION_PARAMETERS", profNum); boolean fatalError = false; @@ -1421,15 +1273,12 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException } if (mode != 'A' && mode != 'D' && mode != 'R') { - formatErrors.add("DATA_MODE[" + (profNum + 1) + "]: '" + mode + "': Invalid"); + validationResult.addError("DATA_MODE[" + (profNum + 1) + "]: '" + mode + "': Invalid"); } - char[] m = { mode }; - data_mode = new String(m); - // .........check PARAMETER_DATA_MODE.............. - String param_mode = readString("PARAMETER_DATA_MODE", profNum, true);// incl NULLs + String param_mode = arFile.readString("PARAMETER_DATA_MODE", profNum, true);// incl NULLs if (param_mode != null) { // ..is included in this file: optional variable @@ -1442,8 +1291,8 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException char md = param_mode.charAt(paramNum); if (md != 'A' && md != 'D' && md != 'R' && md != ' ') { - formatErrors.add("PARAMETER_DATA_MODE[" + (profNum + 1) + "," + (paramNum + 1) + "]: '" + md - + "': Invalid"); + validationResult.addError("PARAMETER_DATA_MODE[" + (profNum + 1) + "," + (paramNum + 1) + "]: '" + + md + "': Invalid"); } if (md == 'D' || final_mode == 'D') { @@ -1465,12 +1314,12 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException // ..check bio-prof file: PARAM_DATA_MODE("PRES") == "R" - if (fileType == FileType.BIO_PROFILE && stParam[paramNum].trim().startsWith("PRES")) { + if (arFile.fileType() == FileType.BIO_PROFILE && stParam[paramNum].trim().startsWith("PRES")) { if (md != 'R') { log.debug("PRES[{}]: PARAMETER_DATA_MODE[{},{}] = '{}'. must be 'R'", profNum, profNum, paramNum, md); - formatErrors.add("PRES[" + (profNum + 1) + "]: PARAMETER_DATA_MODE[" + (profNum + 1) + "," - + (paramNum + 1) + "]: '" + md + "': Must be 'R'"); + validationResult.addError("PRES[" + (profNum + 1) + "]: PARAMETER_DATA_MODE[" + + (profNum + 1) + "," + (paramNum + 1) + "]: '" + md + "': Must be 'R'"); } } } // ..end for (paramNum) @@ -1480,15 +1329,15 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException if (final_mode == ' ') { // ..all param_data_mode = ' '. data_mode better be 'R' if (mode != 'R') { - formatWarnings.add("DATA_MODE[" + (profNum + 1) + "] not 'R'. PARAMETER_DATA_MODE[" + validationResult.addWarning("DATA_MODE[" + (profNum + 1) + "] not 'R'. PARAMETER_DATA_MODE[" + (profNum + 1) + ",...] all ' ': Inconsistent"); log.debug("data_mode[{}] != 'R'. All param_data_mode[{},...] are ' '", profNum, profNum); } } else if (final_mode != mode) { // ..some param_data_mode was set. not consistent with data_mode - formatErrors.add("DATA_MODE[" + (profNum + 1) + "]/PARAMETER_DATA_MODE[" + (profNum + 1) + "," - + (final_nparam + 1) + "]: '" + mode + "'/'" + final_mode + "': Inconsistent"); + validationResult.addError("DATA_MODE[" + (profNum + 1) + "]/PARAMETER_DATA_MODE[" + (profNum + 1) + + "," + (final_nparam + 1) + "]: '" + mode + "'/'" + final_mode + "': Inconsistent"); log.debug("data_mode[{}]/param_data_mode[{},{}] inconsistent", profNum, profNum, final_nparam); } @@ -1496,14 +1345,14 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException // .........check CONFIG_MISSION_NUMBER............ - int msnNum = readInt("CONFIG_MISSION_NUMBER", profNum); + int msnNum = arFile.readInt("CONFIG_MISSION_NUMBER", profNum); log.debug("CONFIG_MISSION_NUMBER[{}]: '{}'", profNum, msnNum); if (msnNum == 99999) { - int cyc = readInt("CYCLE_NUMBER", profNum); + int cyc = arFile.readInt("CYCLE_NUMBER", profNum); if (cyc != 0 && mode == 'D') { - formatErrors.add("CONFIG_MISSION_NUMBER[" + (profNum + 1) + "]: '" + msnNum + validationResult.addError("CONFIG_MISSION_NUMBER[" + (profNum + 1) + "]: '" + msnNum + "': Cannot be FillValue in D-mode"); log.warn("CONFIG_MISSION_NUMBER[" + (profNum + 1) + "]: '" + msnNum + "': Cannot be FillValue in D-mode"); @@ -1552,8 +1401,8 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException if (profParam.get(profNum).contains(param)) { // ..this is a duplicate entry - formatErrors.add("STATION_PARAMETERS[" + (profNum + 1) + "," + (paramNum + 1) + "]: '" - + param + "': Duplicate entry"); + validationResult.addError("STATION_PARAMETERS[" + (profNum + 1) + "," + (paramNum + 1) + + "]: '" + param + "': Duplicate entry"); } else { // ..add to list of for this profile @@ -1561,17 +1410,17 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException nParamUsed++; } - if (spec.isDeprecatedPhysicalParam(param)) { + if (arFile.getFileSpec().isDeprecatedPhysicalParam(param)) { // ..this is a deprecated parameter name - formatWarnings.add("STATION_PARAMETERS[" + (profNum + 1) + "," + (paramNum + 1) + "]: '" - + param + "': Deprecated parameter name"); + validationResult.addWarning("STATION_PARAMETERS[" + (profNum + 1) + "," + (paramNum + 1) + + "]: '" + param + "': Deprecated parameter name"); } } else { // .. is illegal - formatErrors.add("STATION_PARAMETERS[" + (profNum + 1) + "," + (paramNum + 1) + "]: '" + param - + "': Invalid parameter name in this context"); + validationResult.addError("STATION_PARAMETERS[" + (profNum + 1) + "," + (paramNum + 1) + "]: '" + + param + "': Invalid parameter name in this context"); } // ..decide on the final "mode" for this param @@ -1603,16 +1452,16 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException // ..report errors and warnings if (embeddedEmpty) { - formatWarnings.add("STATION_PARAMETERS[" + (profNum + 1) + ",*]: Empty entries in list" + "\n\tList: " - + paramList); + validationResult.addWarning("STATION_PARAMETERS[" + (profNum + 1) + ",*]: Empty entries in list" + + "\n\tList: " + paramList); } // ..check that all required parameters are defined in STATION_PARAMETERS // ..the parameters are only required in profile 0 if (profNum == 0) { for (String p : allowedParam) { - if (!spec.isOptional(p) && !profParam.get(profNum).contains(p)) { - formatErrors.add("STATION_PARAMETERS[" + (profNum + 1) + ",*]: Required PARAM ('" + p + if (!arFile.getFileSpec().isOptional(p) && !profParam.get(profNum).contains(p)) { + validationResult.addError("STATION_PARAMETERS[" + (profNum + 1) + ",*]: Required PARAM ('" + p + "') not specified"); fatalError = true; } @@ -1621,9 +1470,9 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException // ......check that all STATION_PARAMETERS have variable........ for (String p : profParam.get(profNum)) { - Variable var = findVariable(p); + Variable var = arFile.findVariable(p); if (var == null) { - formatErrors.add("STATION_PARAMETERS[" + (profNum + 1) + ",*]: PARAM '" + p + validationResult.addError("STATION_PARAMETERS[" + (profNum + 1) + ",*]: PARAM '" + p + "' specified. Variables not in data file."); fatalError = true; } @@ -1640,7 +1489,7 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException // ..see if the file contains the variable boolean hasData = false; - Variable var = findVariable(p); + Variable var = arFile.findVariable(p); if (var != null) { log.debug(p + "[{}]: in file. Not in STATION_PARAMETERS", profNum); @@ -1701,7 +1550,7 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException double fVal = fillValue.doubleValue(); for (double d : data) { - if (!ArgoDataFile.is_FillValue(fVal, d)) { + if (!ArgoFileValidator.is_FillValue(fVal, d)) { hasData = true; break; } @@ -1712,7 +1561,7 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException float fVal = fillValue.floatValue(); for (float d : data) { - if (!ArgoDataFile.is_FillValue(fVal, d)) { + if (!ArgoFileValidator.is_FillValue(fVal, d)) { hasData = true; break; } @@ -1734,8 +1583,8 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException } if (hasData) { - formatErrors.add("STATION_PARAMETERS[" + (profNum + 1) + ",*]: Does not specify '" + p - + "'. Variable contains data."); + validationResult.addError("STATION_PARAMETERS[" + (profNum + 1) + ",*]: Does not specify '" + + p + "'. Variable contains data."); log.debug("{}[{}]: has data", p, profNum); } else { log.debug("{}[{}]: no data", p, profNum); @@ -1762,14 +1611,14 @@ public void validateParams(int nProf, int nParam, int nLevel) throws IOException maxParamUsed++; // maxLevelUsed++; //..convert from max index to max number if (maxParamUsed < nParam) { - formatWarnings.add("N_PARAM: Larger than necessary." + "\n\tN_PARAM = " + nParam + "\n\tPARAMs used = " - + maxParamUsed); + validationResult.addWarning("N_PARAM: Larger than necessary." + "\n\tN_PARAM = " + nParam + + "\n\tPARAMs used = " + maxParamUsed); } // ..check if N_LEVEL is set too large. /* * if (maxLevelUsed < nLevel) { - * formatWarnings.add("N_LEVEL: Larger than necessary."+ + * validationResult.addWarning("N_LEVEL: Larger than necessary."+ * "\n\tN_LEVEL = "+nLevel+ * "\n\tMaximum levels with data = "+maxLevelUsed); } */ @@ -1787,9 +1636,9 @@ private void checkParamParamQC(int nLevel, int[] origin2, int[] shape2, int prof PARAM_LOOP: for (String param : profParam.get(profNum)) { String varName = param.trim(); - Variable var = findVariable(varName); - Variable varQC = findVariable(varName.trim() + "_QC"); - Variable profVarQC = findVariable("PROFILE_" + varName + "_QC"); + Variable var = arFile.findVariable(varName); + Variable varQC = arFile.findVariable(varName.trim() + "_QC"); + Variable profVarQC = arFile.findVariable("PROFILE_" + varName + "_QC"); Number fillValue = var.findAttribute("_FillValue").getNumericValue(); fValue = fillValue.floatValue(); @@ -1929,7 +1778,7 @@ private void checkParamParamQC(int nLevel, int[] origin2, int[] shape2, int prof if (Float.isNaN(f)) { is_nan = true; - } else if (!ArgoDataFile.is_FillValue(fValue, f)) { + } else if (!ArgoFileValidator.is_FillValue(fValue, f)) { data = f; } } @@ -1997,7 +1846,7 @@ else if (data != Float.MAX_VALUE) { depQC++; } - if (ArgoDataFile.is_FillValue(fValue, prm[k])) { + if (ArgoFileValidator.is_FillValue(fValue, prm[k])) { // ..data is missing - QC better be too if (prm_qc[k] != '9' && prm_qc[k] != '0') { notMiss++; @@ -2007,7 +1856,7 @@ else if (data != Float.MAX_VALUE) { // ..data not missing - check QC value if (prm_qc[k] == '0') { - if (!spec.isOptional(varName)) { + if (!arFile.getFileSpec().isOptional(varName)) { noQC++; } } else if (prm_qc[k] > '4') { @@ -2028,7 +1877,7 @@ else if (data != Float.MAX_VALUE) { } else { // ..QC not is ref table 2, handle " " special case if (prm_qc[k] == ' ') { // ..qc set to NOT MEASURED, data better be missing - if (!ArgoDataFile.is_FillValue(fValue, prm[k])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm[k])) { notNotMeas++; } } else { @@ -2049,51 +1898,52 @@ else if (data != Float.MAX_VALUE) { // ..report errors and warnings if (invQC > 0) { paramErr = true; - formatErrors.add(varName + "_QC[" + (profNum + 1) + "]: Invalid QC codes at " + invQC + " levels (of " - + prm.length + ")"); + validationResult.addError(varName + "_QC[" + (profNum + 1) + "]: Invalid QC codes at " + invQC + + " levels (of " + prm.length + ")"); } if (illQC > 0) { paramErr = true; - formatErrors.add(varName + "_QC[" + (profNum + 1) + "]: QC codes not '1' to '4' at " + illQC + validationResult.addError(varName + "_QC[" + (profNum + 1) + "]: QC codes not '1' to '4' at " + illQC + " levels with data (of " + prm.length + ")"); } if (depQC > 0) { - formatWarnings.add(varName + "_QC[" + (profNum + 1) + "]: Deprecated QC codes at " + depQC + validationResult.addWarning(varName + "_QC[" + (profNum + 1) + "]: Deprecated QC codes at " + depQC + " levels (of " + prm.length + ")"); } if (noQC > 0) { paramErr = true; - formatErrors.add(varName + "_QC[" + (profNum + 1) + "]: QC code '0' at " + noQC + " levels (of " - + prm.length + ")"); + validationResult.addError(varName + "_QC[" + (profNum + 1) + "]: QC code '0' at " + noQC + + " levels (of " + prm.length + ")"); } if (notMiss > 0) { paramErr = true; - formatErrors.add(varName + "_QC[" + (profNum + 1) + "]: Missing data but QC not missing at " + notMiss - + " levels (of " + prm.length + ")"); + validationResult.addError(varName + "_QC[" + (profNum + 1) + "]: Missing data but QC not missing at " + + notMiss + " levels (of " + prm.length + ")"); } if (notNotMeas > 0) { paramErr = true; - formatErrors.add(varName + "_QC[" + (profNum + 1) + "]: Blank (' ') QC when data is not missing at " - + notNotMeas + " levels (of " + prm.length + ")"); + validationResult + .addError(varName + "_QC[" + (profNum + 1) + "]: Blank (' ') QC when data is not missing at " + + notNotMeas + " levels (of " + prm.length + ")"); } - if (fileType == FileType.PROFILE) { + if (arFile.fileType() == FileType.PROFILE) { // ..in a core-file, only intermediate params can have 0 QC - if (!spec.isInterPhysParam(varName)) { + if (!arFile.getFileSpec().isInterPhysParam(varName)) { if (n_noqc > 0) { // .._QC can't be 0 in a core-file paramErr = true; - formatErrors.add(varName + "_QC[" + (profNum + 1) + "]: QC code '0' at " + n_noqc + validationResult.addError(varName + "_QC[" + (profNum + 1) + "]: QC code '0' at " + n_noqc + " levels (of " + prm.length + ")"); } } } if (nan > 0) { - formatErrors - .add(varName + "[" + (profNum + 1) + "]: NaNs at " + nan + " levels (of " + prm.length + ")"); + validationResult.addError( + varName + "[" + (profNum + 1) + "]: NaNs at " + nan + " levels (of " + prm.length + ")"); } if (inf > 0) { - formatErrors.add(varName + "[" + (profNum + 1) + "]: Infinite value at " + inf + " levels (of " + validationResult.addError(varName + "[" + (profNum + 1) + "]: Infinite value at " + inf + " levels (of " + prm.length + ")"); } @@ -2102,9 +1952,9 @@ else if (data != Float.MAX_VALUE) { // ..no errors in param -- check _adjusted varName = param.trim() + "_ADJUSTED"; - var = findVariable(varName); - varQC = findVariable(varName + "_QC"); - Variable varErr = findVariable(varName + "_ERROR"); + var = arFile.findVariable(varName); + varQC = arFile.findVariable(varName + "_QC"); + Variable varErr = arFile.findVariable(varName + "_ERROR"); // ..fillValue is the same for prm, prm_adj, prm_adj_err // ..Number fillValue = var.findAttribute("_FillValue").getNumericValue(); @@ -2234,7 +2084,7 @@ else if (data != Float.MAX_VALUE) { if (Float.isNaN(f)) { is_nan = true; - } else if (!ArgoDataFile.is_FillValue(fValue, f)) { + } else if (!ArgoFileValidator.is_FillValue(fValue, f)) { data = f; } } @@ -2376,21 +2226,21 @@ else if (data != Float.MAX_VALUE) { incNotMeas++; } else { - if (!ArgoDataFile.is_FillValue(fValue, prm_adj[k])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj[k])) { notNotMeas++; } } } else { // ..check if param (not param_adj!) is missing - if (ArgoDataFile.is_FillValue(fValue, prm[k])) { + if (ArgoFileValidator.is_FillValue(fValue, prm[k])) { // .....param is missing..... - if (!ArgoDataFile.is_FillValue(fValue, prm_adj[k])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj[k])) { // ..param_adjusted is NOT missing - error missPrm++; } - if (!ArgoDataFile.is_FillValue(fValue, prm_adj_err[k])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj_err[k])) { // ..param_adjusted_error is NOT missing - error errNotMiss++; } @@ -2402,7 +2252,7 @@ else if (data != Float.MAX_VALUE) { } else { // .....param is NOT missing...... - if (ArgoDataFile.is_FillValue(fValue, prm_adj[k])) { + if (ArgoFileValidator.is_FillValue(fValue, prm_adj[k])) { // ..param_adj is missing - QC must be 4 or 9 if (prm_adj_qc[k] != '4') { if (prm_adj_qc[k] != '9') { @@ -2415,7 +2265,7 @@ else if (data != Float.MAX_VALUE) { } } } - if (!ArgoDataFile.is_FillValue(fValue, prm_adj_err[k])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj_err[k])) { errNotMiss++; } @@ -2480,90 +2330,93 @@ else if (data != Float.MAX_VALUE) { // ..report errors and warnings if (invQC > 0) { param_adjErr = true; - formatErrors.add(varName + "_QC[" + (profNum + 1) + "]: Invalid QC codes at " + invQC + validationResult.addError(varName + "_QC[" + (profNum + 1) + "]: Invalid QC codes at " + invQC + " levels (of " + prm_adj.length + ")"); } if (depQC > 0) { - formatWarnings.add(varName + "_QC[" + (profNum + 1) + "]: Deprecated QC codes at " + depQC - + " levels (of " + prm_adj.length + ")"); + validationResult.addWarning(varName + "_QC[" + (profNum + 1) + "]: Deprecated QC codes at " + + depQC + " levels (of " + prm_adj.length + ")"); } if (incNotMeas > 0) { param_adjErr = true; - formatErrors.add("DATA_MODE '" + mode + "': " + varName + "_QC[" + (profNum + 1) + validationResult.addError("DATA_MODE '" + mode + "': " + varName + "_QC[" + (profNum + 1) + "]: Incompatible blank (' ') QC codes at " + incNotMeas + " levels (of " + prm_adj.length + ")"); } if (notNotMeas > 0) { param_adjErr = true; - formatErrors.add(varName + "_QC[" + (profNum + 1) + "]: Blank (' ') when data not missing at " - + notNotMeas + " levels (of " + prm_adj.length + ")"); + validationResult + .addError(varName + "_QC[" + (profNum + 1) + "]: Blank (' ') when data not missing at " + + notNotMeas + " levels (of " + prm_adj.length + ")"); } - if (n_noqc > 0 && fileType == FileType.PROFILE) { + if (n_noqc > 0 && arFile.fileType() == FileType.PROFILE) { // .._QC can't be 0 in a core-file param_adjErr = true; - formatErrors.add(varName + "_QC[" + (profNum + 1) + "]: QC code '0' at " + n_noqc + validationResult.addError(varName + "_QC[" + (profNum + 1) + "]: QC code '0' at " + n_noqc + " levels (of " + prm_adj.length + ")"); } if (missAdj > 0) { param_adjErr = true; - formatErrors.add("DATA_MODE '" + mode + "': " + param + "[" + (profNum + 1) + "] Not missing / " - + varName + " Missing (QC not 4 or 9): At " + +missAdj + " levels (of " + prm_adj.length - + ")"); + validationResult.addError("DATA_MODE '" + mode + "': " + param + "[" + (profNum + 1) + + "] Not missing / " + varName + " Missing (QC not 4 or 9): At " + +missAdj + + " levels (of " + prm_adj.length + ")"); } if (missMiss > 0) { param_adjErr = true; - formatErrors.add(varName + "_QC[" + (profNum + 1) + "] Missing / " + param + validationResult.addError(varName + "_QC[" + (profNum + 1) + "] Missing / " + param + "_QC Not missing: At " + missMiss + " levels (of " + prm_adj.length + ")"); } if (missNot > 0) { param_adjErr = true; - formatErrors.add("DATA_MODE '" + mode + "': " + varName + "[" + (profNum + 1) + validationResult.addError("DATA_MODE '" + mode + "': " + varName + "[" + (profNum + 1) + "]: Not missing when QC = 4 or 9 at " + missNot + " levels (of " + prm_adj.length + ")"); } if (missPrm > 0) { param_adjErr = true; - formatErrors.add("DATA_MODE '" + mode + "': " + param + "[" + (profNum + 1) + "] Missing / " - + varName + " Not missing: At " + missPrm + " levels (of " + prm_adj.length + ")"); + validationResult.addError( + "DATA_MODE '" + mode + "': " + param + "[" + (profNum + 1) + "] Missing / " + varName + + " Not missing: At " + missPrm + " levels (of " + prm_adj.length + ")"); } if (qcNotMiss > 0) { param_adjErr = true; - formatErrors.add(varName + "_QC[" + (profNum + 1) + "]: Missing data but QC not missing at " - + qcNotMiss + " levels (of " + prm_adj.length + ")"); + validationResult + .addError(varName + "_QC[" + (profNum + 1) + "]: Missing data but QC not missing at " + + qcNotMiss + " levels (of " + prm_adj.length + ")"); } if (errNotMiss > 0) { param_adjErr = true; - formatErrors.add(varName + "_ERROR[" + (profNum + 1) + validationResult.addError(varName + "_ERROR[" + (profNum + 1) + "]: Not missing when PARAM or _ADJUSTED is missing at " + errNotMiss + " levels (of " + prm_adj.length + ")"); } if (errMiss > 0) { param_adjErr = true; - formatErrors.add("DATA_MODE: '" + mode + "': " + varName + "_ERROR[" + (profNum + 1) + validationResult.addError("DATA_MODE: '" + mode + "': " + varName + "_ERROR[" + (profNum + 1) + "]: Incorrectly set to missing at " + errMiss + " levels (of " + prm_adj.length + ")"); } if (mode == 'D' && mismatchAdjErr > 0) { param_adjErr = true; - formatErrors.add("DATA_MODE: '" + mode + "': " + varName + "[" + (profNum + 1) + "]: " + validationResult.addError("DATA_MODE: '" + mode + "': " + varName + "[" + (profNum + 1) + "]: " + "Set/FillValue mismatch between _ADJUSTED and _ERROR at " + mismatchAdjErr + " levels (of " + prm_adj.length + ")"); } if (nan > 0) { - formatErrors.add(varName + "[" + (profNum + 1) + "]: NaNs at " + nan + " levels (of " + validationResult.addError(varName + "[" + (profNum + 1) + "]: NaNs at " + nan + " levels (of " + prm_adj.length + ")"); } if (nanErr > 0) { - formatErrors.add(varName + "_ERROR[" + (profNum + 1) + "]: NaNs at " + nanErr + " levels (of " - + prm_adj.length + ")"); + validationResult.addError(varName + "_ERROR[" + (profNum + 1) + "]: NaNs at " + nanErr + + " levels (of " + prm_adj.length + ")"); } if (inf > 0) { - formatErrors.add(varName + "[" + (profNum + 1) + "]: Infinite value at " + inf + " levels (of " - + prm_adj.length + ")"); + validationResult.addError(varName + "[" + (profNum + 1) + "]: Infinite value at " + inf + + " levels (of " + prm_adj.length + ")"); } if (infErr > 0) { - formatErrors.add(varName + "_ERROR[" + (profNum + 1) + "]: Infinite value at " + infErr + validationResult.addError(varName + "_ERROR[" + (profNum + 1) + "]: Infinite value at " + infErr + " levels (of " + prm_adj.length + ")"); } @@ -2571,7 +2424,7 @@ else if (data != Float.MAX_VALUE) { } else { // ......there were errors in param -- don't bother with param_adjusted..... - formatErrors.add("Warning: " + param + "_ADJUSTED[" + (profNum + 1) + validationResult.addError("Warning: " + param + "_ADJUSTED[" + (profNum + 1) + "] data not checked due to errors in " + param + " data"); } // ..end if (! paramErr) @@ -2581,15 +2434,16 @@ else if (data != Float.MAX_VALUE) { info = ArgoReferenceTable.PROFILE_QC_FLAG.contains(profQC); if (!info.isValid()) { - formatErrors.add("PROFILE_" + param + "_QC[" + (profNum + 1) + "]: '" + profQC + "': Invalid"); + validationResult.addError("PROFILE_" + param + "_QC[" + (profNum + 1) + "]: '" + profQC + "': Invalid"); } else { if (info.isDeprecated) { - formatWarnings.add("PROFILE_" + param + "_QC[" + (profNum + 1) + "]: '" + profQC + "': Deprecated"); + validationResult.addWarning( + "PROFILE_" + param + "_QC[" + (profNum + 1) + "]: '" + profQC + "': Deprecated"); } if (paramErr || param_adjErr) { - formatErrors.add("Warning: PROFILE_" + param + "_QC[" + (profNum + 1) + validationResult.addError("Warning: PROFILE_" + param + "_QC[" + (profNum + 1) + "] not checked due to errors in " + param + " data"); } else { @@ -2612,7 +2466,7 @@ else if (data != Float.MAX_VALUE) { } if (expProfQC != profQC) { - formatErrors.add("PROFILE_" + param + "_QC[" + (profNum + 1) + "]: Value = '" + profQC + validationResult.addError("PROFILE_" + param + "_QC[" + (profNum + 1) + "]: Value = '" + profQC + "'. Expected = '" + expProfQC + "'"); } } // ..end if paramErr | param_adjErr @@ -2649,10 +2503,10 @@ private boolean checkParamAdjusted_When_DataModeIsR_CHECK_PROFILE_0021(float[] p boolean paramErr = false; for (int k = 0; k < prm_adj.length; k++) { - if (!ArgoDataFile.is_FillValue(fValue, prm_adj[k])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj[k])) { missNot++; } - if (!ArgoDataFile.is_FillValue(fValue, prm_adj_err[k])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj_err[k])) { errNotMiss++; } if (prm_adj_qc[k] != ' ') { @@ -2662,18 +2516,18 @@ private boolean checkParamAdjusted_When_DataModeIsR_CHECK_PROFILE_0021(float[] p if (missNot > 0) { paramErr = true; - formatErrors.add("DATA_MODE '" + mode + "': " + varName + "[" + (profNum + 1) + "]: Not FillValue at " - + missNot + " levels (of " + prm.length + ")"); + validationResult.addError("DATA_MODE '" + mode + "': " + varName + "[" + (profNum + 1) + + "]: Not FillValue at " + missNot + " levels (of " + prm.length + ")"); } if (errNotMiss > 0) { paramErr = true; - formatErrors.add("DATA_MODE '" + mode + "': " + varName + "_ERROR[" + (profNum + 1) + "]: Not FillValue at " - + errNotMiss + " levels (of " + prm.length + ")"); + validationResult.addError("DATA_MODE '" + mode + "': " + varName + "_ERROR[" + (profNum + 1) + + "]: Not FillValue at " + errNotMiss + " levels (of " + prm.length + ")"); } if (qcNotMiss > 0) { paramErr = true; - formatErrors.add("DATA_MODE '" + mode + "': " + varName + "_QC[" + (profNum + 1) + "]: Not FillValue at " - + qcNotMiss + " levels (of " + prm.length + ")"); + validationResult.addError("DATA_MODE '" + mode + "': " + varName + "_QC[" + (profNum + 1) + + "]: Not FillValue at " + qcNotMiss + " levels (of " + prm.length + ")"); } return paramErr; @@ -2701,8 +2555,8 @@ public void validateQC(int nProf, int nParam, int nLevel) throws IOException { log.debug(".....validateQC....."); } - String juldQC = readString("JULD_QC", true);// ..true -> return NULLs if present - String posQC = readString("POSITION_QC", true);// ..true -> return NULLs if present + String juldQC = arFile.readString("JULD_QC", true);// ..true -> return NULLs if present + String posQC = arFile.readString("POSITION_QC", true);// ..true -> return NULLs if present // ...........loop over each profile in the file............. for (int n = 0; n < nProf; n++) { @@ -2712,176 +2566,23 @@ public void validateQC(int nProf, int nParam, int nLevel) throws IOException { ch = juldQC.charAt(n); if ((info = ArgoReferenceTable.QC_FLAG.contains(ch)).isValid()) { if (info.isDeprecated) { - formatWarnings.add("JULD_QC[" + (n + 1) + "]: '" + ch + "' Status: " + info.message); + validationResult.addWarning("JULD_QC[" + (n + 1) + "]: '" + ch + "' Status: " + info.message); } } else { - formatErrors.add("JULD_QC[" + (n + 1) + "]: '" + ch + "' Status: " + info.message); + validationResult.addError("JULD_QC[" + (n + 1) + "]: '" + ch + "' Status: " + info.message); } ch = posQC.charAt(n); if ((info = ArgoReferenceTable.QC_FLAG.contains(ch)).isValid()) { if (info.isDeprecated) { - formatWarnings.add("POSITION_QC[" + (n + 1) + "]: '" + ch + "' Status: " + info.message); + validationResult.addWarning("POSITION_QC[" + (n + 1) + "]: '" + ch + "' Status: " + info.message); } } else { - formatErrors.add("POSITION_QC[" + (n + 1) + "]: '" + ch + "' Status: " + info.message); - } - } - } - - /** - * Computes the "index-file psal adjustment" statistics defined as:
- *

    - *
  • Mean of (psal_adjusted - psal) on the deepest 500 meters with good - * psal_adjusted_qc (equal to 1) - *
  • Standard deviation of (psal_adjusted - psal) on the deepest 500 meters - * with good psal_adjusted_qc (equal to 1) - *
- *

- * NOTES: Only performed for a core-file for the first profile. - * - * @param nProf number of profiles in the file - * @param nParam number of parameters in the file - * @param nLevel number of levels in the file - * @throws IOException If an I/O error occurs - */ - public double[] computePsalAdjStats() { - log.debug(".....computePsalAdjStats....."); - - double[] stats = { 99999., 99999. }; - - if (fileType != FileType.PROFILE) { - log.debug("not a core-file. fileType = {}", fileType); - return (stats); - } - - float[] p_adj = readFloatArr("PRES_ADJUSTED", 0); - String p_adj_qc = readString("PRES_ADJUSTED_QC", 0, true); - - if (p_adj == null) { - log.debug("failed: no PRES_ADJUSTED variable"); - return (stats); - } - - // ..find the deepest good pres (must be > 500db) - float pDeep = 99999.f; - int nDeep = -1; - - for (int n = p_adj.length - 1; n > 0; n--) { - // if (p_adj[n] >= 500.f) { - if (p_adj_qc.charAt(n) == '1') { - pDeep = p_adj[n]; - nDeep = n; - break; - } - // } - } - - if (nDeep < 0) { - log.debug("failed: no good PRES_ADJUSTED > 500db"); - return (stats); - } - - log.debug("nDeep, pDeep = {}, {}", nDeep, pDeep); - - // ..find the starting point for the stats - float pShallow = 99999.f; - int nShallow = -1; - - float shallowest = pDeep - 500.f; - - for (int n = 0; n < p_adj.length; n++) { - if (p_adj[n] >= shallowest) { - pShallow = p_adj[n]; - nShallow = n; - break; - } - } - - if (nShallow < 0) { - log.debug("failed: could not find a starting PRES_ADJUSTED index"); - return (stats); - } - - log.debug("nShallow, pShallow = {}, {}", nShallow, pShallow); - - // ..is this mode = A or D (otherwise, the psal_adj) - - char m = readString("DATA_MODE", true).charAt(0); - log.debug("mode = {}", m); - - if (m == 'R') { - // ..no adjustment - stats[0] = 0.; - stats[1] = 0.; - log.debug("r-mode. no adjustment. stats = 0"); - return (stats); - } - - // ..get psal and psal_adj - - float[] s = readFloatArr("PSAL", 0); - float[] sadj = readFloatArr("PSAL_ADJUSTED", 0); - - String s_qc = readString("PSAL_QC", 0, true); - String sadj_qc = readString("PSAL_ADJUSTED_QC", 0, true); - - if (s == null) { - log.debug("failed: no PSAL data"); - return (stats); - } - - // ..compute mean - - double[] diff = new double[s.length]; - double sum = 0.; - int n_data = 0; - boolean[] include = new boolean[s.length]; - - for (int n = nShallow; n <= nDeep; n++) { - if (s_qc.charAt(n) == '1' && sadj_qc.charAt(n) == '1') { - include[n] = true; - n_data++; - diff[n] = sadj[n] - s[n]; - sum += diff[n]; - } else { - include[n] = false; - diff[n] = 1.e10; + validationResult.addError("POSITION_QC[" + (n + 1) + "]: '" + ch + "' Status: " + info.message); } } - - if (n_data < 2) { - log.debug("failed: fewer than 2 good data"); - return (stats); - } - - double mean = sum / n_data; - - log.debug("sum, n_data, mean = {}, {}, {}", sum, n_data, mean); - - // ..compute std dev - - sum = 0.; - - for (int n = nShallow; n <= nDeep; n++) { - if (include[n]) { - double d = diff[n] - mean; - sum += d * d; - } - } - - double sdev = Math.sqrt(sum / ((double) n_data - 1)); - - log.debug("sum, n_data, sdev = {}, {}, {}", sum, n_data, sdev); - - // ..done - - stats[0] = mean; - stats[1] = sdev; - - return (stats); } } // ..end class diff --git a/file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoTechnicalFile.java b/file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoTechnicalFileValidator.java similarity index 58% rename from file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoTechnicalFile.java rename to file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoTechnicalFileValidator.java index 83f7495..3bb104a 100644 --- a/file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoTechnicalFile.java +++ b/file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoTechnicalFileValidator.java @@ -1,7 +1,6 @@ -package fr.coriolis.checker.filetypes; +package fr.coriolis.checker.validators; import java.io.IOException; -import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; @@ -10,10 +9,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import fr.coriolis.checker.core.ArgoDataFile; import fr.coriolis.checker.specs.ArgoConfigTechParam; -import fr.coriolis.checker.specs.ArgoDate; import fr.coriolis.checker.specs.ArgoReferenceTable; -import ucar.ma2.ArrayChar; /** * Implements all of the features required to validate ArgoTechnicalFiles @@ -24,7 +22,7 @@ * @version $Id: ArgoTechnicalFile.java 1263 2021-06-14 17:59:56Z ignaszewski $ */ -public class ArgoTechnicalFile extends ArgoDataFile { +public class ArgoTechnicalFileValidator extends ArgoFileValidator { // ......................................... // VARIABLES @@ -32,26 +30,19 @@ public class ArgoTechnicalFile extends ArgoDataFile { // ..class variables // ..standard i/o shortcuts - private static final Logger log = LogManager.getLogger("ArgoTechnicalFile"); - - private final static long oneDaySec = 1L * 24L * 60L * 60L * 1000L; - - // ..object variables - private String data_mode; - - private ArrayList[] profParam; + private static final Logger log = LogManager.getLogger("ArgoTechnicalFileValidator"); // ....................................... // CONSTRUCTORS // ....................................... - protected ArgoTechnicalFile() throws IOException { - super(); + public ArgoTechnicalFileValidator(ArgoDataFile arFile) throws IOException { + super(arFile); } - public ArgoTechnicalFile(String specDir, String version) { - // super(specDir, FileType.PROFILE, version); - } +// public ArgoTechnicalFileValidator(String specDir, String version) { +// // super(specDir, FileType.PROFILE, version); +// } // .......................................... // METHODS @@ -70,15 +61,15 @@ public ArgoTechnicalFile(String specDir, String version) { * for the failure to open.) * @throws IOException If an I/O error occurs */ - public static ArgoTechnicalFile open(String inFile, String specDir, boolean fullSpec) throws IOException { - ArgoDataFile arFile = ArgoDataFile.open(inFile, specDir, fullSpec); - if (!(arFile instanceof ArgoTechnicalFile)) { - message = "ERROR: '" + inFile + "' not an Argo PROFILE file"; - return null; - } - - return (ArgoTechnicalFile) arFile; - } +// public static ArgoTechnicalFileValidator open(String inFile, String specDir, boolean fullSpec) throws IOException { +// ArgoDataFile arFile = ArgoDataFile.open(inFile, specDir, fullSpec); +// if (!(arFile instanceof ArgoTechnicalFileValidator)) { +// ValidationResult.lastMessage = "ERROR: '" + inFile + "' not an Argo PROFILE file"; +// return null; +// } +// +// return (ArgoTechnicalFileValidator) arFile; +// } /** * Validates the data in the technical file. This is a driver routine that @@ -95,38 +86,18 @@ public static ArgoTechnicalFile open(String inFile, String specDir, boolean full * reason). * @throws IOException If an I/O error occurs */ - public boolean validate(String dacName, boolean ckNulls) throws IOException { - ArgoReferenceTable.DACS dac = null; - - if (!verified) { - message = "File must be verified (verifyFormat) " + "successfully before validation"; + public boolean validateData(String dacName, boolean ckNulls) throws IOException { + boolean basicsChecks = super.basicDataValidation(ckNulls); + if (!basicsChecks) { return false; } - // .......check arguments....... - if (dacName.trim().length() > 0) { - for (ArgoReferenceTable.DACS d : ArgoReferenceTable.DACS.values()) { - if (d.name.equals(dacName)) { - dac = d; - break; - } - } - if (dac == null) { - message = "Unknown DAC name = '" + dacName + "'"; - return false; - } - } - - // ............Determine number of tech params............ - - if (ckNulls) { - validateStringNulls(); - } + // Validate tech meta data + validateMetaData(arFile.getValidatedDac()); - validateMetaData(dac); validateDates(); - if (format_version.startsWith("2.4") || format_version.startsWith("3")) { + if (arFile.fileVersion().startsWith("2.4") || arFile.fileVersion().startsWith("3")) { validateTechParams(); } @@ -149,79 +120,9 @@ public boolean validate(String dacName, boolean ckNulls) throws IOException { */ public void validateDates() throws IOException { log.debug(".....validateDates....."); - - String creation = ((ArrayChar) ncReader.findVariable("DATE_CREATION").read()).getString(); - String update = ((ArrayChar) ncReader.findVariable("DATE_UPDATE").read()).getString(); - Date fileTime = new Date(file.lastModified()); - long fileSec = fileTime.getTime(); - - if (log.isDebugEnabled()) { - log.debug("earliestDate: " + ArgoDate.format(earliestDate)); - log.debug("fileTime: " + ArgoDate.format(fileTime)); - log.debug("DATE_CREATION: " + creation); - log.debug("DATE_UPDATE: " + update); - } - - // ...........initial creation date checks:............. - // ..set, after earliestDate, and before file time - Date dateCreation = null; - boolean haveCreation = false; - long creationSec = 0; - - if (creation.trim().length() <= 0) { - formatErrors.add("DATE_CREATION: Not set"); - - } else { - dateCreation = ArgoDate.get(creation); - haveCreation = true; - - if (dateCreation == null) { - haveCreation = false; - formatErrors.add("DATE_CREATION: '" + creation + "': Invalid date"); - - } else { - creationSec = dateCreation.getTime(); - - if (dateCreation.before(earliestDate)) { - formatErrors.add("DATE_CREATION: '" + creation + "': Before allowed date ('" - + ArgoDate.format(earliestDate) + "')"); - - } else if ((creationSec - fileSec) > oneDaySec) { - formatErrors.add("DATE_CREATION: '" + creation + "': After system file time ('" - + ArgoDate.format(fileTime) + "')"); - } - } - } - - // ............initial update date checks:........... - // ..set, not before creation time, before file time - Date dateUpdate = null; - boolean haveUpdate = false; - long updateSec = 0; - - if (update.trim().length() <= 0) { - formatErrors.add("DATE_UPDATE: Not set"); - } else { - dateUpdate = ArgoDate.get(update); - haveUpdate = true; - - if (dateUpdate == null) { - formatErrors.add("DATE_UPDATE: '" + update + "': Invalid date"); - haveUpdate = false; - - } else { - updateSec = dateUpdate.getTime(); - - if (haveCreation && dateUpdate.before(dateCreation)) { - formatErrors.add("DATE_UPDATE: '" + update + "': Before DATE_CREATION ('" + creation + "')"); - } - - if ((updateSec - fileSec) > oneDaySec) { - formatErrors.add("DATE_UPDATE: '" + update + "': After system file time ('" - + ArgoDate.format(fileTime) + "')"); - } - } - } + Date fileTime = new Date(arFile.getFile().lastModified()); + // ...........creation and update dates checks:............. + super.validateCreationUpdateDates(fileTime); }// ..end validateDates /** @@ -241,31 +142,14 @@ public void validateDates() throws IOException { public void validateMetaData(ArgoReferenceTable.DACS dac) throws IOException { log.debug(".....validateMetaData....."); - ArrayChar.D1 plNum = (ArrayChar.D1) ncReader.findVariable("PLATFORM_NUMBER").read(); - ArrayChar.D1 dc = (ArrayChar.D1) ncReader.findVariable("DATA_CENTRE").read(); - - String firstNum = new String(plNum.getString().trim()); - - log.debug("PLATFORM_NUMBER: '{}'", plNum.getString()); - - String s = plNum.getString().trim(); - if (!s.matches("[1-9][0-9]{4}|[1-9]9[0-9]{5}")) { - formatErrors.add("PLATFORM_NUMBER: '" + s + "': Invalid"); + // PLATFORM_NUMBER + String str = arFile.readString("PLATFORM_NUMBER").trim(); + if (!super.validatePlatfomNumber(str)) { + validationResult.addError("PLATFORM_NUMBER" + ": '" + str + "': Invalid"); } - log.debug("DATA_CENTRE: '" + dc.getString() + "'"); - - s = dc.getString().trim(); - if (dac != null) { - if (!ArgoReferenceTable.DacCenterCodes.get(dac).contains(s)) { - formatErrors.add("DATA_CENTRE: '" + s + "': Invalid for DAC " + dac); - } - - } else { // ..incoming DAC not set - if (!ArgoReferenceTable.DacCenterCodes.containsValue(s)) { - formatErrors.add("DATA_CENTRE: '" + s + "': Invalid (for all DACs)"); - } - } + // DATA_CENTRE + super.validateDataCentre(dac); }// ..end validateMetaData /** @@ -281,7 +165,7 @@ public void validateMetaData(ArgoReferenceTable.DACS dac) throws IOException { public void validateTechParams() throws IOException { log.debug(".....validateTechParams....."); - int nParam = getDimensionLength("N_TECH_PARAM"); + int nParam = arFile.getDimensionLength("N_TECH_PARAM"); log.debug("n_technical_parameter: {}", nParam); // ..read technical parameters and values @@ -289,7 +173,7 @@ public void validateTechParams() throws IOException { String nName = "TECHNICAL_PARAMETER_NAME"; // String vName = "TECHNICAL_PARAMETER_VALUE"; - String[] full_name = readStringArr(nName); + String[] full_name = arFile.readStringArr(nName); /* * 2021-06-01: value checks are currently not being worked on * @@ -309,7 +193,7 @@ public void validateTechParams() throws IOException { // ..poorly formed name - only report if not already reported if (!nameAlreadyChecked.contains(full)) { - formatErrors.add(nName + "[" + (n + 1) + "]: " + "Incorrectly formed name '" + full + "'"); + validationResult.addError(nName + "[" + (n + 1) + "]: " + "Incorrectly formed name '" + full + "'"); nameAlreadyChecked.add(full); } @@ -331,23 +215,24 @@ public void validateTechParams() throws IOException { if (!nameAlreadyChecked.contains(param)) { // ..this parameter name has not been checked - ArgoConfigTechParam.ArgoConfigTechParamMatch match = spec.ConfigTech.findTechParam(param); + ArgoConfigTechParam.ArgoConfigTechParamMatch match = arFile.getFileSpec().ConfigTech + .findTechParam(param); if (match == null) { // ..NOT an active name, NOT a deprecated name --> error String err = String.format("%s[%d]: Invalid name '%s'", nName, (n + 1), param); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("invalid param (not active or deprecated): '{}'", param); } else { if (match.isDeprecated) { // ..IS a deprecated name --> warning - formatWarnings.add(nName + "[" + (n + 1) + "]: " + "Deprecated name '" + param); + validationResult.addWarning(nName + "[" + (n + 1) + "]: " + "Deprecated name '" + param); log.debug("parameter is deprecated: '{}'", param); } @@ -361,11 +246,11 @@ public void validateTechParams() throws IOException { String err = String.format("%s[%d]: Invalid template/value '%s'/'%s' in '%s'", nName, (n + 1), tmplt, val, param); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("...invalid template/value '{}'/'{}'", tmplt, val); } @@ -384,11 +269,11 @@ public void validateTechParams() throws IOException { if (!ArgoReferenceTable.GENERIC_TEMPLATE_short_sensor_name.contains(str)) { String err = String.format("%s[%d]: Invalid short_sensor_name '%s' in '%s'", nName, (n + 1), str, param); - // formatErrors.add(err); + // validationResult.addError(err); // ################# TEMPORARY WARNING ################ - formatWarnings.add(err + " *** WILL BECOME AN ERROR ***"); - log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); + validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + log.warn("TEMP WARNING: {}: {}: {}", arFile.getDacName(), arFile.getFileName(), err); log.debug("...generic short_sensor_name lookup: INVALID = '{}'", str); } else { @@ -408,22 +293,22 @@ public void validateTechParams() throws IOException { if (!unitAlreadyChecked.containsKey(unit)) { // ..this unit name has NOT been checked - if (!spec.ConfigTech.isConfigTechUnit(unit)) { + if (!arFile.getFileSpec().ConfigTech.isConfigTechUnit(unit)) { // ..NOT an active unit - if (spec.ConfigTech.isDeprecatedConfigTechUnit(unit)) { + if (arFile.getFileSpec().ConfigTech.isDeprecatedConfigTechUnit(unit)) { // ..IS a deprecated unit --> warning validUnit = true; - formatWarnings.add( + validationResult.addWarning( nName + "[" + (n + 1) + "]: " + "Deprecated unit '" + unit + "' in '" + full + "'"); log.warn("'{}': unit is deprecated", unit); } else { // ..NOT an active unit, NOT a deprecated unit --> error validUnit = false; - formatErrors - .add(nName + "[" + (n + 1) + "]: " + "Invalid unit '" + unit + "' in '" + full + "'"); + validationResult.addError( + nName + "[" + (n + 1) + "]: " + "Invalid unit '" + unit + "' in '" + full + "'"); log.debug("unit is invalid (new or old)", unit); } @@ -452,11 +337,11 @@ public void validateTechParams() throws IOException { * err = * String.format("%s[%d]: Invalid value for '%s' (type '%s'): value = '%s'", * vName, (n+1), full, spec.ConfigTech.getConfigTechDataType(unit), - * value[n].trim()); //formatErrors.add(err); + * value[n].trim()); //validationResult.addError(err); * - * //################# TEMPORARY WARNING ################ formatWarnings.add(err - * + " *** WILL BECOME AN ERROR ***"); log.warn("TEMP WARNING: {}: {}: {}", - * dacName, file.getName(), err); + * //################# TEMPORARY WARNING ################ + * validationResult.addWarning(err + " *** WILL BECOME AN ERROR ***"); + * log.warn("TEMP WARNING: {}: {}: {}", dacName, file.getName(), err); * * log.debug("invalid value"); } } //..end if validUnit */ diff --git a/file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoTrajectoryFile.java b/file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoTrajectoryFileValidator.java similarity index 73% rename from file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoTrajectoryFile.java rename to file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoTrajectoryFileValidator.java index e47ae3c..e966b10 100644 --- a/file_checker_exec/src/main/java/fr/coriolis/checker/filetypes/ArgoTrajectoryFile.java +++ b/file_checker_exec/src/main/java/fr/coriolis/checker/validators/ArgoTrajectoryFileValidator.java @@ -1,32 +1,23 @@ -package fr.coriolis.checker.filetypes; +package fr.coriolis.checker.validators; import java.io.IOException; import java.io.PrintStream; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.coriolis.checker.specs.ArgoAttribute; +import fr.coriolis.checker.core.ArgoDataFile; +import fr.coriolis.checker.core.ArgoDataFile.FileType; import fr.coriolis.checker.specs.ArgoDate; -import fr.coriolis.checker.specs.ArgoDimension; -import fr.coriolis.checker.specs.ArgoFileSpecification; import fr.coriolis.checker.specs.ArgoReferenceTable; -import fr.coriolis.checker.specs.ArgoVariable; import ucar.ma2.Array; import ucar.ma2.DataType; import ucar.ma2.Index; -import ucar.nc2.Attribute; -import ucar.nc2.NetcdfFileWriter; import ucar.nc2.Variable; /** @@ -48,67 +39,62 @@ * @version $Id: ArgoTrajectoryFile.java 1269 2021-06-14 20:34:45Z ignaszewski $ */ -public class ArgoTrajectoryFile extends ArgoDataFile { +public class ArgoTrajectoryFileValidator extends ArgoFileValidator { // ....................................... // VARIABLES // ....................................... // ..standard i/o shortcuts - private static PrintStream stdout = new PrintStream(System.out); private static PrintStream stderr = new PrintStream(System.err); - private static final Logger log = LogManager.getLogger("ArgoTrajectoryFile"); + private static final Logger log = LogManager.getLogger("ArgoTrajectoryFileValidator"); private final static int fillCycNum = 99999; - private final static long oneDaySec = 1L * 24L * 60L * 60L * 1000L; private final static String goodJuldQC = new String("01258"); - // ..object variables - private NetcdfFileWriter ncWriter; - // ....................................... // CONSTRUCTORS // ....................................... - protected ArgoTrajectoryFile() throws IOException { - super(); + public ArgoTrajectoryFileValidator(ArgoDataFile arFile) throws IOException { + super(arFile); } - protected ArgoTrajectoryFile(String specDir, String version) { - // super(specDir, FileType.TRAJECTORY, version); - } +// protected ArgoTrajectoryFileValidator(String specDir, String version) { +// // super(specDir, FileType.TRAJECTORY, version); +// } // .......................................... // METHODS // .......................................... - /** Retrieve the NetcdfFileWriter reference */ - public NetcdfFileWriter getNetcdfFileWriter() { - return ncWriter; - } +// /** Retrieve the NetcdfFileWriter reference */ +// public NetcdfFileWriter getNetcdfFileWriter() { +// return ncWriter; +// } /** * Retrieve a list of variables in the associated file */ - public List getVariableNames() { - LinkedList varNames = new LinkedList(); - - if (ncReader != null) { - for (Variable var : ncReader.getVariables()) { - varNames.add(var.getShortName()); - } - - } else if (ncWriter != null) { - for (Variable var : ncWriter.getNetcdfFile().getVariables()) { - varNames.add(var.getShortName()); - } - - } else { - varNames = null; - } - - return varNames; - } +// public List getVariableNames() { +// LinkedList varNames = new LinkedList(); +// +// if (ncReader != null) { +// for (Variable var : ncReader.getVariables()) { +// varNames.add(var.getShortName()); +// } +// +// } else if (ncWriter != null) { +// for (Variable var : ncWriter.getNetcdfFile().getVariables()) { +// varNames.add(var.getShortName()); +// } +// +// } else { +// varNames = null; +// } +// +// return varNames; +// } /** * Convenience method to add to String list for "pretty printing". @@ -116,13 +102,14 @@ public List getVariableNames() { * @param list the StringBuilder list * @param add the String to add */ - private void addToList(StringBuilder list, String add) { - if (list.length() == 0) { - list.append("'" + add + "'"); - } else { - list.append(", '" + add + "'"); - } - } +// @Override +// private void addToList(StringBuilder list, String add) { +// if (list.length() == 0) { +// list.append("'" + add + "'"); +// } else { +// list.append(", '" + add + "'"); +// } +// } /** * Convenience method to add to String list for "pretty printing". @@ -130,26 +117,6 @@ private void addToList(StringBuilder list, String add) { * @param list the StringBuilder list * @param add the String to add */ - private void addToList(StringBuilder list, int add) { - if (list.length() == 0) { - list.append(add); - } else { - list.append(", " + add); - } - } - - /** - * Closes an existing file. - * - * @throws IOException If an I/O error occurs - */ - @Override - public void close() throws IOException { - if (ncWriter != null) { - ncWriter.close(); - } - super.close(); - } // ..end close() /** * Creates a new Argo trajectory file. The "template" for the file is the @@ -168,329 +135,329 @@ public void close() throws IOException { * in the CDL attributes to number are * encountered. */ - public static ArgoTrajectoryFile createNew(String fileName, String specDir, String version, int N_PARAM, - int N_CYCLE, int N_HISTORY, Set parameters) throws IOException, NumberFormatException { - log.debug(".....createNew: start....."); - - ArgoTrajectoryFile arFile = new ArgoTrajectoryFile(); - - // ..create the template specification - arFile.spec = ArgoDataFile.openSpecification(false, specDir, ArgoDataFile.FileType.TRAJECTORY, version); - if (arFile.spec == null) { - return null; - } - - arFile.fileType = ArgoDataFile.FileType.TRAJECTORY; - - // ..remove any parameters that are not in this specification - - Iterator i = parameters.iterator(); - while (i.hasNext()) { - String p = i.next(); - if (!arFile.spec.isPhysicalParamName(p)) { - i.remove(); - // arFile.formatWarnings.add("Parameter: '"+p+ - // "' not in specification. Removed."); - log.debug("requested parameter '{}' not in spec: removed"); - } - } - - N_PARAM = parameters.size(); - - // ..create new file - - arFile.ncWriter = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf3, fileName); - arFile.ncWriter.setFill(true); - - // ..fill in object variables - arFile.file = null; // ..don't know why I need this ..... yet - arFile.fileType = ArgoDataFile.FileType.TRAJECTORY; - arFile.format_version = version; - arFile.ncFileName = fileName; - // arFile.spec is filled in by openSpec... - - // .....add globabl attributes..... - - for (ArgoAttribute a : arFile.spec.getGlobalAttributes()) { - String name = a.getName(); - String value = (String) a.getValue(); - - if (value.matches(ArgoFileSpecification.ATTR_SPECIAL_REGEX)) { - // ..the definition starts with one of the special ATTR_IGNORE codes - - if (value.length() > ArgoFileSpecification.ATTR_SPECIAL_LENGTH) { - // ..there is more than just the code on the line - // ..defining the default value, use it - - value = value.substring(ArgoFileSpecification.ATTR_SPECIAL_LENGTH); - log.debug("global attribute with special code: '{}'", value); - - } else { - // ..nothing but the code on the line - // ..ignore the attribute - value = null; - } - } - - if (name.equals("history")) { - value = (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'")).format(new Date()).toString(); - log.debug("history global attribute: '{}'", value); - } - - if (value != null) { - arFile.ncWriter.addGroupAttribute(null, new Attribute(name, value)); - log.debug("add global attribute: '{}' = '{}'", name, value); - } - } - - // .........add Dimensions............... - // ..don't allow <= 0 dimensions - if (N_PARAM <= 0) { - N_PARAM = 1; - } - if (N_CYCLE <= 0) { - N_CYCLE = 1; - } - if (N_HISTORY <= 0) { - N_HISTORY = 1; - } - - for (ArgoDimension d : arFile.spec.getDimensions()) { - String name = d.getName(); - int value = d.getValue(); - - if (name.equals("N_PARAM")) { - value = N_PARAM; - } else if (name.equals("N_CYCLE")) { - value = N_CYCLE; - } else if (name.equals("N_HISTORY")) { - value = N_HISTORY; - } - - if (name.equals("N_MEASUREMENT")) { - arFile.ncWriter.addUnlimitedDimension(name); - } else { - arFile.ncWriter.addDimension(null, name, value); - } - - log.debug("add dimension: '{}' = '{}'", name, value); - } - - // .........add Variables............... - - // ......ordered list..... - // ..this bit of code arranges the variables in the "expected order" - // ..this is technically completely unnecessary - // ..the ordering of the variables in the file should not matter - // ..however, at least one user is complaining about the current files - // ..and I am guessing that variable ordering is the problem. - - // ..the way the "spec" files are parsed, the PARAM variables end up - // ..at the end of the variables list - // ..so I am trying to distribute the variables in the "expected order" - - // ..good coding by users would eliminate the need for this - - // ..the idea is to create an ordered list of variable names that are - // ..then used in the next section - - log.debug("...build ordered list of variables..."); - - ArrayList orderedList = new ArrayList(200); - - for (ArgoVariable v : arFile.spec.getVariables()) { - String name = v.getName(); - - if (v.isParamVar()) { - // ..when we get to the PARAM variables, we are done - // ..they are always at the end of the list and we handle - // ..them separately in the if-blocks below - break; - } - - orderedList.add(v); - log.debug("add {}", name); - - // ..insert the variables after MEASUREMENT_CODE - - if (name.equals("MEASUREMENT_CODE")) { - log.debug("insert here"); - for (ArgoVariable w : arFile.spec.getVariables()) { - if (w.isParamVar()) { - log.debug("isParamVar"); - orderedList.add(w); - log.debug("add {}", w.getName()); - } - } - } - } - - log.debug("...ordered list complete..."); - - // ....end ordered list.... - - Boolean keep; - // ...if we didn't need the list to be ordered... - // for (ArgoVariable v : arFile.spec.getVariables()) { - - for (ArgoVariable v : orderedList) { - String name = v.getName(); - String prm = v.getParamName(); - - if (prm != null) { - // ..this is a physical parameter variable - // ..is it's parameter name in the parameter list - - if (parameters.contains(prm)) { - keep = true; - } else { - keep = false; - log.debug("skip variable: '{}'", name); - } - - } else { - // ..not a physical parameter, so keep it - keep = true; - } - - if (keep) { - DataType type = v.getType(); - String dims = v.getDimensionsString(); - - Variable var = arFile.ncWriter.addVariable(null, name, type, dims); - log.debug("add variable: '{}': '{}' '{}'", name, type, dims); - - // ..add attributes for this variable - for (ArgoAttribute a : v.getAttributes()) { - String aname = a.getName(); - - if (a.isNumeric()) { - var.addAttribute(new Attribute(aname, (Number) a.getValue())); - log.debug("add attribute: '{}:{}' = '{}'", name, aname, a.getValue()); - - } else if (a.isString()) { - String value = (String) a.getValue(); - Object def = null; - - if (value.matches(ArgoFileSpecification.ATTR_SPECIAL_REGEX)) { - // ..the definition starts with one of the special ATTR_IGNORE codes - - if (value.length() > ArgoFileSpecification.ATTR_SPECIAL_LENGTH) { - // ..there is more than just the code on the line - // ..defining the default value, use it - - String val = value.substring(ArgoFileSpecification.ATTR_SPECIAL_LENGTH); - log.debug("attribute with special code: '{}' '{}'", value, val); - - if (val.startsWith("numeric:")) { - // ..this should be encoded as a number - - val = val.substring("numeric:".length()); - log.debug("...number: '{}'", val); - - try { - switch (v.getType()) { - case DOUBLE: - def = Double.valueOf(val); - var.addAttribute(new Attribute(aname, (Number) def)); - break; - case FLOAT: - def = Float.valueOf(val); - var.addAttribute(new Attribute(aname, (Number) def)); - break; - case INT: - def = Integer.valueOf(val); - var.addAttribute(new Attribute(aname, (Number) def)); - break; - case SHORT: - def = Short.valueOf(val); - var.addAttribute(new Attribute(aname, (Number) def)); - break; - default: // ..assume it is a string - def = val; - var.addAttribute(new Attribute(aname, (String) def)); - } - } catch (NumberFormatException e) { - throw new NumberFormatException( - "Attribute " + name + ":" + aname + ": Unable to convert to number"); - } - - } else { - // ..default value is a string - var.addAttribute(new Attribute(aname, val)); - } - - // } else { - // ..nothing but the special code on the line - // ..ignore the attribute - // def = null; - } - - } else { - // ..not a special value -- insert as is - var.addAttribute(new Attribute(aname, value)); - log.debug("add attribute: '{}:{}' = '{}'", name, aname, value); - } - - } else { - log.error("attribute not Number or String: '{}:{}'", name, aname); - continue; - } - - } - } - } - - // .....create the file -- end "define mode" - - arFile.ncWriter.create(); - arFile.ncWriter.close(); - - log.debug(".....createNew: end....."); - return arFile; - } - - /** - * Opens an existing file without opening the specification - * - * @param inFile the string name of the file to open - * @return the file object reference. Returns null if the file is not opened - * successfully. (ArgoTrajectoryFile.getMessage() will return the reason - * for the failure to open.) - * @throws IOException If an I/O error occurs - */ - public static ArgoTrajectoryFile open(String inFile) throws IOException { - ArgoDataFile arFile = ArgoDataFile.open(inFile); - if (!(arFile instanceof ArgoTrajectoryFile)) { - message = "ERROR: '" + inFile + "' not an Argo TRAJECTORY file"; - return null; - } - - return (ArgoTrajectoryFile) arFile; - } - - /** - * Opens an existing file and the associated Argo specification). - * - * @param inFile the string name of the file to open - * @param specDir the string name of the directory containing the format - * specification files - * @param fullSpec true = open the full specification; false = open the template - * specification - * @return the file object reference. Returns null if the file is not opened - * successfully. (ArgoTrajectoryFile.getMessage() will return the reason - * for the failure to open.) - * @throws IOException If an I/O error occurs - */ - public static ArgoTrajectoryFile open(String inFile, String specDir, boolean fullSpec) throws IOException { - ArgoDataFile arFile = ArgoDataFile.open(inFile, specDir, fullSpec); - if (!(arFile instanceof ArgoTrajectoryFile)) { - message = "ERROR: '" + inFile + "' not an Argo TRAJECTORY file"; - return null; - } - - return (ArgoTrajectoryFile) arFile; - } +// public static ArgoTrajectoryFileValidator createNew(String fileName, String specDir, String version, int N_PARAM, +// int N_CYCLE, int N_HISTORY, Set parameters) throws IOException, NumberFormatException { +// log.debug(".....createNew: start....."); +// +// ArgoTrajectoryFileValidator arFile = new ArgoTrajectoryFileValidator(); +// +// // ..create the template specification +// arFile.spec = ArgoDataFile.openSpecification(false, specDir, ArgoDataFile.FileType.TRAJECTORY, version); +// if (arFile.spec == null) { +// return null; +// } +// +// arFile.fileType = ArgoDataFile.FileType.TRAJECTORY; +// +// // ..remove any parameters that are not in this specification +// +// Iterator i = parameters.iterator(); +// while (i.hasNext()) { +// String p = i.next(); +// if (!arFile.spec.isPhysicalParamName(p)) { +// i.remove(); +// // arFile.validationResult.addWarning("Parameter: '"+p+ +// // "' not in specification. Removed."); +// log.debug("requested parameter '{}' not in spec: removed"); +// } +// } +// +// N_PARAM = parameters.size(); +// +// // ..create new file +// +// arFile.ncWriter = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf3, fileName); +// arFile.ncWriter.setFill(true); +// +// // ..fill in object variables +// arFile.file = null; // ..don't know why I need this ..... yet +// arFile.fileType = ArgoDataFile.FileType.TRAJECTORY; +// arFile.format_version = version; +// arFile.ncFileName = fileName; +// // arFile.spec is filled in by openSpec... +// +// // .....add globabl attributes..... +// +// for (ArgoAttribute a : arFile.spec.getGlobalAttributes()) { +// String name = a.getName(); +// String value = (String) a.getValue(); +// +// if (value.matches(ArgoFileSpecification.ATTR_SPECIAL_REGEX)) { +// // ..the definition starts with one of the special ATTR_IGNORE codes +// +// if (value.length() > ArgoFileSpecification.ATTR_SPECIAL_LENGTH) { +// // ..there is more than just the code on the line +// // ..defining the default value, use it +// +// value = value.substring(ArgoFileSpecification.ATTR_SPECIAL_LENGTH); +// log.debug("global attribute with special code: '{}'", value); +// +// } else { +// // ..nothing but the code on the line +// // ..ignore the attribute +// value = null; +// } +// } +// +// if (name.equals("history")) { +// value = (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'")).format(new Date()).toString(); +// log.debug("history global attribute: '{}'", value); +// } +// +// if (value != null) { +// arFile.ncWriter.addGroupAttribute(null, new Attribute(name, value)); +// log.debug("add global attribute: '{}' = '{}'", name, value); +// } +// } +// +// // .........add Dimensions............... +// // ..don't allow <= 0 dimensions +// if (N_PARAM <= 0) { +// N_PARAM = 1; +// } +// if (N_CYCLE <= 0) { +// N_CYCLE = 1; +// } +// if (N_HISTORY <= 0) { +// N_HISTORY = 1; +// } +// +// for (ArgoDimension d : arFile.spec.getDimensions()) { +// String name = d.getName(); +// int value = d.getValue(); +// +// if (name.equals("N_PARAM")) { +// value = N_PARAM; +// } else if (name.equals("N_CYCLE")) { +// value = N_CYCLE; +// } else if (name.equals("N_HISTORY")) { +// value = N_HISTORY; +// } +// +// if (name.equals("N_MEASUREMENT")) { +// arFile.ncWriter.addUnlimitedDimension(name); +// } else { +// arFile.ncWriter.addDimension(null, name, value); +// } +// +// log.debug("add dimension: '{}' = '{}'", name, value); +// } +// +// // .........add Variables............... +// +// // ......ordered list..... +// // ..this bit of code arranges the variables in the "expected order" +// // ..this is technically completely unnecessary +// // ..the ordering of the variables in the file should not matter +// // ..however, at least one user is complaining about the current files +// // ..and I am guessing that variable ordering is the problem. +// +// // ..the way the "spec" files are parsed, the PARAM variables end up +// // ..at the end of the variables list +// // ..so I am trying to distribute the variables in the "expected order" +// +// // ..good coding by users would eliminate the need for this +// +// // ..the idea is to create an ordered list of variable names that are +// // ..then used in the next section +// +// log.debug("...build ordered list of variables..."); +// +// ArrayList orderedList = new ArrayList(200); +// +// for (ArgoVariable v : arFile.spec.getVariables()) { +// String name = v.getName(); +// +// if (v.isParamVar()) { +// // ..when we get to the PARAM variables, we are done +// // ..they are always at the end of the list and we handle +// // ..them separately in the if-blocks below +// break; +// } +// +// orderedList.add(v); +// log.debug("add {}", name); +// +// // ..insert the variables after MEASUREMENT_CODE +// +// if (name.equals("MEASUREMENT_CODE")) { +// log.debug("insert here"); +// for (ArgoVariable w : arFile.spec.getVariables()) { +// if (w.isParamVar()) { +// log.debug("isParamVar"); +// orderedList.add(w); +// log.debug("add {}", w.getName()); +// } +// } +// } +// } +// +// log.debug("...ordered list complete..."); +// +// // ....end ordered list.... +// +// Boolean keep; +// // ...if we didn't need the list to be ordered... +// // for (ArgoVariable v : arFile.spec.getVariables()) { +// +// for (ArgoVariable v : orderedList) { +// String name = v.getName(); +// String prm = v.getParamName(); +// +// if (prm != null) { +// // ..this is a physical parameter variable +// // ..is it's parameter name in the parameter list +// +// if (parameters.contains(prm)) { +// keep = true; +// } else { +// keep = false; +// log.debug("skip variable: '{}'", name); +// } +// +// } else { +// // ..not a physical parameter, so keep it +// keep = true; +// } +// +// if (keep) { +// DataType type = v.getType(); +// String dims = v.getDimensionsString(); +// +// Variable var = arFile.ncWriter.addVariable(null, name, type, dims); +// log.debug("add variable: '{}': '{}' '{}'", name, type, dims); +// +// // ..add attributes for this variable +// for (ArgoAttribute a : v.getAttributes()) { +// String aname = a.getName(); +// +// if (a.isNumeric()) { +// var.addAttribute(new Attribute(aname, (Number) a.getValue())); +// log.debug("add attribute: '{}:{}' = '{}'", name, aname, a.getValue()); +// +// } else if (a.isString()) { +// String value = (String) a.getValue(); +// Object def = null; +// +// if (value.matches(ArgoFileSpecification.ATTR_SPECIAL_REGEX)) { +// // ..the definition starts with one of the special ATTR_IGNORE codes +// +// if (value.length() > ArgoFileSpecification.ATTR_SPECIAL_LENGTH) { +// // ..there is more than just the code on the line +// // ..defining the default value, use it +// +// String val = value.substring(ArgoFileSpecification.ATTR_SPECIAL_LENGTH); +// log.debug("attribute with special code: '{}' '{}'", value, val); +// +// if (val.startsWith("numeric:")) { +// // ..this should be encoded as a number +// +// val = val.substring("numeric:".length()); +// log.debug("...number: '{}'", val); +// +// try { +// switch (v.getType()) { +// case DOUBLE: +// def = Double.valueOf(val); +// var.addAttribute(new Attribute(aname, (Number) def)); +// break; +// case FLOAT: +// def = Float.valueOf(val); +// var.addAttribute(new Attribute(aname, (Number) def)); +// break; +// case INT: +// def = Integer.valueOf(val); +// var.addAttribute(new Attribute(aname, (Number) def)); +// break; +// case SHORT: +// def = Short.valueOf(val); +// var.addAttribute(new Attribute(aname, (Number) def)); +// break; +// default: // ..assume it is a string +// def = val; +// var.addAttribute(new Attribute(aname, (String) def)); +// } +// } catch (NumberFormatException e) { +// throw new NumberFormatException( +// "Attribute " + name + ":" + aname + ": Unable to convert to number"); +// } +// +// } else { +// // ..default value is a string +// var.addAttribute(new Attribute(aname, val)); +// } +// +// // } else { +// // ..nothing but the special code on the line +// // ..ignore the attribute +// // def = null; +// } +// +// } else { +// // ..not a special value -- insert as is +// var.addAttribute(new Attribute(aname, value)); +// log.debug("add attribute: '{}:{}' = '{}'", name, aname, value); +// } +// +// } else { +// log.error("attribute not Number or String: '{}:{}'", name, aname); +// continue; +// } +// +// } +// } +// } +// +// // .....create the file -- end "define mode" +// +// arFile.ncWriter.create(); +// arFile.ncWriter.close(); +// +// log.debug(".....createNew: end....."); +// return arFile; +// } +// +// /** +// * Opens an existing file without opening the specification +// * +// * @param inFile the string name of the file to open +// * @return the file object reference. Returns null if the file is not opened +// * successfully. (ArgoTrajectoryFile.getMessage() will return the reason +// * for the failure to open.) +// * @throws IOException If an I/O error occurs +// */ +// public static ArgoTrajectoryFileValidator open(String inFile) throws IOException { +// ArgoDataFile arFile = ArgoDataFile.open(inFile); +// if (!(arFile instanceof ArgoTrajectoryFileValidator)) { +// ValidationResult.lastMessage = "ERROR: '" + inFile + "' not an Argo TRAJECTORY file"; +// return null; +// } +// +// return (ArgoTrajectoryFileValidator) arFile; +// } +// +// /** +// * Opens an existing file and the associated Argo specification). +// * +// * @param inFile the string name of the file to open +// * @param specDir the string name of the directory containing the format +// * specification files +// * @param fullSpec true = open the full specification; false = open the template +// * specification +// * @return the file object reference. Returns null if the file is not opened +// * successfully. (ArgoTrajectoryFile.getMessage() will return the reason +// * for the failure to open.) +// * @throws IOException If an I/O error occurs +// */ +// public static ArgoTrajectoryFileValidator open(String inFile, String specDir, boolean fullSpec) throws IOException { +// ArgoDataFile arFile = ArgoDataFile.open(inFile, specDir, fullSpec); +// if (!(arFile instanceof ArgoTrajectoryFileValidator)) { +// ValidationResult.lastMessage = "ERROR: '" + inFile + "' not an Argo TRAJECTORY file"; +// return null; +// } +// +// return (ArgoTrajectoryFileValidator) arFile; +// } /** * Validates the data in the trajectory file. This is a driver routine that @@ -528,36 +495,18 @@ public static ArgoTrajectoryFile open(String inFile, String specDir, boolean ful * reason). * @throws IOException If an I/O error occurs */ - public boolean validate(String dacName, boolean ckNulls) throws IOException { - log.debug(".....validate: start....."); - - ArgoReferenceTable.DACS dac = null; - - if (!verified) { - message = new String("File must be verified (verifyFormat) " + "successfully before validation"); + public boolean validateData(String dacName, boolean ckNulls) throws IOException { + boolean basicsChecks = super.basicDataValidation(ckNulls); + if (!basicsChecks) { return false; } - // .......check arguments....... - if (dacName.trim().length() > 0) { - for (ArgoReferenceTable.DACS d : ArgoReferenceTable.DACS.values()) { - if (d.name.equals(dacName)) { - dac = d; - break; - } - } - if (dac == (ArgoReferenceTable.DACS) null) { - message = new String("Unknown DAC name = '" + dacName + "'"); - return false; - } - } - // ............Determine number of profiles............ - int nCycle = getDimensionLength("N_CYCLE"); - int nHistory = getDimensionLength("N_HISTORY"); - int nMeasure = getDimensionLength("N_MEASUREMENT"); - int nParam = getDimensionLength("N_PARAM"); + int nCycle = arFile.getDimensionLength("N_CYCLE"); + int nHistory = arFile.getDimensionLength("N_HISTORY"); + int nMeasure = arFile.getDimensionLength("N_MEASUREMENT"); + int nParam = arFile.getDimensionLength("N_PARAM"); boolean pass; @@ -568,11 +517,7 @@ public boolean validate(String dacName, boolean ckNulls) throws IOException { log.debug("N_PARAM: {}", nParam); } - if (ckNulls) { - validateStringNulls(); - } - - pass = validateMetaData(dac); + pass = validateMetaData(arFile.getValidatedDac()); if (!pass) { return true; } @@ -596,9 +541,10 @@ public boolean validate(String dacName, boolean ckNulls) throws IOException { if (!pass) { // ..the mapping from cyc_num to cyc_num_index is too important to continue w/o - // will be.. formatErrors.add("CYCLE_NUMBER errors exist. Remaining checks + // will be.. validationResult.addError("CYCLE_NUMBER errors exist. Remaining + // checks // skipped"); - formatWarnings.add("CYCLE_NUMBER errors exist. Remaining checks skipped"); + validationResult.addWarning("CYCLE_NUMBER errors exist. Remaining checks skipped"); return true; } @@ -670,14 +616,14 @@ public boolean validateCycleNumber(int nMeasure, int nCycle, char overallDM, cha // .....cycle_number_index and cycle_number_index_adj.......... log.debug("cyc_num_ind-check: start"); - int[] cycleIndex = readIntArr("CYCLE_NUMBER_INDEX"); + int[] cycleIndex = arFile.readIntArr("CYCLE_NUMBER_INDEX"); int[] cycleIndexAdj = null; boolean core = false; - if (fileType == FileType.TRAJECTORY) { + if (arFile.fileType() == FileType.TRAJECTORY) { // ..implies this is a core-file NOT a bio-file core = true; - cycleIndexAdj = readIntArr("CYCLE_NUMBER_INDEX_ADJUSTED"); + cycleIndexAdj = arFile.readIntArr("CYCLE_NUMBER_INDEX_ADJUSTED"); } ErrorTracker adjSet = new ErrorTracker(); @@ -771,33 +717,33 @@ public boolean validateCycleNumber(int nMeasure, int nCycle, char overallDM, cha if (adjSet.counter > 0) { pass = false; - adjSet.addMessage(formatWarnings, // will be.. formatErrors, + adjSet.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER_INDEX_ADJUSTED: Set in real-time at", "cycles"); } if (dupIndex.counter > 0) { pass = false; - dupIndex.addMessage(formatWarnings, // will be.. formatErrors, + dupIndex.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER_INDEX / CYCLE_NUMBER_ADJUSTED_INDEX:" + " Duplicate cycle number at ", "cycles"); } if (invAdjCyc.counter > 0) { pass = false; - invAdjCyc.addMessage(formatWarnings, // will be.. formatErrors, + invAdjCyc.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER_ADJUSTED_INDEX: Invalid cycle number at", "cycles"); } if (invCyc.counter > 0) { pass = false; - invCyc.addMessage(formatWarnings, // will be.. formatErrors, + invCyc.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER_INDEX: Invalid cycle number at", "cycles"); } if (missDCyc.counter > 0) { pass = false; - missDCyc.addMessage(formatWarnings, // will be.. formatErrors, + missDCyc.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER_ADJUSTED_INDEX: Missing in delayed-mode at", "cycles"); } if (missRCyc.counter > 0) { pass = false; - missRCyc.addMessage(formatWarnings, // will be.. formatErrors, + missRCyc.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER_INDEX: Missing in real-time at", "cycles"); } @@ -805,11 +751,11 @@ public boolean validateCycleNumber(int nMeasure, int nCycle, char overallDM, cha log.debug("cyc_num_ind-check: end"); log.debug("cyc_num: start"); - int[] cycle = readIntArr("CYCLE_NUMBER"); + int[] cycle = arFile.readIntArr("CYCLE_NUMBER"); int[] cycleAdj = null; if (core) { - cycleAdj = readIntArr("CYCLE_NUMBER_ADJUSTED"); + cycleAdj = arFile.readIntArr("CYCLE_NUMBER_ADJUSTED"); } adjSet.reset(); @@ -933,37 +879,37 @@ public boolean validateCycleNumber(int nMeasure, int nCycle, char overallDM, cha if (adjSet.counter > 0) { pass = false; - adjSet.addMessage(formatWarnings, // will be.. formatErrors, + adjSet.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER_ADJUSTED: Set in real-time at", "measurements"); } if (invAdjCyc.counter > 0) { pass = false; - invAdjCyc.addMessage(formatWarnings, // will be.. formatErrors, + invAdjCyc.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER_ADJUSTED: Invalid cycle number at", "measurments"); } if (invCyc.counter > 0) { pass = false; - invCyc.addMessage(formatWarnings, // will be.. formatErrors, + invCyc.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER: Invalid cycle number at", "measurements"); } if (invAdjLaunch.counter > 0) { pass = false; - invAdjLaunch.addMessage(formatWarnings, // will be.. formatErrors, + invAdjLaunch.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER_ADJUSTED: Cycle -1 not in first index at", "measurements"); } if (invLaunch.counter > 0) { pass = false; - invLaunch.addMessage(formatWarnings, // will be.. formatErrors, + invLaunch.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER: Cycle -1 not in first index at", "measurements"); } if (missDCyc.counter > 0) { pass = false; - missDCyc.addMessage(formatWarnings, // will be.. formatErrors, + missDCyc.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER_ADJUSTED: Missing in delayed-mode at", "measurements"); } if (missRCyc.counter > 0) { pass = false; - missRCyc.addMessage(formatWarnings, // will be.. formatErrors, + missRCyc.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "CYCLE_NUMBER: Missing in real-time at ", "cycles"); } @@ -1040,7 +986,7 @@ public boolean validateCycleNumber(int nMeasure, int nCycle, char overallDM, cha if (missCyc.counter > 0) { pass = false; - missCyc.addMessage(formatWarnings, // will be.. formatErrors, + missCyc.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "Cycle number in CYCLE_NUMBER/*_ADJUSTED is not " + "in CYCLE_NUMBER_INDEX/*_ADJUSTED: At ", " measurements"); } @@ -1082,7 +1028,7 @@ public boolean validateCycleNumber(int nMeasure, int nCycle, char overallDM, cha if (missCyc.counter > 0) { pass = false; - missCyc.addMessage(formatWarnings, // will be.. formatErrors, + missCyc.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "Cycle number in CYCLE_NUMBER_INDEX/*_ADJUSTED is not " + "in CYCLE_NUMBER/*_ADJUSTED: At ", "cycles"); } @@ -1112,7 +1058,7 @@ public char validateDataMode(char[] mode) throws IOException { // .....get DATA_MODE and check it as long we're at it..... - String data_mode = readString("DATA_MODE", true); // ..true -> return NULLS + String data_mode = arFile.readString("DATA_MODE", true); // ..true -> return NULLS ErrorTracker invalid = new ErrorTracker(); @@ -1136,7 +1082,7 @@ public char validateDataMode(char[] mode) throws IOException { } } - invalid.addMessage(formatErrors, "DATA_MODE: Invalid at ", "cycles"); + invalid.addMessage(validationResult.getErrors(), "DATA_MODE: Invalid at ", "cycles"); log.debug(".....validateDataMode: end....."); return overallDM; @@ -1168,91 +1114,23 @@ public void validateDates(int nParam, int nHistory) throws IOException { // ..check Reference_Date String name = "REFERENCE_DATE_TIME"; - String ref = readString(name); + String ref = arFile.readString(name); log.debug(name + ": " + ref); - if (!ref.matches(spec.getMeta(name))) { - formatErrors.add(name + ": '" + ref + "': Does not match specification ('" + spec.getMeta(name) + "')"); - } - - String creation = readString("DATE_CREATION"); - String update = readString("DATE_UPDATE"); - Date fileTime = new Date(file.lastModified()); - long fileSec = fileTime.getTime(); - - if (log.isDebugEnabled()) { - log.debug("earliestDate: " + ArgoDate.format(earliestDate)); - log.debug("fileTime: " + ArgoDate.format(fileTime)); - log.debug("DATE_CREATION: " + creation); - log.debug("DATE_UPDATE: " + update); - } - - // ...........initial creation date checks:............. - // ..set, after earliestDate, and before file time - Date dateCreation = null; - boolean haveCreation = false; - long creationSec = 0; - - if (creation.trim().length() <= 0) { - formatErrors.add("DATE_CREATION: Not set"); - - } else { - dateCreation = ArgoDate.get(creation); - haveCreation = true; - - if (dateCreation == null) { - haveCreation = false; - formatErrors.add("DATE_CREATION: '" + creation + "': Invalid date"); - - } else { - creationSec = dateCreation.getTime(); - - if (dateCreation.before(earliestDate)) { - formatErrors.add("DATE_CREATION: '" + creation + "': Before allowed date ('" - + ArgoDate.format(earliestDate) + "')"); - - } else if ((creationSec - fileSec) > oneDaySec) { - formatErrors.add("DATE_CREATION: '" + creation + "': After GDAC receipt time ('" - + ArgoDate.format(fileTime) + "')"); - } - } + if (!ref.matches(arFile.getFileSpec().getMeta(name))) { + validationResult.addError(name + ": '" + ref + "': Does not match specification ('" + + arFile.getFileSpec().getMeta(name) + "')"); } - // ............initial update date checks:........... - // ..set, not before creation time, before file time - Date dateUpdate = null; - boolean haveUpdate = false; - long updateSec = 0; - - if (update.trim().length() <= 0) { - formatErrors.add("DATE_UPDATE: Not set"); - } else { - dateUpdate = ArgoDate.get(update); - haveUpdate = true; - - if (dateUpdate == null) { - formatErrors.add("DATE_UPDATE: '" + update + "': Invalid date"); - haveUpdate = false; - - } else { - updateSec = dateUpdate.getTime(); - - if (haveCreation && dateUpdate.before(dateCreation)) { - formatErrors.add("DATE_UPDATE: '" + update + "': Before DATE_CREATION ('" + creation + "')"); - } - - if ((updateSec - fileSec) > oneDaySec) { - formatErrors.add("DATE_UPDATE: '" + update + "': After GDAC receipt time ('" - + ArgoDate.format(fileTime) + "')"); - } - } - } + Date fileTime = new Date(arFile.getFile().lastModified()); + // ...........creation and update dates checks:............. + super.validateCreationUpdateDates(fileTime); // ...................history date checks................... // ..if set, after DATE_CREATION, before DATE_UPSATE if (nHistory > 0) { - String[] dateHist = readStringArr("HISTORY_DATE"); + String[] dateHist = arFile.readStringArr("HISTORY_DATE"); for (int h = 0; h < nHistory; h++) { String d = dateHist[h].trim(); @@ -1262,13 +1140,13 @@ public void validateDates(int nParam, int nHistory) throws IOException { Date date = ArgoDate.get(d); if (date == null) { - formatErrors.add("HISTORY_DATE[" + (h + 1) + "]: '" + dateHist[h] + "': Invalid date"); + validationResult.addError("HISTORY_DATE[" + (h + 1) + "]: '" + dateHist[h] + "': Invalid date"); - } else if (haveUpdate) { + } else if (arFile.isHaveUpdateDate()) { long dateSec = date.getTime(); - if ((dateSec - updateSec) > oneDaySec) { - formatErrors.add("HISTORY_DATE[" + (h + 1) + "]: '" + dateHist[h] - + "': After DATE_UPDATE ('" + update + "')"); + if ((dateSec - arFile.getUpdateSec()) > oneDaySec) { + validationResult.addError("HISTORY_DATE[" + (h + 1) + "]: '" + dateHist[h] + + "': After DATE_UPDATE ('" + arFile.getUpdateDate() + "')"); } } } @@ -1311,7 +1189,7 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, boolean core = false; ArgoReferenceTable.ArgoReferenceEntry info; - if (fileType == FileType.TRAJECTORY) { + if (arFile.fileType() == FileType.TRAJECTORY) { // ..implies this is 1) a v3.2+ or 2) a pre-v3.2 core-file (NOT a bio-file) core = true; } @@ -1321,11 +1199,11 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, // ..............cycle_number............... log.debug("..validate CYCLE_NUMBER"); - int[] cycle = readIntArr("CYCLE_NUMBER"); + int[] cycle = arFile.readIntArr("CYCLE_NUMBER"); int[] cycle_adj = null; if (core) { - cycle_adj = readIntArr("CYCLE_NUMBER_ADJUSTED"); + cycle_adj = arFile.readIntArr("CYCLE_NUMBER_ADJUSTED"); } int[] finalCycle = new int[nMeasure]; @@ -1342,7 +1220,7 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, // ..........MEASUREMENT_CODE............ log.debug("..validate MEASUREMENT_CODE"); - int[] m_code = readIntArr("MEASUREMENT_CODE"); + int[] m_code = arFile.readIntArr("MEASUREMENT_CODE"); ErrorTracker delCode = new ErrorTracker(); ErrorTracker depCode = new ErrorTracker(); @@ -1416,10 +1294,11 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, log.debug("depCode.counter = {}", depCode.counter); log.debug("delCode.counter = {}", delCode.counter); - invCode.addMessage(formatWarnings, // will be.. formatErrors, + invCode.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "MEASUREMENT_CODE: Invalid measurement codes at ", "measurements"); - depCode.addMessage(formatWarnings, "MEASUREMENT_CODE: Deprecated measurement codes at ", "measurements"); - delCode.addMessage(formatWarnings, // will be.. formatErrors, + depCode.addMessage(validationResult.getWarnings(), "MEASUREMENT_CODE: Deprecated measurement codes at ", + "measurements"); + delCode.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "MEASUREMENT_CODE: Obsolete measurement codes at ", "measurements"); // ..........JULD............ @@ -1427,15 +1306,15 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, // ..read juld and associated variables - double[] juld = readDoubleArr("JULD"); + double[] juld = arFile.readDoubleArr("JULD"); - String str = readString("JULD_QC", true); // ..true -> include any NULLs + String str = arFile.readString("JULD_QC", true); // ..true -> include any NULLs char[] juld_qc = new char[nMeasure]; for (int n = 0; n < nMeasure; n++) { juld_qc[n] = str.charAt(n); } - str = readString("JULD_STATUS", true); // ..true -> include any NULLs + str = arFile.readString("JULD_STATUS", true); // ..true -> include any NULLs char[] juld_status = new char[nMeasure]; log.debug("juld_status length = {}", str.length()); log.debug("juld_status = '{}'", str); @@ -1496,7 +1375,7 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, * juld_status[n], juld_qc[n]); } } */ - if (ArgoDataFile.is_999_999_FillValue(juld[n])) { + if (ArgoFileValidator.is_999_999_FillValue(juld[n])) { // ..data is missing - QC better be too if (!(juld_qc[n] == '9' || juld_qc[n] == ' ')) { notMiss.increment(n); @@ -1522,25 +1401,26 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, if (invQC.counter > 0) { // fail = true; - invQC.addMessage(formatWarnings, // will be.. formatErrors, + invQC.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_QC: Invalid QC code at", "measurements"); } if (depQC.counter > 0) { // fail = true; - depQC.addMessage(formatWarnings, "JULD_QC: Deprecated QC code at", "measurements"); + depQC.addMessage(validationResult.getWarnings(), "JULD_QC: Deprecated QC code at", "measurements"); } if (invStatus.counter > 0) { // fail = true; - invStatus.addMessage(formatWarnings, // will be.. formatErrors, + invStatus.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_STATUS: Invalid status code at", "measurements"); } if (depStatus.counter > 0) { // fail = true; - depStatus.addMessage(formatWarnings, "JULD_STATUS: Deprecated status code at", "measurements"); + depStatus.addMessage(validationResult.getWarnings(), "JULD_STATUS: Deprecated status code at", + "measurements"); } if (incStatus.counter > 0) { // fail = true; - incStatus.addMessage(formatWarnings, // will be.. formatErrors, + incStatus.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_QC / JULD_STATUS: Use of ' ' or '9' is inconsistent at", "measurements"); } /* @@ -1552,12 +1432,12 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, */ if (notMiss.counter > 0) { // fail = true; - notMiss.addMessage(formatWarnings, // will be.. formatErrors, + notMiss.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD: FillValue where QC not ' ' or '9': At", "measurements"); } if (noQC.counter > 0) { // fail = true; - noQC.addMessage(formatWarnings, // will be.. formatErrors, + noQC.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD: Not FillValue where QC ' ' or '9': At", "measurements"); } @@ -1575,15 +1455,15 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, if (core) { // ..read juld_adjusted and associated variables - juld_adj = readDoubleArr("JULD_ADJUSTED"); + juld_adj = arFile.readDoubleArr("JULD_ADJUSTED"); - str = readString("JULD_ADJUSTED_QC", true); // ..true -> include any NULLs + str = arFile.readString("JULD_ADJUSTED_QC", true); // ..true -> include any NULLs juld_adj_qc = new char[nMeasure]; for (int n = 0; n < nMeasure; n++) { juld_adj_qc[n] = str.charAt(n); } - str = readString("JULD_ADJUSTED_STATUS", true); // ..true -> include any NULLs + str = arFile.readString("JULD_ADJUSTED_STATUS", true); // ..true -> include any NULLs juld_adj_status = new char[nMeasure]; for (int n = 0; n < nMeasure; n++) { juld_adj_status[n] = str.charAt(n); @@ -1593,7 +1473,7 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, char mode[]; - str = readString("JULD_DATA_MODE"); + str = arFile.readString("JULD_DATA_MODE"); if (str == null) { // ..pre-v3.2 file log.debug("JULD_DATA_MODE missing - pre-v3.2"); @@ -1672,7 +1552,7 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, * juld_adj[n], juld_adj_status[n], juld_adj_qc[n]); } } */ - if (ArgoDataFile.is_999_999_FillValue(juld_adj[n])) { + if (ArgoFileValidator.is_999_999_FillValue(juld_adj[n])) { // ..data is missing - QC better be too if (!(juld_adj_qc[n] == '9' || juld_adj_qc[n] == ' ')) { notMiss.increment(n); @@ -1691,7 +1571,7 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, // ..case 2: juld not missing, juld_adj not missing: // .. - mode = 'A' or 'D' - if (!ArgoDataFile.is_999_999_FillValue(juld[n])) { + if (!ArgoFileValidator.is_999_999_FillValue(juld[n])) { // ..juld not missing, juld_adj not missing -- mode must be 'A' or 'D' // ..but ignore the "launch cycle" (-1) @@ -1715,26 +1595,28 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, if (invQC.counter > 0) { // fail = true; - invQC.addMessage(formatWarnings, // will be.. formatErrors, + invQC.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_ADJUSTED_QC: Invalid QC code at", "measurements"); } if (depQC.counter > 0) { // fail = true; - depQC.addMessage(formatWarnings, "JULD_ADJUSTED_QC: Deprecated QC code at", "measurements"); + depQC.addMessage(validationResult.getWarnings(), "JULD_ADJUSTED_QC: Deprecated QC code at", + "measurements"); } if (invStatus.counter > 0) { // fail = true; - invStatus.addMessage(formatWarnings, // will be.. formatErrors, + invStatus.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_ADJUSTED_STATUS: Invalid status code at", "measurements"); } if (depStatus.counter > 0) { // fail = true; - depStatus.addMessage(formatWarnings, "JULD_ADJUSTED_STATUS: Deprecated status code at", "measurements"); + depStatus.addMessage(validationResult.getWarnings(), "JULD_ADJUSTED_STATUS: Deprecated status code at", + "measurements"); } if (incStatus.counter > 0) { // fail = true; - incStatus.addMessage(formatWarnings, // will be.. formatErrors, + incStatus.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_ADJUSTED_QC / JULD_ADJUSTED_STATUS: Use of ' '/'9' is inconsistent at", "measurements"); } @@ -1755,25 +1637,25 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, */ if (notMiss.counter > 0) { // fail = true; - notMiss.addMessage(formatWarnings, // will be.. formatErrors, + notMiss.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_ADJUSTED: FillValue where QC not ' ' or '9': At", "measurements"); } if (noQC.counter > 0) { // fail = true; - noQC.addMessage(formatWarnings, // will be.. formatErrors, + noQC.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_ADJUSTED: Not FillValue where QC ' ' or '9': At", "measurements"); } if (adjNotAorD.counter > 0) { // fail = true; - adjNotAorD.addMessage(formatWarnings, // will be.. formatErrors, + adjNotAorD.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_ADJUSTED: Not FillValue where DATA_MODE not 'A' or 'D': At", " measurements"); } log.debug("fail = {}", fail); /* - * if (fail == true) { - * formatErrors.add("Above JULD/JULD_ADJUSTED errors prohibit further tests"); + * if (fail == true) { validationResult. + * addError("Above JULD/JULD_ADJUSTED errors prohibit further tests"); * log.debug(".....validateMC_and_JULD: end (due to prior juld errors)....."); * return false; } */ @@ -1784,7 +1666,7 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, // .............check for reasonable juld dates............. log.debug("..validate reasonable dates"); - String update = readString("DATE_UPDATE"); + String update = arFile.readString("DATE_UPDATE"); Date dateUpdate = ArgoDate.get(update); long updateSec = dateUpdate.getTime(); @@ -1808,7 +1690,7 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, if (qc_index >= 0) { // ..QC indicates "good" - if (!ArgoDataFile.is_999_999_FillValue(juld[n])) { + if (!ArgoFileValidator.is_999_999_FillValue(juld[n])) { // ..check that JULD is after earliestDate and before DATE_UPDATE @@ -1836,7 +1718,7 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, qc_index = goodJuldQC.indexOf(juld_adj_qc[n]); if (qc_index >= 0) { - if (!ArgoDataFile.is_999_999_FillValue(juld_adj[n])) { + if (!ArgoFileValidator.is_999_999_FillValue(juld_adj[n])) { // ..check that JULD_ADJUSTED is after earliestDate and before DATE_UPDATE @@ -1858,22 +1740,22 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, if (juldBeforeEarliest.counter > 0) { // fail = true; - juldBeforeEarliest.addMessage(formatWarnings, // will be.. formatErrors, + juldBeforeEarliest.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD before " + earliestDate + " at", " measurements"); } if (juldAfterUpdate.counter > 0) { // fail = true; - juldAfterUpdate.addMessage(formatWarnings, // will be.. formatErrors, + juldAfterUpdate.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD after update time at", "measurements"); } if (juld_adjBeforeEarliest.counter > 0) { // fail = true; - juld_adjBeforeEarliest.addMessage(formatWarnings, // will be.. formatErrors, + juld_adjBeforeEarliest.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_ADJUSTED before " + earliestDate + " at", "measurements"); } if (juld_adjAfterUpdate.counter > 0) { // fail = true; - juld_adjAfterUpdate.addMessage(formatWarnings, // will be.. formatErrors, + juld_adjAfterUpdate.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_ADJUSTED after update time at", "measurements"); } @@ -1889,7 +1771,7 @@ public boolean validateMC_and_JULD(int nMeasure, char[] mode_nMeasure, // ..juld_adj is not in bio-trajectory if (core) { - if (ArgoDataFile.is_999_999_FillValue(juld_adj[n])) { + if (ArgoFileValidator.is_999_999_FillValue(juld_adj[n])) { fv[n].juld = juld[n]; fv[n].juld_status = juld_status[n]; fv[n].juld_qc = juld_qc[n]; @@ -1938,87 +1820,76 @@ public boolean validateMetaData(ArgoReferenceTable.DACS dac) throws IOException String name; String str; - name = "PLATFORM_NUMBER"; - str = readString(name).trim(); - log.debug("{}: '{}'", name, str); - if (!str.matches("[1-9][0-9]{4}|[1-9]9[0-9]{5}")) { - formatErrors.add(name + ": '" + str + "': Invalid"); + name = "PLATFORM_NUMBER"; // ..valid wmo id + str = arFile.readString(name).trim(); + if (!super.validatePlatfomNumber(str)) { + validationResult.addError("PLATFORM_NUMBER" + ": '" + str + "': Invalid"); } - name = "DATA_CENTRE"; - str = readString(name).trim(); - log.debug("{}: '{}' DAC: '{}'", name, str, dac); - if (dac != (ArgoReferenceTable.DACS) null) { - if (!ArgoReferenceTable.DacCenterCodes.get(dac).contains(str)) { - formatErrors.add(name + ": '" + str + "': Invalid for DAC '" + dac + "'"); - } - } else { // ..incoming DAC not set - if (!ArgoReferenceTable.DacCenterCodes.containsValue(str)) { - formatErrors.add(name + ": '" + str + "': Invalid (for all DACs)"); - } - } + // DATA_CENTRE + super.validateDataCentre(dac); name = "DATA_STATE_INDICATOR"; // ..ref table 6 - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); info = ArgoReferenceTable.DATA_STATE_INDICATOR.contains(str); if (info.isValid()) { if (info.isDeprecated) { - formatWarnings.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + str + "' Status: " + info.message); } } else { - formatErrors.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addError(name + ": '" + str + "' Status: " + info.message); } name = "FIRMWARE_VERSION"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatErrors.add(name + ": Empty"); + validationResult.addError(name + ": Empty"); } name = "FLOAT_SERIAL_NO"; // ..not empty - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() <= 0) { - formatErrors.add(name + ": Empty"); + validationResult.addError(name + ": Empty"); } name = "PLATFORM_TYPE"; // ..ref table 23 - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); info = ArgoReferenceTable.PLATFORM_TYPE.contains(str); if (info.isValid()) { if (info.isDeprecated) { - formatWarnings.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + str + "' Status: " + info.message); } } else { - formatErrors.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addError(name + ": '" + str + "' Status: " + info.message); } name = "POSITIONING_SYSTEM"; // ..ref table 9 - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug(name + ": '{}'", str); info = ArgoReferenceTable.POSITIONING_SYSTEM.contains(str); if (info.isValid()) { if (info.isDeprecated) { - formatWarnings.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + str + "' Status: " + info.message); } } else { - formatErrors.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addError(name + ": '" + str + "' Status: " + info.message); } name = "WMO_INST_TYPE"; - str = readString(name).trim(); + str = arFile.readString(name).trim(); log.debug("{}: '{}'", name, str); if (str.length() == 0) { - formatErrors.add(name + ": Not set"); + validationResult.addError(name + ": Not set"); } else { try { @@ -2027,14 +1898,14 @@ public boolean validateMetaData(ArgoReferenceTable.DACS dac) throws IOException info = ArgoReferenceTable.WMO_INST_TYPE.contains(N); if (info.isValid()) { if (info.isDeprecated) { - formatWarnings.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addWarning(name + ": '" + str + "' Status: " + info.message); } } else { - formatErrors.add(name + ": '" + str + "' Status: " + info.message); + validationResult.addError(name + ": '" + str + "' Status: " + info.message); } } catch (Exception e) { - formatErrors.add(name + ": '" + str + "' Invalid. Must be integer."); + validationResult.addError(name + ": '" + str + "' Invalid. Must be integer."); } } @@ -2059,9 +1930,9 @@ public void validateNCycle(int nCycle, char[] mode) { // ..bio-traj exception: GROUNDED not in bio-traj files - if (fileType == FileType.TRAJECTORY) { + if (arFile.fileType() == FileType.TRAJECTORY) { varName = "GROUNDED"; - String g = readString(varName, true); // ..true -> include any NULLs + String g = arFile.readString(varName, true); // ..true -> include any NULLs for (int n = 0; n < nCycle; n++) { info = ArgoReferenceTable.GROUNDED.contains(g.charAt(n)); @@ -2077,13 +1948,13 @@ public void validateNCycle(int nCycle, char[] mode) { // will be.. inv.addMessage(formatErrors, varName+": Invalid code at ", // "cycles"); - inv.addMessage(formatWarnings, varName + ": Invalid code at ", "cycles"); + inv.addMessage(validationResult.getWarnings(), varName + ": Invalid code at ", "cycles"); - dep.addMessage(formatWarnings, varName + ": Deprecated code at ", "cycles"); + dep.addMessage(validationResult.getWarnings(), varName + ": Deprecated code at ", "cycles"); } varName = "CONFIG_MISSION_NUMBER"; - int[] c = readIntArr(varName); + int[] c = arFile.readIntArr(varName); // ..can be FillValue in real time // ..cycle 0 can be FillValue in any case, so start loop at 1 @@ -2102,9 +1973,9 @@ public void validateNCycle(int nCycle, char[] mode) { // will be.. inv.addMessage(formatErrors, varName+": Invalid value at", // "cycles"); - inv.addMessage(formatWarnings, varName + ": Invalid value at", "cycles"); + inv.addMessage(validationResult.getWarnings(), varName + ": Invalid value at", "cycles"); - set.addMessage(formatErrors, varName + ": Not set in D-mode at", "cycles"); + set.addMessage(validationResult.getErrors(), varName + ": Not set in D-mode at", "cycles"); log.debug(".....validateNCycle: end....."); } @@ -2126,7 +1997,7 @@ public void validateNCycleJuld(int nMeasure, int nCycle, // char[] mode, HashMap CycNumIndex_cycle2index, Final_NMeasurement_Variables[] finalNMVar) { log.debug(".....validateNCycleJuld: start....."); - if (fileType != FileType.TRAJECTORY) { + if (arFile.fileType() != FileType.TRAJECTORY) { // ..implies this is a core-file NOT a bio-file log.debug("bio-trajectory file. skip all checks"); log.debug(".....validateNCycleJuld: end....."); @@ -2185,9 +2056,9 @@ public void validateNCycleJuld(int nMeasure, int nCycle, // char[] mode, log.debug("...reading '{}'", var); - double[] juldVar = readDoubleArr(var); + double[] juldVar = arFile.readDoubleArr(var); - String str = readString(var + "_STATUS", true); // ..true -> include any NULLs + String str = arFile.readString(var + "_STATUS", true); // ..true -> include any NULLs char[] juldVar_status = new char[nCycle]; for (int n = 0; n < nCycle; n++) { juldVar_status[n] = str.charAt(n); @@ -2235,8 +2106,9 @@ public void validateNCycleJuld(int nMeasure, int nCycle, // char[] mode, Integer ndx = CycNumIndex_cycle2index.get(cycNum); if (ndx == null) { - // will be.. formatErrors.add("No JULD_*[*] variable for cycle "+cycNum); - formatWarnings.add("No JULD_*[*] variable for cycle " + cycNum); + // will be.. validationResult.addError("No JULD_*[*] variable for cycle + // "+cycNum); + validationResult.addWarning("No JULD_*[*] variable for cycle " + cycNum); log.error( "validateNCycleJuld: CycNumIndex_cycle2index[{}] is null. " + "This should not happen", cycNum); @@ -2296,7 +2168,7 @@ public void validateNCycleJuld(int nMeasure, int nCycle, // char[] mode, if (!juldCheck.checked[n]) { // ..this value wasn't compared to a JULD value - if (!ArgoDataFile.is_999_999_FillValue(juldVar[n])) { + if (!ArgoFileValidator.is_999_999_FillValue(juldVar[n])) { notJuld++; if (a_notJuld < 0) { a_notJuld = n + 1; @@ -2316,34 +2188,35 @@ public void validateNCycleJuld(int nMeasure, int nCycle, // char[] mode, // .....report for this MC........ if (juldCheck.incJuld.counter > 0) { - juldCheck.incJuld.addMessage(formatWarnings, // will be.. formatErrors, + juldCheck.incJuld.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD (MC " + M_CODE + ") / " + var + ": Inconsistent at ", "cycles", "(N_MEASUREMENT, N_CYCLE)"); - // formatErrors.add("JULD (MC "+M_CODE+") / "+var+ + // validationResult.addError("JULD (MC "+M_CODE+") / "+var+ // ": Inconsistent at "+ // juldCheck.incJuld+" cycles; index of first case = "+ // juldCheck.a_incJuld+", "+juldCheck.b_incJuld+ // " (N_MEASUREMENT, N_CYCLE)"); } if (juldCheck.incJuld_s.counter > 0) { - juldCheck.incJuld_s.addMessage(formatWarnings, // will be.. formatErrors, + juldCheck.incJuld_s.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "JULD_STATUS (MC " + M_CODE + ") / " + var + "_STATUS: Inconsistent at ", "cycles", "(N_MEASUREMENT, N_CYCLE)"); - // formatErrors.add("JULD_STATUS (MC "+M_CODE+") / "+var+ + // validationResult.addError("JULD_STATUS (MC "+M_CODE+") / "+var+ // "_STATUS: Inconsistent at "+ // juldCheck.incJuld_s+" cycles; index of first case = "+ // juldCheck.a_incJuld_s+", "+juldCheck.b_incJuld_s+ // " (N_MEASUREMENT, N_CYCLE)"); } if (notJuld > 0) { - // * should be* formatErrors.add(var+" (MC "+M_CODE+ - formatWarnings.add(var + " (MC " + M_CODE + "): Not FillValue where there is no associated JULD at " - + notJuld + " cycles; index of first case = " + a_notJuld); + // * should be* validationResult.addError(var+" (MC "+M_CODE+ + validationResult + .addWarning(var + " (MC " + M_CODE + "): Not FillValue where there is no associated JULD at " + + notJuld + " cycles; index of first case = " + a_notJuld); } if (notJuld_s > 0) { - formatWarnings.add( + validationResult.addWarning( var + "_STATUS (MC " + M_CODE + "): Not FillValue where there is no associated JULD_STATUS at " + notJuld_s + " cycles; index of first case = " + a_notJuld_s); } @@ -2381,7 +2254,7 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList boolean core = false; - if (fileType == FileType.TRAJECTORY) { + if (arFile.fileType() == FileType.TRAJECTORY) { // ..implies: 1) a v3.2+ file or 2) a pre-v3.2 core-file NOT a bio-file core = true; } @@ -2402,7 +2275,7 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList continue; // ..TRAJ_PARAMS can have blank entries } - Variable var = findVariable(varName); + Variable var = arFile.getNcReader().findVariable(varName); Number fillValue = var.findAttribute("_FillValue").getNumericValue(); float fValue = fillValue.floatValue(); @@ -2465,7 +2338,7 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList // ..get the QC - String str = readString(varName + "_QC", true); // ..true -> include any NULLs + String str = arFile.readString(varName + "_QC", true); // ..true -> include any NULLs if (str != null) { prm_qc = new char[nMeasure]; @@ -2480,7 +2353,7 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList // ..Set the data mode for this parameter - String[] trajPrmDM = readStringArr("TRAJECTORY_PARAMETER_DATA_MODE"); + String[] trajPrmDM = arFile.readStringArr("TRAJECTORY_PARAMETER_DATA_MODE"); if (trajPrmDM == null) { log.debug("TRAJECTORY_PARAMETER_DATA_MODE missing - pre-v3.2"); } else { @@ -2526,7 +2399,7 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList depQC.increment(n); } - if (ArgoDataFile.is_FillValue(fValue, prm[n])) { + if (ArgoFileValidator.is_FillValue(fValue, prm[n])) { // ..data is missing - QC better be too if (prm_qc[n] != '9' && prm_qc[n] != ' ') { notMiss.increment(n); @@ -2553,38 +2426,39 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList if (invQC.counter > 0) { fail = true; - invQC.addMessage(formatWarnings, // will be.. formatErrors, + invQC.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + "_QC: Invalid QC code at", "measurements"); } - invQC.addMessage(formatWarnings, varName + "_QC: Deprecated QC code at", "measurements"); + invQC.addMessage(validationResult.getWarnings(), varName + "_QC: Deprecated QC code at", "measurements"); if (missQC.counter > 0) { fail = true; - missQC.addMessage(formatWarnings, // will be.. formatErrors, + missQC.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + ": Not FillValue where QC is ' ' or 9 at", "measurements"); } if (notMiss.counter > 0) { fail = true; - notMiss.addMessage(formatWarnings, // will be.. formatErrors, + notMiss.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + ": FillValue where QC is not ' ' or 9 at", "measurements"); } if (nan.counter > 0) { fail = true; - nan.addMessage(formatWarnings, // will be.. formatErrors, + nan.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + ": NaN at", "measurements"); } if (fail) { - // will be.. formatErrors.add("PARAM/_QC errors prevent checking _ADJUSTED"); - formatWarnings.add("PARAM/_QC errors prevent checking _ADJUSTED"); + // will be.. validationResult.addError("PARAM/_QC errors prevent checking + // _ADJUSTED"); + validationResult.addWarning("PARAM/_QC errors prevent checking _ADJUSTED"); continue PARAM_LOOP; } // .............param_adjusted........... varName = param.trim() + "_ADJUSTED"; - var = findVariable(varName); + var = arFile.getNcReader().findVariable(varName); // ..bio-traj exception (pre-v3.2): PRES does not have *_ADJUSTED variables // ..Intermediate PARAMS may or may not have them @@ -2597,7 +2471,7 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList continue PARAM_LOOP; } - Variable varErr = findVariable(varName + "_ERROR"); + Variable varErr = arFile.getNcReader().findVariable(varName + "_ERROR"); float[] prm_adj; char[] prm_adj_qc; @@ -2661,7 +2535,7 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList // ..get the QC - str = readString(varName + "_QC", true); // ..true -> include any NULLs + str = arFile.readString(varName + "_QC", true); // ..true -> include any NULLs prm_adj_qc = new char[nMeasure]; for (int n = 0; n < nMeasure; n++) { prm_adj_qc[n] = str.charAt(n); @@ -2734,7 +2608,7 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList // ........... r-mode ............ - if (!ArgoDataFile.is_FillValue(fValue, prm_adj[n])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj[n])) { rNotMiss.increment(n); } @@ -2742,7 +2616,7 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList rQcNotMiss.increment(n); } - if (!ArgoDataFile.is_FillValue(fValue, prm_adj_err[n])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj_err[n])) { rErrNotMiss.increment(n); } @@ -2772,7 +2646,7 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList incNotMeas.increment(n); } else { - if (!ArgoDataFile.is_FillValue(fValue, prm_adj[n])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj[n])) { notNotMeas.increment(n); } } @@ -2781,15 +2655,15 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList // ..check if param (not param_adj!) is missing - if (ArgoDataFile.is_FillValue(fValue, prm[n])) { + if (ArgoFileValidator.is_FillValue(fValue, prm[n])) { // .....param is missing..... - if (!ArgoDataFile.is_FillValue(fValue, prm_adj[n])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj[n])) { // ..param_adjusted is NOT missing - error notMissAdj.increment(n); } - if (!ArgoDataFile.is_FillValue(fValue, prm_adj_err[n])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj_err[n])) { // ..param_adjusted_error is NOT missing - error notMissErr.increment(n); } @@ -2801,13 +2675,13 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList } else { // .....param is NOT missing...... - if (ArgoDataFile.is_FillValue(fValue, prm_adj[n])) { + if (ArgoFileValidator.is_FillValue(fValue, prm_adj[n])) { // ..param_adj is missing - QC must be 4 or 9 if (prm_adj_qc[n] != '4' && prm_adj_qc[n] != '9') { missAdj.increment(n); } - if (!ArgoDataFile.is_FillValue(fValue, prm_adj_err[n])) { + if (!ArgoFileValidator.is_FillValue(fValue, prm_adj_err[n])) { errNotMiss.increment(n); } @@ -2861,53 +2735,53 @@ public void validateParams(int nMeasure, char[] mode_nMeasure, ArrayList rNotMiss.counter, rQcNotMiss.counter); } - adjNotMiss.addMessage(formatWarnings, // will be.. formatErrors, + adjNotMiss.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + ": Not FillValue where QC is 4 or 9 at", "measurements"); - errNotMiss.addMessage(formatWarnings, // will be.. formatErrors, + errNotMiss.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + "_ERROR: Not FillValue where " + varName + " is FillValue at", "measurements"); - errNotMissAmode.addMessage(formatWarnings, // will be.. formatErrors, + errNotMissAmode.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "A-mode: " + varName + "_ERROR: Not FillValue at", "measurements"); - errNotSetDmode.addMessage(formatWarnings, // will be.. formatErrors, + errNotSetDmode.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "D-mode: " + varName + "_ERROR: FillValue at", "measurements"); - incNotMeas.addMessage(formatWarnings, // will be.. formatErrors, + incNotMeas.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + "_QC: Inconsistent ' ' with PARAM_QC at", "measurements"); - invQC.addMessage(formatWarnings, // will be.. formatErrors, + invQC.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + "_QC: Invalid at ", "measurements"); - depQC.addMessage(formatWarnings, varName + "_QC: Deprecated at ", "measurements"); + depQC.addMessage(validationResult.getWarnings(), varName + "_QC: Deprecated at ", "measurements"); - missAdj.addMessage(formatWarnings, // will be.. formatErrors, + missAdj.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + "_QC: FillValue where QC not ' ' or '9' at", "measurements"); - nan.addMessage(formatWarnings, // will be.. formatErrors, + nan.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + ": NaN at ", "measurements"); - nanErr.addMessage(formatWarnings, // will be.. formatErrors, + nanErr.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + "_ERROR: NaN at ", "measurements"); - notMissAdj.addMessage(formatWarnings, // will be.. formatErrors, + notMissAdj.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + ": Not FillValue where PARAM is FillValue at", "measurements"); - notMissAdjQc.addMessage(formatWarnings, // will be.. formatErrors, + notMissAdjQc.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + "_QC: Not 9 where PARAM is FillValue at", "measurements"); - notMissErr.addMessage(formatWarnings, // will be.. formatErrors, + notMissErr.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + "_ERROR: Not FillValue where PARAM is FillValue at", "measurements"); - notNotMeas.addMessage(formatWarnings, // will be.. formatErrors, + notNotMeas.addMessage(validationResult.getWarnings(), // will be.. formatErrors, varName + ": Not FillValue where QC is set to ' ' at", "measurements"); - rErrNotMiss.addMessage(formatWarnings, // will be.. formatErrors, + rErrNotMiss.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "R-mode: " + varName + "_ERROR: Not FillValue at ", "measurements"); - rNotMiss.addMessage(formatWarnings, // will be.. formatErrors, + rNotMiss.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "R-mode: " + varName + ": Not FillValue at ", "measurements"); - rQcNotMiss.addMessage(formatWarnings, // will be.. formatErrors, + rQcNotMiss.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "R-mode: " + varName + ": Not ' ' or '9' at", "measurements"); } // ..end PARAM_LOOP @@ -2961,7 +2835,7 @@ private float[] collapse_extra(Array array, int nMeasure, Variable var, float fi if (f == Float.NaN) { is_nan = true; - } else if (!ArgoDataFile.is_FillValue(fillValue, f)) { + } else if (!ArgoFileValidator.is_FillValue(fillValue, f)) { data = f; } } @@ -3004,16 +2878,16 @@ public void validatePosition(int nMeasure) throws IOException { log.debug(".....validatePosition: start....."); log.debug("nMeasure = {}", nMeasure); - double lat[] = readDoubleArr("LATITUDE"); - double lon[] = readDoubleArr("LONGITUDE"); + double lat[] = arFile.readDoubleArr("LATITUDE"); + double lon[] = arFile.readDoubleArr("LONGITUDE"); - String str = readString("POSITION_QC", true); // ..true -> include any NULLs + String str = arFile.readString("POSITION_QC", true); // ..true -> include any NULLs char[] pos_qc = new char[nMeasure]; for (int n = 0; n < nMeasure; n++) { pos_qc[n] = str.charAt(n); } - str = readString("POSITION_ACCURACY", true); // ..true -> include any NULLs + str = arFile.readString("POSITION_ACCURACY", true); // ..true -> include any NULLs char[] pos_acc = new char[nMeasure]; for (int n = 0; n < nMeasure; n++) { pos_acc[n] = str.charAt(n); @@ -3041,9 +2915,9 @@ public void validatePosition(int nMeasure) throws IOException { // will be.. invCode.addMessage(formatErrors, "POSITION_QC: Invalid QC code at // ", "measurements"); - invCode.addMessage(formatWarnings, "POSITION_QC: Invalid QC code at ", "measurements"); + invCode.addMessage(validationResult.getWarnings(), "POSITION_QC: Invalid QC code at ", "measurements"); - depCode.addMessage(formatWarnings, "POSITION_QC: Deprecated QC code at ", "measurements"); + depCode.addMessage(validationResult.getWarnings(), "POSITION_QC: Deprecated QC code at ", "measurements"); // ........accuracy code check........... @@ -3064,9 +2938,9 @@ public void validatePosition(int nMeasure) throws IOException { } } - invCode.addMessage(formatWarnings, // will be.. formatErrors, + invCode.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "POSITION_ACCURACY: Invalid code at ", " measurements"); - depCode.addMessage(formatWarnings, "POSITION_ACCURACY: Deprecated code at ", " measurements"); + depCode.addMessage(validationResult.getWarnings(), "POSITION_ACCURACY: Deprecated code at ", " measurements"); // ........lat/lon/qc checks.............. @@ -3088,10 +2962,10 @@ public void validatePosition(int nMeasure) throws IOException { } } - notMissQC.addMessage(formatWarnings, // will be.. formatErrors, + notMissQC.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "LAT/LON missing: QC is not 9 or ' ' at ", "measurements"); - notMissPos.addMessage(formatWarnings, // will be.. formatErrors, + notMissPos.addMessage(validationResult.getWarnings(), // will be.. formatErrors, "POSITION_QC = 9 or ' ': LAT/LON not missing at ", " measurements"); log.debug(".....validatePosition: end....."); @@ -3125,11 +2999,11 @@ public ArrayList validateTrajectoryParameters(int nParam) throws IOExcep boolean embeddedEmpty = false; int last_empty = -1; - ArrayList allowedParam = spec.getPhysicalParamNames(); // ..allowed + ArrayList allowedParam = arFile.getFileSpec().getPhysicalParamNames(); // ..allowed ArrayList paramList = new ArrayList(nParam); // .....check that trajectory parameters are valid..... - String trajParam[] = readStringArr("TRAJECTORY_PARAMETERS"); + String trajParam[] = arFile.readStringArr("TRAJECTORY_PARAMETERS"); for (int paramNum = 0; paramNum < nParam; paramNum++) { paramList.add(""); // ..initialize this position in the list @@ -3158,24 +3032,24 @@ public ArrayList validateTrajectoryParameters(int nParam) throws IOExcep if (paramList.contains(param)) { // ..this is a duplicate entry - formatErrors - .add("TRAJECTORY_PARAMETERS[" + (paramNum + 1) + "]: '" + param + "': Duplicate entry"); + validationResult.addError( + "TRAJECTORY_PARAMETERS[" + (paramNum + 1) + "]: '" + param + "': Duplicate entry"); log.debug("param #{}: '{}': duplicate", paramNum, param); } else { log.debug("param #{}: '{}': accepted", paramNum, param); } - if (spec.isDeprecatedPhysicalParam(param)) { + if (arFile.getFileSpec().isDeprecatedPhysicalParam(param)) { // ..this is a deprecated parameter name - formatWarnings.add("TRAJECTORY_PARAMETERS[" + (paramNum + 1) + "]: '" + param + validationResult.addWarning("TRAJECTORY_PARAMETERS[" + (paramNum + 1) + "]: '" + param + "': Deprecated parameter name"); } } else { // .. is illegal - formatErrors.add( + validationResult.addError( "TRAJECTORY_PARAMETERS[" + (paramNum + 1) + "]: '" + param + "': Invalid parameter name"); log.debug("param #{}: '{}': invalid", paramNum, param); } @@ -3188,7 +3062,7 @@ public ArrayList validateTrajectoryParameters(int nParam) throws IOExcep // ..report errors and warnings if (embeddedEmpty) { - formatWarnings.add("TRAJECTORY_PARAMETERS: Empty entries in list" + "\n\tList: " + paramList); + validationResult.addWarning("TRAJECTORY_PARAMETERS: Empty entries in list" + "\n\tList: " + paramList); } // ......check that all TRAJECTORY_PARAMETERS have variable........ @@ -3196,9 +3070,10 @@ public ArrayList validateTrajectoryParameters(int nParam) throws IOExcep if (p.length() == 0) { continue; // ..TRAJ_PARAMS can contain blank entries } - Variable var = findVariable(p); + Variable var = arFile.getNcReader().findVariable(p); if (var == null) { - formatErrors.add("TRAJECTORY_PARAMETERS: PARAM '" + p + "' specified. Variables not in data file."); + validationResult + .addError("TRAJECTORY_PARAMETERS: PARAM '" + p + "' specified. Variables not in data file."); // fatalError = true; } } @@ -3209,7 +3084,7 @@ public ArrayList validateTrajectoryParameters(int nParam) throws IOExcep // ..p is an allowed param. p is NOT in TRAJECTORY_PARAMETERS // ..is there a variable for it? - Variable var = findVariable(p); + Variable var = arFile.getNcReader().findVariable(p); if (var != null) { // ..this variable exists in the file // ..technically, that is OK as long as it is all FillValue @@ -3220,7 +3095,7 @@ public ArrayList validateTrajectoryParameters(int nParam) throws IOExcep Number fillValue = var.findAttribute("_FillValue").getNumericValue(); if (type == DataType.FLOAT) { - float[] data = readFloatArr(p); + float[] data = arFile.readFloatArr(p); float fVal = fillValue.floatValue(); for (float d : data) { @@ -3231,7 +3106,7 @@ public ArrayList validateTrajectoryParameters(int nParam) throws IOExcep } } else if (type == DataType.DOUBLE) { - double[] data = readDoubleArr(p); + double[] data = arFile.readDoubleArr(p); double fVal = fillValue.doubleValue(); for (double d : data) { @@ -3246,7 +3121,7 @@ public ArrayList validateTrajectoryParameters(int nParam) throws IOExcep } if (hasData) { - formatErrors.add("TRAJECTORY_PARAMETERS: Does not specify '" + p + validationResult.addError("TRAJECTORY_PARAMETERS: Does not specify '" + p + "'. Variable exists and contains data."); log.debug("{}: not in TRAJECTORY_PARAMETERS. exists and has data", p); } else { diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/DumpProfile.java b/file_checker_exec/src/test/java/com/usgdac/legacy/DumpProfile.java deleted file mode 100644 index 2a810ae..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/DumpProfile.java +++ /dev/null @@ -1,805 +0,0 @@ -package com.usgdac.legacy; - -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.io.PrintStream; -import java.io.PrintWriter; -import java.text.DecimalFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import fr.coriolis.checker.filetypes.ArgoDataFile; -import fr.coriolis.checker.filetypes.ArgoProfileFile; -import fr.coriolis.checker.specs.ArgoDate; -import fr.coriolis.checker.specs.ArgoFileSpecification; -import fr.coriolis.checker.specs.ArgoVariable; -import ucar.ma2.DataType; - -public class DumpProfile { - - public static void main(String args[]) throws IOException { - // .....extract the options.... - String listFile = null; - int next; - - boolean only_first = false; - boolean no_trailing = false; - boolean id_inc_prof = false; - boolean group_prof = false; - - for (next = 0; next < args.length; next++) { - if (args[next].equals("-help")) { - Help(); - System.exit(0); - } else if (args[next].equals("-only-first") || args[next].equals("-1")) { - only_first = true; - } else if (args[next].equals("-no-trailing-missing") || args[next].equals("-b")) { - no_trailing = true; - } else if (args[next].equals("-id-includes-prof") || args[next].equals("-i")) { - id_inc_prof = true; - } else if (args[next].equals("-group-prof") || args[next].equals("-g")) { - group_prof = true; - } else if (args[next].equals("-list-file") || args[next].equals("-l")) { - next++; - if (next < args.length) { - listFile = args[next]; - } - // } else if (args[next].startsWith("-inc-history")) { - // addHistory = true; - } else if (args[next].startsWith("-")) { - stderr.println("Invalid argument: '" + args[next] + "'"); - System.exit(1); - } else { - break; - } - } - - // .....parse the positional parameters..... - - log.info("DumpProfile: START"); - - if (args.length < (2 + next)) { - log.error("too few arguments: " + args.length); - Help(); - stderr.println("Too few arguments: " + args.length); - System.exit(1); - } - - String specDirName = args[next++]; - String outFileName = args[next++]; - - List inFileList = null; // ..list of input files - if (next < args.length) { - inFileList = new ArrayList(args.length - next); - for (; next < args.length; next++) { - inFileList.add(args[next]); - } - } - - log.debug("outFileName = '" + outFileName + "'"); - log.debug("number of inFileList = " + (inFileList == null ? "null" : inFileList.size())); - - stdout.println("\nDumpProfile inputs:"); - stdout.println(" Output file name: " + outFileName); - - // .........get list of input files........ - // ..input files are chosen in the following priority order - // ..1) an input-file-list (overrides all other lists) - // ..2) file name arguments (already parsed above, if specified) - - if (listFile != null) { - // ..a list file was specified - open and read it - // ..this overrides all other "input lists" - File f = new File(listFile); - if (!f.isFile()) { - stderr.println("\nERROR: -list-file does not exist: '" + listFile + "'"); - log.error("-list-file does not exist: '" + listFile + "'"); - System.exit(1); - } else if (!f.canRead()) { - log.error("-list-file '" + listFile + "' cannot be read"); - stderr.println("\nERROR: -list-file cannot be read: '" + listFile + "'"); - System.exit(1); - } - - // ..open and read the file - BufferedReader file = new BufferedReader(new FileReader(listFile)); - inFileList = new ArrayList(200); - String line; - while ((line = file.readLine()) != null) { - if (line.trim().length() > 0) { - inFileList.add(line.trim()); - } - } - log.info("Read {} entries from -list-file '{}'", inFileList.size(), listFile); - - stdout.println(" Input files read from: '" + listFile + "'"); - - } else if (inFileList == null) { - stderr.println("\nERROR: No input files specified\n"); - log.error("No input files specified"); - System.exit(1); - - } else { - stdout.println(" Input files read from command line"); - } - - stdout.println(" Number of input files: " + inFileList.size()); - - /* - * //..output file if ((new File(outFileName)).exists()) { stderr.println - * ("\nERROR: Output file MUST NOT EXIST :'" + outFileName + "'\n"); - * log.error("Output file MUST NOT EXIST: '" + outFileName +"'"); - * System.exit(1); } - */ - - // ..create the output file - PrintWriter outFile = new PrintWriter(new BufferedWriter(new java.io.FileWriter(outFileName))); - - // .....write shtuff to the file..... - log.debug(".....writing data to file....."); - - // .....loop over input files --- read from innie, write to outie - - int nFile = -1; - int nInProf; - int nextOutProf = 0; - - stdout.println("\nProcessing File:"); - stdout.println(" FileNum N_PROF N_CALIB N_HISTORY N_PARAM N_LEVELS File Name"); - - for (String inFileName : inFileList) { - nFile++; - - ArgoProfileFile arFile = null; - boolean openSuccessful = false; - try { - arFile = ArgoProfileFile.open(inFileName, specDirName, true); - } catch (Exception e) { - log.error("ArgoDataFile.open exception:\n" + e); - stdout.println("ERROR: Exception opening Argo file:"); - e.printStackTrace(stderr); - System.exit(1); - } - - if (arFile == null) { - stdout.printf(" %6d Open failed. Not an Argo file\n"); - continue; - } - - ArgoFileSpecification spec = arFile.getFileSpec(); - - int nProf = arFile.findDimension("N_PROF").getLength(); - int nCalib = arFile.findDimension("N_CALIB").getLength(); - int nHistory = arFile.findDimension("N_HISTORY").getLength(); - int nParam = arFile.findDimension("N_PARAM").getLength(); - int nLevel = arFile.findDimension("N_LEVELS").getLength(); - - if (log.isDebugEnabled()) { - log.debug("N_PROF = {}", nProf); - log.debug("N_CALIB = {}", nCalib); - log.debug("N_HISTORY = {}", nHistory); - log.debug("N_PARAM = {}", nParam); - log.debug("N_LEVELS = {}", nLevel); - } - - stdout.printf(" %6d %8d %8d %10d %8d %9d %s\n", nFile, nProf, nCalib, nHistory, nParam, nLevel, - inFileName.trim()); - - ArrayList varNameList = arFile.getVariableNames(); - - // .............group-print requested.............. - // .. need to determine the variables that will be group-printed - // .. need to determine the "main" variable for the group - - HashSet groupMainVar = new HashSet(); - HashSet groupPrintVar = new HashSet(); - - if (group_prof) { - log.debug(".....group_prof requested....."); - - for (String varName : varNameList) { - ArgoVariable specVar = spec.getVariable(varName); - - if (specVar.isParamVar()) { - // ..potential "group-print" variable - - String gName = spec.inGroup(varName); - String altName = null; - - // ..the possible group names are: null, PARAM, PARAM_QC, PARAM_ADJUSTED - // ..decode PARAM from that - - String pName = gName; - - if (gName == null) { - pName = varName; - } else if (pName.endsWith("_QC")) { - pName = gName.substring(0, pName.length() - 3); - altName = pName + "_ADJUSTED"; - } else if (gName.endsWith("_ADJUSTED")) { - pName = gName.substring(0, pName.length() - 9); - altName = pName + "_QC"; - } - - // ..check the PARAM to see if it is group-printable (ie, rank 2) - - if (varName.equals(pName)) { - - // ..this is the "main" variable - // ..need to see if it has extra dimensions -- can't group-print those - - int r = arFile.findVariable(varName).getRank(); - - if (r == 2) { - // ..we can group-print this group - groupMainVar.add(pName); - - if (gName != null) { - groupPrintVar.addAll(spec.groupMembers(gName)); - } - if (altName != null) { - groupPrintVar.addAll(spec.groupMembers(altName)); - } - - log.debug("group-print variable '{}' (groups '{}', '{}'). rank = {}", pName, gName, - altName, r); - - } else { - stdout.println(" " + varName + ": cannot group-print variables > rank 2"); - log.debug("skip group-print for '{}'. rank = {}", pName, r); - } - } - } - } - } - - // .............loop over the profiles..................... - - for (int n = 0; n < (only_first ? 1 : nProf); n++) { - // ..Need plfm_number, cycle, position and time for the header - - log.debug("******* PROFILE #{} ********", n); - - String plfm_num = arFile.readString("PLATFORM_NUMBER", n); - int cycle = arFile.readInt("CYCLE_NUMBER", n); - - double lat = arFile.readDouble("LATITUDE", n); - double lon = arFile.readDouble("LONGITUDE", n); - - double juld = arFile.readDouble("JULD", n); - Date dateJuld = ArgoDate.get(juld); - String juldDTG = ArgoDate.format(dateJuld); - - if (id_inc_prof) { - outFile.printf("\n===> %s_%03d_%03d %10.3f %10.3f %s\n", plfm_num.trim(), cycle, n, lat, lon, - juldDTG); - } else { - outFile.printf("\n===> %s_%03d %10.3f %10.3f %s\n", plfm_num.trim(), cycle, lat, lon, juldDTG); - } - - outFile.printf(" %-25s %6d\n", "Dimensions: N_PROF", nProf); - outFile.printf(" %-25s %6d\n", "Dimensions: N_PARAM", nParam); - outFile.printf(" %-25s %6d\n", "Dimensions: N_LEVELS", nLevel); - outFile.printf(" %-25s %6d\n", "Dimensions: N_CALIB", nCalib); - outFile.printf(" %-25s %6d\n", "Dimensions: N_HISTORY", nHistory); - outFile.printf(" %-25s %6d (of %6d)\n", "profile-number", n + 1, nProf); - - // ............loop over variables............ - - for (String varName : varNameList) { - log.debug("VARIABLE: {}", varName); - - if (varName.startsWith("HISTORY")) { - log.debug(" skip history variable '{}'", varName); - continue; - } - - ArgoVariable specVar = spec.getVariable(varName); - - if (specVar == null) { - stdout.println(" WARNING: Variable not part of spec: '" + varName + "'"); - log.debug(" skip var not in spec '{}'", varName); - continue; - } - - DataType varType = specVar.getType(); - - // ..handle it based on the type of data - // ..if-else structure based on knowledge of file structure - - if (group_prof && groupPrintVar.contains(varName)) { - // ..requested to "group-print" param variables - // ..and this variable has been determined to be a group-print variable - - if (groupMainVar.contains(varName)) { - // ..this is the main var for the group --- print it now - dumpGroup(arFile, outFile, varName, varType, n, no_trailing); - log.debug(" group-print = '{}'", varName); - } - - } else if (specVar.isPerCalib()) { - log.debug(" isPerCalib: '{}' rank: {}", varName, specVar.getRank()); - - // ..*********variables dependent on N_CALIB************* - - // ..only the rank 4 (N_PROF, N_CALIB, N_PARAM, str) variables - // ..PARAMETER + the SCI*_CALIB_* - - boolean hdr = true; - for (int m = 0; m < nCalib; m++) { - String str[] = arFile.readStringArr(varName, n, m); - - if (str != null) { - if (hdr) { - outFile.printf(" %-25s\n", varName); - hdr = false; - } - - int len = str.length; - for (int k = 0; k < len; k++) { - outFile.printf(" %5d,%5d) %s\n", m, k, str[k]); - } - - } else { - stdout.println(" " + varName + " not in the file"); - break; - } - } - - } else if (specVar.isPerLevel()) { - - // ..***********variables dependent on N_LEVEL************** - - // ..only the phys variables - "(N_PROF, N_LEVEL [, n_values])" data - - log.debug(" variable isPerLevel: '{}' rank: {}", varName, specVar.getRank()); - - int vRank = 2; - if (specVar.canHaveExtraDimensions()) { - vRank = arFile.findVariable(varName).getRank(); - } - - if (vRank == 2) { - - if (varType == DataType.CHAR) { - String str = arFile.readString(varName, n); - - if (str != null) { - outFile.printf(" %-25s %s\n", varName, str.trim()); - } else { - stdout.println(" " + varName + " not in the file"); - } - - /* - * } else if (varType == DataType.INT) { int v[] = arFile.readIntArr(varName, - * n); - * - * if (v != null) { outFile.printf(" %-25s", varName); for (int m = 0; m < - * v.length; m++) { outFile.printf(" %5d) %15d\n", m, v[m]); } } else { - * stdout.println(" "+varName+" not in the file"); } - */ - } else if (varType == DataType.DOUBLE) { - // stdout.println("readDoubleArr "+varName+" "+n); - double v[] = arFile.readDoubleArr(varName, n); - - if (v != null) { - int len = v.length; - if (no_trailing) { - len = 0; - for (int m = v.length - 1; m >= 0; m--) { - if (!ArgoDataFile.is_99_999_FillValue(v[m])) { - len = m + 1; - break; - } - } - } - - if (len <= 0) { - outFile.printf(" %-25s No non-FillValue data for this profile\n", varName); - - } else { - outFile.printf(" %-25s\n", varName); - - for (int m = 0; m < len; m++) { - outFile.printf(" %5d) %15.5f\n", m, v[m]); - } - } - } else { - stdout.println(" " + varName + " not in the file"); - } - - } else if (varType == DataType.FLOAT) { - // stdout.println("readFloatArr "+varName+" "+n); - float v[] = arFile.readFloatArr(varName, n); - - if (v != null) { - int len = v.length - 1; - if (no_trailing) { - len = 0; - for (int m = v.length - 1; m >= 0; m--) { - if (!ArgoDataFile.is_99_999_FillValue(v[m])) { - len = m + 1; - break; - } - } - } - - if (len == 0) { - outFile.printf(" %-25s No non-FillValue data for this profile\n", varName); - - } else { - outFile.printf(" %-25s\n", varName); - for (int m = 0; m < len; m++) { - outFile.printf(" %5d) %15.5f\n", m, v[m]); - } - } - } else { - stdout.println(" " + varName + " not read"); - stdout.println(" message: " + ArgoDataFile.getMessage()); - } - - } else { - stdout.println(" " + varName + ": data type '" + varType + "' not handled"); - } - - } else if (vRank == 3) { - - // ..this is an "extra dimension" variable - - if (varType == DataType.CHAR) { - stdout.println(" " + varName + ": not currently handling CHAR rank " + vRank); - } else if (varType == DataType.DOUBLE) { - stdout.println(" " + varName + ": not currently handling DOUBLE rank " + vRank); - } else if (varType == DataType.FLOAT) { - - float prm[][] = new float[nLevel][]; - - for (int k = 0; k < nLevel; k++) { - float v[] = arFile.readFloatArr(varName, n, k); - prm[k] = Arrays.copyOf(v, v.length); - } - - int len = nLevel; - - if (no_trailing) { - len = 0; - k_loop: for (int k = nLevel - 1; k >= 0; k--) { - for (int m = 0; m < prm[k].length; m++) { - if (!ArgoDataFile.is_99_999_FillValue(prm[k][m])) { - len = k + 1; - break k_loop; - } - } - } - } - - if (len <= 0) { - outFile.printf(" %-25s No non-FillValue data for this profile\n", varName); - - } else { - outFile.printf(" %-25s\n", varName); - - for (int k = 0; k < len; k++) { - for (int m = 0; m < prm[k].length; m++) { - outFile.printf(" %5d, %5d) %15.5f\n", k, m, prm[k][m]); - } - } - } - - } else { - stdout.println(" " + varName + ": data type '" + varType + "' not handled"); - } - - } else { - stdout.println(" " + varName + ": not currently handling rank " + vRank); - outFile.printf(" %25s CANNOT PRINT RANK %d variable\n", varName, vRank); - } - - } else if (specVar.isPerParam()) { - - // *************variables dependent on N_PARAM**************** - - // ..in combination with if-else above... char (N_PROF, N_PARAM, [str]) - // ..- station_param(n_prof, n_param, str) - // ..- parameter_data_mode(n_prof, n_param) [optional] - - log.debug(" isPerParam: '{}' rank: {}", varName, specVar.getRank()); - - if (varName.equals("PARAMETER_DATA_MODE")) { - String str = arFile.readString(varName, n); - - if (str != null) { - outFile.printf(" %-25s %s\n", varName, str.trim()); - } else { - stdout.println(" " + varName + ": not in the file"); - } - - } else { - String str[] = arFile.readStringArr(varName, n); - - if (str != null) { - outFile.printf(" %-25s\n", varName); - - int len = str.length; - /* - * if (no_trailing) { for (int m = v.length-1; m >= 0; m--) { if (! - * ArgoDataFile.is_99_999_FillValue(v[m])) { len = m + 1; break; } } } - */ - for (int m = 0; m < len; m++) { - outFile.printf(" %5d) %s\n", m, str[m].trim()); - } - } else { - stdout.println(" " + varName + ": not in the file"); - } - } - - } else if (specVar.isPerProfile()) { - - // *****************variable dependent on N_PROF***************** - // ..in combination with above: [N_PROF] data - - log.debug(" isPerProfile"); - - if (varType == DataType.CHAR) { - String str; - if (specVar.isString()) { - // ..it is a char[n_prof, stringx] - str = arFile.readString(varName, n); - - } else { - // ..it is a char[n_prof] - str = arFile.readString(varName); - str = str.substring(n, n + 1); - } - - if (str != null) { - outFile.printf(" %-25s %s\n", varName, str.trim()); - } else { - stdout.println(" " + varName + " not in the file"); - } - - } else if (varType == DataType.INT) { - int i = arFile.readInt(varName, n); - - if (i != INT_MAX) { - outFile.printf(" %-25s %15d\n", varName, i); - } else { - stdout.println(" " + varName + " not in the file"); - } - - } else if (varType == DataType.DOUBLE) { - double d = arFile.readDouble(varName, n); - - if (d != DBL_NAN) { - outFile.printf(" %-25s %15.5f\n", varName, d); - } else { - stdout.println(" " + varName + " not in the file"); - } - - } else { - stdout.println(" data type '" + varType + "' not handled for " + varName); - } - - } // ..end if isPerProfile - else { - log.debug(" independent variable: '{}' rank: {}", varName, specVar.getRank()); - - String str = arFile.readString(varName); - - if (str != null) { - outFile.printf(" %-25s %s\n", varName, str.trim()); - } else { - stdout.println(" " + varName + ": not in the file"); - } - } - - } // ..end for varName - } // ..end for nProf - - arFile.close(); - } // ..end for (inFileName) - - // ..............end of data writes..................... - - outFile.close(); - - } // ..end main - - // ................................... - // .. Help .. - // ................................... - public static void Help() { - stdout.println("\n" + "Purpose: Dumps Argo Profile files\n" + "\n" - + "Usage: java DumpProfile [options] spec-dir " + "dump-file input-files ...\n" + "\n" + "Options:\n" - + " -list-file File containing list of files to process\n" + " -l\n" - + " -only-first (-1) Only dump first profile in data file(s)\n" - + " default: Dump all profiles\n" - + " -no-trailing-missing (-b) Does not print missing levels at the end of profiles\n" - + " -id-includes-prof (-i) Ob-id (header line) includes the prof # within the file\n" - + " -group-prof (-g) Group profile variables together\n" + "\n" + "Arguments:\n" - + " spec-dir Directory to specification files\n" + " dump-file Output file\n" - + " input-files Input Argo NetCDF profile file(s)\n" + "\n" + "Input Files:\n" - + " Names of input files are determined as follows (priority order):\n" - + " 1) -list-file List of names will be read from \n" - + " 2) [file-names] argument Files listed on command-line will be processed\n" + "\n"); - return; - } - - public static void dumpGroup(ArgoProfileFile in, PrintWriter out, String groupName, DataType varType, int nProf, - boolean no_trailing_missing) { - log.debug("....start dumpGroup '{}'......", groupName); - - // ..read PROFILE_..._QC - - String name = "PROFILE_" + groupName + "_QC"; - String prof_qc = in.readString(name); - - // ..read PARAM and related variables - - float[] prm = null; - String sPrm = groupName; - String prm_qc = null; - String sPQc = groupName + "_QC"; - float[] prm_adj = null; - String sAdj = groupName + "_ADJUSTED"; - float[] prm_err = null; - String sErr = groupName + "_ADJUSTED_ERROR"; - String adj_qc = null; - String sAQc = groupName + "_ADJUSTED_QC"; - - if (varType == DataType.DOUBLE) { - // stdout.println("readDoubleArr "+groupName+" "+n); - double v[] = in.readDoubleArr(sPrm, nProf); - if (v != null) { - prm = new float[v.length]; - for (int m = 0; m < v.length; m++) { - prm[m] = (float) v[m]; - } - } - - v = in.readDoubleArr(sAdj, nProf); - if (v != null) { - prm_adj = new float[v.length]; - for (int m = 0; m < v.length; m++) { - prm_adj[m] = (float) v[m]; - } - } - - v = in.readDoubleArr(sErr, nProf); - if (v != null) { - prm_err = new float[v.length]; - for (int m = 0; m < v.length; m++) { - prm_err[m] = (float) v[m]; - } - } - - } else if (varType == DataType.FLOAT) { - // stdout.println("readFloatArr "+groupName+" "+n); - prm = in.readFloatArr(sPrm, nProf); - prm_adj = in.readFloatArr(sAdj, nProf); - prm_err = in.readFloatArr(sErr, nProf); - - } - - prm_qc = in.readString(sPQc, nProf); - adj_qc = in.readString(sAQc, nProf); - - if (prm == null) { - stdout.println("WARNING: STATION_PARAMETER 'groupName' is not a PARAM variable in the file "); - return; - } - - // .....determine where to end profile list..... - - int LEN = prm.length; - - if (no_trailing_missing) { - LEN = -1; - for (int m = prm.length - 1; m > -1; m--) { - if (prm != null && !ArgoDataFile.is_99_999_FillValue(prm[m])) { - LEN = m + 1; - break; - } - if (prm_adj != null && !ArgoDataFile.is_99_999_FillValue(prm_adj[m])) { - LEN = m + 1; - break; - } - if (prm_err != null && !ArgoDataFile.is_99_999_FillValue(prm_err[m])) { - LEN = m + 1; - break; - } - } - } - - // ............output.......... - // ..PROFILE_..._QC - if (prof_qc != null) { - out.printf(" %-25s %c\n", name, prof_qc.charAt(nProf)); - } - - // ..profile - - if (LEN <= 0) { - out.printf(" %-25s No non-FillValue data for this profile\n", sPrm); - - } else { - - out.println(" " + sPrm); - if (LEN > 0) { - out.printf(" %5s %15s %4s %15s %15s %4s\n", "N", sPrm, "_QC", "_ADJUSTED", "_ERROR", "_QC"); - } - - for (int m = 0; m < LEN; m++) { - out.printf(" %5d) %15.5f", m, prm[m]); - - if (prm_qc != null) { - out.printf(" %c", prm_qc.charAt(m)); - } else { - out.print(" -"); - } - - if (prm_adj != null) { - out.print(String.format(" %15.5f", prm_adj[m])); - } else { - out.print(" -"); - } - - if (prm_err != null) { - out.print(String.format(" %15.5f", prm_err[m])); - } else { - out.print(" -"); - } - - if (adj_qc != null) { - out.printf(" %c", adj_qc.charAt(m)); - } else { - out.print(" -"); - } - - out.print("\n"); - } - } - } - - // ......................Variable Declarations................ - - // ..standard i/o shortcuts - static PrintStream stdout = new PrintStream(System.out); - static PrintStream stderr = new PrintStream(System.err); - - static { - System.setProperty("logfile.name", "DumpProfile_LOG"); - } - - private static final Logger log = LogManager.getLogger("DumpProfile"); - - // ..class variables - static String outFileName; - static String specFileName; - - // static boolean addHistory = false; - static HashMap n_prof; - static HashMap n_param; - static HashMap n_levels; - static HashMap n_calib; - // static HashMap n_history; - static Set parameters = new HashSet(); - - static int INT_MAX = Integer.MAX_VALUE; - static double DBL_NAN = Double.NaN; - - private final static DecimalFormat fFmt = new DecimalFormat("####0.0000;-####0.0000"); - private final static DecimalFormat nFmt = new DecimalFormat("#####"); - -} // ..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/DumpTraj.java b/file_checker_exec/src/test/java/com/usgdac/legacy/DumpTraj.java deleted file mode 100644 index 0afd86d..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/DumpTraj.java +++ /dev/null @@ -1,582 +0,0 @@ -package com.usgdac.legacy; - -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.io.PrintStream; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import fr.coriolis.checker.filetypes.ArgoDataFile; -import fr.coriolis.checker.filetypes.ArgoTrajectoryFile; -import fr.coriolis.checker.specs.ArgoFileSpecification; -import fr.coriolis.checker.specs.ArgoReferenceTable; -import ucar.ma2.DataType; -import ucar.nc2.NetcdfFile; -import ucar.nc2.Variable; - -public class DumpTraj { - - public static void main(String args[]) throws IOException { - // .....extract the options.... - String listFile = null; - int next; - - boolean ind_num = false; - boolean only_cycle = false; - boolean only_measure = false; - boolean no_trailing = false; - - for (next = 0; next < args.length; next++) { - if (args[next].equals("-help")) { - Help(); - System.exit(0); - } else if (args[next].equals("-index-number") || args[next].equals("-i")) { - ind_num = true; - } else if (args[next].equals("-n_measure") || args[next].equals("-m")) { - only_measure = true; - } else if (args[next].equals("-n_cycle") || args[next].equals("-c")) { - only_cycle = true; - } else if (args[next].equals("-no-trailing-missing") || args[next].equals("-b")) { - no_trailing = true; - } else if (args[next].equals("-list-file") || args[next].equals("-l")) { - next++; - if (next < args.length) { - listFile = args[next]; - } - // } else if (args[next].startsWith("-inc-history")) { - // addHistory = true; - } else if (args[next].startsWith("-")) { - stderr.println("Invalid argument: '" + args[next] + "'"); - System.exit(1); - } else { - break; - } - } - - // .....parse the positional parameters..... - - log.info("DumpTraj: START"); - - if (args.length < (2 + next)) { - log.error("too few arguments: " + args.length); - Help(); - stderr.println("Too few arguments: " + args.length); - System.exit(1); - } - - String specDirName = args[next++]; - String outFileName = args[next++]; - - List inFileList = null; // ..list of input files - if (next < args.length) { - inFileList = new ArrayList(args.length - next); - for (; next < args.length; next++) { - inFileList.add(args[next]); - } - } - - log.debug("outFileName = '" + outFileName + "'"); - log.debug("number of inFileList = " + (inFileList == null ? "null" : inFileList.size())); - - stdout.println("\nDumpTraj inputs:"); - stdout.println(" Output file name: " + outFileName); - - // .........get list of input files........ - // ..input files are chosen in the following priority order - // ..1) an input-file-list (overrides all other lists) - // ..2) file name arguments (already parsed above, if specified) - - if (listFile != null) { - // ..a list file was specified - open and read it - // ..this overrides all other "input lists" - File f = new File(listFile); - if (!f.isFile()) { - stderr.println("\nERROR: -list-file does not exist: '" + listFile + "'"); - log.error("-list-file does not exist: '" + listFile + "'"); - System.exit(1); - } else if (!f.canRead()) { - log.error("-list-file '" + listFile + "' cannot be read"); - stderr.println("\nERROR: -list-file cannot be read: '" + listFile + "'"); - System.exit(1); - } - - // ..open and read the file - BufferedReader file = new BufferedReader(new FileReader(listFile)); - inFileList = new ArrayList(200); - String line; - while ((line = file.readLine()) != null) { - if (line.trim().length() > 0) { - inFileList.add(line.trim()); - } - } - log.info("Read {} entries from -list-file '{}'", inFileList.size(), listFile); - - stdout.println(" Input files read from: '" + listFile + "'"); - - } else if (inFileList == null) { - stderr.println("\nERROR: No input files specified\n"); - log.error("No input files specified"); - System.exit(1); - - } else { - stdout.println(" Input files read from command line"); - } - - stdout.println(" Number of input files: " + inFileList.size()); - - // ..output file - // if ((new File(outFileName)).exists()) { - // stderr.println ("\nERROR: Output file MUST NOT EXIST :'" + outFileName + - // "'\n"); - // log.error("Output file MUST NOT EXIST: '" + outFileName +"'"); - // System.exit(1); - // } - - // ..create the output file - PrintWriter outFile = new PrintWriter(new BufferedWriter(new java.io.FileWriter(outFileName))); - - // .....write shtuff to the file..... - log.debug(".....writing data to file....."); - - // .....loop over input files --- read from innie, write to outie - - int nFile = -1; - int nInProf; - int nextOutProf = 0; - - stdout.println("\nProcessing File:"); - stdout.println(" FileNum N_MEASUREMENT N_CYCLE N_PARAM N_HISTORY File Name"); - - for (String inFileName : inFileList) { - nFile++; - - ArgoTrajectoryFile arFile = null; - boolean openSuccessful = false; - try { - arFile = ArgoTrajectoryFile.open(inFileName, specDirName, true); - } catch (Exception e) { - log.error("ArgoDataFile.open exception:\n" + e); - stdout.println("ERROR: Exception opening Argo file:"); - e.printStackTrace(stderr); - System.exit(1); - } - - NetcdfFile ncFile = arFile.getNetcdfFile(); - ArgoFileSpecification spec = arFile.getFileSpec(); - - int nMeasure = arFile.getDimensionLength("N_MEASUREMENT"); - int nCycle = arFile.getDimensionLength("N_CYCLE"); - int nHistory = arFile.getDimensionLength("N_HISTORY"); - int nParam = arFile.getDimensionLength("N_PARAM"); - - if (log.isDebugEnabled()) { - log.debug("N_MEASUREMENT = {}", nMeasure); - log.debug("N_CYCLE = {}", nCycle); - log.debug("N_HISTORY = {}", nHistory); - log.debug("N_PARAM = {}", nParam); - } - - stdout.printf(" %6d %15d %8d %8d %10d %s\n", nFile, nMeasure, nCycle, nParam, nHistory, inFileName.trim()); - - outFile.printf("\n===> " + inFileName.trim()); - - String var; - var = "DATA_TYPE"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "FORMAT_VERSION"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "HANDBOOK_VERSION"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "REFERENCE_DATE_TIME"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "DATE_CREATION"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "DATE_UPDATE"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "PLATFORM_NUMBER"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "PROJECT_NAME"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "PI_NAME"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "DATA_CENTRE"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "DATA_STATE_INDICATOR"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "PLATFORM_TYPE"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "FLOAT_SERIAL_NO"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "FIRMWARE_VERSION"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "WMO_INST_TYPE"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "POSITIONING_SYSTEM"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - - var = "TRAJECTORY_PARAMETERS"; - outFile.printf(" %-25s\n", var); - for (int n = 0; n < nParam; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %5d) %s\n", m, arFile.readString(var, n)); - } - - outFile.println("\n ..........DIMENSIONS..........\n"); - - outFile.printf(" %-25s %10d\n", "N_MEASUREMENT", nMeasure); - outFile.printf(" %-25s %10d\n", "N_CYCLE", nCycle); - outFile.printf(" %-25s %10d\n", "N_HISTORY", nHistory); - outFile.printf(" %-25s %10d\n", "N_PARAM", nParam); - - outFile.println("\n ..........N_MEASUREMENT VARIABLES..........\n"); - - var = "CYCLE_NUMBER"; - int[] idata1 = arFile.readIntArr(var); - int[] idata2; - var = "MEASUREMENT_CODE"; - int[] idata3 = arFile.readIntArr(var); - - outFile.printf(" CYCLE_NUMBER/CYCLE_NUMBER_ADJUSTED/MEASUREMENT_CODE\n"); - - if (arFile.fileType() == ArgoDataFile.FileType.TRAJECTORY) { - var = "CYCLE_NUMBER_ADJUSTED"; - idata2 = arFile.readIntArr(var); - - for (int n = 0; n < nMeasure; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %6d %6d %6d\n", m, idata1[n], idata2[n], idata3[n]); - } - } else { - for (int n = 0; n < nMeasure; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %6d ------ %6d\n", m, idata1[n], idata3[n]); - } - } - - var = "JULD"; - double[] data1 = arFile.readDoubleArr(var); - String data1_code = arFile.readString(var + "_STATUS"); - String data1_qc = arFile.readString(var + "_QC"); - double[] data2; - - outFile.printf( - " JULD variables n) MC JULD/JULD_STATUS/JULD_QC | JULD_ADJUSTED/JULD_ADJUSTED_STATUS/JULD_ADJUSTED_QC\n"); - - if (arFile.fileType() == ArgoDataFile.FileType.TRAJECTORY) { - var = "JULD_ADJUSTED"; - data2 = arFile.readDoubleArr(var); - String data2_code = arFile.readString(var + "_STATUS"); - String data2_qc = arFile.readString(var + "_QC"); - - for (int n = 0; n < nMeasure; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %17d) %5d %15.6f '%1s' '%1s' | %15.6f '%1s' '%1s'\n", m, idata3[n], - data1[n], data1_code.charAt(n), data1_qc.charAt(n), data2[n], data2_code.charAt(n), - data2_qc.charAt(n)); - } - } else { - for (int n = 0; n < nMeasure; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %17d) %5d %15.6f '%1s' '%1s' | --------------- '-' '-'\n", m, idata3[n], - data1[n], data1_code.charAt(n), data1_qc.charAt(n)); - } - } - - var = "LATITUDE"; - data1 = arFile.readDoubleArr(var); - - var = "LONGITUDE"; - data2 = arFile.readDoubleArr(var); - - var = "POSITION"; - data1_qc = arFile.readString(var + "_QC"); - data1_code = arFile.readString(var + "_ACCURACY"); - - outFile.printf(" LATITUDE/LONGITUDE/POSITION_QC/POSITION_ACCURACY"); - for (int n = 0; n < nMeasure; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %17d) %15.6f %15.6f '%1s' '%1s'\n", m, data1[n], data2[n], data1_qc.charAt(n), - data1_code.charAt(n)); - } - - // ......physical parameter variables.......... - - for (String pName : spec.getPhysicalParamNames()) { - - Variable v = arFile.findVariable(pName); - if (v == null) { - continue; - } - - double[] dprm = null; - float[] fprm = null; - short[] sprm = null; - String prm_qc = null; - double[] dprm_adj = null; - float[] fprm_adj = null; - short[] sprm_adj = null; - double[] dprm_adj_err = null; - float[] fprm_adj_err = null; - short[] sprm_adj_err = null; - String prm_adj_qc = null; - - DataType type = v.getDataType(); - - if (type == DataType.FLOAT) { - fprm = arFile.readFloatArr(pName); - fprm_adj = arFile.readFloatArr(pName + "_ADJUSTED"); - fprm_adj_err = arFile.readFloatArr(pName + "_ADJUSTED_ERROR"); - - } else if (type == DataType.DOUBLE) { - - dprm = arFile.readDoubleArr(pName); - dprm_adj = arFile.readDoubleArr(pName + "_ADJUSTED"); - dprm_adj_err = arFile.readDoubleArr(pName + "_ADJUSTED_ERROR"); - - } else if (type == DataType.SHORT) { - - sprm = arFile.readShortArr(pName); - sprm_adj = arFile.readShortArr(pName + "_ADJUSTED"); - sprm_adj_err = arFile.readShortArr(pName + "_ADJUSTED_ERROR"); - - } else { - stdout.println("\n" + pName + ": Unexpected data type = " + type); - continue; - } - - prm_qc = arFile.readString(pName + "_QC"); - prm_adj_qc = arFile.readString(pName + "_ADJUSTED_QC"); - - stdout.println(pName); - - if (prm_qc == null) { // ..PRES_QC is not in bio-traj files - StringBuilder q = new StringBuilder(nMeasure); - for (int n = 0; n < nMeasure; n++) { - q.append('-'); - } - - prm_qc = new String(q); - } - - outFile.printf(" %-10s %11s / QC | %s_ADJUSTED / _ERROR / _QC\n", pName, pName, pName); - - if (dprm != null) { - if (dprm_adj == null) { - for (int n = 0; n < dprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10.4f '%1s' | ---------- ---------- '-'\n", m, dprm[n], - prm_qc.charAt(n)); - } - - } else { - for (int n = 0; n < dprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10.4f '%1s' | %10.4f %10.4f '%1s'\n", m, dprm[n], - prm_qc.charAt(n), dprm_adj[n], dprm_adj_err[n], prm_adj_qc.charAt(n)); - } - } - - } else if (fprm != null) { - if (fprm_adj == null) { - for (int n = 0; n < fprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10.4f '%1s' | ---------- ---------- '-'\n", m, fprm[n], - prm_qc.charAt(n)); - } - - } else { - for (int n = 0; n < fprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10.4f '%1s' | %10.4f %10.4f '%1s'\n", m, fprm[n], - prm_qc.charAt(n), fprm_adj[n], fprm_adj_err[n], prm_adj_qc.charAt(n)); - } - } - - } else if (sprm != null) { - if (sprm_adj == null) { - for (int n = 0; n < sprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10d '%1s' | ---------- ---------- '-'\n", m, sprm[n], - prm_qc.charAt(n)); - } - - } else { - for (int n = 0; n < sprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10d '%1s' | %10d %10d '%1s'\n", m, sprm[n], prm_qc.charAt(n), - sprm_adj[n], sprm_adj_err[n], prm_adj_qc.charAt(n)); - } - } - } - } - - /*********************************************************************** - * N_CYCLE - ***********************************************************************/ - - outFile.println("\n ..........N_CYCLE VARIABLES..........\n"); - - var = "CYCLE_NUMBER_INDEX"; - idata1 = arFile.readIntArr(var); - - var = "CYCLE_NUMBER_INDEX_ADJUSTED"; - idata2 = arFile.readIntArr(var); - - var = "DATA_MODE"; - String str1 = arFile.readString(var); - - var = "CONFIG_MISSION_NUMBER"; - idata3 = arFile.readIntArr(var); - - var = "GROUNDED"; - String str2 = arFile.readString(var); - - outFile.println(" CYCLE_NUMBER_INDEX / CYCLE_NUMBER_INDEX_ADJUSTED / DATA_MODE" - + " / CONFIG_MISSION_NUMER / GROUNDED"); - for (int n = 0; n < nCycle; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %6d %6d '%1s' %6d '%1s'\n", m, idata1[n], idata2[n], str1.charAt(n), - idata3[n], str2.charAt(n)); - } - - for (ArgoReferenceTable.ArgoReferenceEntry e : ArgoReferenceTable.MEASUREMENT_CODE_toJuldVariable - .values()) { - // outFile.println(e.getColumn(1)+" "+e.getColumn(2)); - String v = e.getColumn(2); - - log.debug("...reading '{}'", e); - - double[] juld = arFile.readDoubleArr(v); - str1 = arFile.readString(v + "_STATUS"); - - outFile.println(" " + v + "/" + v + "_STATUS"); - for (int n = 0; n < nCycle; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %15.6f '%1s'\n", m, juld[n], str1.charAt(n)); - } - } - - arFile.close(); - } // ..end for (inFileName) - - // ..............end of data writes..................... - - outFile.close(); - - } // ..end main - - // ................................... - // .. Help .. - // ................................... - public static void Help() { - stdout.println("\n" + "Purpose: Dumps Argo Trajectory files\n" + "\n" - + "Usage: java DumpTraj [options] spec-dir " + "dump-file input-files ...\n" + "\n" + "Options:\n" - + " -list-file File containing list of files to process\n" + " -l\n" - + " -n_measure (-m) List only N_MEASUREMENT variables\n" - + " -n_cycle (-c) List only N_CYCLE variables\n" - + " -index-number (-i) List index numbers (1-based; not 'array indices')\n" - + " default: list array indices\n" - + " -no-trailing-missing (-b) Do not print missing values at the end of a trajectory\n" - + " ***NO-OP CURRENTLY***\n" + "\n" + "Arguments:\n" - + " spec-dir Directory to specification files\n" - + " dump-file Output file (must not exist)\n" - + " input-files Input Argo NetCDF profile file(s)\n" + "\n" + "Input Files:\n" - + " Names of input files are determined as follows (priority order):\n" - + " 1) -list-file List of names will be read from \n" - + " 2) [file-names] argument Files listed on command-line will be processed\n" + "\n"); - return; - } - - // ......................Variable Declarations................ - - static { - System.setProperty("logfile.name", "Dumper_LOG"); - } - - // ..standard i/o shortcuts - static PrintStream stdout = new PrintStream(System.out); - static PrintStream stderr = new PrintStream(System.err); - - private static final Logger log = LogManager.getLogger("DumpProfile"); - - // ..class variables - static String outFileName; - static String specFileName; - - // static boolean addHistory = false; - static HashMap n_prof; - static HashMap n_param; - static HashMap n_levels; - static HashMap n_calib; - // static HashMap n_history; - static Set parameters = new HashSet(); - - static int INT_MAX = Integer.MAX_VALUE; - static double DBL_NAN = Double.NaN; -} // ..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/DumpTraj32.java b/file_checker_exec/src/test/java/com/usgdac/legacy/DumpTraj32.java deleted file mode 100644 index 77e9e76..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/DumpTraj32.java +++ /dev/null @@ -1,581 +0,0 @@ -package com.usgdac.legacy; - -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.io.PrintStream; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import fr.coriolis.checker.filetypes.ArgoTrajectoryFile; -import fr.coriolis.checker.specs.ArgoFileSpecification; -import fr.coriolis.checker.specs.ArgoReferenceTable; -import ucar.ma2.DataType; -import ucar.nc2.Variable; - -public class DumpTraj32 { - - public static void main(String args[]) throws IOException { - // .....extract the options.... - String listFile = null; - int next; - - boolean ind_num = false; - boolean only_cycle = false; - boolean only_measure = false; - boolean no_trailing = false; - - for (next = 0; next < args.length; next++) { - if (args[next].equals("-help")) { - Help(); - System.exit(0); - } else if (args[next].equals("-index-number") || args[next].equals("-i")) { - ind_num = true; - } else if (args[next].equals("-n_measure") || args[next].equals("-m")) { - only_measure = true; - } else if (args[next].equals("-n_cycle") || args[next].equals("-c")) { - only_cycle = true; - } else if (args[next].equals("-no-trailing-missing") || args[next].equals("-b")) { - no_trailing = true; - } else if (args[next].equals("-list-file") || args[next].equals("-l")) { - next++; - if (next < args.length) { - listFile = args[next]; - } - // } else if (args[next].startsWith("-inc-history")) { - // addHistory = true; - } else if (args[next].startsWith("-")) { - stderr.println("Invalid argument: '" + args[next] + "'"); - System.exit(1); - } else { - break; - } - } - - // .....parse the positional parameters..... - - log.info("DumpTraj: START"); - - if (args.length < (2 + next)) { - log.error("too few arguments: " + args.length); - Help(); - stderr.println("Too few arguments: " + args.length); - System.exit(1); - } - - String specDirName = args[next++]; - String outFileName = args[next++]; - - List inFileList = null; // ..list of input files - if (next < args.length) { - inFileList = new ArrayList(args.length - next); - for (; next < args.length; next++) { - inFileList.add(args[next]); - } - } - - log.debug("outFileName = '" + outFileName + "'"); - log.debug("number of inFileList = " + (inFileList == null ? "null" : inFileList.size())); - - stdout.println("\nDumpTraj inputs:"); - stdout.println(" Output file name: " + outFileName); - - // .........get list of input files........ - // ..input files are chosen in the following priority order - // ..1) an input-file-list (overrides all other lists) - // ..2) file name arguments (already parsed above, if specified) - - if (listFile != null) { - // ..a list file was specified - open and read it - // ..this overrides all other "input lists" - File f = new File(listFile); - if (!f.isFile()) { - stderr.println("\nERROR: -list-file does not exist: '" + listFile + "'"); - log.error("-list-file does not exist: '" + listFile + "'"); - System.exit(1); - } else if (!f.canRead()) { - log.error("-list-file '" + listFile + "' cannot be read"); - stderr.println("\nERROR: -list-file cannot be read: '" + listFile + "'"); - System.exit(1); - } - - // ..open and read the file - BufferedReader file = new BufferedReader(new FileReader(listFile)); - inFileList = new ArrayList(200); - String line; - while ((line = file.readLine()) != null) { - if (line.trim().length() > 0) { - inFileList.add(line.trim()); - } - } - file.close(); - log.info("Read {} entries from -list-file '{}'", inFileList.size(), listFile); - - stdout.println(" Input files read from: '" + listFile + "'"); - - } else if (inFileList == null) { - stderr.println("\nERROR: No input files specified\n"); - log.error("No input files specified"); - System.exit(1); - - } else { - stdout.println(" Input files read from command line"); - } - - stdout.println(" Number of input files: " + inFileList.size()); - - // ..output file - // if ((new File(outFileName)).exists()) { - // stderr.println ("\nERROR: Output file MUST NOT EXIST :'" + outFileName + - // "'\n"); - // log.error("Output file MUST NOT EXIST: '" + outFileName +"'"); - // System.exit(1); - // } - - // ..create the output file - PrintWriter outFile = new PrintWriter(new BufferedWriter(new java.io.FileWriter(outFileName))); - - // .....write shtuff to the file..... - log.debug(".....writing data to file....."); - - // .....loop over input files --- read from innie, write to outie - - int nFile = -1; - - stdout.println("\nProcessing File:"); - stdout.println( - " FileNum N_MEASUREMENT N_CYCLE N_PARAM N_HISTORY " + "N_CALIB_JULD N_CALIB_PARAM File Name"); - - for (String inFileName : inFileList) { - nFile++; - - ArgoTrajectoryFile arFile = null; - try { - arFile = ArgoTrajectoryFile.open(inFileName, specDirName, true); - } catch (Exception e) { - log.error("ArgoDataFile.open exception:\n" + e); - stdout.println("ERROR: Exception opening Argo file:"); - e.printStackTrace(stderr); - System.exit(1); - } - - ArgoFileSpecification spec = arFile.getFileSpec(); - - int nCalibParam = arFile.getDimensionLength("N_CALIB_PARAM"); - int nCalibJuld = arFile.getDimensionLength("N_CALIB_JULD"); - int nCycle = arFile.getDimensionLength("N_CYCLE"); - int nHistory = arFile.getDimensionLength("N_HISTORY"); - int nMeasure = arFile.getDimensionLength("N_MEASUREMENT"); - int nParam = arFile.getDimensionLength("N_PARAM"); - // int nValuesXX = arFile.getDimensionLength("N_VALUESxx"); - - if (log.isDebugEnabled()) { - log.debug("N_CALIB_JULD = {}", nCalibJuld); - log.debug("N_CALIB_PARAM = {}", nCalibParam); - log.debug("N_CYCLE = {}", nCycle); - log.debug("N_HISTORY = {}", nHistory); - log.debug("N_MEASUREMENT = {}", nMeasure); - log.debug("N_PARAM = {}", nParam); - // log.debug("N_VALUESxx = {}", nValuesXX); - } - - stdout.printf(" %6d %15d %8d %8d %10d %s\n", nFile, nMeasure, nCycle, nParam, nHistory, nCalibParam, - nCalibJuld, inFileName.trim()); - - outFile.printf("\n===> " + inFileName.trim()); - - String var; - var = "DATA_TYPE"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "FORMAT_VERSION"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "HANDBOOK_VERSION"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "REFERENCE_DATE_TIME"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "DATE_CREATION"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "DATE_UPDATE"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "PLATFORM_NUMBER"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "PROJECT_NAME"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "PI_NAME"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "DATA_CENTRE"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "DATA_STATE_INDICATOR"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "PLATFORM_TYPE"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "FLOAT_SERIAL_NO"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "FIRMWARE_VERSION"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "WMO_INST_TYPE"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - var = "POSITIONING_SYSTEM"; - outFile.printf(" %-25s %s\n", var, arFile.readString(var)); - - var = "TRAJECTORY_PARAMETERS"; - outFile.printf(" %-25s\n", var); - - ArrayList paramList = new ArrayList(); - for (int n = 0; n < nParam; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - String v = arFile.readString(var, n).trim(); - - outFile.printf(" %5d) %s\n", m, v); - paramList.add(v); - } - - outFile.println("\n ..........DIMENSIONS..........\n"); - - outFile.printf(" %-25s %10d\n", "N_CALIB_JULD", nCalibJuld); - outFile.printf(" %-25s %10d\n", "N_CALIB_PARAM", nCalibParam); - outFile.printf(" %-25s %10d\n", "N_CYCLE", nCycle); - outFile.printf(" %-25s %10d\n", "N_HISTORY", nHistory); - outFile.printf(" %-25s %10d\n", "N_MEASUREMENT", nMeasure); - outFile.printf(" %-25s %10d\n", "N_PARAM", nParam); - - outFile.println("\n ..........N_MEASUREMENT VARIABLES..........\n"); - - var = "CYCLE_NUMBER"; - int[] idata1 = arFile.readIntArr(var); - int[] idata2; - var = "MEASUREMENT_CODE"; - int[] idata3 = arFile.readIntArr(var); - - outFile.printf(" CYCLE_NUMBER/CYCLE_NUMBER_ADJUSTED/MEASUREMENT_CODE\n"); - - var = "CYCLE_NUMBER_ADJUSTED"; - idata2 = arFile.readIntArr(var); - - for (int n = 0; n < nMeasure; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %6d %6d %6d\n", m, idata1[n], idata2[n], idata3[n]); - } - - var = "JULD"; - double[] data1 = arFile.readDoubleArr(var); - String data1_code = arFile.readString(var + "_STATUS"); - String data1_qc = arFile.readString(var + "_QC"); - double[] data2; - - outFile.printf(" JULD variables n) MC JULD/JULD_STATUS/JULD_QC | " - + "JULD_ADJUSTED/JULD_ADJUSTED_STATUS/JULD_ADJUSTED_QC | JULD_DATA_MODE\n"); - - var = "JULD_ADJUSTED"; - data2 = arFile.readDoubleArr(var); - String data2_code = arFile.readString(var + "_STATUS"); - String data2_qc = arFile.readString(var + "_QC"); - - var = "JULD_DATA_MODE"; - String data1_mode = arFile.readString(var); - - for (int n = 0; n < nMeasure; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %17d) %5d %15.6f '%1s' '%1s' | %15.6f '%1s' '%1s' | '%1s'\n", m, idata3[n], - data1[n], data1_code.charAt(n), data1_qc.charAt(n), data2[n], data2_code.charAt(n), - data2_qc.charAt(n), data1_mode.charAt(n)); - } - - var = "LATITUDE"; - data1 = arFile.readDoubleArr(var); - - var = "LONGITUDE"; - data2 = arFile.readDoubleArr(var); - - var = "POSITION"; - data1_qc = arFile.readString(var + "_QC"); - data1_code = arFile.readString(var + "_ACCURACY"); - - outFile.printf(" LATITUDE/LONGITUDE/POSITION_QC/POSITION_ACCURACY"); - for (int n = 0; n < nMeasure; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %17d) %15.6f %15.6f '%1s' '%1s'\n", m, data1[n], data2[n], data1_qc.charAt(n), - data1_code.charAt(n)); - } - - // ..get the traj_param_data_mode - - String[] trajPrmDM = arFile.readStringArr("TRAJECTORY_PARAMETER_DATA_MODE"); - if (trajPrmDM == null) { - log.debug("TRAJECTORY_PARAMETER_DATA_MODE missing from file"); - stdout.printf("*** TRAJECTORY_PARAMETER_DATA_MODE missing from file\n"); - } - - // ......physical parameter variables.......... - - for (String pName : spec.getPhysicalParamNames()) { - - Variable v = arFile.findVariable(pName); - if (v == null) { - continue; - } - - double[] dprm = null; - float[] fprm = null; - short[] sprm = null; - String prm_qc = null; - double[] dprm_adj = null; - float[] fprm_adj = null; - short[] sprm_adj = null; - double[] dprm_adj_err = null; - float[] fprm_adj_err = null; - short[] sprm_adj_err = null; - String prm_adj_qc = null; - - DataType type = v.getDataType(); - - if (type == DataType.FLOAT) { - fprm = arFile.readFloatArr(pName); - fprm_adj = arFile.readFloatArr(pName + "_ADJUSTED"); - fprm_adj_err = arFile.readFloatArr(pName + "_ADJUSTED_ERROR"); - - } else if (type == DataType.DOUBLE) { - - dprm = arFile.readDoubleArr(pName); - dprm_adj = arFile.readDoubleArr(pName + "_ADJUSTED"); - dprm_adj_err = arFile.readDoubleArr(pName + "_ADJUSTED_ERROR"); - - } else if (type == DataType.SHORT) { - - sprm = arFile.readShortArr(pName); - sprm_adj = arFile.readShortArr(pName + "_ADJUSTED"); - sprm_adj_err = arFile.readShortArr(pName + "_ADJUSTED_ERROR"); - - } else { - stdout.println("\n" + pName + ": Unexpected data type = " + type); - continue; - } - - prm_qc = arFile.readString(pName + "_QC"); - prm_adj_qc = arFile.readString(pName + "_ADJUSTED_QC"); - - stdout.println(pName); - - int pNdx = paramList.indexOf(pName); - if (pNdx < 0) { - stdout.printf("*** Parameter List does not contain '%s'\n", pName); - } - log.debug("pNdx = {}", pNdx); - - outFile.printf(" %-10s %11s / QC | %s_ADJUSTED / _ERROR / _QC | TRAJ_PARAM_DATA_MODE\n", pName, pName, - pName); - - if (dprm != null) { - if (dprm_adj == null) { - for (int n = 0; n < dprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10.4f '%1s' | ---------- ---------- '-' | '%1s'\n", m, dprm[n], - prm_qc.charAt(n), trajPrmDM[n].charAt(pNdx)); - } - - } else { - for (int n = 0; n < dprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10.4f '%1s' | %10.4f %10.4f '%1s' | '%1s'\n", m, dprm[n], - prm_qc.charAt(n), dprm_adj[n], dprm_adj_err[n], prm_adj_qc.charAt(n), - trajPrmDM[n].charAt(pNdx)); - } - } - - } else if (fprm != null) { - if (fprm_adj == null) { - for (int n = 0; n < fprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10.4f '%1s' | ---------- ---------- '-' | '%1s'\n", m, fprm[n], - prm_qc.charAt(n), trajPrmDM[n].charAt(pNdx)); - } - - } else { - for (int n = 0; n < fprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10.4f '%1s' | %10.4f %10.4f '%1s' | '%1s'\n", m, fprm[n], - prm_qc.charAt(n), fprm_adj[n], fprm_adj_err[n], prm_adj_qc.charAt(n), - trajPrmDM[n].charAt(pNdx)); - } - } - - } else if (sprm != null) { - if (sprm_adj == null) { - for (int n = 0; n < sprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10d '%1s' | ---------- ---------- '-' | '%1s'\n", m, sprm[n], - prm_qc.charAt(n), trajPrmDM[n].charAt(pNdx)); - } - - } else { - for (int n = 0; n < sprm.length; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %10d '%1s' | %10d %10d '%1s' | '%1s'\n", m, sprm[n], - prm_qc.charAt(n), sprm_adj[n], sprm_adj_err[n], prm_adj_qc.charAt(n), - trajPrmDM[n].charAt(pNdx)); - } - } - } - } - - /*********************************************************************** - * N_CYCLE - ***********************************************************************/ - - outFile.println("\n ..........N_CYCLE VARIABLES..........\n"); - - var = "CYCLE_NUMBER_INDEX"; - idata1 = arFile.readIntArr(var); - - var = "CYCLE_NUMBER_INDEX_ADJUSTED"; - idata2 = arFile.readIntArr(var); - - var = "DATA_MODE"; - String str1 = arFile.readString(var); - - var = "CONFIG_MISSION_NUMBER"; - idata3 = arFile.readIntArr(var); - - var = "GROUNDED"; - String str2 = arFile.readString(var); - - outFile.println(" CYCLE_NUMBER_INDEX / CYCLE_NUMBER_INDEX_ADJUSTED / DATA_MODE" - + " / CONFIG_MISSION_NUMER / GROUNDED"); - for (int n = 0; n < nCycle; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %6d %6d '%1s' %6d '%1s'\n", m, idata1[n], idata2[n], str1.charAt(n), - idata3[n], str2.charAt(n)); - } - - for (ArgoReferenceTable.ArgoReferenceEntry e : ArgoReferenceTable.MEASUREMENT_CODE_toJuldVariable - .values()) { - // outFile.println(e.getColumn(1)+" "+e.getColumn(2)); - String v = e.getColumn(2); - - log.debug("...reading '{}'", e); - - double[] juld = arFile.readDoubleArr(v); - str1 = arFile.readString(v + "_STATUS"); - - outFile.println(" " + v + "/" + v + "_STATUS"); - for (int n = 0; n < nCycle; n++) { - int m = n; - if (ind_num) { - m = n + 1; - } - - outFile.printf(" %10d) %15.6f '%1s'\n", m, juld[n], str1.charAt(n)); - } - } - - arFile.close(); - } // ..end for (inFileName) - - // ..............end of data writes..................... - - outFile.close(); - - } // ..end main - - // ................................... - // .. Help .. - // ................................... - public static void Help() { - stdout.println("\n" + "Purpose: Dumps Argo Trajectory v3.2 files\n" + "\n" - + "Usage: java DumpTraj [options] spec-dir " + "dump-file input-files ...\n" + "\n" + "Options:\n" - + " -list-file File containing list of files to process\n" + " -l\n" - + " -n_measure (-m) List only N_MEASUREMENT variables\n" - + " -n_cycle (-c) List only N_CYCLE variables\n" - + " -index-number (-i) List index numbers (1-based; not 'array indices')\n" - + " default: list array indices\n" - + " -no-trailing-missing (-b) Do not print missing values at the end of a trajectory\n" - + " ***NO-OP CURRENTLY***\n" + "\n" + "Arguments:\n" - + " spec-dir Directory to specification files\n" - + " dump-file Output file (must not exist)\n" - + " input-files Input Argo NetCDF profile file(s)\n" + "\n" + "Input Files:\n" - + " Names of input files are determined as follows (priority order):\n" - + " 1) -list-file List of names will be read from \n" - + " 2) [file-names] argument Files listed on command-line will be processed\n" + "\n"); - return; - } - - // ......................Variable Declarations................ - - static { - System.setProperty("logfile.name", "Dumper_LOG"); - } - - // ..standard i/o shortcuts - static PrintStream stdout = new PrintStream(System.out); - static PrintStream stderr = new PrintStream(System.err); - - private static final Logger log = LogManager.getLogger("DumpProfile"); - - // ..class variables - static String outFileName; - static String specFileName; - - // static boolean addHistory = false; - static HashMap n_prof; - static HashMap n_param; - static HashMap n_levels; - static HashMap n_calib; - // static HashMap n_history; - static Set parameters = new HashSet(); - - static int INT_MAX = Integer.MAX_VALUE; - static double DBL_NAN = Double.NaN; -} // ..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestArgoDate.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestArgoDate.java deleted file mode 100644 index 63e97df..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestArgoDate.java +++ /dev/null @@ -1,56 +0,0 @@ -package com.usgdac.legacy; - -import java.io.*; -import java.util.Date; - -import fr.coriolis.checker.specs.ArgoDate; - -public class TestArgoDate { - - public static void main (String[] args) - { - - Help(); - - BufferedReader cin = new BufferedReader(new InputStreamReader(System.in)); - String dtg = null; - - while (true) { - System.out.println(""); - System.out.print("Enter dtg: "); - - try { - String d = cin.readLine(); - if (d.length() > 0) { - dtg = d; - } - - } catch (Exception e) { - System.out.println("Exception during read."); - System.exit(1); - } - - if (dtg.equals("quit")) { - System.out.println(""); - System.out.println("...terminating..."); - System.exit(0); - } - - Date date = ArgoDate.get(dtg); - if (date == null) { - System.out.println("failed"); - } else { - System.out.println(date); - } - - } - } - - public static void Help() { - System.out.println(" at prompt reuses previous entry"); - System.out.println("Enter \"quit\" to terminate"); - System.out.println(""); - } - - -} \ No newline at end of file diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestArgoDatePattern.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestArgoDatePattern.java deleted file mode 100644 index 030e5f3..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestArgoDatePattern.java +++ /dev/null @@ -1,77 +0,0 @@ -package com.usgdac.legacy; - -import java.io.*; - -import fr.coriolis.checker.specs.ArgoDate; - -public class TestArgoDatePattern { - - public static void main (String[] args) - { - - Help(); - - BufferedReader cin = new BufferedReader(new InputStreamReader(System.in)); - String pattern = null; - String value = null; - - while (true) { - System.out.println(""); - System.out.print("Enter pattern: "); - - try { - String p = cin.readLine(); - if (p.length() > 0) { - pattern = p; - } - - } catch (Exception e) { - System.out.println("Exception during read."); - System.exit(1); - } - - if (pattern.equals("quit")) { - System.out.println(""); - System.out.println("...terminating..."); - System.exit(0); - } - - System.out.print("Enter value: "); - - try { - String v = cin.readLine(); - if (v.length() > 0) { - value = v; - } - } catch (Exception e) { - System.out.println("Exception during read."); - System.exit(1); - } - - System.out.println(""); - System.out.println("Checking: Pattern = '"+pattern+"' Value = '"+value+"'"); - - Boolean success; - success = ArgoDate.checkArgoDatePattern(pattern, value); - - if (success == null) { - System.out.println("Unknown pattern"); - - } else if (success) { - System.out.println("Known pattern: Valid date string"); - - } else { - System.out.println("Known pattern: Invalid date string"); - } - - } - } - - public static void Help() { - System.out.println(" at prompt reuses previous entry"); - System.out.println("Enter \"quit\" to terminate"); - System.out.println(""); - } - - -} \ No newline at end of file diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestArgoFileSpecification.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestArgoFileSpecification.java deleted file mode 100644 index 1d517c6..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestArgoFileSpecification.java +++ /dev/null @@ -1,146 +0,0 @@ -package com.usgdac.legacy; - -import java.io.File; -import java.io.IOException; -import java.io.PrintStream; -import java.lang.invoke.MethodHandles; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import fr.coriolis.checker.filetypes.ArgoDataFile; -import fr.coriolis.checker.filetypes.ArgoDataFile.FileType; -import fr.coriolis.checker.specs.ArgoFileSpecification; - -/** - */ -public class TestArgoFileSpecification { - - // ........................................................... - // ......................Variable Declarations................ - // ........................................................... - - // ..standard i/o shortcuts - static PrintStream stdout = new PrintStream(System.out); - static PrintStream stderr = new PrintStream(System.err); - - // ..class variables - static String outFileName; - static String specFileName; - - private static final Class ThisClass; - private static final String ClassName; - private static final Logger log; - - static { - ThisClass = MethodHandles.lookup().lookupClass(); - ClassName = ThisClass.getSimpleName(); - - System.setProperty("logfile.name", ClassName + "_LOG"); - log = LogManager.getLogger(ClassName); - } - - // ............................................................. - // .. .. - // .. main .. - // .. .. - // ............................................................. - - public static void main(String args[]) throws IOException { - // .....extract the options.... - int next; - - for (next = 0; next < args.length; next++) { - if (args[next].equals("-help")) { - Help(); - System.exit(0); - } else if (args[next].startsWith("-")) { - stderr.println("Invalid argument: '" + args[next] + "'"); - System.exit(1); - } else { - break; - } - } - - // .....parse the positional parameters..... - - if (args.length < (3 + next)) { - log.error("too few arguments: " + args.length); - Help(); - stderr.println("Too few arguments: " + args.length); - System.exit(1); - } - - String specDirName = args[next++]; - String fileTypeName = args[next++]; - String specVersion = args[next++]; - // String outFile = args[next++]; - - log.debug("specDirName = '{}'", specDirName); - log.debug("fileTypeName = '{}'", fileTypeName); - log.debug("specVersion = '{}'", specVersion); - // log.debug("outFile = '{}'", outFile); - - stdout.println("\n" + ClassName + " inputs:"); - stdout.println(" Specification directory: " + specDirName); - stdout.println(" File Type: " + fileTypeName); - stdout.println(" Specification version: " + specVersion); - // stdout.println(" Output file: "+outFile); - - // .....check the spec directory..... - - if (!(new File(specDirName).isDirectory())) { - stderr.println("ERROR: Specification directory is not a directory ('" + specDirName + "')"); - log.error("specification directory is not a directory"); - System.exit(1); - } - - // .....set the file type..... - FileType fileType = null; - - if (fileTypeName.equals("meta")) { - fileType = FileType.METADATA; - - } else if (fileTypeName.equals("prof")) { - fileType = FileType.PROFILE; - - } else if (fileTypeName.equals("traj")) { - fileType = FileType.TRAJECTORY; - - } else if (fileTypeName.equals("tech")) { - fileType = FileType.TECHNICAL; - - } else if (fileTypeName.equals("b-prof")) { - fileType = FileType.BIO_PROFILE; - - } else if (fileTypeName.equals("b-traj")) { - fileType = FileType.BIO_TRAJECTORY; - - } else { - stderr.println("ERROR: Invalid file type specified: '" + fileTypeName + "'"); - System.exit(1); - } - - // ......open the specification....... - - ArgoFileSpecification spec = ArgoDataFile.openSpecification - // full-spec type version - (true, specDirName, fileType, specVersion); - - } // ..end main - - // ................................... - // .. Help .. - // ................................... - public static void Help() { - stdout.println("\n" + "Purpose: Creates a merged core-/bio-profile Argo NetCDF file from the input files\n" - + "\n" + "Usage: java " + ClassName + " [options] spec-dir file-type format-version\n" + "\n" - + "Options:\n" + " -help\n" + "\n" + "Arguments:\n" - + " spec-dir Directory to specification files\n" - + " file-type meta, prof, tech, traj, b-prof, b-traj\n" - + " format-version Version of file format for the multi-profile file\n" + "\n"); - // " out-file Output file\n"+ - return; - } - -} // ..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestBatteryVars.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestBatteryVars.java deleted file mode 100644 index 9a350bb..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestBatteryVars.java +++ /dev/null @@ -1,118 +0,0 @@ -package com.usgdac.legacy; - -import java.io.IOException; -import java.io.PrintStream; -import java.lang.invoke.MethodHandles; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import fr.coriolis.checker.filetypes.ArgoDataFile; -import fr.coriolis.checker.filetypes.ArgoMetadataFile; -import fr.coriolis.checker.filetypes.ArgoDataFile.FileType; - -/** - * Tests the validateBattery method of ArgoMetadataFile - * - * @author Mark Ignaszewski - * @version $Id: ValidateSubmit.java 372 2015-12-02 19:02:31Z ignaszewski $ - */ -public class TestBatteryVars { - - // ......................Variable Declarations................ - - // ..standard i/o shortcuts - static PrintStream stdout = new PrintStream(System.out); - static PrintStream stderr = new PrintStream(System.err); - - // ..log file - static final Class ThisClass; - static final String ClassName; - - private static final Logger log; - - static { - ThisClass = MethodHandles.lookup().lookupClass(); - ClassName = ThisClass.getSimpleName(); - - System.setProperty("logfile.name", ClassName + "_LOG"); - - log = LogManager.getLogger(ClassName); - } - - public static void main(String args[]) throws IOException { - - String spec_dir = null; - - // .....extract the options.... - for (String file : args) { - if (file.equals("-help")) { - Help(); - System.exit(0); - } - - if (spec_dir == null) { - spec_dir = file; - stdout.println("\n\n==> SPEC-DIR: " + spec_dir); - continue; - } - - stdout.println("\n\n==> FILE: " + file); - - ArgoDataFile argo = null; - - try { - argo = ArgoDataFile.open(file, spec_dir, true); - - } catch (Exception e) { - stdout.println("ArgoDataFile.open exception:\n" + e); - e.printStackTrace(stdout); - - continue; - } - - // ..null file means it did not meet the min criteria to be an argo file - if (argo == null) { - stdout.println("ArgoDataFile.open failed: " + ArgoDataFile.getMessage()); - continue; - } - - if (argo.fileType() != FileType.METADATA) { - stdout.println("Who you trying to kid? This isn't a metadata file."); - continue; - } - - ArgoMetadataFile meta = (ArgoMetadataFile) argo; - meta.validateBattery(); - - // .....print diagnostics...... - - int nMessage = 0; - for (String str : meta.formatErrors()) { - stdout.println(file + ": ERROR: " + str); - nMessage++; - } - - for (String str : meta.formatWarnings()) { - stdout.println(file + ": WARNING: " + str); - nMessage++; - } - - if (nMessage == 0) { - stdout.println(file + ": no messages"); - } - - // ..reset - meta.clearFormatErrors(); - meta.clearFormatWarnings(); - - } // ..end for (file) - } - - public static void Help() { - stdout.println("\n" + "Purpose: Tests the 'BATTERY validation' of ArgoMetadataFile\n" + "\n" - + "Usage: java TestBattery spec-dir file-names...\n" + "Options:\n" - + " -help | -H | -U Help -- this message\n" + "\n"); - } - -} diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestGdacFileName.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestGdacFileName.java deleted file mode 100644 index 41fc26e..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestGdacFileName.java +++ /dev/null @@ -1,83 +0,0 @@ -package com.usgdac.legacy; - -import java.io.IOException; -import java.io.PrintStream; - -import fr.coriolis.checker.filetypes.ArgoDataFile; - -/** - * Tests the getGdacFileName and validateFileName methods of ArgoDataFile - *

- * - * @author Mark Ignaszewski - * @version $Id: ValidateSubmit.java 372 2015-12-02 19:02:31Z ignaszewski $ - */ -public class TestGdacFileName { - - public static void main(String args[]) throws IOException { - - // .....extract the options.... - for (String file : args) { - if (file.equals("-help")) { - Help(); - System.exit(0); - } - - stdout.println("\n\n==> FILE: " + file); - - ArgoDataFile argo = null; - - try { - argo = ArgoDataFile.open(file); - - } catch (Exception e) { - stdout.println("ArgoDataFile.open exception:\n" + e); - e.printStackTrace(stdout); - - continue; - } - - // ..null file means it did not meet the min criteria to be an argo file - if (argo == null) { - stdout.println("ArgoDataFile.open failed: " + ArgoDataFile.getMessage()); - continue; - } - - // ..get and validate file name - String gdac = argo.getGdacFileName(); - stdout.println("GDAC File Name: '" + gdac + "'"); - - boolean valid = argo.validateGdacFileName(); - if (valid) { - stdout.println("File name is VALID"); - } else { - stdout.println("File name is INvalid"); - } - - if (argo.nFormatErrors() == 0) { - stdout.println("No errors"); - - } else { - stdout.println("ERRORS:\n"); - for (String err : argo.formatErrors()) { - stdout.println(err + "\n"); - } - } - } - - } - - public static void Help() { - stdout.println("\n" + "Purpose: Tests the GDAC file name methods of ArgoDataFile\n" + "\n" - + "Usage: java TestGdacFileName file-names...\n" + "Options:\n" - + " -help | -H | -U Help -- this message\n" + "\n"); - } - - // ......................Variable Declarations................ - - private static ArgoDataFile argo; - - // ..standard i/o shortcuts - static PrintStream stdout = new PrintStream(System.out); - static PrintStream stderr = new PrintStream(System.err); -} diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestMFS.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestMFS.java deleted file mode 100644 index 1227183..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestMFS.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.usgdac.legacy; -import ucar.nc2.*; -import ucar.ma2.*; -import java.io.*; -import java.util.*; - - -public class TestMFS -{ - - public static void main (String args[]) - throws IOException - { - NetcdfFileWriter out = null; - - //..open netCDF file for writing - - try { - out = NetcdfFileWriter.openExisting("out.nc"); - } catch (Exception e) { - System.out.println("write caused exception: "+e); - System.exit(1); - } - - Variable var1 = out.findVariable("DATA_TYPE"); - - Array arr = ArrayChar.makeFromString("test string", 16); - - try { - out.write(var1, arr); - } catch (Exception e) { - System.out.println("write caused exception: "+e); - } - - out.close(); - } - - -} //..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestPattern.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestPattern.java deleted file mode 100644 index 5e7e52c..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestPattern.java +++ /dev/null @@ -1,98 +0,0 @@ -package com.usgdac.legacy; -import java.io.*; -import java.util.*; -import java.util.regex.*; - - -public class TestPattern -{ - - //..standard i/o shortcuts - static PrintStream stdout = new PrintStream(System.out); - static PrintStream stderr = new PrintStream(System.err); - - public static void main (String args[]) - throws IOException - { - int next; - for (next = 0; next < args.length; next++) { - if (args[next].equals("-help")) { - Help(); - System.exit(0); - } else if (args[next].startsWith("-")) { - stderr.println("Invalid argument: '"+args[next]+"'"); - System.exit(1); - } else { - break; - } - } - - if (args.length - next < 1) { - stderr.println("Not enough arguments"); - System.exit(1); - } - - String pat = args[next]; - - stdout.printf("\nPattern = '%s'\n\n", pat); - - Pattern pattern; - - pattern = Pattern.compile(pat); - - BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); - - while (true) { - stdout.print("Enter test string (\"zzzendzzz\" to stop): "); - String str = in.readLine(); - stdout.printf("String = '%s'\n", str); - - if (str.equals("zzzendzzz")) break; - - Matcher m = pattern.matcher(str); - - if (m.matches()) { - stdout.printf("\n MATCHES\n\n"); - } else { - stdout.printf("\n NO match\n\n"); - } - - int nGroup = m.groupCount(); - stdout.printf(" groupCount = %d\n\n", nGroup); - - int nMatch = 0; - while (m.find()) { - nMatch++; - stdout.printf(" %d)\n", nMatch); - - for (int n = 0; n <= nGroup; n++) { - stdout.printf(" group #%d) '%s'\n", n, m.group(n)); - } - - stdout.println(); - } - - } - - } - - //................................... - //.. Help .. - //................................... - public static void Help () - { - stdout.println ( - "\n"+ - "Purpose: Tests a Pattern. Strings to test against pattern are read from terminal.\n" + - "\n"+ - "Usage: java TestPattern pattern \n"+ - "\n"+ - "Options:\n"+ - "\n"+ - "Arguments:\n"+ - " pattern REGEXP pattern to test\n" + - "\n"); - return; - } - -} //..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestPsalStats.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestPsalStats.java deleted file mode 100644 index 04f0d5e..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestPsalStats.java +++ /dev/null @@ -1,79 +0,0 @@ -package com.usgdac.legacy; - -import java.io.IOException; -import java.io.PrintStream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import fr.coriolis.checker.filetypes.ArgoDataFile; -import fr.coriolis.checker.filetypes.ArgoProfileFile; - -/** - * Tests the getGdacFileName and validateFileName methods of ArgoDataFile - *

- * - * @author Mark Ignaszewski - * @version $Id: ValidateSubmit.java 372 2015-12-02 19:02:31Z ignaszewski $ - */ -public class TestPsalStats { - - // ......................Variable Declarations................ - - private static ArgoDataFile argo; - - // ..standard i/o shortcuts - static PrintStream stdout = new PrintStream(System.out); - static PrintStream stderr = new PrintStream(System.err); - - static { - System.setProperty("logfile.name", "TestPsalStats_LOG"); - } - - static Logger log = LoggerFactory.getLogger("TestPsalStats"); - - // .........................main............................... - - public static void main(String args[]) throws IOException { - - // .....extract the options.... - for (String file : args) { - if (file.equals("-help")) { - Help(); - System.exit(0); - } - - ArgoProfileFile argo = null; - - try { - argo = ArgoProfileFile.open(file); - - } catch (Exception e) { - stdout.println("ArgoDataFile.open exception:\n" + e); - e.printStackTrace(stdout); - - continue; - } - - // ..null file means it did not meet the min criteria to be an argo file - if (argo == null) { - stdout.println("ArgoDataFile.open failed: " + ArgoDataFile.getMessage()); - continue; - } - - // ..compute PSAL statistics - - double[] stats = argo.computePsalAdjStats(); - - stdout.printf("FILE: %-30s: %10.6f %10.6f\n", file, stats[0], stats[1]); - } - - } - - public static void Help() { - stdout.println("\n" + "Purpose: Tests the GDAC file name methods of ArgoDataFile\n" + "\n" - + "Usage: java TestGdacFileName file-names...\n" + "Options:\n" - + " -help | -H | -U Help -- this message\n" + "\n"); - } - -} diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestRd1.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestRd1.java deleted file mode 100644 index 2d9188b..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestRd1.java +++ /dev/null @@ -1,38 +0,0 @@ -package com.usgdac.legacy; -import ucar.nc2.*; -import ucar.ma2.*; -import java.io.*; -import java.util.*; - - -//..uses test.nc from the TestWr*.java -//..reads all - -public class TestRd1 -{ - - public static void main (String args[]) - throws IOException - { - NetcdfFile in; - - //..open netCDF file for writing - in = NetcdfFile.open("test.nc"); - - Variable var1 = in.findVariable("var1"); - - Array v = var1.read(); - - int shape[] = v.getShape(); - - for (int n = 0; n < shape.length; n++) { - System.out.println("shape["+n+"]: "+shape[n]); - } - - System.out.println(v); - - in.close(); - } - - -} //..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestRd2.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestRd2.java deleted file mode 100644 index f53edf8..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestRd2.java +++ /dev/null @@ -1,51 +0,0 @@ -package com.usgdac.legacy; -import ucar.nc2.*; -import ucar.ma2.*; -import java.io.*; -import java.util.*; - - -//..uses test.nc from the TestWr*.java -//..reads rows 0 & 1 - -public class TestRd2 -{ - - public static void main (String args[]) - throws IOException - { - NetcdfFile in; - - //..open netCDF file for writing - in = NetcdfFile.open("test.nc"); - - Variable var1 = in.findVariable("var1"); - - int shape[] = var1.getShape(); - for (int n = 0; n < shape.length; n++) { - System.out.println("variable shape["+n+"]: "+shape[n]); - } - - int origin[] = {0, 0}; - shape[0] = 2; - - - Array v = null; - try { - v = var1.read(origin, shape); - } catch (Exception e) { - System.out.println(e); - } - - int ashape[] = v.getShape(); - for (int n = 0; n < ashape.length; n++) { - System.out.println("array shape["+n+"]: "+ashape[n]); - } - - System.out.println(v); - - in.close(); - } - - -} //..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestReadString.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestReadString.java deleted file mode 100644 index b1113cd..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestReadString.java +++ /dev/null @@ -1,360 +0,0 @@ -package com.usgdac.legacy; - -import java.io.BufferedWriter; -import java.io.IOException; -import java.io.PrintStream; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import fr.coriolis.checker.filetypes.ArgoProfileFile; - -public class TestReadString { - - public static void main(String args[]) throws IOException { - // .....extract the options.... - int next; - - for (next = 0; next < args.length; next++) { - if (args[next].equals("-help")) { - Help(); - System.exit(0); - // } else if (args[next].startsWith("-inc-history")) { - // addHistory = true; - } else if (args[next].startsWith("-")) { - stderr.println("Invalid argument: '" + args[next] + "'"); - System.exit(1); - } else { - break; - } - } - - // .....parse the positional parameters..... - - log.info("TestReadString: START"); - - if (args.length < (2 + next)) { - log.error("too few arguments: " + args.length); - Help(); - stderr.println("Too few arguments: " + args.length); - System.exit(1); - } - - String outFileName = args[next++]; - - List inFileList = null; // ..list of input files - if (next < args.length) { - inFileList = new ArrayList(args.length - next); - for (; next < args.length; next++) { - inFileList.add(args[next]); - } - } - - log.debug("outFileName = '" + outFileName + "'"); - log.debug("number of inFileList = " + (inFileList == null ? "null" : inFileList.size())); - - stdout.println("\nTestReadString inputs:"); - stdout.println(" Output file name: " + outFileName); - - // .........get list of input files........ - // ..input files are chosen in the following priority order - // ..1) an input-file-list (overrides all other lists) - // ..2) file name arguments (already parsed above, if specified) - - if (inFileList == null) { - stderr.println("\nERROR: No input files specified\n"); - log.error("No input files specified"); - System.exit(1); - - } else { - stdout.println(" Input files read from command line"); - } - - stdout.println(" Number of input files: " + inFileList.size()); - - // ..create the output file - PrintWriter outFile = new PrintWriter(new BufferedWriter(new java.io.FileWriter(outFileName))); - - // .....write shtuff to the file..... - log.debug(".....writing data to file....."); - - // .....loop over input files --- read from innie, write to outie - - int nFile = -1; - - stdout.println("\nProcessing File:"); - stdout.println(" FileNum File Name"); - - for (String inFileName : inFileList) { - nFile++; - - stdout.printf("%8d %s\n", nFile, inFileName); - outFile.printf("\n=========== FILE: %s (%d)\n", inFileName, nFile); - - ArgoProfileFile arFile = null; - - try { - arFile = ArgoProfileFile.open(inFileName); - } catch (Exception e) { - log.error("ArgoDataFile.open exception:\n" + e); - stdout.println("ERROR: Exception opening Argo file:"); - e.printStackTrace(stderr); - System.exit(1); - } - - { - outFile.println("\n*************** readString(name) ****************\n"); - - String[] VAR = { "DATA_TYPE", "FORMAT_VERSION", "HANDBOOK_VERSION", "DIRECTION", "DATA_MODE", - "JULD_QC" }; - - for (String var : VAR) { - - String str = arFile.readString(var); - String strN = arFile.readString(var, true); - - if (str.length() != strN.length()) { - outFile.println("NULLS detected: " + var); - } - - outFile.printf("%-20s: length = %5d: '%s'\n", var, str.length(), str); - outFile.printf("%-20s: length = %5d: '%s'\n\n", var, strN.length(), strN); - } - } - - { - outFile.println("\n*************** readString(name, n) ****************\n"); - - // int nProf = arFile.getDimensionLength("N_PROF"); - - String[] VAR = { "PLATFORM_NUMBER", "PROJECT_NAME", "PI_NAME", "DATA_STATE_INDICATOR", "WMO_INST_TYPE", - "DATA_CENTRE" }; - - for (String var : VAR) { - - String str = arFile.readString(var, 0); - String strN = arFile.readString(var, 0, true); - - if (str == null) { - stdout.println(">> readString(" + var + ",0): Failed"); - outFile.println(">> readString(" + var + ",0): Failed"); - continue; - } - if (strN == null) { - stdout.println(">> readString(" + var + ",0, true): Failed"); - outFile.println(">> readString(" + var + ",0, true): Failed"); - continue; - } - - if (str.length() != strN.length()) { - outFile.println("NULLS detected: " + var); - } - - outFile.printf("%-20s[0]: length = %5d: '%s'\n", var, str.length(), str); - outFile.printf("%-20s[0]: length = %5d: '%s'\n\n", var, strN.length(), strN); - } - } - - { - outFile.println("\n********** readString(name, n, m) ************\n"); - - String[] VAR = { "STATION_PARAMETERS", "HISTORY_INSTITUTION", "HISTORY_DATE", "HISTORY_PARAMETER" }; - - for (String var : VAR) { - - String str = arFile.readString(var, 0, 0); - String strN = arFile.readString(var, 0, 0, true); - - if (str == null) { - stdout.println(">> readString(" + var + ",0,0): Failed"); - outFile.println(">> readString(" + var + ",0,0): Failed"); - continue; - } - if (strN == null) { - stdout.println(">> readString(" + var + ",0,0, true): Failed"); - outFile.println(">> readString(" + var + ",0,0, true): Failed"); - continue; - } - - if (str.length() != strN.length()) { - outFile.println("NULLS detected: " + var); - } - - outFile.printf("%-20s[0]: length = %5d: '%s'\n", var, str.length(), str); - outFile.printf("%-20s[0]: length = %5d: '%s'\n\n", var, strN.length(), strN); - } - } - - { - outFile.println("\n********** readString(name, n, m, k) ************\n"); - - String[] VAR = { "PARAMETER", "SCIENTIFIC_CALIB_EQUATION", "SCIENTIFIC_CALIB_DATE", - "SCIENTIFIC_CALIB_COMMENT" }; - - for (String var : VAR) { - - String str = arFile.readString(var, 0, 0, 0); - String strN = arFile.readString(var, 0, 0, 0, true); - - if (str == null) { - stdout.println(">> readString(" + var + ",0,0,0): Failed"); - outFile.println(">> readString(" + var + ",0,0,0): Failed"); - continue; - } - if (strN == null) { - stdout.println(">> readString(" + var + ",0,0,0, true): Failed"); - outFile.println(">> readString(" + var + ",0,0,0, true): Failed"); - continue; - } - - if (str.length() != strN.length()) { - outFile.println("NULLS detected: " + var); - } - - outFile.printf("%-20s[0]: length = %5d: '%s'\n", var, str.length(), str); - outFile.printf("%-20s[0]: length = %5d: '%s'\n\n", var, strN.length(), strN); - } - } - - { - outFile.println("\n********** readStringArr(name) ************\n"); - - String[] VAR = { "PLATFORM_NUMBER", "WMO_INST_TYPE", "DATA_CENTRE" }; - - for (String var : VAR) { - - String[] str = arFile.readStringArr(var); - String[] strN = arFile.readStringArr(var, true); - - if (str == null) { - stdout.println(">> readStringArr(" + var + "): Failed"); - outFile.println(">> readStringArr(" + var + "): Failed"); - continue; - } - if (strN == null) { - stdout.println(">> readStringArr(" + var + ", true): Failed"); - outFile.println(">> readStringArr(" + var + ", true): Failed"); - continue; - } - - outFile.printf("%-20s:\n", var); - - for (int n = 0; n < str.length; n++) { - if (str[n].length() != strN[n].length()) { - outFile.println("Array NULLS detected: " + var); - } - - outFile.printf(" %5d) length = %5d: '%s'\n", n, str[n].length(), str[n]); - outFile.printf(" length = %5d: '%s'\n\n", strN[n].length(), strN[n]); - } - } - } - - { - outFile.println("\n********** readStringArr(name, n) ************\n"); - - String[] VAR = { "STATION_PARAMETERS", "HISTORY_INSTITUTION", "HISTORY_DATE", "HISTORY_PARAMETER" }; - - for (String var : VAR) { - - String[] str = arFile.readStringArr(var, 0); - String[] strN = arFile.readStringArr(var, 0, true); - - if (str == null) { - stdout.println(">> readStringArr(" + var + ",0): Failed"); - outFile.println(">> readStringArr(" + var + ",0): Failed"); - continue; - } - if (strN == null) { - stdout.println(">> readStringArr(" + var + ",0, true): Failed"); - outFile.println(">> readStringArr(" + var + ",0, true): Failed"); - continue; - } - - outFile.printf("%-20s[0]:\n", var); - - for (int n = 0; n < str.length; n++) { - if (str[n].length() != strN[n].length()) { - outFile.println("Array NULLS detected: " + var); - } - - outFile.printf(" %5d) length = %5d: '%s'\n", n, str[n].length(), str[n]); - outFile.printf(" length = %5d: '%s'\n\n", strN[n].length(), strN[n]); - } - } - } - - { - outFile.println("\n********** readStringArr(name, n, m) ************\n"); - - String[] VAR = { "PARAMETER", "SCIENTIFIC_CALIB_EQUATION", "SCIENTIFIC_CALIB_DATE", - "SCIENTIFIC_CALIB_COMMENT" }; - - for (String var : VAR) { - - String[] str = arFile.readStringArr(var, 0, 0); - String[] strN = arFile.readStringArr(var, 0, 0, true); - - if (str == null) { - stdout.println(">> readStringArr(" + var + ",0,0): Failed"); - outFile.println(">> readStringArr(" + var + ",0,0): Failed"); - continue; - } - if (strN == null) { - stdout.println(">> readStringArr(" + var + ",0,0, true): Failed"); - outFile.println(">> readStringArr(" + var + ",0,0, true): Failed"); - continue; - } - - outFile.printf("%-20s[0,0]:\n", var); - - for (int n = 0; n < str.length; n++) { - if (str[n].length() != strN[n].length()) { - outFile.println("Array NULLS detected: " + var); - } - - outFile.printf(" %5d) length = %5d: '%s'\n", n, str[n].length(), str[n]); - outFile.printf(" length = %5d: '%s'\n\n", strN[n].length(), strN[n]); - } - } - } - - arFile.close(); - } // ..end for (inFileName) - - // ..............end of data writes..................... - - outFile.close(); - - } // ..end main - - // ................................... - // .. Help .. - // ................................... - public static void Help() { - stdout.println("\n" + "Purpose: Dumps Argo Profile files\n" + "\n" - + "Usage: java TestReadString [options] output-file profile-files ...\n" + "\n" + "Options:\n" + "\n" - + "Arguments:\n" + " output-file Output file\n" - + " profile-files Input Argo NetCDF profile file(s)\n" + "\n"); - return; - } - - // ......................Variable Declarations................ - - // ..standard i/o shortcuts - static PrintStream stdout = new PrintStream(System.out); - static PrintStream stderr = new PrintStream(System.err); - - static { - System.setProperty("logfile.name", "TestReadString_LOG"); - } - - static Logger log = LoggerFactory.getLogger("TestReadString"); - - // ..class variables - static String outFileName; - -} // ..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile.java deleted file mode 100644 index 6628b3b..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile.java +++ /dev/null @@ -1,426 +0,0 @@ -package com.usgdac.legacy; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.PrintStream; -import java.lang.invoke.MethodHandles; -import java.util.Properties; - -import javax.xml.stream.XMLStreamException; -import javax.xml.transform.TransformerConfigurationException; -import javax.xml.transform.TransformerException; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import fr.coriolis.checker.filetypes.ArgoDataFile; -import fr.coriolis.checker.filetypes.ArgoProfileFile; -import fr.coriolis.checker.output.ResultsFile; - -/** - */ -public class TestResultsFile { - - // ........................................................... - // ......................Variable Declarations................ - // ........................................................... - - // ..standard i/o shortcuts - static PrintStream stdout = new PrintStream(System.out); - static PrintStream stderr = new PrintStream(System.err); - - // ..class variables - static String outFileName; - static String specFileName; - - private static final String propFileName; - - private static Properties property; - - private static final Class ThisClass; - private static final String ClassName; - - private static final Logger log; - - static { - ThisClass = MethodHandles.lookup().lookupClass(); - ClassName = ThisClass.getSimpleName(); - propFileName = ClassName + ".properties"; - - System.setProperty("logfile.name", ClassName + "_LOG"); - log = LogManager.getLogger(ClassName); - } - - static boolean doIndexInfo = false; - - // ............................................................. - // .. .. - // .. main .. - // .. .. - // ............................................................. - - public static void main(String args[]) - throws IOException, XMLStreamException, TransformerException, TransformerConfigurationException { - // .....get the Properties.... - - try { - ClassLoader loader = ThisClass.getClassLoader(); - InputStream in = loader.getResourceAsStream(propFileName); - - property = new Properties(); - property.load(in); - in.close(); - - } catch (Exception e) { - stderr.println("\n*** No property file: '" + propFileName + "' ***\n"); - System.exit(1); - } - - String specDirName = property.getProperty("SpecDirectory", "unknown"); - String dacName = property.getProperty("Dac", "unknown"); - - // .....extract the options.... - int next; - boolean doXml = false; - - for (next = 0; next < args.length; next++) { - if (args[next].equals("-help")) { - Help(); - System.exit(0); - } else if (args[next].equals("-xml")) { - doXml = true; - } else if (args[next].equals("-text")) { - doXml = false; - } else if (args[next].startsWith("-")) { - stderr.println("Invalid argument: '" + args[next] + "'"); - System.exit(1); - } else { - break; - } - } - - stdout.println("\n" + ClassName + " inputs:"); - stdout.println(" Specification directory: " + specDirName); - // stdout.println(" Output file: "+outFile); - - // .....check the spec directory..... - - if (!(new File(specDirName).isDirectory())) { - stderr.println("ERROR: Specification directory is not a directory ('" + specDirName + "')"); - log.error("specification directory is not a directory"); - System.exit(1); - } - - // ..set some test settings - - String fcVersion = "6.0"; - String spVersion = "1.2.4"; - - ArgoDataFile argo = null; - - String inFileName; - String outFileName; - ResultsFile results; - - // .......................................................... - // ................test exception output..................... - // .......................................................... - - String test = "exception"; - inFileName = "1929394_blah.nc"; - outFileName = "results." + test; - - stdout.println("\nTest '" + test + "':"); - stdout.println("...output file: '" + outFileName + "'"); - log.debug("Test exception output: '{}'", outFileName); - - results = new ResultsFile(doXml, outFileName, fcVersion, spVersion, inFileName); - - log.debug("cause an exception from ArgoDataFile.open"); - - try { - argo = ArgoDataFile.open("gobbledygook"); - - stdout.println("\n *** TEST FAILED to throw exception!! ***\n"); - log.debug("...TEST FAILED to throw exception!!"); - - } catch (Exception e) { - stdout.println("...intentionally created an exception"); - log.debug("...intentionally created an exception"); - - results.openError(e); - } - - results.close(); - - stdout.println("...test completed"); - - // .......................................................... - // .............test "not an argo file" output............... - // .......................................................... - - test = "not_argo_file"; - inFileName = property.getProperty("NotAnArgoFile", "unknown"); - outFileName = "results." + test; - - stdout.println("\nTest '" + test + "':"); - stdout.println("...input file: '" + inFileName + "'"); - stdout.println("...output file: '" + outFileName + "'"); - - log.debug("Test 'not an Argo file' output: in, out = '{}', '{}'", inFileName, outFileName); - - results = new ResultsFile(doXml, outFileName, fcVersion, spVersion, inFileName); - - try { - argo = ArgoDataFile.open(inFileName); - stdout.println("...test file opened"); - log.debug("...test file opened"); - - } catch (Exception e) { - stdout.println("\n *** TEST FAILED!!! Could not open the test file ***\n"); - e.printStackTrace(); - log.debug("...TEST FAILED!!! Could not open the test file: '{}'", inFileName); - } - - if (argo == null) { - stdout.println("...argo.open returned null as expected"); - results.notArgoFile(dacName); - - } else { - stdout.println("\n *** TEST FAILED!!! argo.open was incorrectly successful ***\n"); - } - - results.close(); - - stdout.println("...test completed"); - - // .......................................................... - // ........test "format open, no specification" output....... - // .......................................................... - - test = "no_specification"; - inFileName = property.getProperty("NoSpecificationFile", "unknown"); - outFileName = "results." + test; - - stdout.println("\nTest '" + test + "':"); - stdout.println("...input file: '" + inFileName + "'"); - stdout.println("...output file: '" + outFileName + "'"); - - log.debug("Test 'no specification' output: in, out = '{}', '{}'", inFileName, outFileName); - - results = new ResultsFile(doXml, outFileName, fcVersion, spVersion, inFileName); - - try { - argo = ArgoDataFile.open(inFileName, specDirName, true); - - stdout.println("...test file file opened"); - log.debug("...test file opened"); - - } catch (Exception e) { - stdout.println("\n *** TEST FAILED!!! Could not open the test file ***\n"); - e.printStackTrace(); - log.debug("...TEST FAILED!!! Could not open the test file: '{}'", inFileName); - } - - String phase = "FORMAT-VERIFICATION"; - - if (argo == null) { - stdout.println("...test file failed argo.open as desired"); - log.debug("...test file failed argo.open as desired"); - - results.formatErrorMessage(phase); - - } else { - stdout.println("\n *** TEST FAILED!!! Test file incorrectly succeeded argo.open ***\n"); - log.debug("...TEST FAILED!!! Test file incorrectly succeeded argo.open ***\n"); - } - - results.close(); - - stdout.println("...test completed"); - - // .......................................................... - // .........test "format validation failed" output........... - // .......................................................... - - test = "format_invalid"; - inFileName = property.getProperty("FormatInvalidFile", "unknown"); - outFileName = "results." + test; - - stdout.println("\nTest '" + test + "':"); - stdout.println("...input file: '" + inFileName + "'"); - stdout.println("...output file: '" + outFileName + "'"); - - log.debug("Test 'format invalid' output: in, out = '{}', '{}'", inFileName, outFileName); - - results = new ResultsFile(doXml, outFileName, fcVersion, spVersion, inFileName); - - try { - argo = ArgoDataFile.open(inFileName, specDirName, true); - - } catch (Exception e) { - stdout.println("\n *** TEST FAILED!!! Could not open the test file ***\n"); - e.printStackTrace(); - log.debug("...TEST FAILED!!! Could not open the test file: '{}'", inFileName); - } - - phase = "FORMAT-VERIFICATION"; - boolean checkTest = true; - - if (argo == null) { - checkTest = false; - stdout.println("\n *** TEST FAILED!!! Incorrectly failed in argo.open ***\n"); - log.debug("...TEST FAILED!!! Incorrectly failed in argto.,open ***\n"); - - } else { - stdout.println("...test file file opened"); - log.debug("...test file opened"); - } - - if (checkTest) { - stdout.println("...verify format"); - log.debug("...verify format"); - - checkTest = argo.verifyFormat(dacName); - - if (!checkTest) { - stdout.println("\n *** TEST FAILED!!! argo.verifyFormat incorrectly returned false ***\n"); - log.debug(" *** TEST FAILED!!! argo.verifyFormat incorrectly returned false ***"); - } - } - - if (checkTest) { - boolean formatPassed = (argo.nFormatErrors() == 0); - - if (!formatPassed) { - stdout.println("...test file failed format validation as desired"); - log.debug("...test file failed format validation as desired"); - - boolean doPsalStats = false; - - results.statusAndPhase(formatPassed, phase); - results.metaData(dacName, argo, formatPassed, doPsalStats); - results.errorsAndWarnings(argo); - - } else { - stdout.println("\n *** TEST FAILED!!! Test file incorrectly passed format validation ***\n"); - log.debug("...TEST FAILED!!! Test file incorrectly passed format validation ***\n"); - } - } - - results.close(); - - stdout.println("...test completed"); - - // .......................................................... - // .....test "data consistency errors/warnings" output....... - // .......................................................... - - test = "data_error"; - inFileName = property.getProperty("DataErrorFile", "unknown"); - outFileName = "results." + test; - - stdout.println("\nTest '" + test + "':"); - stdout.println("...input file: '" + inFileName + "'"); - stdout.println("...output file: '" + outFileName + "'"); - - log.debug("Test 'format invalid' output: in, out = '{}', '{}'", inFileName, outFileName); - - results = new ResultsFile(doXml, outFileName, fcVersion, spVersion, inFileName); - - try { - argo = ArgoDataFile.open(inFileName, specDirName, true); - - } catch (Exception e) { - stdout.println("\n *** TEST FAILED!!! Could not open the test file ***\n"); - e.printStackTrace(); - log.debug("...TEST FAILED!!! Could not open the test file: '{}'", inFileName); - } - - phase = "FORMAT-VERIFICATION"; - checkTest = true; - - if (argo == null) { - checkTest = false; - stdout.println("\n *** TEST FAILED!!! Incorrectly failed in argo.open ***\n"); - log.debug("...TEST FAILED!!! Incorrectly failed in argto.,open ***\n"); - } - - if (checkTest) { - stdout.println("...verify format"); - log.debug("...verify format"); - - checkTest = argo.verifyFormat(dacName); - - if (!checkTest) { - stdout.println("\n *** TEST FAILED!!! argo.verifyFormat incorrectly returned false ***\n"); - log.debug("...TEST FAILED!!! argo.verifyFormat incorrectly returned false ***"); - } - } - - boolean formatPassed = false; - - if (checkTest) { - formatPassed = (argo.nFormatErrors() == 0); - - if (!formatPassed) { - stdout.println("\n *** TEST FAILED!!! Incorrectly found format errors ***\n"); - log.debug("...TEST FAILED!!! Incorrectly found format errors ***\n"); - - } else { - stdout.println("...passed format check as desired"); - log.debug("...passed format check as desired"); - } - } - - if (formatPassed) { - dacName = "aoml"; - checkTest = ((ArgoProfileFile) argo).validate(false, dacName, true); - - if (!checkTest) { - stdout.println("\n *** TEST FAILED!!! Test file incorrectly argo.validate = false ***\n"); - log.debug("\n *** TEST FAILED!!! Test file incorrectly argo.validate = false ***\n"); - - stdout.println(ArgoDataFile.getMessage()); - } - - boolean dataPassed = (argo.nFormatErrors() == 0 && argo.nFormatWarnings() == 0); - - if (!dataPassed) { - stdout.println("...test file failed data check as desired"); - log.debug("...test file failed data check as desired"); - - boolean doPsalStats = false; - - results.statusAndPhase(formatPassed, phase); - results.metaData(dacName, argo, formatPassed, doPsalStats); - results.errorsAndWarnings(argo); - - } else { - stdout.println("\n *** TEST FAILED!!! Test file incorrectly passed data verification ***\n"); - log.debug("...TEST FAILED!!! Test file incorrectly passed data verification ***\n"); - } - } - - results.close(); - - stdout.println("...test completed"); - - } // ..end main - - // ................................... - // .. Help .. - // ................................... - public static void Help() { - stdout.println("\n" + "Purpose: Creates a merged core-/bio-profile Argo NetCDF file from the input files\n" - + "\n" + "Usage: java " + ClassName + " [options] spec-dir file-type format-version\n" + "\n" - + "Options:\n" + " -xml XML results file\n" + " -text Text results file\n" - + " default\n" + " -help\n" + "\n" + "Arguments:\n" - + " spec-dir Directory to specification files\n" + " file-type meta, prof, tech, traj\n" - + " format-version Version of file format for the multi-profile file\n" + "\n"); - // " out-file Output file\n"+ - return; - } - -} // ..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_DataError.cdl b/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_DataError.cdl deleted file mode 100644 index cd910a8..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_DataError.cdl +++ /dev/null @@ -1,600 +0,0 @@ -//****** set a PARAMETER_DATA_MODE setting to "D" / DATA_MODE = "R" -//****** add a TEMP_DOXY_ADJUSTED value in an to a "R" variable ***** - -netcdf BR1900722_002 { -dimensions: - DATE_TIME = 14 ; - STRING2 = 2 ; - STRING4 = 4 ; - STRING8 = 8 ; - STRING16 = 16 ; - STRING32 = 32 ; - STRING64 = 64 ; - STRING256 = 256 ; - N_PROF = 1 ; - N_PARAM = 4 ; - N_LEVELS = 71 ; - N_CALIB = 1 ; - N_HISTORY = UNLIMITED ; // (1 currently) -variables: - char DATA_TYPE(STRING32) ; - DATA_TYPE:long_name = "Data type" ; - DATA_TYPE:conventions = "Argo reference table 1" ; - DATA_TYPE:_FillValue = " " ; - char FORMAT_VERSION(STRING4) ; - FORMAT_VERSION:long_name = "File format version" ; - FORMAT_VERSION:_FillValue = " " ; - char HANDBOOK_VERSION(STRING4) ; - HANDBOOK_VERSION:long_name = "Data handbook version" ; - HANDBOOK_VERSION:_FillValue = " " ; - char REFERENCE_DATE_TIME(DATE_TIME) ; - REFERENCE_DATE_TIME:long_name = "Date of reference for Julian days" ; - REFERENCE_DATE_TIME:conventions = "YYYYMMDDHHMISS" ; - REFERENCE_DATE_TIME:_FillValue = " " ; - char PLATFORM_NUMBER(N_PROF, STRING8) ; - PLATFORM_NUMBER:long_name = "Float unique identifier" ; - PLATFORM_NUMBER:conventions = "WMO float identifier : A9IIIII" ; - PLATFORM_NUMBER:_FillValue = " " ; - char PROJECT_NAME(N_PROF, STRING64) ; - PROJECT_NAME:long_name = "Name of the project" ; - PROJECT_NAME:_FillValue = " " ; - char PI_NAME(N_PROF, STRING64) ; - PI_NAME:long_name = "Name of the principal investigator" ; - PI_NAME:_FillValue = " " ; - char STATION_PARAMETERS(N_PROF, N_PARAM, STRING64) ; - STATION_PARAMETERS:long_name = "List of available parameters for the station" ; - STATION_PARAMETERS:conventions = "Argo reference table 3" ; - STATION_PARAMETERS:_FillValue = " " ; - int CYCLE_NUMBER(N_PROF) ; - CYCLE_NUMBER:long_name = "Float cycle number" ; - CYCLE_NUMBER:conventions = "0...N, 0 : launch cycle (if exists), 1 : first complete cycle" ; - CYCLE_NUMBER:_FillValue = 99999 ; - char DIRECTION(N_PROF) ; - DIRECTION:long_name = "Direction of the station profiles" ; - DIRECTION:conventions = "A: ascending profiles, D: descending profiles" ; - DIRECTION:_FillValue = " " ; - char DATA_CENTRE(N_PROF, STRING2) ; - DATA_CENTRE:long_name = "Data centre in charge of float data processing" ; - DATA_CENTRE:conventions = "Argo reference table 4" ; - DATA_CENTRE:_FillValue = " " ; - char DATE_CREATION(DATE_TIME) ; - DATE_CREATION:long_name = "Date of file creation" ; - DATE_CREATION:conventions = "YYYYMMDDHHMISS" ; - DATE_CREATION:_FillValue = " " ; - char DATE_UPDATE(DATE_TIME) ; - DATE_UPDATE:long_name = "Date of update of this file" ; - DATE_UPDATE:conventions = "YYYYMMDDHHMISS" ; - DATE_UPDATE:_FillValue = " " ; - char DC_REFERENCE(N_PROF, STRING32) ; - DC_REFERENCE:long_name = "Station unique identifier in data centre" ; - DC_REFERENCE:conventions = "Data centre convention" ; - DC_REFERENCE:_FillValue = " " ; - char DATA_STATE_INDICATOR(N_PROF, STRING4) ; - DATA_STATE_INDICATOR:long_name = "Degree of processing the data have passed through" ; - DATA_STATE_INDICATOR:conventions = "Argo reference table 6" ; - DATA_STATE_INDICATOR:_FillValue = " " ; - char DATA_MODE(N_PROF) ; - DATA_MODE:long_name = "Delayed mode or real time data" ; - DATA_MODE:conventions = "R : real time; D : delayed mode; A : real time with adjustment" ; - DATA_MODE:_FillValue = " " ; - char PARAMETER_DATA_MODE(N_PROF, N_PARAM) ; - PARAMETER_DATA_MODE:long_name = "Delayed mode or real time data" ; - PARAMETER_DATA_MODE:conventions = "R : real time; D : delayed mode; A : real time with adjustment" ; - PARAMETER_DATA_MODE:_FillValue = " " ; - char PLATFORM_TYPE(N_PROF, STRING32) ; - PLATFORM_TYPE:long_name = "Type of float" ; - PLATFORM_TYPE:conventions = "Argo reference table 23" ; - PLATFORM_TYPE:_FillValue = " " ; - char FLOAT_SERIAL_NO(N_PROF, STRING32) ; - FLOAT_SERIAL_NO:long_name = "Serial number of the float" ; - FLOAT_SERIAL_NO:_FillValue = " " ; - char FIRMWARE_VERSION(N_PROF, STRING32) ; - FIRMWARE_VERSION:long_name = "Instrument firmware version" ; - FIRMWARE_VERSION:_FillValue = " " ; - char WMO_INST_TYPE(N_PROF, STRING4) ; - WMO_INST_TYPE:long_name = "Coded instrument type" ; - WMO_INST_TYPE:conventions = "Argo reference table 8" ; - WMO_INST_TYPE:_FillValue = " " ; - double JULD(N_PROF) ; - JULD:long_name = "Julian day (UTC) of the station relative to REFERENCE_DATE_TIME" ; - JULD:standard_name = "time" ; - JULD:units = "days since 1950-01-01 00:00:00 UTC" ; - JULD:conventions = "Relative julian days with decimal part (as parts of day)" ; - JULD:resolution = 1.e-08 ; - JULD:_FillValue = 999999. ; - JULD:axis = "T" ; - char JULD_QC(N_PROF) ; - JULD_QC:long_name = "Quality on date and time" ; - JULD_QC:conventions = "Argo reference table 2" ; - JULD_QC:_FillValue = " " ; - double JULD_LOCATION(N_PROF) ; - JULD_LOCATION:long_name = "Julian day (UTC) of the location relative to REFERENCE_DATE_TIME" ; - JULD_LOCATION:units = "days since 1950-01-01 00:00:00 UTC" ; - JULD_LOCATION:conventions = "Relative julian days with decimal part (as parts of day)" ; - JULD_LOCATION:resolution = 1.e-08 ; - JULD_LOCATION:_FillValue = 999999. ; - double LATITUDE(N_PROF) ; - LATITUDE:long_name = "Latitude of the station, best estimate" ; - LATITUDE:standard_name = "latitude" ; - LATITUDE:units = "degree_north" ; - LATITUDE:_FillValue = 99999. ; - LATITUDE:valid_min = -90. ; - LATITUDE:valid_max = 90. ; - LATITUDE:axis = "Y" ; - double LONGITUDE(N_PROF) ; - LONGITUDE:long_name = "Longitude of the station, best estimate" ; - LONGITUDE:standard_name = "longitude" ; - LONGITUDE:units = "degree_east" ; - LONGITUDE:_FillValue = 99999. ; - LONGITUDE:valid_min = -180. ; - LONGITUDE:valid_max = 180. ; - LONGITUDE:axis = "X" ; - char POSITION_QC(N_PROF) ; - POSITION_QC:long_name = "Quality on position (latitude and longitude)" ; - POSITION_QC:conventions = "Argo reference table 2" ; - POSITION_QC:_FillValue = " " ; - char POSITIONING_SYSTEM(N_PROF, STRING8) ; - POSITIONING_SYSTEM:long_name = "Positioning system" ; - POSITIONING_SYSTEM:_FillValue = " " ; - char VERTICAL_SAMPLING_SCHEME(N_PROF, STRING256) ; - VERTICAL_SAMPLING_SCHEME:long_name = "Vertical sampling scheme" ; - VERTICAL_SAMPLING_SCHEME:conventions = "Argo reference table 16" ; - VERTICAL_SAMPLING_SCHEME:_FillValue = " " ; - int CONFIG_MISSION_NUMBER(N_PROF) ; - CONFIG_MISSION_NUMBER:long_name = "Unique number denoting the missions performed by the float" ; - CONFIG_MISSION_NUMBER:conventions = "1...N, 1 : first complete mission" ; - CONFIG_MISSION_NUMBER:_FillValue = 99999 ; - char PROFILE_TEMP_DOXY_QC(N_PROF) ; - PROFILE_TEMP_DOXY_QC:long_name = "Global quality flag of TEMP_DOXY profile" ; - PROFILE_TEMP_DOXY_QC:conventions = "Argo reference table 2a" ; - PROFILE_TEMP_DOXY_QC:_FillValue = " " ; - char PROFILE_BPHASE_DOXY_QC(N_PROF) ; - PROFILE_BPHASE_DOXY_QC:long_name = "Global quality flag of BPHASE_DOXY profile" ; - PROFILE_BPHASE_DOXY_QC:conventions = "Argo reference table 2a" ; - PROFILE_BPHASE_DOXY_QC:_FillValue = " " ; - char PROFILE_DOXY_QC(N_PROF) ; - PROFILE_DOXY_QC:long_name = "Global quality flag of DOXY profile" ; - PROFILE_DOXY_QC:conventions = "Argo reference table 2a" ; - PROFILE_DOXY_QC:_FillValue = " " ; - float PRES(N_PROF, N_LEVELS) ; - PRES:long_name = "Sea water pressure, equals 0 at sea-level" ; - PRES:standard_name = "sea_water_pressure" ; - PRES:units = "decibar" ; - PRES:valid_min = 0.f ; - PRES:valid_max = 12000.f ; - PRES:resolution = 0.1f ; - PRES:C_format = "%7.1f" ; - PRES:FORTRAN_format = "F7.1f" ; - PRES:_FillValue = 99999.f ; - PRES:axis = "Z" ; - float TEMP_DOXY(N_PROF, N_LEVELS) ; - TEMP_DOXY:long_name = "Sea temperature from oxygen sensor ITS-90 scale" ; - TEMP_DOXY:standard_name = "temperature_of_sensor_for_oxygen_in_sea_water" ; - TEMP_DOXY:units = "degree_Celsius" ; - TEMP_DOXY:valid_min = -2.f ; - TEMP_DOXY:valid_max = 40.f ; - TEMP_DOXY:resolution = 0.001f ; - TEMP_DOXY:C_format = "%9.3f" ; - TEMP_DOXY:FORTRAN_format = "F9.3f" ; - TEMP_DOXY:_FillValue = 99999.f ; - char TEMP_DOXY_QC(N_PROF, N_LEVELS) ; - TEMP_DOXY_QC:long_name = "quality flag" ; - TEMP_DOXY_QC:conventions = "Argo reference table 2" ; - TEMP_DOXY_QC:_FillValue = " " ; - float TEMP_DOXY_ADJUSTED(N_PROF, N_LEVELS) ; - TEMP_DOXY_ADJUSTED:long_name = "Sea temperature from oxygen sensor ITS-90 scale" ; - TEMP_DOXY_ADJUSTED:standard_name = "temperature_of_sensor_for_oxygen_in_sea_water" ; - TEMP_DOXY_ADJUSTED:units = "degree_Celsius" ; - TEMP_DOXY_ADJUSTED:valid_min = -2.f ; - TEMP_DOXY_ADJUSTED:valid_max = 40.f ; - TEMP_DOXY_ADJUSTED:resolution = 0.001f ; - TEMP_DOXY_ADJUSTED:C_format = "%9.3f" ; - TEMP_DOXY_ADJUSTED:FORTRAN_format = "F9.3f" ; - TEMP_DOXY_ADJUSTED:_FillValue = 99999.f ; - char TEMP_DOXY_ADJUSTED_QC(N_PROF, N_LEVELS) ; - TEMP_DOXY_ADJUSTED_QC:long_name = "quality flag" ; - TEMP_DOXY_ADJUSTED_QC:conventions = "Argo reference table 2" ; - TEMP_DOXY_ADJUSTED_QC:_FillValue = " " ; - float TEMP_DOXY_ADJUSTED_ERROR(N_PROF, N_LEVELS) ; - TEMP_DOXY_ADJUSTED_ERROR:long_name = "Contains the error on the adjusted values as determined by the delayed mode QC process" ; - TEMP_DOXY_ADJUSTED_ERROR:units = "degree_Celsius" ; - TEMP_DOXY_ADJUSTED_ERROR:resolution = 0.001f ; - TEMP_DOXY_ADJUSTED_ERROR:C_format = "%9.3f" ; - TEMP_DOXY_ADJUSTED_ERROR:FORTRAN_format = "F9.3f" ; - TEMP_DOXY_ADJUSTED_ERROR:_FillValue = 99999.f ; - float BPHASE_DOXY(N_PROF, N_LEVELS) ; - BPHASE_DOXY:long_name = "Uncalibrated phase shift reported by oxygen sensor" ; - BPHASE_DOXY:units = "degree" ; - BPHASE_DOXY:valid_min = 10.f ; - BPHASE_DOXY:valid_max = 70.f ; - BPHASE_DOXY:resolution = 0.01f ; - BPHASE_DOXY:C_format = "%8.2f" ; - BPHASE_DOXY:FORTRAN_format = "F8.2f" ; - BPHASE_DOXY:_FillValue = 99999.f ; - char BPHASE_DOXY_QC(N_PROF, N_LEVELS) ; - BPHASE_DOXY_QC:long_name = "quality flag" ; - BPHASE_DOXY_QC:conventions = "Argo reference table 2" ; - BPHASE_DOXY_QC:_FillValue = " " ; - float BPHASE_DOXY_ADJUSTED(N_PROF, N_LEVELS) ; - BPHASE_DOXY_ADJUSTED:long_name = "Uncalibrated phase shift reported by oxygen sensor" ; - BPHASE_DOXY_ADJUSTED:units = "degree" ; - BPHASE_DOXY_ADJUSTED:valid_min = 10.f ; - BPHASE_DOXY_ADJUSTED:valid_max = 70.f ; - BPHASE_DOXY_ADJUSTED:resolution = 0.01f ; - BPHASE_DOXY_ADJUSTED:C_format = "%8.2f" ; - BPHASE_DOXY_ADJUSTED:FORTRAN_format = "F8.2f" ; - BPHASE_DOXY_ADJUSTED:_FillValue = 99999.f ; - char BPHASE_DOXY_ADJUSTED_QC(N_PROF, N_LEVELS) ; - BPHASE_DOXY_ADJUSTED_QC:long_name = "quality flag" ; - BPHASE_DOXY_ADJUSTED_QC:conventions = "Argo reference table 2" ; - BPHASE_DOXY_ADJUSTED_QC:_FillValue = " " ; - float BPHASE_DOXY_ADJUSTED_ERROR(N_PROF, N_LEVELS) ; - BPHASE_DOXY_ADJUSTED_ERROR:long_name = "Contains the error on the adjusted values as determined by the delayed mode QC process" ; - BPHASE_DOXY_ADJUSTED_ERROR:units = "degree" ; - BPHASE_DOXY_ADJUSTED_ERROR:resolution = 0.01f ; - BPHASE_DOXY_ADJUSTED_ERROR:C_format = "%8.2f" ; - BPHASE_DOXY_ADJUSTED_ERROR:FORTRAN_format = "F8.2f" ; - BPHASE_DOXY_ADJUSTED_ERROR:_FillValue = 99999.f ; - float DOXY(N_PROF, N_LEVELS) ; - DOXY:long_name = "Dissolved oxygen" ; - DOXY:standard_name = "moles_of_oxygen_per_unit_mass_in_sea_water" ; - DOXY:units = "micromole/kg" ; - DOXY:valid_min = 0.f ; - DOXY:valid_max = 600.f ; - DOXY:resolution = 0.001f ; - DOXY:C_format = "%9.3f" ; - DOXY:FORTRAN_format = "F9.3f" ; - DOXY:_FillValue = 99999.f ; - char DOXY_QC(N_PROF, N_LEVELS) ; - DOXY_QC:long_name = "quality flag" ; - DOXY_QC:conventions = "Argo reference table 2" ; - DOXY_QC:_FillValue = " " ; - float DOXY_ADJUSTED(N_PROF, N_LEVELS) ; - DOXY_ADJUSTED:long_name = "Dissolved oxygen" ; - DOXY_ADJUSTED:standard_name = "moles_of_oxygen_per_unit_mass_in_sea_water" ; - DOXY_ADJUSTED:units = "micromole/kg" ; - DOXY_ADJUSTED:valid_min = 0.f ; - DOXY_ADJUSTED:valid_max = 600.f ; - DOXY_ADJUSTED:resolution = 0.001f ; - DOXY_ADJUSTED:C_format = "%9.3f" ; - DOXY_ADJUSTED:FORTRAN_format = "F9.3f" ; - DOXY_ADJUSTED:_FillValue = 99999.f ; - char DOXY_ADJUSTED_QC(N_PROF, N_LEVELS) ; - DOXY_ADJUSTED_QC:long_name = "quality flag" ; - DOXY_ADJUSTED_QC:conventions = "Argo reference table 2" ; - DOXY_ADJUSTED_QC:_FillValue = " " ; - float DOXY_ADJUSTED_ERROR(N_PROF, N_LEVELS) ; - DOXY_ADJUSTED_ERROR:long_name = "Contains the error on the adjusted values as determined by the delayed mode QC process" ; - DOXY_ADJUSTED_ERROR:units = "micromole/kg" ; - DOXY_ADJUSTED_ERROR:resolution = 0.001f ; - DOXY_ADJUSTED_ERROR:C_format = "%9.3f" ; - DOXY_ADJUSTED_ERROR:FORTRAN_format = "F9.3f" ; - DOXY_ADJUSTED_ERROR:_FillValue = 99999.f ; - char PARAMETER(N_PROF, N_CALIB, N_PARAM, STRING64) ; - PARAMETER:long_name = "List of parameters with calibration information" ; - PARAMETER:conventions = "Argo reference table 3" ; - PARAMETER:_FillValue = " " ; - char SCIENTIFIC_CALIB_EQUATION(N_PROF, N_CALIB, N_PARAM, STRING256) ; - SCIENTIFIC_CALIB_EQUATION:long_name = "Calibration equation for this parameter" ; - SCIENTIFIC_CALIB_EQUATION:_FillValue = " " ; - char SCIENTIFIC_CALIB_COEFFICIENT(N_PROF, N_CALIB, N_PARAM, STRING256) ; - SCIENTIFIC_CALIB_COEFFICIENT:long_name = "Calibration coefficients for this equation" ; - SCIENTIFIC_CALIB_COEFFICIENT:_FillValue = " " ; - char SCIENTIFIC_CALIB_COMMENT(N_PROF, N_CALIB, N_PARAM, STRING256) ; - SCIENTIFIC_CALIB_COMMENT:long_name = "Comment applying to this parameter calibration" ; - SCIENTIFIC_CALIB_COMMENT:_FillValue = " " ; - char SCIENTIFIC_CALIB_DATE(N_PROF, N_CALIB, N_PARAM, DATE_TIME) ; - SCIENTIFIC_CALIB_DATE:long_name = "Date of calibration" ; - SCIENTIFIC_CALIB_DATE:conventions = "YYYYMMDDHHMISS" ; - SCIENTIFIC_CALIB_DATE:_FillValue = " " ; - char HISTORY_INSTITUTION(N_HISTORY, N_PROF, STRING4) ; - HISTORY_INSTITUTION:long_name = "Institution which performed action" ; - HISTORY_INSTITUTION:conventions = "Argo reference table 4" ; - HISTORY_INSTITUTION:_FillValue = " " ; - char HISTORY_STEP(N_HISTORY, N_PROF, STRING4) ; - HISTORY_STEP:long_name = "Step in data processing" ; - HISTORY_STEP:conventions = "Argo reference table 12" ; - HISTORY_STEP:_FillValue = " " ; - char HISTORY_SOFTWARE(N_HISTORY, N_PROF, STRING4) ; - HISTORY_SOFTWARE:long_name = "Name of software which performed action" ; - HISTORY_SOFTWARE:conventions = "Institution dependent" ; - HISTORY_SOFTWARE:_FillValue = " " ; - char HISTORY_SOFTWARE_RELEASE(N_HISTORY, N_PROF, STRING4) ; - HISTORY_SOFTWARE_RELEASE:long_name = "Version/release of software which performed action" ; - HISTORY_SOFTWARE_RELEASE:conventions = "Institution dependent" ; - HISTORY_SOFTWARE_RELEASE:_FillValue = " " ; - char HISTORY_REFERENCE(N_HISTORY, N_PROF, STRING64) ; - HISTORY_REFERENCE:long_name = "Reference of database" ; - HISTORY_REFERENCE:conventions = "Institution dependent" ; - HISTORY_REFERENCE:_FillValue = " " ; - char HISTORY_DATE(N_HISTORY, N_PROF, DATE_TIME) ; - HISTORY_DATE:long_name = "Date the history record was created" ; - HISTORY_DATE:conventions = "YYYYMMDDHHMISS" ; - HISTORY_DATE:_FillValue = " " ; - char HISTORY_ACTION(N_HISTORY, N_PROF, STRING4) ; - HISTORY_ACTION:long_name = "Action performed on data" ; - HISTORY_ACTION:conventions = "Argo reference table 7" ; - HISTORY_ACTION:_FillValue = " " ; - char HISTORY_PARAMETER(N_HISTORY, N_PROF, STRING64) ; - HISTORY_PARAMETER:long_name = "Station parameter action is performed on" ; - HISTORY_PARAMETER:conventions = "Argo reference table 3" ; - HISTORY_PARAMETER:_FillValue = " " ; - float HISTORY_START_PRES(N_HISTORY, N_PROF) ; - HISTORY_START_PRES:long_name = "Start pressure action applied on" ; - HISTORY_START_PRES:units = "decibar" ; - HISTORY_START_PRES:_FillValue = 99999.f ; - float HISTORY_STOP_PRES(N_HISTORY, N_PROF) ; - HISTORY_STOP_PRES:long_name = "Stop pressure action applied on" ; - HISTORY_STOP_PRES:units = "decibar" ; - HISTORY_STOP_PRES:_FillValue = 99999.f ; - float HISTORY_PREVIOUS_VALUE(N_HISTORY, N_PROF) ; - HISTORY_PREVIOUS_VALUE:long_name = "Parameter/Flag previous value before action" ; - HISTORY_PREVIOUS_VALUE:_FillValue = 99999.f ; - char HISTORY_QCTEST(N_HISTORY, N_PROF, STRING16) ; - HISTORY_QCTEST:long_name = "Documentation of tests performed, tests failed (in hex form)" ; - HISTORY_QCTEST:conventions = "Write tests performed when ACTION=QCP$; tests failed when ACTION=QCF$" ; - HISTORY_QCTEST:_FillValue = " " ; - -// global attributes: - :title = "Argo float vertical profile" ; - :institution = "AOML" ; - :source = "Argo float" ; - :history = "2012-05-20 AOML 2.2 creation; 2015-05-29T19:36:21Z UW 3.1 conversion" ; - :references = "http://www.argodatamgt.org/Documentation" ; - :comment = "free text" ; - :user_manual_version = "3.1" ; - :Conventions = "Argo-3.1 CF-1.6" ; - :featureType = "trajectoryProfile" ; -data: - - DATA_TYPE = "B-Argo profile " ; - - FORMAT_VERSION = "3.1 " ; - - HANDBOOK_VERSION = "1.2 " ; - - REFERENCE_DATE_TIME = "19500101000000" ; - - PLATFORM_NUMBER = - "1900722 " ; - - PROJECT_NAME = - "US ARGO PROJECT " ; - - PI_NAME = - "STEPHEN RISER " ; - - STATION_PARAMETERS = - "PRES ", - "TEMP_DOXY ", - "BPHASE_DOXY ", - "DOXY " ; - - CYCLE_NUMBER = 2 ; - - DIRECTION = "A" ; - - DATA_CENTRE = - "AO" ; - - DATE_CREATION = "20120520122653" ; - - DATE_UPDATE = "20150529123621" ; - - DC_REFERENCE = - "1726_17964_002 " ; - - DATA_STATE_INDICATOR = - "2B " ; - -//********* reset P_D_M to make DATA_MODE inconsistent *********** - DATA_MODE = "R" ; - - PARAMETER_DATA_MODE = - "RDRR" ; - - PLATFORM_TYPE = - "APEX " ; - - FLOAT_SERIAL_NO = - "2596 " ; - - FIRMWARE_VERSION = - "012606 " ; - - WMO_INST_TYPE = - "846 " ; - - JULD = 20758.2808217439 ; - - JULD_QC = "1" ; - - JULD_LOCATION = 20758.3432986438 ; - - LATITUDE = -40.39 ; - - LONGITUDE = 73.528 ; - - POSITION_QC = "1" ; - - POSITIONING_SYSTEM = - "ARGOS " ; - - VERTICAL_SAMPLING_SCHEME = - "Primary sampling: discrete [] " ; - - CONFIG_MISSION_NUMBER = 1 ; - - PROFILE_TEMP_DOXY_QC = " " ; - - PROFILE_BPHASE_DOXY_QC = " " ; - - PROFILE_DOXY_QC = " " ; - - PRES = - 6.5, 10.3, 20.1, 30.5, 39.7, 50.4, 60.4, 70.5, - 80.4, 90.1, 100.0, 110.0, 120.4, 130.2, 140.4, 150.5, - 159.8, 169.7, 180.3, 190.1, 200.2, 210.2, 220.3, 229.2, - 240.4, 250.4, 259.5, 269.8, 279.9, 290.1, 299.6, 309.6, - 319.7, 329.9, 339.5, 349.8, 360.2, 380.6, 400.0, 450.0, - 500.3, 550.1, 600.4, 650.3, 700.1, 749.6, 800.3, 850.3, - 900.1, 950.3, 1000.4, 1050.0, 1100.2, 1150.4, 1200.0, 1249.8, - 1298.4, 1350.5, 1400.2, 1450.0, 1499.9, 1549.8, 1600.2, 1649.8, - 1700.4, 1750.0, 1800.5, 1850.1, 1899.5, 1949.5, 1999.8 ; - - TEMP_DOXY = - 12.940, 12.940, 12.800, 12.750, 12.720, 12.710, - 12.700, 12.680, 12.640, 12.590, 12.570, 12.550, - 12.530, 12.510, 12.490, 12.440, 12.440, 12.430, - 12.410, 12.360, 12.320, 12.310, 12.240, 12.170, - 12.110, 12.070, 12.050, 12.020, 11.960, 11.940, - 11.860, 11.760, 11.710, 11.610, 11.510, 11.380, - 11.280, 10.900, 11.020, 10.360, 10.200, 9.750, - 9.210, 8.740, 8.050, 7.400, 6.790, 6.120, - 5.510, 5.140, 4.730, 4.400, 4.130, 3.840, - 3.620, 3.520, 3.410, 3.290, 3.180, 3.080, - 3.000, 2.910, 2.860, 2.780, 2.720, 2.670, - 2.620, 2.590, 2.570, 2.540, 2.490 ; - - TEMP_DOXY_QC = - "00000000000000000000000000000000000000000000000000000000000000000000000" ; - -//****** add an adjusted value to a "R" variable ***** - TEMP_DOXY_ADJUSTED = - 12.940, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - TEMP_DOXY_ADJUSTED_QC = - " " ; - - TEMP_DOXY_ADJUSTED_ERROR = - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - BPHASE_DOXY = - 31.52, 31.52, 31.61, 31.66, 31.71, 31.79, 31.87, - 31.92, 32.00, 32.12, 32.19, 32.21, 32.25, 32.28, - 32.34, 32.45, 32.52, 32.55, 32.57, 32.62, 32.65, - 32.73, 32.87, 32.95, 33.02, 33.07, 33.14, 33.12, - 33.17, 33.38, 33.45, 33.46, 33.43, 33.49, 33.69, - 33.62, 33.51, 33.43, 33.95, 34.91, 35.58, 36.18, - 36.82, 37.38, 38.05, 38.35, 38.71, 38.61, 38.91, - 39.30, 39.54, 39.88, 40.31, 40.83, 41.42, 41.71, - 42.11, 42.51, 42.78, 43.06, 43.29, 43.46, 43.55, - 43.62, 43.65, 43.65, 43.60, 43.57, 43.54, 43.46, - 43.38 ; - - BPHASE_DOXY_QC = - "00000000000000000000000000000000000000000000000000000000000000000000000" ; - - BPHASE_DOXY_ADJUSTED = - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - BPHASE_DOXY_ADJUSTED_QC = - " " ; - - BPHASE_DOXY_ADJUSTED_ERROR = - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - DOXY = - 231.635, 231.663, 231.275, 230.873, 230.289, 229.008, - 227.727, 227.076, 226.064, 224.428, 223.425, 223.310, - 222.850, 222.564, 221.763, 220.357, 219.215, 218.853, - 218.753, 218.390, 218.294, 217.087, 215.382, 214.699, - 214.108, 213.679, 212.748, 213.393, 213.137, 209.920, - 209.503, 210.235, 211.206, 211.127, 208.761, 211.050, - 213.765, 218.510, 209.043, 199.645, 191.159, 186.227, - 181.586, 177.686, 174.020, 174.977, 174.899, 181.368, - 182.132, 179.861, 179.946, 178.107, 174.690, 170.332, - 164.740, 162.130, 158.316, 154.657, 152.476, 150.147, - 148.273, 147.138, 146.657, 146.560, 146.795, 147.292, - 148.351, 149.074, 149.742, 151.036, 152.461 ; - - DOXY_QC = - "00000000000000000000000000000000000000000000000000000000000000000000000" ; - - DOXY_ADJUSTED = - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - DOXY_ADJUSTED_QC = - " " ; - - DOXY_ADJUSTED_ERROR = - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - PARAMETER = - "PRES ", - "TEMP_DOXY ", - "BPHASE_DOXY ", - "DOXY " ; - - SCIENTIFIC_CALIB_EQUATION = - "none ", - "none ", - "none ", - "none " ; - - SCIENTIFIC_CALIB_COEFFICIENT = - "none ", - "none ", - "none ", - "none " ; - - SCIENTIFIC_CALIB_COMMENT = - "none ", - "none ", - "none ", - "none " ; - - SCIENTIFIC_CALIB_DATE = - " ", - " ", - " ", - " " ; - - HISTORY_INSTITUTION = - "AO " ; - - HISTORY_STEP = - "ARFM" ; - - HISTORY_SOFTWARE = - " " ; - - HISTORY_SOFTWARE_RELEASE = - " " ; - - HISTORY_REFERENCE = - " " ; - - HISTORY_DATE = - "20120520122653" ; - - HISTORY_ACTION = - " " ; - - HISTORY_PARAMETER = - " " ; - - HISTORY_START_PRES = - _ ; - - HISTORY_STOP_PRES = - _ ; - - HISTORY_PREVIOUS_VALUE = - _ ; - - HISTORY_QCTEST = - " " ; -} diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_DataWarning.cdl b/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_DataWarning.cdl deleted file mode 100644 index b9ccb4f..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_DataWarning.cdl +++ /dev/null @@ -1,600 +0,0 @@ -//****** set a PARAMETER_DATA_MODE setting to "D" / DATA_MODE = "R" -//****** add a TEMP_DOXY_ADJUSTED value in an to a "R" variable ***** - -netcdf BR1900722_002 { -dimensions: - DATE_TIME = 14 ; - STRING2 = 2 ; - STRING4 = 4 ; - STRING8 = 8 ; - STRING16 = 16 ; - STRING32 = 32 ; - STRING64 = 64 ; - STRING256 = 256 ; - N_PROF = 1 ; - N_PARAM = 4 ; - N_LEVELS = 71 ; - N_CALIB = 1 ; - N_HISTORY = UNLIMITED ; // (1 currently) -variables: - char DATA_TYPE(STRING32) ; - DATA_TYPE:long_name = "Data type" ; - DATA_TYPE:conventions = "Argo reference table 1" ; - DATA_TYPE:_FillValue = " " ; - char FORMAT_VERSION(STRING4) ; - FORMAT_VERSION:long_name = "File format version" ; - FORMAT_VERSION:_FillValue = " " ; - char HANDBOOK_VERSION(STRING4) ; - HANDBOOK_VERSION:long_name = "Data handbook version" ; - HANDBOOK_VERSION:_FillValue = " " ; - char REFERENCE_DATE_TIME(DATE_TIME) ; - REFERENCE_DATE_TIME:long_name = "Date of reference for Julian days" ; - REFERENCE_DATE_TIME:conventions = "YYYYMMDDHHMISS" ; - REFERENCE_DATE_TIME:_FillValue = " " ; - char PLATFORM_NUMBER(N_PROF, STRING8) ; - PLATFORM_NUMBER:long_name = "Float unique identifier" ; - PLATFORM_NUMBER:conventions = "WMO float identifier : A9IIIII" ; - PLATFORM_NUMBER:_FillValue = " " ; - char PROJECT_NAME(N_PROF, STRING64) ; - PROJECT_NAME:long_name = "Name of the project" ; - PROJECT_NAME:_FillValue = " " ; - char PI_NAME(N_PROF, STRING64) ; - PI_NAME:long_name = "Name of the principal investigator" ; - PI_NAME:_FillValue = " " ; - char STATION_PARAMETERS(N_PROF, N_PARAM, STRING64) ; - STATION_PARAMETERS:long_name = "List of available parameters for the station" ; - STATION_PARAMETERS:conventions = "Argo reference table 3" ; - STATION_PARAMETERS:_FillValue = " " ; - int CYCLE_NUMBER(N_PROF) ; - CYCLE_NUMBER:long_name = "Float cycle number" ; - CYCLE_NUMBER:conventions = "0...N, 0 : launch cycle (if exists), 1 : first complete cycle" ; - CYCLE_NUMBER:_FillValue = 99999 ; - char DIRECTION(N_PROF) ; - DIRECTION:long_name = "Direction of the station profiles" ; - DIRECTION:conventions = "A: ascending profiles, D: descending profiles" ; - DIRECTION:_FillValue = " " ; - char DATA_CENTRE(N_PROF, STRING2) ; - DATA_CENTRE:long_name = "Data centre in charge of float data processing" ; - DATA_CENTRE:conventions = "Argo reference table 4" ; - DATA_CENTRE:_FillValue = " " ; - char DATE_CREATION(DATE_TIME) ; - DATE_CREATION:long_name = "Date of file creation" ; - DATE_CREATION:conventions = "YYYYMMDDHHMISS" ; - DATE_CREATION:_FillValue = " " ; - char DATE_UPDATE(DATE_TIME) ; - DATE_UPDATE:long_name = "Date of update of this file" ; - DATE_UPDATE:conventions = "YYYYMMDDHHMISS" ; - DATE_UPDATE:_FillValue = " " ; - char DC_REFERENCE(N_PROF, STRING32) ; - DC_REFERENCE:long_name = "Station unique identifier in data centre" ; - DC_REFERENCE:conventions = "Data centre convention" ; - DC_REFERENCE:_FillValue = " " ; - char DATA_STATE_INDICATOR(N_PROF, STRING4) ; - DATA_STATE_INDICATOR:long_name = "Degree of processing the data have passed through" ; - DATA_STATE_INDICATOR:conventions = "Argo reference table 6" ; - DATA_STATE_INDICATOR:_FillValue = " " ; - char DATA_MODE(N_PROF) ; - DATA_MODE:long_name = "Delayed mode or real time data" ; - DATA_MODE:conventions = "R : real time; D : delayed mode; A : real time with adjustment" ; - DATA_MODE:_FillValue = " " ; - char PARAMETER_DATA_MODE(N_PROF, N_PARAM) ; - PARAMETER_DATA_MODE:long_name = "Delayed mode or real time data" ; - PARAMETER_DATA_MODE:conventions = "R : real time; D : delayed mode; A : real time with adjustment" ; - PARAMETER_DATA_MODE:_FillValue = " " ; - char PLATFORM_TYPE(N_PROF, STRING32) ; - PLATFORM_TYPE:long_name = "Type of float" ; - PLATFORM_TYPE:conventions = "Argo reference table 23" ; - PLATFORM_TYPE:_FillValue = " " ; - char FLOAT_SERIAL_NO(N_PROF, STRING32) ; - FLOAT_SERIAL_NO:long_name = "Serial number of the float" ; - FLOAT_SERIAL_NO:_FillValue = " " ; - char FIRMWARE_VERSION(N_PROF, STRING32) ; - FIRMWARE_VERSION:long_name = "Instrument firmware version" ; - FIRMWARE_VERSION:_FillValue = " " ; - char WMO_INST_TYPE(N_PROF, STRING4) ; - WMO_INST_TYPE:long_name = "Coded instrument type" ; - WMO_INST_TYPE:conventions = "Argo reference table 8" ; - WMO_INST_TYPE:_FillValue = " " ; - double JULD(N_PROF) ; - JULD:long_name = "Julian day (UTC) of the station relative to REFERENCE_DATE_TIME" ; - JULD:standard_name = "time" ; - JULD:units = "days since 1950-01-01 00:00:00 UTC" ; - JULD:conventions = "Relative julian days with decimal part (as parts of day)" ; - JULD:resolution = 1.e-08 ; - JULD:_FillValue = 999999. ; - JULD:axis = "T" ; - char JULD_QC(N_PROF) ; - JULD_QC:long_name = "Quality on date and time" ; - JULD_QC:conventions = "Argo reference table 2" ; - JULD_QC:_FillValue = " " ; - double JULD_LOCATION(N_PROF) ; - JULD_LOCATION:long_name = "Julian day (UTC) of the location relative to REFERENCE_DATE_TIME" ; - JULD_LOCATION:units = "days since 1950-01-01 00:00:00 UTC" ; - JULD_LOCATION:conventions = "Relative julian days with decimal part (as parts of day)" ; - JULD_LOCATION:resolution = 1.e-08 ; - JULD_LOCATION:_FillValue = 999999. ; - double LATITUDE(N_PROF) ; - LATITUDE:long_name = "Latitude of the station, best estimate" ; - LATITUDE:standard_name = "latitude" ; - LATITUDE:units = "degree_north" ; - LATITUDE:_FillValue = 99999. ; - LATITUDE:valid_min = -90. ; - LATITUDE:valid_max = 90. ; - LATITUDE:axis = "Y" ; - double LONGITUDE(N_PROF) ; - LONGITUDE:long_name = "Longitude of the station, best estimate" ; - LONGITUDE:standard_name = "longitude" ; - LONGITUDE:units = "degree_east" ; - LONGITUDE:_FillValue = 99999. ; - LONGITUDE:valid_min = -180. ; - LONGITUDE:valid_max = 180. ; - LONGITUDE:axis = "X" ; - char POSITION_QC(N_PROF) ; - POSITION_QC:long_name = "Quality on position (latitude and longitude)" ; - POSITION_QC:conventions = "Argo reference table 2" ; - POSITION_QC:_FillValue = " " ; - char POSITIONING_SYSTEM(N_PROF, STRING8) ; - POSITIONING_SYSTEM:long_name = "Positioning system" ; - POSITIONING_SYSTEM:_FillValue = " " ; - char VERTICAL_SAMPLING_SCHEME(N_PROF, STRING256) ; - VERTICAL_SAMPLING_SCHEME:long_name = "Vertical sampling scheme" ; - VERTICAL_SAMPLING_SCHEME:conventions = "Argo reference table 16" ; - VERTICAL_SAMPLING_SCHEME:_FillValue = " " ; - int CONFIG_MISSION_NUMBER(N_PROF) ; - CONFIG_MISSION_NUMBER:long_name = "Unique number denoting the missions performed by the float" ; - CONFIG_MISSION_NUMBER:conventions = "1...N, 1 : first complete mission" ; - CONFIG_MISSION_NUMBER:_FillValue = 99999 ; - char PROFILE_TEMP_DOXY_QC(N_PROF) ; - PROFILE_TEMP_DOXY_QC:long_name = "Global quality flag of TEMP_DOXY profile" ; - PROFILE_TEMP_DOXY_QC:conventions = "Argo reference table 2a" ; - PROFILE_TEMP_DOXY_QC:_FillValue = " " ; - char PROFILE_BPHASE_DOXY_QC(N_PROF) ; - PROFILE_BPHASE_DOXY_QC:long_name = "Global quality flag of BPHASE_DOXY profile" ; - PROFILE_BPHASE_DOXY_QC:conventions = "Argo reference table 2a" ; - PROFILE_BPHASE_DOXY_QC:_FillValue = " " ; - char PROFILE_DOXY_QC(N_PROF) ; - PROFILE_DOXY_QC:long_name = "Global quality flag of DOXY profile" ; - PROFILE_DOXY_QC:conventions = "Argo reference table 2a" ; - PROFILE_DOXY_QC:_FillValue = " " ; - float PRES(N_PROF, N_LEVELS) ; - PRES:long_name = "Sea water pressure, equals 0 at sea-level" ; - PRES:standard_name = "sea_water_pressure" ; - PRES:units = "decibar" ; - PRES:valid_min = 0.f ; - PRES:valid_max = 12000.f ; - PRES:resolution = 0.1f ; - PRES:C_format = "%7.1f" ; - PRES:FORTRAN_format = "F7.1f" ; - PRES:_FillValue = 99999.f ; - PRES:axis = "Z" ; - float TEMP_DOXY(N_PROF, N_LEVELS) ; - TEMP_DOXY:long_name = "Sea temperature from oxygen sensor ITS-90 scale" ; - TEMP_DOXY:standard_name = "temperature_of_sensor_for_oxygen_in_sea_water" ; - TEMP_DOXY:units = "degree_Celsius" ; - TEMP_DOXY:valid_min = -2.f ; - TEMP_DOXY:valid_max = 40.f ; - TEMP_DOXY:resolution = 0.001f ; - TEMP_DOXY:C_format = "%9.3f" ; - TEMP_DOXY:FORTRAN_format = "F9.3f" ; - TEMP_DOXY:_FillValue = 99999.f ; - char TEMP_DOXY_QC(N_PROF, N_LEVELS) ; - TEMP_DOXY_QC:long_name = "quality flag" ; - TEMP_DOXY_QC:conventions = "Argo reference table 2" ; - TEMP_DOXY_QC:_FillValue = " " ; - float TEMP_DOXY_ADJUSTED(N_PROF, N_LEVELS) ; - TEMP_DOXY_ADJUSTED:long_name = "Sea temperature from oxygen sensor ITS-90 scale" ; - TEMP_DOXY_ADJUSTED:standard_name = "temperature_of_sensor_for_oxygen_in_sea_water" ; - TEMP_DOXY_ADJUSTED:units = "degree_Celsius" ; - TEMP_DOXY_ADJUSTED:valid_min = -2.f ; - TEMP_DOXY_ADJUSTED:valid_max = 40.f ; - TEMP_DOXY_ADJUSTED:resolution = 0.001f ; - TEMP_DOXY_ADJUSTED:C_format = "%9.3f" ; - TEMP_DOXY_ADJUSTED:FORTRAN_format = "F9.3f" ; - TEMP_DOXY_ADJUSTED:_FillValue = 99999.f ; - char TEMP_DOXY_ADJUSTED_QC(N_PROF, N_LEVELS) ; - TEMP_DOXY_ADJUSTED_QC:long_name = "quality flag" ; - TEMP_DOXY_ADJUSTED_QC:conventions = "Argo reference table 2" ; - TEMP_DOXY_ADJUSTED_QC:_FillValue = " " ; - float TEMP_DOXY_ADJUSTED_ERROR(N_PROF, N_LEVELS) ; - TEMP_DOXY_ADJUSTED_ERROR:long_name = "Contains the error on the adjusted values as determined by the delayed mode QC process" ; - TEMP_DOXY_ADJUSTED_ERROR:units = "degree_Celsius" ; - TEMP_DOXY_ADJUSTED_ERROR:resolution = 0.001f ; - TEMP_DOXY_ADJUSTED_ERROR:C_format = "%9.3f" ; - TEMP_DOXY_ADJUSTED_ERROR:FORTRAN_format = "F9.3f" ; - TEMP_DOXY_ADJUSTED_ERROR:_FillValue = 99999.f ; - float BPHASE_DOXY(N_PROF, N_LEVELS) ; - BPHASE_DOXY:long_name = "Uncalibrated phase shift reported by oxygen sensor" ; - BPHASE_DOXY:units = "degree" ; - BPHASE_DOXY:valid_min = 10.f ; - BPHASE_DOXY:valid_max = 70.f ; - BPHASE_DOXY:resolution = 0.01f ; - BPHASE_DOXY:C_format = "%8.2f" ; - BPHASE_DOXY:FORTRAN_format = "F8.2f" ; - BPHASE_DOXY:_FillValue = 99999.f ; - char BPHASE_DOXY_QC(N_PROF, N_LEVELS) ; - BPHASE_DOXY_QC:long_name = "quality flag" ; - BPHASE_DOXY_QC:conventions = "Argo reference table 2" ; - BPHASE_DOXY_QC:_FillValue = " " ; - float BPHASE_DOXY_ADJUSTED(N_PROF, N_LEVELS) ; - BPHASE_DOXY_ADJUSTED:long_name = "Uncalibrated phase shift reported by oxygen sensor" ; - BPHASE_DOXY_ADJUSTED:units = "degree" ; - BPHASE_DOXY_ADJUSTED:valid_min = 10.f ; - BPHASE_DOXY_ADJUSTED:valid_max = 70.f ; - BPHASE_DOXY_ADJUSTED:resolution = 0.01f ; - BPHASE_DOXY_ADJUSTED:C_format = "%8.2f" ; - BPHASE_DOXY_ADJUSTED:FORTRAN_format = "F8.2f" ; - BPHASE_DOXY_ADJUSTED:_FillValue = 99999.f ; - char BPHASE_DOXY_ADJUSTED_QC(N_PROF, N_LEVELS) ; - BPHASE_DOXY_ADJUSTED_QC:long_name = "quality flag" ; - BPHASE_DOXY_ADJUSTED_QC:conventions = "Argo reference table 2" ; - BPHASE_DOXY_ADJUSTED_QC:_FillValue = " " ; - float BPHASE_DOXY_ADJUSTED_ERROR(N_PROF, N_LEVELS) ; - BPHASE_DOXY_ADJUSTED_ERROR:long_name = "Contains the error on the adjusted values as determined by the delayed mode QC process" ; - BPHASE_DOXY_ADJUSTED_ERROR:units = "degree" ; - BPHASE_DOXY_ADJUSTED_ERROR:resolution = 0.01f ; - BPHASE_DOXY_ADJUSTED_ERROR:C_format = "%8.2f" ; - BPHASE_DOXY_ADJUSTED_ERROR:FORTRAN_format = "F8.2f" ; - BPHASE_DOXY_ADJUSTED_ERROR:_FillValue = 99999.f ; - float DOXY(N_PROF, N_LEVELS) ; - DOXY:long_name = "Dissolved oxygen" ; - DOXY:standard_name = "moles_of_oxygen_per_unit_mass_in_sea_water" ; - DOXY:units = "micromole/kg" ; - DOXY:valid_min = 0.f ; - DOXY:valid_max = 600.f ; - DOXY:resolution = 0.001f ; - DOXY:C_format = "%9.3f" ; - DOXY:FORTRAN_format = "F9.3f" ; - DOXY:_FillValue = 99999.f ; - char DOXY_QC(N_PROF, N_LEVELS) ; - DOXY_QC:long_name = "quality flag" ; - DOXY_QC:conventions = "Argo reference table 2" ; - DOXY_QC:_FillValue = " " ; - float DOXY_ADJUSTED(N_PROF, N_LEVELS) ; - DOXY_ADJUSTED:long_name = "Dissolved oxygen" ; - DOXY_ADJUSTED:standard_name = "moles_of_oxygen_per_unit_mass_in_sea_water" ; - DOXY_ADJUSTED:units = "micromole/kg" ; - DOXY_ADJUSTED:valid_min = 0.f ; - DOXY_ADJUSTED:valid_max = 600.f ; - DOXY_ADJUSTED:resolution = 0.001f ; - DOXY_ADJUSTED:C_format = "%9.3f" ; - DOXY_ADJUSTED:FORTRAN_format = "F9.3f" ; - DOXY_ADJUSTED:_FillValue = 99999.f ; - char DOXY_ADJUSTED_QC(N_PROF, N_LEVELS) ; - DOXY_ADJUSTED_QC:long_name = "quality flag" ; - DOXY_ADJUSTED_QC:conventions = "Argo reference table 2" ; - DOXY_ADJUSTED_QC:_FillValue = " " ; - float DOXY_ADJUSTED_ERROR(N_PROF, N_LEVELS) ; - DOXY_ADJUSTED_ERROR:long_name = "Contains the error on the adjusted values as determined by the delayed mode QC process" ; - DOXY_ADJUSTED_ERROR:units = "micromole/kg" ; - DOXY_ADJUSTED_ERROR:resolution = 0.001f ; - DOXY_ADJUSTED_ERROR:C_format = "%9.3f" ; - DOXY_ADJUSTED_ERROR:FORTRAN_format = "F9.3f" ; - DOXY_ADJUSTED_ERROR:_FillValue = 99999.f ; - char PARAMETER(N_PROF, N_CALIB, N_PARAM, STRING64) ; - PARAMETER:long_name = "List of parameters with calibration information" ; - PARAMETER:conventions = "Argo reference table 3" ; - PARAMETER:_FillValue = " " ; - char SCIENTIFIC_CALIB_EQUATION(N_PROF, N_CALIB, N_PARAM, STRING256) ; - SCIENTIFIC_CALIB_EQUATION:long_name = "Calibration equation for this parameter" ; - SCIENTIFIC_CALIB_EQUATION:_FillValue = " " ; - char SCIENTIFIC_CALIB_COEFFICIENT(N_PROF, N_CALIB, N_PARAM, STRING256) ; - SCIENTIFIC_CALIB_COEFFICIENT:long_name = "Calibration coefficients for this equation" ; - SCIENTIFIC_CALIB_COEFFICIENT:_FillValue = " " ; - char SCIENTIFIC_CALIB_COMMENT(N_PROF, N_CALIB, N_PARAM, STRING256) ; - SCIENTIFIC_CALIB_COMMENT:long_name = "Comment applying to this parameter calibration" ; - SCIENTIFIC_CALIB_COMMENT:_FillValue = " " ; - char SCIENTIFIC_CALIB_DATE(N_PROF, N_CALIB, N_PARAM, DATE_TIME) ; - SCIENTIFIC_CALIB_DATE:long_name = "Date of calibration" ; - SCIENTIFIC_CALIB_DATE:conventions = "YYYYMMDDHHMISS" ; - SCIENTIFIC_CALIB_DATE:_FillValue = " " ; - char HISTORY_INSTITUTION(N_HISTORY, N_PROF, STRING4) ; - HISTORY_INSTITUTION:long_name = "Institution which performed action" ; - HISTORY_INSTITUTION:conventions = "Argo reference table 4" ; - HISTORY_INSTITUTION:_FillValue = " " ; - char HISTORY_STEP(N_HISTORY, N_PROF, STRING4) ; - HISTORY_STEP:long_name = "Step in data processing" ; - HISTORY_STEP:conventions = "Argo reference table 12" ; - HISTORY_STEP:_FillValue = " " ; - char HISTORY_SOFTWARE(N_HISTORY, N_PROF, STRING4) ; - HISTORY_SOFTWARE:long_name = "Name of software which performed action" ; - HISTORY_SOFTWARE:conventions = "Institution dependent" ; - HISTORY_SOFTWARE:_FillValue = " " ; - char HISTORY_SOFTWARE_RELEASE(N_HISTORY, N_PROF, STRING4) ; - HISTORY_SOFTWARE_RELEASE:long_name = "Version/release of software which performed action" ; - HISTORY_SOFTWARE_RELEASE:conventions = "Institution dependent" ; - HISTORY_SOFTWARE_RELEASE:_FillValue = " " ; - char HISTORY_REFERENCE(N_HISTORY, N_PROF, STRING64) ; - HISTORY_REFERENCE:long_name = "Reference of database" ; - HISTORY_REFERENCE:conventions = "Institution dependent" ; - HISTORY_REFERENCE:_FillValue = " " ; - char HISTORY_DATE(N_HISTORY, N_PROF, DATE_TIME) ; - HISTORY_DATE:long_name = "Date the history record was created" ; - HISTORY_DATE:conventions = "YYYYMMDDHHMISS" ; - HISTORY_DATE:_FillValue = " " ; - char HISTORY_ACTION(N_HISTORY, N_PROF, STRING4) ; - HISTORY_ACTION:long_name = "Action performed on data" ; - HISTORY_ACTION:conventions = "Argo reference table 7" ; - HISTORY_ACTION:_FillValue = " " ; - char HISTORY_PARAMETER(N_HISTORY, N_PROF, STRING64) ; - HISTORY_PARAMETER:long_name = "Station parameter action is performed on" ; - HISTORY_PARAMETER:conventions = "Argo reference table 3" ; - HISTORY_PARAMETER:_FillValue = " " ; - float HISTORY_START_PRES(N_HISTORY, N_PROF) ; - HISTORY_START_PRES:long_name = "Start pressure action applied on" ; - HISTORY_START_PRES:units = "decibar" ; - HISTORY_START_PRES:_FillValue = 99999.f ; - float HISTORY_STOP_PRES(N_HISTORY, N_PROF) ; - HISTORY_STOP_PRES:long_name = "Stop pressure action applied on" ; - HISTORY_STOP_PRES:units = "decibar" ; - HISTORY_STOP_PRES:_FillValue = 99999.f ; - float HISTORY_PREVIOUS_VALUE(N_HISTORY, N_PROF) ; - HISTORY_PREVIOUS_VALUE:long_name = "Parameter/Flag previous value before action" ; - HISTORY_PREVIOUS_VALUE:_FillValue = 99999.f ; - char HISTORY_QCTEST(N_HISTORY, N_PROF, STRING16) ; - HISTORY_QCTEST:long_name = "Documentation of tests performed, tests failed (in hex form)" ; - HISTORY_QCTEST:conventions = "Write tests performed when ACTION=QCP$; tests failed when ACTION=QCF$" ; - HISTORY_QCTEST:_FillValue = " " ; - -// global attributes: - :title = "Argo float vertical profile" ; - :institution = "AOML" ; - :source = "Argo float" ; - :history = "2012-05-20 AOML 2.2 creation; 2015-05-29T19:36:21Z UW 3.1 conversion" ; - :references = "http://www.argodatamgt.org/Documentation" ; - :comment = "free text" ; - :user_manual_version = "3.1" ; - :Conventions = "Argo-3.1 CF-1.6" ; - :featureType = "trajectoryProfile" ; -data: - - DATA_TYPE = "B-Argo profile " ; - - FORMAT_VERSION = "3.1 " ; - - HANDBOOK_VERSION = "1.2 " ; - - REFERENCE_DATE_TIME = "19500101000000" ; - - PLATFORM_NUMBER = - "1900722 " ; - - PROJECT_NAME = - "US ARGO PROJECT " ; - - PI_NAME = - "STEPHEN RISER " ; - - STATION_PARAMETERS = - "PRES ", - "TEMP_DOXY ", - "BPHASE_DOXY ", - "DOXY " ; - - CYCLE_NUMBER = 2 ; - - DIRECTION = "A" ; - - DATA_CENTRE = - "AO" ; - - DATE_CREATION = "20120520122653" ; - - DATE_UPDATE = "20150529123621" ; - - DC_REFERENCE = - "1726_17964_002 " ; - - DATA_STATE_INDICATOR = - "2B " ; - -//********* reset P_D_M to make DATA_MODE inconsistent *********** - DATA_MODE = "R" ; - - PARAMETER_DATA_MODE = - "RRDR" ; - - PLATFORM_TYPE = - "APEX " ; - - FLOAT_SERIAL_NO = - "2596 " ; - - FIRMWARE_VERSION = - "012606 " ; - - WMO_INST_TYPE = - "846 " ; - - JULD = 20758.2808217439 ; - - JULD_QC = "1" ; - - JULD_LOCATION = 20758.3432986438 ; - - LATITUDE = -40.39 ; - - LONGITUDE = 73.528 ; - - POSITION_QC = "1" ; - - POSITIONING_SYSTEM = - "ARGOS " ; - - VERTICAL_SAMPLING_SCHEME = - "Primary sampling: discrete [] " ; - - CONFIG_MISSION_NUMBER = 1 ; - - PROFILE_TEMP_DOXY_QC = " " ; - - PROFILE_BPHASE_DOXY_QC = " " ; - - PROFILE_DOXY_QC = " " ; - - PRES = - 6.5, 10.3, 20.1, 30.5, 39.7, 50.4, 60.4, 70.5, - 80.4, 90.1, 100.0, 110.0, 120.4, 130.2, 140.4, 150.5, - 159.8, 169.7, 180.3, 190.1, 200.2, 210.2, 220.3, 229.2, - 240.4, 250.4, 259.5, 269.8, 279.9, 290.1, 299.6, 309.6, - 319.7, 329.9, 339.5, 349.8, 360.2, 380.6, 400.0, 450.0, - 500.3, 550.1, 600.4, 650.3, 700.1, 749.6, 800.3, 850.3, - 900.1, 950.3, 1000.4, 1050.0, 1100.2, 1150.4, 1200.0, 1249.8, - 1298.4, 1350.5, 1400.2, 1450.0, 1499.9, 1549.8, 1600.2, 1649.8, - 1700.4, 1750.0, 1800.5, 1850.1, 1899.5, 1949.5, 1999.8 ; - - TEMP_DOXY = - 12.940, 12.940, 12.800, 12.750, 12.720, 12.710, - 12.700, 12.680, 12.640, 12.590, 12.570, 12.550, - 12.530, 12.510, 12.490, 12.440, 12.440, 12.430, - 12.410, 12.360, 12.320, 12.310, 12.240, 12.170, - 12.110, 12.070, 12.050, 12.020, 11.960, 11.940, - 11.860, 11.760, 11.710, 11.610, 11.510, 11.380, - 11.280, 10.900, 11.020, 10.360, 10.200, 9.750, - 9.210, 8.740, 8.050, 7.400, 6.790, 6.120, - 5.510, 5.140, 4.730, 4.400, 4.130, 3.840, - 3.620, 3.520, 3.410, 3.290, 3.180, 3.080, - 3.000, 2.910, 2.860, 2.780, 2.720, 2.670, - 2.620, 2.590, 2.570, 2.540, 2.490 ; - - TEMP_DOXY_QC = - "00000000000000000000000000000000000000000000000000000000000000000000000" ; - -//****** add an adjusted value to a "R" variable ***** - TEMP_DOXY_ADJUSTED = - 12.940, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - TEMP_DOXY_ADJUSTED_QC = - " " ; - - TEMP_DOXY_ADJUSTED_ERROR = - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - BPHASE_DOXY = - 31.52, 31.52, 31.61, 31.66, 31.71, 31.79, 31.87, - 31.92, 32.00, 32.12, 32.19, 32.21, 32.25, 32.28, - 32.34, 32.45, 32.52, 32.55, 32.57, 32.62, 32.65, - 32.73, 32.87, 32.95, 33.02, 33.07, 33.14, 33.12, - 33.17, 33.38, 33.45, 33.46, 33.43, 33.49, 33.69, - 33.62, 33.51, 33.43, 33.95, 34.91, 35.58, 36.18, - 36.82, 37.38, 38.05, 38.35, 38.71, 38.61, 38.91, - 39.30, 39.54, 39.88, 40.31, 40.83, 41.42, 41.71, - 42.11, 42.51, 42.78, 43.06, 43.29, 43.46, 43.55, - 43.62, 43.65, 43.65, 43.60, 43.57, 43.54, 43.46, - 43.38 ; - - BPHASE_DOXY_QC = - "00000000000000000000000000000000000000000000000000000000000000000000000" ; - - BPHASE_DOXY_ADJUSTED = - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - BPHASE_DOXY_ADJUSTED_QC = - " " ; - - BPHASE_DOXY_ADJUSTED_ERROR = - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - DOXY = - 231.635, 231.663, 231.275, 230.873, 230.289, 229.008, - 227.727, 227.076, 226.064, 224.428, 223.425, 223.310, - 222.850, 222.564, 221.763, 220.357, 219.215, 218.853, - 218.753, 218.390, 218.294, 217.087, 215.382, 214.699, - 214.108, 213.679, 212.748, 213.393, 213.137, 209.920, - 209.503, 210.235, 211.206, 211.127, 208.761, 211.050, - 213.765, 218.510, 209.043, 199.645, 191.159, 186.227, - 181.586, 177.686, 174.020, 174.977, 174.899, 181.368, - 182.132, 179.861, 179.946, 178.107, 174.690, 170.332, - 164.740, 162.130, 158.316, 154.657, 152.476, 150.147, - 148.273, 147.138, 146.657, 146.560, 146.795, 147.292, - 148.351, 149.074, 149.742, 151.036, 152.461 ; - - DOXY_QC = - "00000000000000000000000000000000000000000000000000000000000000000000000" ; - - DOXY_ADJUSTED = - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - DOXY_ADJUSTED_QC = - " " ; - - DOXY_ADJUSTED_ERROR = - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, - _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ ; - - PARAMETER = - "PRES ", - "TEMP_DOXY ", - "BPHASE_DOXY ", - "DOXY " ; - - SCIENTIFIC_CALIB_EQUATION = - "none ", - "none ", - "none ", - "none " ; - - SCIENTIFIC_CALIB_COEFFICIENT = - "none ", - "none ", - "none ", - "none " ; - - SCIENTIFIC_CALIB_COMMENT = - "none ", - "none ", - "none ", - "none " ; - - SCIENTIFIC_CALIB_DATE = - " ", - " ", - " ", - " " ; - - HISTORY_INSTITUTION = - "AO " ; - - HISTORY_STEP = - "ARFM" ; - - HISTORY_SOFTWARE = - " " ; - - HISTORY_SOFTWARE_RELEASE = - " " ; - - HISTORY_REFERENCE = - " " ; - - HISTORY_DATE = - "20120520122653" ; - - HISTORY_ACTION = - " " ; - - HISTORY_PARAMETER = - " " ; - - HISTORY_START_PRES = - _ ; - - HISTORY_STOP_PRES = - _ ; - - HISTORY_PREVIOUS_VALUE = - _ ; - - HISTORY_QCTEST = - " " ; -} diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_FormatInvalid.cdl b/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_FormatInvalid.cdl deleted file mode 100644 index 5828a60..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_FormatInvalid.cdl +++ /dev/null @@ -1,329 +0,0 @@ -netcdf \7900685_meta { -dimensions: - DATE_TIME = 14 ; - STRING2 = 2 ; - STRING4 = 4 ; - STRING8 = 8 ; - STRING16 = 16 ; - STRING32 = 32 ; - STRING64 = 64 ; - STRING128 = 128 ; - STRING256 = 256 ; - STRING1024 = 1024 ; - N_PARAM = 1 ; - N_SENSOR = 1 ; - N_CONFIG_PARAM = 1 ; - N_LAUNCH_CONFIG_PARAM = 1 ; - N_POSITIONING_SYSTEM = 1 ; - N_TRANS_SYSTEM = 1 ; - N_MISSIONS = UNLIMITED ; // (4 currently) -variables: - char DATA_TYPE(STRING16) ; - DATA_TYPE:long_name = "Data type" ; - DATA_TYPE:conventions = "***** bad setting for this attribute *****"; - DATA_TYPE:_FillValue = " " ; - char FORMAT_VERSION(STRING16) ; - FORMAT_VERSION:long_name = "File format version" ; - FORMAT_VERSION:_FillValue = " " ; - char HANDBOOK_VERSION(STRING4) ; - HANDBOOK_VERSION:long_name = "Data handbook version" ; - HANDBOOK_VERSION:_FillValue = " " ; - char DATE_CREATION(DATE_TIME) ; - DATE_CREATION:long_name = "Date of file creation" ; - DATE_CREATION:conventions = "YYYYMMDDHHMISS" ; - DATE_CREATION:comment = " " ; - DATE_CREATION:_FillValue = " " ; - char DATE_UPDATE(DATE_TIME) ; - DATE_UPDATE:long_name = "Date of update of this file" ; - DATE_UPDATE:conventions = "YYYYMMDDHHMISS" ; - DATE_UPDATE:_FillValue = " " ; - char PLATFORM_NUMBER(STRING8) ; - PLATFORM_NUMBER:long_name = "Float unique identifier" ; - PLATFORM_NUMBER:conventions = "WMO float identifier : A9IIIII" ; - PLATFORM_NUMBER:_FillValue = " " ; - char PTT(STRING256) ; - PTT:long_name = "Transmission identifier (ARGOS, ORBCOMM, etc.)" ; - PTT:_FillValue = " " ; - char TRANS_SYSTEM(N_TRANS_SYSTEM, STRING16) ; - TRANS_SYSTEM:long_name = "Telecommunications system used" ; - TRANS_SYSTEM:_FillValue = " " ; - char TRANS_SYSTEM_ID(N_TRANS_SYSTEM, STRING32) ; - TRANS_SYSTEM_ID:long_name = "Program identifier used by the transmission system" ; - TRANS_SYSTEM_ID:_FillValue = " " ; - char TRANS_FREQUENCY(N_TRANS_SYSTEM, STRING16) ; - TRANS_FREQUENCY:long_name = "Frequency of transmission from the float" ; - TRANS_FREQUENCY:units = "hertz" ; - TRANS_FREQUENCY:_FillValue = " " ; - char POSITIONING_SYSTEM(N_POSITIONING_SYSTEM, STRING8) ; - POSITIONING_SYSTEM:long_name = "Positioning system" ; - POSITIONING_SYSTEM:_FillValue = " " ; - char PLATFORM_FAMILY(STRING256) ; - PLATFORM_FAMILY:long_name = "Category of instrument" ; - PLATFORM_FAMILY:conventions = "Argo reference table 22" ; - PLATFORM_FAMILY:_FillValue = " " ; - char PLATFORM_TYPE(STRING32) ; - PLATFORM_TYPE:long_name = "Type of float" ; - PLATFORM_TYPE:conventions = "Argo reference table 23" ; - PLATFORM_TYPE:_FillValue = " " ; - char PLATFORM_MAKER(STRING256) ; - PLATFORM_MAKER:long_name = "Name of the manufacturer" ; - PLATFORM_MAKER:conventions = "Argo reference table 24" ; - PLATFORM_MAKER:_FillValue = " " ; - char FIRMWARE_VERSION(STRING32) ; - FIRMWARE_VERSION:long_name = "Firmware version for the float" ; - FIRMWARE_VERSION:_FillValue = " " ; - char MANUAL_VERSION(STRING16) ; - MANUAL_VERSION:long_name = "Manual version for the float" ; - MANUAL_VERSION:_FillValue = " " ; - char FLOAT_SERIAL_NO(STRING32) ; - FLOAT_SERIAL_NO:long_name = "Serial number of the float" ; - FLOAT_SERIAL_NO:_FillValue = " " ; - char STANDARD_FORMAT_ID(STRING16) ; - STANDARD_FORMAT_ID:long_name = "Standard format number to describe the data format type for each float" ; - STANDARD_FORMAT_ID:_FillValue = " " ; - char DAC_FORMAT_ID(STRING16) ; - DAC_FORMAT_ID:long_name = "Format number used by the DAC to describe the data format type for each float" ; - DAC_FORMAT_ID:_FillValue = " " ; - char WMO_INST_TYPE(STRING4) ; - WMO_INST_TYPE:long_name = "Coded instrument type" ; - WMO_INST_TYPE:conventions = "Argo reference table 8" ; - WMO_INST_TYPE:_FillValue = " " ; - char PROJECT_NAME(STRING64) ; - PROJECT_NAME:long_name = "Program under which the float was deployed" ; - PROJECT_NAME:_FillValue = " " ; - char DATA_CENTRE(STRING2) ; - DATA_CENTRE:long_name = "Data centre in charge of float real-time processing" ; - DATA_CENTRE:conventions = "Argo reference table 4" ; - DATA_CENTRE:_FillValue = " " ; - char PI_NAME(STRING64) ; - PI_NAME:long_name = "Name of the principal investigator" ; - PI_NAME:_FillValue = " " ; - char ANOMALY(STRING256) ; - ANOMALY:long_name = "Describe any anomalies or problems the float may have had" ; - ANOMALY:_FillValue = " " ; - char BATTERY_TYPE(STRING64) ; - BATTERY_TYPE:long_name = "Type of battery packs in the float" ; - BATTERY_TYPE:_FillValue = " " ; - char BATTERY_PACKS(STRING64) ; - BATTERY_PACKS:long_name = "Configuration of battery packs in the float" ; - BATTERY_PACKS:_FillValue = " " ; - char CONTROLLER_BOARD_TYPE_PRIMARY(STRING32) ; - CONTROLLER_BOARD_TYPE_PRIMARY:long_name = "Type of primary controller board" ; - CONTROLLER_BOARD_TYPE_PRIMARY:_FillValue = " " ; - char CONTROLLER_BOARD_TYPE_SECONDARY(STRING32) ; - CONTROLLER_BOARD_TYPE_SECONDARY:long_name = "Type of secondary controller board" ; - CONTROLLER_BOARD_TYPE_SECONDARY:_FillValue = " " ; - char CONTROLLER_BOARD_SERIAL_NO_PRIMARY(STRING32) ; - CONTROLLER_BOARD_SERIAL_NO_PRIMARY:long_name = "Serial number of the primary controller board" ; - CONTROLLER_BOARD_SERIAL_NO_PRIMARY:_FillValue = " " ; - char CONTROLLER_BOARD_SERIAL_NO_SECONDARY(STRING32) ; - CONTROLLER_BOARD_SERIAL_NO_SECONDARY:long_name = "Serial number of the secondary controller board" ; - CONTROLLER_BOARD_SERIAL_NO_SECONDARY:_FillValue = " " ; - char SPECIAL_FEATURES(STRING1024) ; - SPECIAL_FEATURES:long_name = "Extra features of the float (algorithms, compressee etc.)" ; - SPECIAL_FEATURES:_FillValue = " " ; - char FLOAT_OWNER(STRING64) ; - FLOAT_OWNER:long_name = "Float owner" ; - FLOAT_OWNER:_FillValue = " " ; - char OPERATING_INSTITUTION(STRING64) ; - OPERATING_INSTITUTION:long_name = "Operating institution of the float" ; - OPERATING_INSTITUTION:_FillValue = " " ; - char CUSTOMISATION(STRING1024) ; - CUSTOMISATION:long_name = "Float customisation, i.e. (institution and modifications)" ; - CUSTOMISATION:_FillValue = " " ; - char LAUNCH_DATE(DATE_TIME) ; - LAUNCH_DATE:long_name = "Date (UTC) of the deployment" ; - LAUNCH_DATE:conventions = "YYYYMMDDHHMISS" ; - LAUNCH_DATE:_FillValue = " " ; - double LAUNCH_LATITUDE ; - LAUNCH_LATITUDE:long_name = "Latitude of the float when deployed" ; - LAUNCH_LATITUDE:standard_name = "latitude" ; - LAUNCH_LATITUDE:units = "degree_north" ; - LAUNCH_LATITUDE:_FillValue = 99999. ; - LAUNCH_LATITUDE:valid_min = -90. ; - LAUNCH_LATITUDE:valid_max = 90. ; - LAUNCH_LATITUDE:axis = "Y" ; - double LAUNCH_LONGITUDE ; - LAUNCH_LONGITUDE:long_name = "Longitude of the float when deployed" ; - LAUNCH_LONGITUDE:standard_name = "longitude" ; - LAUNCH_LONGITUDE:units = "degree_east" ; - LAUNCH_LONGITUDE:_FillValue = 99999. ; - LAUNCH_LONGITUDE:valid_min = -180. ; - LAUNCH_LONGITUDE:valid_max = 180. ; - LAUNCH_LONGITUDE:axis = "X" ; - char LAUNCH_QC ; - LAUNCH_QC:long_name = "Quality on launch date, time and location" ; - LAUNCH_QC:conventions = "Argo reference table 2" ; - LAUNCH_QC:_FillValue = " " ; - char START_DATE(DATE_TIME) ; - START_DATE:long_name = "Date (UTC) of the first descent of the float" ; - START_DATE:conventions = "YYYYMMDDHHMISS" ; - START_DATE:_FillValue = " " ; - char START_DATE_QC ; - START_DATE_QC:long_name = "Quality on start date" ; - START_DATE_QC:conventions = "Argo reference table 2" ; - START_DATE_QC:_FillValue = " " ; - char STARTUP_DATE(DATE_TIME) ; - STARTUP_DATE:long_name = "Date (UTC) of the activation of the float" ; - STARTUP_DATE:conventions = "YYYYMMDDHHMISS" ; - STARTUP_DATE:_FillValue = " " ; - char STARTUP_DATE_QC ; - STARTUP_DATE_QC:long_name = "Quality on startup date" ; - STARTUP_DATE_QC:conventions = "Argo reference table 2" ; - STARTUP_DATE_QC:_FillValue = " " ; - char DEPLOYMENT_PLATFORM(STRING32) ; - DEPLOYMENT_PLATFORM:long_name = "Identifier of the deployment platform" ; - DEPLOYMENT_PLATFORM:_FillValue = " " ; - char DEPLOYMENT_CRUISE_ID(STRING32) ; - DEPLOYMENT_CRUISE_ID:long_name = "Identification number or reference number of the cruise used to deploy the float" ; - DEPLOYMENT_CRUISE_ID:_FillValue = " " ; - char DEPLOYMENT_REFERENCE_STATION_ID(STRING256) ; - DEPLOYMENT_REFERENCE_STATION_ID:long_name = "Identifier or reference number of co-located stations used to verify the first profile" ; - DEPLOYMENT_REFERENCE_STATION_ID:_FillValue = " " ; - char END_MISSION_DATE(DATE_TIME) ; - END_MISSION_DATE:long_name = "Date (UTC) of the end of mission of the float" ; - END_MISSION_DATE:conventions = "YYYYMMDDHHMISS" ; - END_MISSION_DATE:_FillValue = " " ; - char END_MISSION_STATUS ; - END_MISSION_STATUS:long_name = "Status of the end of mission of the float" ; - END_MISSION_STATUS:conventions = "T:No more transmission received, R:Retrieved" ; - END_MISSION_STATUS:_FillValue = " " ; - char LAUNCH_CONFIG_PARAMETER_NAME(N_LAUNCH_CONFIG_PARAM, STRING128) ; - LAUNCH_CONFIG_PARAMETER_NAME:long_name = "Name of configuration parameter at launch" ; - LAUNCH_CONFIG_PARAMETER_NAME:_FillValue = " " ; - double LAUNCH_CONFIG_PARAMETER_VALUE(N_LAUNCH_CONFIG_PARAM) ; - LAUNCH_CONFIG_PARAMETER_VALUE:long_name = "Value of configuration parameter at launch" ; - LAUNCH_CONFIG_PARAMETER_VALUE:_FillValue = 99999. ; - char CONFIG_PARAMETER_NAME(N_CONFIG_PARAM, STRING128) ; - CONFIG_PARAMETER_NAME:long_name = "Name of configuration parameter" ; - CONFIG_PARAMETER_NAME:_FillValue = " " ; - double CONFIG_PARAMETER_VALUE(N_MISSIONS, N_CONFIG_PARAM) ; - CONFIG_PARAMETER_VALUE:long_name = "Value of configuration parameter" ; - CONFIG_PARAMETER_VALUE:_FillValue = 99999. ; - int CONFIG_MISSION_NUMBER(N_MISSIONS) ; - CONFIG_MISSION_NUMBER:long_name = "Unique number denoting the missions performed by the float" ; - CONFIG_MISSION_NUMBER:conventions = "1...N, 1 : first complete mission" ; - CONFIG_MISSION_NUMBER:_FillValue = 99999 ; - char CONFIG_MISSION_COMMENT(N_MISSIONS, STRING256) ; - CONFIG_MISSION_COMMENT:long_name = "Comment on configuration" ; - CONFIG_MISSION_COMMENT:_FillValue = " " ; - char SENSOR(N_SENSOR, STRING32) ; - SENSOR:long_name = "Name of the sensor mounted on the float" ; - SENSOR:conventions = "Argo reference table 25" ; - SENSOR:_FillValue = " " ; - char SENSOR_MAKER(N_SENSOR, STRING256) ; - SENSOR_MAKER:long_name = "Name of the sensor manufacturer" ; - SENSOR_MAKER:conventions = "Argo reference table 26" ; - SENSOR_MAKER:_FillValue = " " ; - char SENSOR_MODEL(N_SENSOR, STRING256) ; - SENSOR_MODEL:long_name = "Type of sensor" ; - SENSOR_MODEL:conventions = "Argo reference table 27" ; - SENSOR_MODEL:_FillValue = " " ; - char SENSOR_SERIAL_NO(N_SENSOR, STRING16) ; - SENSOR_SERIAL_NO:long_name = "Serial number of the sensor" ; - SENSOR_SERIAL_NO:_FillValue = " " ; - char PARAMETER(N_PARAM, STRING64) ; - PARAMETER:long_name = "Name of parameter computed from float measurements" ; - PARAMETER:conventions = "Argo reference table 3" ; - PARAMETER:_FillValue = " " ; - char PARAMETER_SENSOR(N_PARAM, STRING128) ; - PARAMETER_SENSOR:long_name = "Name of the sensor that measures this parameter" ; - PARAMETER_SENSOR:conventions = "Argo reference table 25" ; - PARAMETER_SENSOR:_FillValue = " " ; - char PARAMETER_UNITS(N_PARAM, STRING32) ; - PARAMETER_UNITS:long_name = "Units of accuracy and resolution of the parameter" ; - PARAMETER_UNITS:_FillValue = " " ; - char PARAMETER_ACCURACY(N_PARAM, STRING32) ; - PARAMETER_ACCURACY:long_name = "Accuracy of the parameter" ; - PARAMETER_ACCURACY:_FillValue = " " ; - char PARAMETER_RESOLUTION(N_PARAM, STRING32) ; - PARAMETER_RESOLUTION:long_name = "Resolution of the parameter" ; - PARAMETER_RESOLUTION:_FillValue = " " ; - char PREDEPLOYMENT_CALIB_EQUATION(N_PARAM, STRING1024) ; - PREDEPLOYMENT_CALIB_EQUATION:long_name = "Calibration equation for this parameter" ; - PREDEPLOYMENT_CALIB_EQUATION:_FillValue = " " ; - char PREDEPLOYMENT_CALIB_COEFFICIENT(N_PARAM, STRING1024) ; - PREDEPLOYMENT_CALIB_COEFFICIENT:long_name = "Calibration coefficients for this equation" ; - PREDEPLOYMENT_CALIB_COEFFICIENT:_FillValue = " " ; - char PREDEPLOYMENT_CALIB_COMMENT(N_PARAM, STRING1024) ; - PREDEPLOYMENT_CALIB_COMMENT:long_name = "Comment applying to this parameter calibration" ; - PREDEPLOYMENT_CALIB_COMMENT:_FillValue = " " ; - -// global attributes: - :title = "Argo float metadata file" ; - :institution = "Scripps Institution of Oceanography" ; - :source = "Argo float" ; - :history = "2018-03-08T20:35:37Z creation; 2018-04-10T22:21:06Z most recent update" ; - :references = "http://www.argodatamgt.org/Documentation" ; - :comment = "free text" ; - :user_manual_version = "3.10" ; - :Conventions = "Argo-3.1 CF-1.6" ; - :comment_on_dac_decoder_version = "Decoded by SIO: Argo SIO SOLOII V2.4" ; - -data: - - DATA_TYPE = "Argo meta-data " ; - - FORMAT_VERSION = "3.1 " ; - - HANDBOOK_VERSION = "1.2 " ; - - DATE_CREATION = "20180308203537" ; - - DATE_UPDATE = "20180410222106" ; - - PLATFORM_NUMBER = "7900685 " ; - - PTT = "n/a " ; - - TRANS_SYSTEM = - "IRIDIUM " ; - - TRANS_SYSTEM_ID = - "n/a " ; - - TRANS_FREQUENCY = - "n/a " ; - - POSITIONING_SYSTEM = - "GPS " ; - - PLATFORM_FAMILY = "FLOAT " ; - - PLATFORM_TYPE = "SOLO_II " ; - - PLATFORM_MAKER = "SIO_IDG " ; - - FIRMWARE_VERSION = "V02.40; SBE602 15Aug17 " ; - - MANUAL_VERSION = "V02.40 " ; - - FLOAT_SERIAL_NO = "8637 " ; - - STANDARD_FORMAT_ID = "202024 " ; - - DAC_FORMAT_ID = "SOLO2IR_TS36 " ; - - WMO_INST_TYPE = "853 " ; - - PROJECT_NAME = "US ARGO PROJECT " ; - - DATA_CENTRE = "AO" ; - - PI_NAME = "DEAN ROEMMICH " ; - - ANOMALY = " " ; - - BATTERY_TYPE = "Lithium " ; - - BATTERY_PACKS = "1 CPU (sn=2511); 3 PUMP (sn=17212 17208 17); " ; - - CONTROLLER_BOARD_TYPE_PRIMARY = "GG32 Rev:6.2 " ; - - CONTROLLER_BOARD_TYPE_SECONDARY = " " ; - - CONTROLLER_BOARD_SERIAL_NO_PRIMARY = "2496 " ; - - CONTROLLER_BOARD_SERIAL_NO_SECONDARY = " " ; - -} diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_NoSpecification.cdl b/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_NoSpecification.cdl deleted file mode 100644 index 7fb0349..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_NoSpecification.cdl +++ /dev/null @@ -1,329 +0,0 @@ -netcdf \7900685_meta { -dimensions: - DATE_TIME = 14 ; - STRING2 = 2 ; - STRING4 = 4 ; - STRING8 = 8 ; - STRING16 = 16 ; - STRING32 = 32 ; - STRING64 = 64 ; - STRING128 = 128 ; - STRING256 = 256 ; - STRING1024 = 1024 ; - N_PARAM = 1 ; - N_SENSOR = 1 ; - N_CONFIG_PARAM = 1 ; - N_LAUNCH_CONFIG_PARAM = 1 ; - N_POSITIONING_SYSTEM = 1 ; - N_TRANS_SYSTEM = 1 ; - N_MISSIONS = UNLIMITED ; // (4 currently) -variables: - char DATA_TYPE(STRING16) ; - DATA_TYPE:long_name = "Data type" ; - DATA_TYPE:conventions = "***** bad setting for this attribute *****"; - DATA_TYPE:_FillValue = " " ; - char FORMAT_VERSION(STRING16) ; - FORMAT_VERSION:long_name = "File format version" ; - FORMAT_VERSION:_FillValue = " " ; - char HANDBOOK_VERSION(STRING4) ; - HANDBOOK_VERSION:long_name = "Data handbook version" ; - HANDBOOK_VERSION:_FillValue = " " ; - char DATE_CREATION(DATE_TIME) ; - DATE_CREATION:long_name = "Date of file creation" ; - DATE_CREATION:conventions = "YYYYMMDDHHMISS" ; - DATE_CREATION:comment = " " ; - DATE_CREATION:_FillValue = " " ; - char DATE_UPDATE(DATE_TIME) ; - DATE_UPDATE:long_name = "Date of update of this file" ; - DATE_UPDATE:conventions = "YYYYMMDDHHMISS" ; - DATE_UPDATE:_FillValue = " " ; - char PLATFORM_NUMBER(STRING8) ; - PLATFORM_NUMBER:long_name = "Float unique identifier" ; - PLATFORM_NUMBER:conventions = "WMO float identifier : A9IIIII" ; - PLATFORM_NUMBER:_FillValue = " " ; - char PTT(STRING256) ; - PTT:long_name = "Transmission identifier (ARGOS, ORBCOMM, etc.)" ; - PTT:_FillValue = " " ; - char TRANS_SYSTEM(N_TRANS_SYSTEM, STRING16) ; - TRANS_SYSTEM:long_name = "Telecommunications system used" ; - TRANS_SYSTEM:_FillValue = " " ; - char TRANS_SYSTEM_ID(N_TRANS_SYSTEM, STRING32) ; - TRANS_SYSTEM_ID:long_name = "Program identifier used by the transmission system" ; - TRANS_SYSTEM_ID:_FillValue = " " ; - char TRANS_FREQUENCY(N_TRANS_SYSTEM, STRING16) ; - TRANS_FREQUENCY:long_name = "Frequency of transmission from the float" ; - TRANS_FREQUENCY:units = "hertz" ; - TRANS_FREQUENCY:_FillValue = " " ; - char POSITIONING_SYSTEM(N_POSITIONING_SYSTEM, STRING8) ; - POSITIONING_SYSTEM:long_name = "Positioning system" ; - POSITIONING_SYSTEM:_FillValue = " " ; - char PLATFORM_FAMILY(STRING256) ; - PLATFORM_FAMILY:long_name = "Category of instrument" ; - PLATFORM_FAMILY:conventions = "Argo reference table 22" ; - PLATFORM_FAMILY:_FillValue = " " ; - char PLATFORM_TYPE(STRING32) ; - PLATFORM_TYPE:long_name = "Type of float" ; - PLATFORM_TYPE:conventions = "Argo reference table 23" ; - PLATFORM_TYPE:_FillValue = " " ; - char PLATFORM_MAKER(STRING256) ; - PLATFORM_MAKER:long_name = "Name of the manufacturer" ; - PLATFORM_MAKER:conventions = "Argo reference table 24" ; - PLATFORM_MAKER:_FillValue = " " ; - char FIRMWARE_VERSION(STRING32) ; - FIRMWARE_VERSION:long_name = "Firmware version for the float" ; - FIRMWARE_VERSION:_FillValue = " " ; - char MANUAL_VERSION(STRING16) ; - MANUAL_VERSION:long_name = "Manual version for the float" ; - MANUAL_VERSION:_FillValue = " " ; - char FLOAT_SERIAL_NO(STRING32) ; - FLOAT_SERIAL_NO:long_name = "Serial number of the float" ; - FLOAT_SERIAL_NO:_FillValue = " " ; - char STANDARD_FORMAT_ID(STRING16) ; - STANDARD_FORMAT_ID:long_name = "Standard format number to describe the data format type for each float" ; - STANDARD_FORMAT_ID:_FillValue = " " ; - char DAC_FORMAT_ID(STRING16) ; - DAC_FORMAT_ID:long_name = "Format number used by the DAC to describe the data format type for each float" ; - DAC_FORMAT_ID:_FillValue = " " ; - char WMO_INST_TYPE(STRING4) ; - WMO_INST_TYPE:long_name = "Coded instrument type" ; - WMO_INST_TYPE:conventions = "Argo reference table 8" ; - WMO_INST_TYPE:_FillValue = " " ; - char PROJECT_NAME(STRING64) ; - PROJECT_NAME:long_name = "Program under which the float was deployed" ; - PROJECT_NAME:_FillValue = " " ; - char DATA_CENTRE(STRING2) ; - DATA_CENTRE:long_name = "Data centre in charge of float real-time processing" ; - DATA_CENTRE:conventions = "Argo reference table 4" ; - DATA_CENTRE:_FillValue = " " ; - char PI_NAME(STRING64) ; - PI_NAME:long_name = "Name of the principal investigator" ; - PI_NAME:_FillValue = " " ; - char ANOMALY(STRING256) ; - ANOMALY:long_name = "Describe any anomalies or problems the float may have had" ; - ANOMALY:_FillValue = " " ; - char BATTERY_TYPE(STRING64) ; - BATTERY_TYPE:long_name = "Type of battery packs in the float" ; - BATTERY_TYPE:_FillValue = " " ; - char BATTERY_PACKS(STRING64) ; - BATTERY_PACKS:long_name = "Configuration of battery packs in the float" ; - BATTERY_PACKS:_FillValue = " " ; - char CONTROLLER_BOARD_TYPE_PRIMARY(STRING32) ; - CONTROLLER_BOARD_TYPE_PRIMARY:long_name = "Type of primary controller board" ; - CONTROLLER_BOARD_TYPE_PRIMARY:_FillValue = " " ; - char CONTROLLER_BOARD_TYPE_SECONDARY(STRING32) ; - CONTROLLER_BOARD_TYPE_SECONDARY:long_name = "Type of secondary controller board" ; - CONTROLLER_BOARD_TYPE_SECONDARY:_FillValue = " " ; - char CONTROLLER_BOARD_SERIAL_NO_PRIMARY(STRING32) ; - CONTROLLER_BOARD_SERIAL_NO_PRIMARY:long_name = "Serial number of the primary controller board" ; - CONTROLLER_BOARD_SERIAL_NO_PRIMARY:_FillValue = " " ; - char CONTROLLER_BOARD_SERIAL_NO_SECONDARY(STRING32) ; - CONTROLLER_BOARD_SERIAL_NO_SECONDARY:long_name = "Serial number of the secondary controller board" ; - CONTROLLER_BOARD_SERIAL_NO_SECONDARY:_FillValue = " " ; - char SPECIAL_FEATURES(STRING1024) ; - SPECIAL_FEATURES:long_name = "Extra features of the float (algorithms, compressee etc.)" ; - SPECIAL_FEATURES:_FillValue = " " ; - char FLOAT_OWNER(STRING64) ; - FLOAT_OWNER:long_name = "Float owner" ; - FLOAT_OWNER:_FillValue = " " ; - char OPERATING_INSTITUTION(STRING64) ; - OPERATING_INSTITUTION:long_name = "Operating institution of the float" ; - OPERATING_INSTITUTION:_FillValue = " " ; - char CUSTOMISATION(STRING1024) ; - CUSTOMISATION:long_name = "Float customisation, i.e. (institution and modifications)" ; - CUSTOMISATION:_FillValue = " " ; - char LAUNCH_DATE(DATE_TIME) ; - LAUNCH_DATE:long_name = "Date (UTC) of the deployment" ; - LAUNCH_DATE:conventions = "YYYYMMDDHHMISS" ; - LAUNCH_DATE:_FillValue = " " ; - double LAUNCH_LATITUDE ; - LAUNCH_LATITUDE:long_name = "Latitude of the float when deployed" ; - LAUNCH_LATITUDE:standard_name = "latitude" ; - LAUNCH_LATITUDE:units = "degree_north" ; - LAUNCH_LATITUDE:_FillValue = 99999. ; - LAUNCH_LATITUDE:valid_min = -90. ; - LAUNCH_LATITUDE:valid_max = 90. ; - LAUNCH_LATITUDE:axis = "Y" ; - double LAUNCH_LONGITUDE ; - LAUNCH_LONGITUDE:long_name = "Longitude of the float when deployed" ; - LAUNCH_LONGITUDE:standard_name = "longitude" ; - LAUNCH_LONGITUDE:units = "degree_east" ; - LAUNCH_LONGITUDE:_FillValue = 99999. ; - LAUNCH_LONGITUDE:valid_min = -180. ; - LAUNCH_LONGITUDE:valid_max = 180. ; - LAUNCH_LONGITUDE:axis = "X" ; - char LAUNCH_QC ; - LAUNCH_QC:long_name = "Quality on launch date, time and location" ; - LAUNCH_QC:conventions = "Argo reference table 2" ; - LAUNCH_QC:_FillValue = " " ; - char START_DATE(DATE_TIME) ; - START_DATE:long_name = "Date (UTC) of the first descent of the float" ; - START_DATE:conventions = "YYYYMMDDHHMISS" ; - START_DATE:_FillValue = " " ; - char START_DATE_QC ; - START_DATE_QC:long_name = "Quality on start date" ; - START_DATE_QC:conventions = "Argo reference table 2" ; - START_DATE_QC:_FillValue = " " ; - char STARTUP_DATE(DATE_TIME) ; - STARTUP_DATE:long_name = "Date (UTC) of the activation of the float" ; - STARTUP_DATE:conventions = "YYYYMMDDHHMISS" ; - STARTUP_DATE:_FillValue = " " ; - char STARTUP_DATE_QC ; - STARTUP_DATE_QC:long_name = "Quality on startup date" ; - STARTUP_DATE_QC:conventions = "Argo reference table 2" ; - STARTUP_DATE_QC:_FillValue = " " ; - char DEPLOYMENT_PLATFORM(STRING32) ; - DEPLOYMENT_PLATFORM:long_name = "Identifier of the deployment platform" ; - DEPLOYMENT_PLATFORM:_FillValue = " " ; - char DEPLOYMENT_CRUISE_ID(STRING32) ; - DEPLOYMENT_CRUISE_ID:long_name = "Identification number or reference number of the cruise used to deploy the float" ; - DEPLOYMENT_CRUISE_ID:_FillValue = " " ; - char DEPLOYMENT_REFERENCE_STATION_ID(STRING256) ; - DEPLOYMENT_REFERENCE_STATION_ID:long_name = "Identifier or reference number of co-located stations used to verify the first profile" ; - DEPLOYMENT_REFERENCE_STATION_ID:_FillValue = " " ; - char END_MISSION_DATE(DATE_TIME) ; - END_MISSION_DATE:long_name = "Date (UTC) of the end of mission of the float" ; - END_MISSION_DATE:conventions = "YYYYMMDDHHMISS" ; - END_MISSION_DATE:_FillValue = " " ; - char END_MISSION_STATUS ; - END_MISSION_STATUS:long_name = "Status of the end of mission of the float" ; - END_MISSION_STATUS:conventions = "T:No more transmission received, R:Retrieved" ; - END_MISSION_STATUS:_FillValue = " " ; - char LAUNCH_CONFIG_PARAMETER_NAME(N_LAUNCH_CONFIG_PARAM, STRING128) ; - LAUNCH_CONFIG_PARAMETER_NAME:long_name = "Name of configuration parameter at launch" ; - LAUNCH_CONFIG_PARAMETER_NAME:_FillValue = " " ; - double LAUNCH_CONFIG_PARAMETER_VALUE(N_LAUNCH_CONFIG_PARAM) ; - LAUNCH_CONFIG_PARAMETER_VALUE:long_name = "Value of configuration parameter at launch" ; - LAUNCH_CONFIG_PARAMETER_VALUE:_FillValue = 99999. ; - char CONFIG_PARAMETER_NAME(N_CONFIG_PARAM, STRING128) ; - CONFIG_PARAMETER_NAME:long_name = "Name of configuration parameter" ; - CONFIG_PARAMETER_NAME:_FillValue = " " ; - double CONFIG_PARAMETER_VALUE(N_MISSIONS, N_CONFIG_PARAM) ; - CONFIG_PARAMETER_VALUE:long_name = "Value of configuration parameter" ; - CONFIG_PARAMETER_VALUE:_FillValue = 99999. ; - int CONFIG_MISSION_NUMBER(N_MISSIONS) ; - CONFIG_MISSION_NUMBER:long_name = "Unique number denoting the missions performed by the float" ; - CONFIG_MISSION_NUMBER:conventions = "1...N, 1 : first complete mission" ; - CONFIG_MISSION_NUMBER:_FillValue = 99999 ; - char CONFIG_MISSION_COMMENT(N_MISSIONS, STRING256) ; - CONFIG_MISSION_COMMENT:long_name = "Comment on configuration" ; - CONFIG_MISSION_COMMENT:_FillValue = " " ; - char SENSOR(N_SENSOR, STRING32) ; - SENSOR:long_name = "Name of the sensor mounted on the float" ; - SENSOR:conventions = "Argo reference table 25" ; - SENSOR:_FillValue = " " ; - char SENSOR_MAKER(N_SENSOR, STRING256) ; - SENSOR_MAKER:long_name = "Name of the sensor manufacturer" ; - SENSOR_MAKER:conventions = "Argo reference table 26" ; - SENSOR_MAKER:_FillValue = " " ; - char SENSOR_MODEL(N_SENSOR, STRING256) ; - SENSOR_MODEL:long_name = "Type of sensor" ; - SENSOR_MODEL:conventions = "Argo reference table 27" ; - SENSOR_MODEL:_FillValue = " " ; - char SENSOR_SERIAL_NO(N_SENSOR, STRING16) ; - SENSOR_SERIAL_NO:long_name = "Serial number of the sensor" ; - SENSOR_SERIAL_NO:_FillValue = " " ; - char PARAMETER(N_PARAM, STRING64) ; - PARAMETER:long_name = "Name of parameter computed from float measurements" ; - PARAMETER:conventions = "Argo reference table 3" ; - PARAMETER:_FillValue = " " ; - char PARAMETER_SENSOR(N_PARAM, STRING128) ; - PARAMETER_SENSOR:long_name = "Name of the sensor that measures this parameter" ; - PARAMETER_SENSOR:conventions = "Argo reference table 25" ; - PARAMETER_SENSOR:_FillValue = " " ; - char PARAMETER_UNITS(N_PARAM, STRING32) ; - PARAMETER_UNITS:long_name = "Units of accuracy and resolution of the parameter" ; - PARAMETER_UNITS:_FillValue = " " ; - char PARAMETER_ACCURACY(N_PARAM, STRING32) ; - PARAMETER_ACCURACY:long_name = "Accuracy of the parameter" ; - PARAMETER_ACCURACY:_FillValue = " " ; - char PARAMETER_RESOLUTION(N_PARAM, STRING32) ; - PARAMETER_RESOLUTION:long_name = "Resolution of the parameter" ; - PARAMETER_RESOLUTION:_FillValue = " " ; - char PREDEPLOYMENT_CALIB_EQUATION(N_PARAM, STRING1024) ; - PREDEPLOYMENT_CALIB_EQUATION:long_name = "Calibration equation for this parameter" ; - PREDEPLOYMENT_CALIB_EQUATION:_FillValue = " " ; - char PREDEPLOYMENT_CALIB_COEFFICIENT(N_PARAM, STRING1024) ; - PREDEPLOYMENT_CALIB_COEFFICIENT:long_name = "Calibration coefficients for this equation" ; - PREDEPLOYMENT_CALIB_COEFFICIENT:_FillValue = " " ; - char PREDEPLOYMENT_CALIB_COMMENT(N_PARAM, STRING1024) ; - PREDEPLOYMENT_CALIB_COMMENT:long_name = "Comment applying to this parameter calibration" ; - PREDEPLOYMENT_CALIB_COMMENT:_FillValue = " " ; - -// global attributes: - :title = "Argo float metadata file" ; - :institution = "Scripps Institution of Oceanography" ; - :source = "Argo float" ; - :history = "2018-03-08T20:35:37Z creation; 2018-04-10T22:21:06Z most recent update" ; - :references = "http://www.argodatamgt.org/Documentation" ; - :comment = "free text" ; - :user_manual_version = "3.10" ; - :Conventions = "Argo-3.1 CF-1.6" ; - :comment_on_dac_decoder_version = "Decoded by SIO: Argo SIO SOLOII V2.4" ; - -data: - - DATA_TYPE = "Argo meta-data " ; - - FORMAT_VERSION = "10.0 " ; - - HANDBOOK_VERSION = "1.2 " ; - - DATE_CREATION = "20180308203537" ; - - DATE_UPDATE = "20180410222106" ; - - PLATFORM_NUMBER = "7900685 " ; - - PTT = "n/a " ; - - TRANS_SYSTEM = - "IRIDIUM " ; - - TRANS_SYSTEM_ID = - "n/a " ; - - TRANS_FREQUENCY = - "n/a " ; - - POSITIONING_SYSTEM = - "GPS " ; - - PLATFORM_FAMILY = "FLOAT " ; - - PLATFORM_TYPE = "SOLO_II " ; - - PLATFORM_MAKER = "SIO_IDG " ; - - FIRMWARE_VERSION = "V02.40; SBE602 15Aug17 " ; - - MANUAL_VERSION = "V02.40 " ; - - FLOAT_SERIAL_NO = "8637 " ; - - STANDARD_FORMAT_ID = "202024 " ; - - DAC_FORMAT_ID = "SOLO2IR_TS36 " ; - - WMO_INST_TYPE = "853 " ; - - PROJECT_NAME = "US ARGO PROJECT " ; - - DATA_CENTRE = "AO" ; - - PI_NAME = "DEAN ROEMMICH " ; - - ANOMALY = " " ; - - BATTERY_TYPE = "Lithium " ; - - BATTERY_PACKS = "1 CPU (sn=2511); 3 PUMP (sn=17212 17208 17); " ; - - CONTROLLER_BOARD_TYPE_PRIMARY = "GG32 Rev:6.2 " ; - - CONTROLLER_BOARD_TYPE_SECONDARY = " " ; - - CONTROLLER_BOARD_SERIAL_NO_PRIMARY = "2496 " ; - - CONTROLLER_BOARD_SERIAL_NO_SECONDARY = " " ; - -} diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_NotAnArgoFile.cdl b/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_NotAnArgoFile.cdl deleted file mode 100644 index 78245b4..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestResultsFile_NotAnArgoFile.cdl +++ /dev/null @@ -1,29 +0,0 @@ -netcdf \7900685_meta { -dimensions: - DATE_TIME = 14 ; - STRING2 = 2 ; - STRING4 = 4 ; - STRING8 = 8 ; - STRING16 = 16 ; - STRING32 = 32 ; - STRING64 = 64 ; - STRING128 = 128 ; - STRING256 = 256 ; - STRING1024 = 1024 ; - N_PARAM = 3 ; - N_SENSOR = 3 ; - N_CONFIG_PARAM = 20 ; - N_LAUNCH_CONFIG_PARAM = 27 ; - N_POSITIONING_SYSTEM = 1 ; - N_TRANS_SYSTEM = 1 ; - N_MISSIONS = UNLIMITED ; // (4 currently) -variables: - char DATA_TYPE(STRING16) ; - DATA_TYPE:long_name = "Data type" ; - DATA_TYPE:conventions = "Argo reference table 1" ; - DATA_TYPE:_FillValue = " " ; -data: - - DATA_TYPE = "Argo meta-data " ; - -} diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestTechnicalParam.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestTechnicalParam.java deleted file mode 100644 index 6d45bcc..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestTechnicalParam.java +++ /dev/null @@ -1,118 +0,0 @@ -package com.usgdac.legacy; - -import java.io.IOException; -import java.io.PrintStream; -import java.lang.invoke.MethodHandles; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import fr.coriolis.checker.filetypes.ArgoDataFile; -import fr.coriolis.checker.filetypes.ArgoTechnicalFile; -import fr.coriolis.checker.filetypes.ArgoDataFile.FileType; - -/** - * Tests the ArgoTechnicalFile.validateTechParams() - * - * @author Mark Ignaszewski - * @version $Id: ValidateSubmit.java 372 2015-12-02 19:02:31Z ignaszewski $ - */ -public class TestTechnicalParam { - - // ......................Variable Declarations................ - - // ..standard i/o shortcuts - static PrintStream stdout = new PrintStream(System.out); - static PrintStream stderr = new PrintStream(System.err); - - // ..log file - static final Class ThisClass; - static final String ClassName; - - private static final Logger log; - - static { - ThisClass = MethodHandles.lookup().lookupClass(); - ClassName = ThisClass.getSimpleName(); - - System.setProperty("logfile.name", ClassName + "_LOG"); - - log = LogManager.getLogger(ClassName); - } - - public static void main(String args[]) throws IOException { - - String spec_dir = null; - - // .....extract the options.... - for (String file : args) { - if (file.equals("-help")) { - Help(); - System.exit(0); - } - - if (spec_dir == null) { - spec_dir = file; - stdout.println("\n\n==> SPEC-DIR: " + spec_dir); - continue; - } - - stdout.println("\n\n==> FILE: " + file); - - ArgoDataFile argo = null; - - try { - argo = ArgoDataFile.open(file, spec_dir, true); - - } catch (Exception e) { - stdout.println("ArgoDataFile.open exception:\n" + e); - e.printStackTrace(stdout); - - continue; - } - - // ..null file means it did not meet the min criteria to be an argo file - if (argo == null) { - stdout.println("ArgoDataFile.open failed: " + ArgoDataFile.getMessage()); - continue; - } - - if (argo.fileType() != FileType.TECHNICAL) { - stdout.println("Who you trying to kid? This isn't a tech file."); - continue; - } - - ArgoTechnicalFile tech = (ArgoTechnicalFile) argo; - tech.validateTechParams(); - - // .....print diagnostics...... - - int nMessage = 0; - for (String str : tech.formatErrors()) { - stdout.println(file + ": ERROR: " + str); - nMessage++; - } - - for (String str : tech.formatWarnings()) { - stdout.println(file + ": WARNING: " + str); - nMessage++; - } - - if (nMessage == 0) { - stdout.println(file + ": no messages"); - } - - // ..reset - tech.clearFormatErrors(); - tech.clearFormatWarnings(); - - } // ..end for (file) - } - - public static void Help() { - stdout.println("\n" + "Purpose: Tests the 'BATTERY validation' of ArgoMetadataFile\n" + "\n" - + "Usage: java TestBattery spec-dir file-names...\n" + "Options:\n" - + " -help | -H | -U Help -- this message\n" + "\n"); - } - -} diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr1.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr1.java deleted file mode 100644 index ae4de14..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr1.java +++ /dev/null @@ -1,58 +0,0 @@ -package com.usgdac.legacy; -import ucar.nc2.*; -import ucar.ma2.*; -import java.io.*; -import java.util.*; - - -//..creates test.nc with a 2 array of doubles -//..creates a 2-row 2D array and writes to rows 0 & 1 - -public class TestWr1 -{ - - public static void main (String args[]) - throws IOException - { - NetcdfFileWriter out; - - int m_dim = 10; - - //..open netCDF file for writing - out = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf3, "test.nc"); - out.setFill(true); - - Variable var1 = out.addVariable((Group) null, "var1", DataType.DOUBLE, "N M"); - - //Double fill = new Double(99999.); - Double fill = new Double(ucar.nc2.iosp.netcdf3.N3iosp.NC_FILL_DOUBLE); - - Attribute attr = new Attribute("_FillValue", (Number) fill); - var1.addAttribute(attr); - - out.setFill(true); - out.create(); - - //double dbl[][] = new double[n_dim][m_dim]; - double dbl[][] = new double[2][m_dim]; - - for (int n = 0; n < 2; n++) { - for (int m = 0; m < m_dim; m++) { - // dbl[n][m] = (double) n + ((double) m / 10.); - dbl[n][m] = fill; - } - } - - Array arr = Array.factory(dbl); - - try { - out.write(var1, arr); - } catch (Exception e) { - System.out.println("write caused exception: "+e); - } - - out.close(); - } - - -} //..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr2.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr2.java deleted file mode 100644 index b0be6a7..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr2.java +++ /dev/null @@ -1,53 +0,0 @@ -package com.usgdac.legacy; -import ucar.nc2.*; -import ucar.ma2.*; -import java.io.*; -import java.util.*; - -//..creates a 2 row 2D array and -//..adds rows 2 & 3 to test.nc - -public class TestWr2 -{ - - public static void main (String args[]) - throws IOException - { - NetcdfFileWriter out = null; - int m_dim = 10; - - //..open netCDF file for writing - - try { - out = NetcdfFileWriter.openExisting("test.nc"); - } catch (Exception e) { - System.out.println("write caused exception: "+e); - System.exit(1); - } - - Variable var1 = out.findVariable("var1"); - - //double dbl[][] = new double[n_dim][m_dim]; - double dbl[][] = new double[2][m_dim]; - - for (int n = 0; n < 2; n++) { - for (int m = 0; m < m_dim; m++) { - dbl[n][m] = (double) (n+2) + ((double) m / 10.); - } - } - - Array arr = Array.factory(dbl); - - int origin[] = {2, 0}; - - try { - out.write(var1, origin, arr); - } catch (Exception e) { - System.out.println("write caused exception: "+e); - } - - out.close(); - } - - -} //..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr3.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr3.java deleted file mode 100644 index 0223dee..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr3.java +++ /dev/null @@ -1,52 +0,0 @@ -package com.usgdac.legacy; -import ucar.nc2.*; -import ucar.ma2.*; -import java.io.*; -import java.util.*; - -//..creates a 2D 2-row arrays with fewer columns -//..adds it to rows 4 & 5 - -public class TestWr3 -{ - - public static void main (String args[]) - throws IOException - { - NetcdfFileWriter out = null; - - //..open netCDF file for writing - - try { - out = NetcdfFileWriter.openExisting("test.nc"); - } catch (Exception e) { - System.out.println("write caused exception: "+e); - System.exit(1); - } - - Variable var1 = out.findVariable("var1"); - - //double dbl[][] = new double[n_dim][m_dim]; - double dbl[][] = new double[2][5]; - - for (int n = 0; n < 2; n++) { - for (int m = 0; m < 5; m++) { - dbl[n][m] = (double) (n+4) + ((double) m / 10.); - } - } - - Array arr = Array.factory(dbl); - - int origin[] = {4, 0}; - - try { - out.write(var1, origin, arr); - } catch (Exception e) { - System.out.println("write caused exception: "+e); - } - - out.close(); - } - - -} //..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr4.java b/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr4.java deleted file mode 100644 index a3136f8..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/TestWr4.java +++ /dev/null @@ -1,51 +0,0 @@ -package com.usgdac.legacy; -import ucar.nc2.*; -import ucar.ma2.*; -import java.io.*; - -//..creates a 2D 2-row array with fewer columns -//..adds it to the bottom columns of rows 6 & 7 - -public class TestWr4 -{ - - public static void main (String args[]) - throws IOException - { - NetcdfFileWriter out = null; - - //..open netCDF file for writing - - try { - out = NetcdfFileWriter.openExisting("test.nc"); - } catch (Exception e) { - System.out.println("write caused exception: "+e); - System.exit(1); - } - - Variable var1 = out.findVariable("var1"); - - //double dbl[][] = new double[n_dim][m_dim]; - double dbl[][] = new double[2][5]; - - for (int n = 0; n < 2; n++) { - for (int m = 0; m < 5; m++) { - dbl[n][m] = (double) (n+6) + ((double) m / 10.); - } - } - - Array arr = Array.factory(dbl); - - int origin[] = {6, 5}; - - try { - out.write(var1, origin, arr); - } catch (Exception e) { - System.out.println("write caused exception: "+e); - } - - out.close(); - } - - -} //..end class diff --git a/file_checker_exec/src/test/java/com/usgdac/legacy/Testwr.java b/file_checker_exec/src/test/java/com/usgdac/legacy/Testwr.java deleted file mode 100644 index 0eb9ce0..0000000 --- a/file_checker_exec/src/test/java/com/usgdac/legacy/Testwr.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.usgdac.legacy; -import ucar.nc2.*; -import ucar.ma2.*; -import java.io.*; -import java.util.*; - - -public class Testwr -{ - - public static void main (String args[]) - throws IOException - { - NetcdfFileWriter out = null; - - //..open netCDF file for writing - - try { - out = NetcdfFileWriter.openExisting("out.nc"); - } catch (Exception e) { - System.out.println("write caused exception: "+e); - System.exit(1); - } - - Variable var1 = out.findVariable("DATA_TYPE"); - - Array arr = ArrayChar.makeFromString("test string", 16); - - try { - out.write(var1, arr); - } catch (Exception e) { - System.out.println("write caused exception: "+e); - } - - out.close(); - } - - -} //..end class diff --git a/file_checker_exec/src/test/java/fr/coriolis/checker/e2etests/ValidateTechTimeseriesIT.java b/file_checker_exec/src/test/java/fr/coriolis/checker/e2etests/ValidateTechTimeseriesIT.java new file mode 100644 index 0000000..2e9c6e0 --- /dev/null +++ b/file_checker_exec/src/test/java/fr/coriolis/checker/e2etests/ValidateTechTimeseriesIT.java @@ -0,0 +1,35 @@ +package fr.coriolis.checker.e2etests; + +import java.io.IOException; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +class ValidateTechTimeseriesIT { + private final String TEST_DIR_NAME = "TEST_TECH_0001"; + + @BeforeAll + public static void init() { + TestsUtils.init(ValidateTechTimeseriesIT.class); + + } + + @Tag(TEST_DIR_NAME) + @ParameterizedTest(name = "{0} from dac {1} should have status {2} at phase {3}") + @CsvSource({ "3901682_tech.nc,coriolis,FILE-ACCEPTED,DATA-VALIDATION", + "3901682_tech_No-JULD.nc,coriolis,FILE-REJECTED,FORMAT-VERIFICATION", + "6903283_tech_No-Timeseries.nc,coriolis,FILE-ACCEPTED,DATA-VALIDATION", + "3901682_tech_bad_long_name.nc,coriolis,FILE-REJECTED,FORMAT-VERIFICATION", + "6990728_tech_unitsNotInR14ButInUnitsList.nc,coriolis,FILE-ACCEPTED,DATA-VALIDATION", + "6990728_tech_BadUnits.nc,coriolis,FILE-REJECTED,FORMAT-VERIFICATION" + }) + void fileChecker_shouldAcceptTechTimeseries_WhenConformToSpec(String fileName, String dac, String result, + String phase) throws IOException, InterruptedException { + + TestsUtils.genericFileCheckerE2ETest(fileName, dac, result, phase, TEST_DIR_NAME); + + } + +} diff --git a/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/3901682_tech.nc b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/3901682_tech.nc new file mode 100644 index 0000000..698dd75 Binary files /dev/null and b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/3901682_tech.nc differ diff --git a/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/3901682_tech_No-JULD.nc b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/3901682_tech_No-JULD.nc new file mode 100644 index 0000000..8e5feea Binary files /dev/null and b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/3901682_tech_No-JULD.nc differ diff --git a/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/3901682_tech_bad_long_name.nc b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/3901682_tech_bad_long_name.nc new file mode 100644 index 0000000..d5cb74b Binary files /dev/null and b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/3901682_tech_bad_long_name.nc differ diff --git a/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/6903283_tech_No-Timeseries.nc b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/6903283_tech_No-Timeseries.nc new file mode 100644 index 0000000..77a218a Binary files /dev/null and b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/6903283_tech_No-Timeseries.nc differ diff --git a/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/6990728_tech_BadUnits.nc b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/6990728_tech_BadUnits.nc new file mode 100644 index 0000000..e5fc2bc Binary files /dev/null and b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/6990728_tech_BadUnits.nc differ diff --git a/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/6990728_tech_unitsNotInR14ButInUnitsList.nc b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/6990728_tech_unitsNotInR14ButInUnitsList.nc new file mode 100644 index 0000000..958285a Binary files /dev/null and b/file_checker_exec/src/test/netcdf-test-files/TEST_TECH_0001/6990728_tech_unitsNotInR14ButInUnitsList.nc differ diff --git a/file_checker_spec/argo-tech_names-spec-v3.1 b/file_checker_spec/argo-tech_names-spec-v3.1 index 8fb5f6a..2a828ce 100644 --- a/file_checker_spec/argo-tech_names-spec-v3.1 +++ b/file_checker_spec/argo-tech_names-spec-v3.1 @@ -532,7 +532,7 @@ CURRENT_BatteryAvgPumpOnStartAscent_mA | Average pump motor current taken at sta CURRENT_BatteryFlntu_mA | battery current measured when Flntu sampled at unspecified time in profile | | T | | | 1/5/2012 | 1/5/2012 | CURRENT_BatteryFlntuParkEnd_mA | battery current measured when Flntu sampled at end of park | | T | | | 1/5/2012 | 1/5/2012 | CURRENT_BatteryFlbbParkEnd_mA | battery current measured when Flbb sampled at end of park | | T | | | 22/08/2016 | 22/08/2016 | -CURRENT_BatteryOptics_COUNTS | battery current while the optial sensor is on | | T | | | 5/29/2012 | 5/29/2012 | +CURRENT_BatteryOptics_COUNT | battery current while the optial sensor is on | | T | | | 5/29/2012 | 5/29/2012 | CURRENT_Battery_mA | battery current measured by the sensors themselves. | | | | | 3/28/2014 | 3/28/2014 | CURRENT_BatteryInitialAtProfileDepth_mA | Battery Current measured at bottom (park or pressure) at initial pump extension completion | | T | | | 1/5/2012 | 1/5/2012 | CURRENT_BatteryMaxPumpOnStartAscent_mA | Maximum pump motor current taken at start of ascent | | | | | 1/5/2012 | 1/5/2012 | @@ -540,7 +540,6 @@ CURRENT_BatteryNoLoad_mA | same as: CURRENT_BATTERY_PARK_NOLOAD_mA? | | T | | CURRENT_BatteryPark_mA | measured during park-park phase and is averaged | | T | | | 1/5/2012 | 1/5/2012 | CURRENT_BatteryParkEnd_mA | "battery current measured at end of park, before descent to profile depth" | | | | | 1/5/2012 | 1/5/2012 | CURRENT_BatteryParkNoLoad_mA | "Battery Current measured at park pressure with no load, Same as BOTTOM battery current (older floats)" | | T | | | 1/5/2012 | 1/5/2012 | -CURRENT_BatteryParkNoLoad_volts | same as: CURRENT_BATTERY_PARK_NOLOAD_mA? | | T | | | 1/5/2012 | 1/5/2012 | CURRENT_BatteryPistonPumpOn_mA | Battery Current during pumping to ascend | | T | | | 1/5/2012 | 1/5/2012 | CURRENT_BatteryPumpOn_mA | Battery Current buoyancy pump on | | T | | | 1/5/2012 | 1/5/2012 | CURRENT_BatterySampledbyFlntu_mA | Flntu sampled battery current | | T | | | 1/5/2012 | 1/5/2012 | diff --git a/file_checker_spec/argo-technical-spec-v3.1.cdl b/file_checker_spec/argo-technical-spec-v3.1.cdl index 685994d..a82107d 100644 --- a/file_checker_spec/argo-technical-spec-v3.1.cdl +++ b/file_checker_spec/argo-technical-spec-v3.1.cdl @@ -26,6 +26,7 @@ dimensions: STRING4 = 4; STRING2 = 2; N_TECH_PARAM = UNLIMITED; + N_TECH_MEASUREMENT = UNLIMITED; variables: @@ -68,5 +69,18 @@ variables: CYCLE_NUMBER:long_name = "Float cycle number"; CYCLE_NUMBER:conventions = "0...N, 0 : launch cycle (if exists), 1 : first complete cycle"; CYCLE_NUMBER:_FillValue = 99999; - -} + +// Timeseries of technical data (optional) + double JULD(N_TECH_MEASUREMENT); + JULD:long_name = "Julian day (UTC) of each measurement"; + JULD:standard_name = "time"; + JULD:units = "days since 1950-01-01 00:00:00 UTC"; + JULD:conventions = "Relative julian days with decimal part (as parts of day)"; + JULD:_FillValue = 999999; + JULD:axis = "T"; + int CYCLE_NUMBER_MEAS (N_TECH_MEASUREMENT); + int MEASUREMENT_CODE(N_TECH_MEASUREMENT); + MEASUREMENT_CODE:long_name = "Flag referring to a measurement event in the cycle"; + MEASUREMENT_CODE:conventions = "Argo reference table 15"; + MEASUREMENT_CODE:_FillValue = 99999; + diff --git a/file_checker_spec/argo-technical-spec-v3.1.opt b/file_checker_spec/argo-technical-spec-v3.1.opt new file mode 100644 index 0000000..1f2afcb --- /dev/null +++ b/file_checker_spec/argo-technical-spec-v3.1.opt @@ -0,0 +1,10 @@ +// $Revision: 657 $ +// $Date: 2025-02-07 17:36:02 $ + + +TECH_TIMESERIES:N_TECH_MEASUREMENT + +TECH_TIMESERIES:JULD +TECH_TIMESERIES:CYCLE_NUMBER_MEAS +TECH_TIMESERIES:MEASUREMENT_CODE + diff --git a/file_checker_spec/ref_table-27 b/file_checker_spec/ref_table-27 index 04505cd..896b33a 100644 --- a/file_checker_spec/ref_table-27 +++ b/file_checker_spec/ref_table-27 @@ -66,11 +66,12 @@ SBE61_V5.0.10 | SBE | | CTD_PRES, CTD_TEMP, CTD_CNDC | CTD | 2023-07-21 | https SBE61_V5.0.12 | SBE | | CTD_PRES, CTD_TEMP, CTD_CNDC | CTD | 2023-07-21 | https://github.com/nvs-vocabs/ArgoVocabs/issues/65 CTD_F01 | TSK | TSK model | CTD_PRES, CTD_TEMP, CTD_CNDC | CTD | | RBR | RBR | RBR model | CTD_PRES, CTD_TEMP, CTD_CNDC | CTD | | -RBRoem_V1.16 | RBR | RBR model | CTD_PRES, CTD_TEMP, CTD_CNDC | | | +RBRoem_V1.16 | RBR | RBR model | CTD_PRES, CTD_TEMP, CTD_CNDC | | | deprecated RBR_ARGO | RBR | | CTD_PRES, CTD_TEMP, CTD_CNDC | CTD | | added by MB 15/07/2020 completed by VT 28/07/2020 RBR_ARGO3 | RBR | | CTD_PRES, CTD_TEMP, CTD_CNDC | CTD | | added by MB 15/07/2020 completed by VT 28/07/2020 RBR_ARGO3_DEEP4K | RBR | | CTD_PRES, CTD_TEMP, CTD_CNDC | CTD | | added by MB 15/07/2020 completed by VT 28/07/2020 -RBR_ARGO3_DEEP6K | RBR | | CTD_PRES, CTD_TEMP, CTD_CNDC | CTD | | added by MB 15/07/2020 completed by VT 28/07/2020 +RBR_ARGO3_DEEP6K | RBR | | CTD_PRES, CTD_TEMP, CTD_CNDC | CTD | | added by MB 15/07/2020 completed by VT 28/07/2020 +RBR_CTD | RBR | | CTD_PRES, CTD_TEMP, CTD_CNDC | CTD | 2025-09-23 | added by ylubac 23/09/2025 https://github.com/OneArgo/ArgoVocabs/issues/157 //Oxygen sensors | | | | | | RBR_CODA_T_ODO | RBR | | OPTODE_DOXY | OPTODE | | added by VT 28/07/2020 @@ -137,19 +138,19 @@ SEAFET | SBE | pH (SEABIRD) | TRANSISTOR_PH | DURAFET | | updated from DURAFET //Radiometers | | | | | | OCR504_ICSW | SBE | Multispectral radiometer 4 channels with cosine detector to measure irradiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_DOWN_IRR, RADIOMETER_PAR | OCR | 23/12/2021 | added by t. carval for c. schmid https://github.com/nvs-vocabs/R27/issues/11 OCR504_R10W | SBE | Multispectral radiometer 4 channels with a 10° half-angle field of view to measure radiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_UP_RAD | OCR | 27/12/2021 | added by t. carval for c. schmid https://github.com/nvs-vocabs/R27/issues/11 -OCR507_ICSW | SBE | Multispectral radiometer 7 channels with cosine detector to measure irradiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_DOWN_IRR, RADIOMETER_PAR | OCR | 27/12/2021 | added by t. carval for c. schmid https://github.com/nvs-vocabs/R27/issues/11 -OCR507_R10W | SBE | Multispectral radiometer 7 channels with a 10° half-angle field of view to measure radiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_UP_RAD | OCR | 27/12/2021 | added by t. carval for c. schmid https://github.com/nvs-vocabs/R27/issues/11 -OCR507_ICSWR10W | SBE | Multispectral radiometer 7 channels combination of cosine detector to measure irradiance and a 10° half-angle field of view to measure radiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_DOWN_IRR , RADIOMETER_PAR, RADIOMETER_UP_RAD | OCR | 27/12/2021 | added by t. carval for c. schmid https://github.com/nvs-vocabs/R27/issues/11 +OCR507_ICSW | SBE | Multispectral radiometer 7 channels with cosine detector to measure irradiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_DOWN_IRR, RADIOMETER_PAR | OCR | 27/12/2021 | deprecated added by t. carval for c. schmid https://github.com/nvs-vocabs/R27/issues/11 +OCR507_R10W | SBE | Multispectral radiometer 7 channels with a 10° half-angle field of view to measure radiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_UP_RAD | OCR | 27/12/2021 | deprecated added by t. carval for c. schmid https://github.com/nvs-vocabs/R27/issues/11 +OCR507_ICSWR10W | SBE | Multispectral radiometer 7 channels combination of cosine detector to measure irradiance and a 10° half-angle field of view to measure radiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_DOWN_IRR , RADIOMETER_PAR, RADIOMETER_UP_RAD | OCR | 27/12/2021 | deprecated added by t. carval for c. schmid https://github.com/nvs-vocabs/R27/issues/11 SATLANTIC_OCR504_ICSW | SATLANTIC | Multispectral radiometer 4 channels (SATLANTIC) with cosine detector to measure irradiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_DOWN_IRR, RADIOMETER_PAR | OCR | | SATLANTIC_OCR504_R10W | SATLANTIC | Multispectral radiometer 4 channels (SATLANTIC) with a 10° half-angle field of view to measure radiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_UP_RAD | | | -SATLANTIC_OCR507_ICSW | SATLANTIC | Multispectral radiometer 7 channels (SATLANTIC) with cosine detector to measure irradiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_DOWN_IRR, RADIOMETER_PAR | OCR | | -SATLANTIC_OCR507_R10W | SATLANTIC | Multispectral radiometer 7 channels (SATLANTIC) with a 10° half-angle field of view to measure radiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_UP_RAD | | | -SATLANTIC_OCR507_ICSWR10W | SATLANTIC | Multispectral radiometer 7 channels (SATLANTIC) combination of cosine detector to measure irradiance and a 10° half-angle field of view to measure radiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_DOWN_IRR , RADIOMETER_PAR, RADIOMETER_UP_RAD | | | +SATLANTIC_OCR507_ICSW | SATLANTIC | Multispectral radiometer 7 channels (SATLANTIC) with cosine detector to measure irradiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_DOWN_IRR, RADIOMETER_PAR | OCR | | deprecated +SATLANTIC_OCR507_R10W | SATLANTIC | Multispectral radiometer 7 channels (SATLANTIC) with a 10° half-angle field of view to measure radiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_UP_RAD | | | deprecated +SATLANTIC_OCR507_ICSWR10W | SATLANTIC | Multispectral radiometer 7 channels (SATLANTIC) combination of cosine detector to measure irradiance and a 10° half-angle field of view to measure radiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_DOWN_IRR , RADIOMETER_PAR, RADIOMETER_UP_RAD | | | deprecated SATLANTIC_PAR | SATLANTIC | | RADIOMETER_PAR | | | RAMSES_ACC | TRIOS | Hyperspectral radiometer 256 channels (TRIOS) with cosine detector to measure irradiance in water (at wavelength specified in Table 3 BGC-Argo parameters list) | RADIOMETER_DOWN_IRR, RADIOMETER_PAR | RAMSES | 17/11/2020 | added by t. carval for h. bittig, pilot stage to be validated by BGC-Argo on https://github.com/nvs-vocabs/ArgoVocabs/issues/1 //Backscatteringmeters and Fluorometers combination | | | | | | -ECO_BB | WETLABS | Wetlabs Eco optical sensor packages with one backscattering meter (at wavelength specified in Table 3 BGC-Argo parameters list) | BACKSCATTERINGMETER_BBP | ECO | 11/5/2016 | added by MB. validated by BioGCArgo +ECO_BB | WETLABS | Wetlabs Eco optical sensor packages with one backscattering meter (at wavelength specified in Table 3 BGC-Argo parameters list) | BACKSCATTERINGMETER_BBP | ECO | 11/5/2016 | deprecated added by MB. validated by BioGCArgo ECO_FL | WETLABS | Wetlabs Eco optical sensor packages with one fluorometer (type specified in SENSOR) | FLUOROMETER_CHLA | ECO | 11/5/2016 | added by MB. validated by BioGCArgo ECO_NTU | WETLABS | Wetlabs Eco optical sensor packages with one backscattering meter measuring turbidity | BACKSCATTERINGMETER_TURBIDITY | ECO | 11/5/2016 | added by MB. validated by BioGCArgo ECO_FLBB | WETLABS | Wetlabs Eco optical sensor packages with one fluorometer (type specified in SENSOR) and one backscattering meter (at wavelength specified in Table 3 BGC-Argo parameters list) | FLUOROMETER_CHLA, BACKSCATTERINGMETER_BBP | ECO | 11/5/2016 | added by MB. validated by BioGCArgo @@ -158,7 +159,7 @@ ECO_FLBBFL_AP2 | WETLABS | A three-channel optical sensor fitted with two single ECO_FLBB_2K | WETLABS | Wetlabs Eco optical sensor packages with one fluorometer (type specified in SENSOR) and one backscattering meter (at wavelength specified in Table 3 BGC-Argo parameters list) certified for applications down to 2000m | FLUOROMETER_CHLA, BACKSCATTERINGMETER_BBP | ECO | | ECO_FLBBCD_AP2 | WETLABS | Wetlabs Eco optical sensor packages with one fluorometer (type specified in SENSOR), one CDOM fluorescence channel, and one backscattering meter (wavelength specified in SENSOR) mounted on an Apex float | FLUOROMETER_CHLA,FLUOROMETER_CDOM, BACKSCATTERINGMETER_BBP | ECO | 30/01/2019 | added by MB. validated by BioGCArgo ECO_FLNTU | WETLABS | Wetlabs Eco optical sensor packages with one fluorometer (type specified in SENSOR) and one backscattering meter measuring turbidity | FLUOROMETER_CHLA, BACKSCATTERINGMETER_TURBIDITY | ECO | 11/5/2016 | added by MB. validated by BioGCArgo -ECO_BB2 | WETLABS | Wetlabs Eco optical sensor packages with two backscatteringmeters (at wavelength specified in Table 3 BGC-Argo parameters list) | BACKSCATTERINGMETER_BBP | ECO | 11/5/2016 | added by MB. validated by BioGCArgo +ECO_BB2 | WETLABS | Wetlabs Eco optical sensor packages with two backscatteringmeters (at wavelength specified in Table 3 BGC-Argo parameters list) | BACKSCATTERINGMETER_BBP | ECO | 11/5/2016 | deprecated - added by MB. validated by BioGCArgo ECO_FLBBCD | WETLABS | Wetlabs Eco optical sensor packages with two fluorometers (type specified in SENSOR) and one backscatteringmeter (at wavelength specified in Table 3 BGC-Argo parameters list) | FLUOROMETER_CHLA,FLUOROMETER_CDOM, BACKSCATTERINGMETER_BBP | ECO | 11/5/2016 | added by MB. validated by BioGCArgo ECO_FLBB2 | WETLABS | Wetlabs Eco optical sensor packages with one fluorometer (type specified in SENSOR) and two backscatteringmeters (at wavelength specified in Table 3 BGC-Argo parameters list) | FLUOROMETER_CHLA,BACKSCATTERINGMETER_BBP | ECO | 11/5/2016 | added by MB. validated by BioGCArgo ECO_FLBBFL | SBE | A three-channel optical sensor fitted with two single-wavelength chlorophyll fluorometers and a single-angle scattering meter, depth-rated to 2000 m and designed for float applications. The two fluorometers are configured to detect chlorophyll-a fluorescence at 470 nm (excitation) / 695 nm (emission) wavelengths and chlorophyll-a fluorescence at 435 nm (excitation) / 695 nm (emission) wavelengths respectively, whereas the scattering meter (in-water centroid angle of 124 deg) is configured to measure backscattering at 700 nm wavelength. The instrument is part of the Environmental Characterization Optics (ECO) series tools, manufactured by Sea-Bird Scientific. | FLUOROMETER_CHLA,FLUOROMETER_CHLA435, BACKSCATTERINGMETER_BBP | ECO | 2022-11-21 | added by added by TC and validated by BGC-Argo diff --git a/file_checker_spec/ref_table-41 b/file_checker_spec/ref_table-41 index 414374e..e66b2ba 100644 --- a/file_checker_spec/ref_table-41 +++ b/file_checker_spec/ref_table-41 @@ -1,23 +1,21 @@ // $Revision: 0001 $ -// $Date: 2025-13-06 +// $Date: 2025-23-09 //CODNAM - -AOML Hurricane ALAMO Floats Argo ARGENTINA Argo AUSTRALIA -Argo AUSTRALIA eq. +Argo eq. AUSTRALIA Argo AWI -Argo BGC, AOML +Argo BGC - AOML Argo BRAZIL Argo BSH Argo CANADA Argo CANADA Dalhousie University -Argo CANADA eq. +Argo eq. CANADA Argo CANADA ONC Argo CHILE Argo CHINA -Argo Colombia +Argo COLOMBIA Argo COSTA RICA Argo Deep, CHINA Argo DENMARK @@ -42,7 +40,6 @@ Argo eq. NRIFS Argo eq. OIST Argo eq. ORI Argo eq. PMEL -Argo eq. POMME Argo eq. SAGE Argo eq. TNFRI Argo eq. TSK @@ -59,9 +56,9 @@ Argo GERMANY 2025 Argo GO-BGC, SIO Argo GO-BGC, UW Argo GO-BGC, WHOI -Argo GOM-BOEM eq. +Argo eq. GOM-BOEM Argo Greenland Ocean Observations -Argo HU eq. +Argo eq. HU Argo IFM-GEOMAR Argo INDIA Argo INDONESIA @@ -73,7 +70,7 @@ Argo KENYA Argo KIOST Argo LEBANON Argo MAURITIUS -Argo MEX-CO-US +Argo MEXICO-US Argo MEXICO Argo MOROCCO Argo NETHERLANDS @@ -83,55 +80,38 @@ Argo NORWAY Argo PERU Argo PMEL Argo POLAND -Argo Portugal +Argo PORTUGAL Argo ROMANIA Argo RUSSIA Argo SAUDI ARABIA Argo SIO -Argo SIO eq (ASIRI) -Argo SIO .eq. (BGCSOLONOPP) -Argo SIO eq. (OKMC) -Argo SIO eq. (SBE61NOPP) +Argo eq. SIO (ASIRI) +Argo eq. SIO (BGCSOLONOPP) +Argo eq. SIO (OKMC) +Argo eq. SIO (SBE61NOPP) Argo SOUTH AFRICA Argo SPAIN Argo SRI LANKA Argo TWR Engineering Argo UK Argo UK Bio -Argo UK eq. +Argo eq. UK Argo UW -Argo UW-APL eq. -Argo UW eq. -Argo UW-MBARI -Argo UW-SPURS eq. -Argo UW-TPOS eq. -Argo UW-UA eq. +Argo eq. UW, APL +Argo eq. UW +Argo UW, MBARI +Argo eq. UW (SPURS) +Argo eq. UW (TPOS) +Argo eq. UW (UA) Argo WHOI -Argo WHOI eq. -Argo WHOI eq. IR -Argo WHOI-Loop Current eq. -Argo WHOI-MRV eq. -Argo WHOI-SODA eq. -AtlantOS_H2020 -BioArgo UMaine -BulArgo -Coriolis -Coriolis-BIOArgo -Coriolis-CONGAS -Coriolis-DRAKE -Coriolis-EGYPT -Coriolis-FLOPS -Coriolis-FLOSTRAL -Coriolis-FNOB-JCOMMOPS -Coriolis-FRONTALIS -Coriolis-Good Hope -Coriolis-OVIDE -Coriolis-PIRATA -Coriolis-Previmer -Coriolis-PROSAT -Coriolis-remOcean -Coriolis-SPICE -Coriolis-TRACK +Argo eq. WHOI +Argo eq. WHOI (IR) +Argo eq. WHOI (LOOP CURRENT) +Argo eq. WHOI, MRV +Argo eq. WHOI (SODA) +AtlantOS H2020 +Argo Bio UMaine +Argo Bulgaria DEKOSIM E-AIMS Euro-Argo @@ -146,18 +126,17 @@ MOCCA-IT MOCCA-NETH MOCCA-POL NAOS-Canada -NAOS-France -REFINE -SOCLIM -Coriolis-EGEE Argo GREECE -Argo UW-SOCCOM +Argo UW (SOCCOM) Argo BRAZIL Navy -Argo SIO eq. (ARCTIC SOLO) -Euro-Argo RISE -Argo FPAII-GMMC +Argo eq. SIO (ARCTIC SOLO) +Euro-Argo (RISE) Meridian Goodhope -Argo CHINA SOA -Coriolis-CANOA -Argo Ukraine -Argo Sweden +Argo CHINA, SOA +Coriolis (CANOA) +Argo UKRAINE +Argo SWEDEN +Argo FRANCE +Argo eq. FRANCE +ITINERIS +Argo AOML \ No newline at end of file diff --git a/run-file-checker-linux.sh b/run-file-checker-linux.sh new file mode 100755 index 0000000..1d28621 --- /dev/null +++ b/run-file-checker-linux.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +# TODO : add when container can run as non root +# export DOCKER_UID=$UID +# export DOCKER_GID=$(id -g $UID) + +docker compose --env-file .env.demo.bodc down +docker compose --env-file .env.demo.bodc up \ No newline at end of file diff --git a/run-file-checker-windows.bat b/run-file-checker-windows.bat new file mode 100644 index 0000000..e00511f --- /dev/null +++ b/run-file-checker-windows.bat @@ -0,0 +1,5 @@ +@echo off +REM Docker Compose script for Windows + +docker compose --env-file .env.demo.bodc down +docker compose --env-file .env.demo.bodc up \ No newline at end of file