diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml index 36b6e4f6..c0b00096 100644 --- a/.github/workflows/cmake-ctest.yml +++ b/.github/workflows/cmake-ctest.yml @@ -8,6 +8,11 @@ on: description: "The hdf5 base name of the binaries" required: true type: string + snap_name: + description: 'The name in the source tarballs' + type: string + required: false + default: hdfsrc file_base: description: "The common base name of the source tarballs" required: true @@ -35,17 +40,29 @@ jobs: - name: Install Dependencies (Windows) run: choco install ninja - - uses: actions/checkout@v3 - - name: Enable Developer Command Prompt - uses: ilammy/msvc-dev-cmd@v1.12.1 + uses: ilammy/msvc-dev-cmd@v1.13.0 + + - name: Set file base name (Windows) + id: set-file-base + run: | + FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + if [[ '${{ inputs.use_environ }}' == 'release' ]] + then + SOURCE_NAME_BASE=$(echo "${{ inputs.snap_name }}") + else + SOURCE_NAME_BASE=$(echo "hdfsrc") + fi + echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT + shell: bash - name: Get hdf5 release if: ${{ (inputs.use_environ == 'snapshots') }} uses: dsaltares/fetch-gh-release-asset@master with: repo: 'HDFGroup/hdf5' - version: 'tags/snapshot' + version: 'tags/snapshot-1.14' file: '${{ inputs.use_hdf }}-win-vs2022_cl.zip' - name: Get hdf5 release @@ -87,21 +104,7 @@ jobs: - name: List files for the binaries (Win) run: | - ls -l ${{ github.workspace }}/HDF_Group/HDF5 - - - name: Set file base name (Windows) - id: set-file-base - run: | - FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") - echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT - if [[ '${{ inputs.use_environ }}' == 'snapshots' ]] - then - SOURCE_NAME_BASE=$(echo "hdfsrc") - else - SOURCE_NAME_BASE=$(echo "$FILE_NAME_BASE") - fi - echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT - shell: bash + ls -l ${{ github.workspace }}/HDF_Group/HDF5 # Get files created by release script - name: Get zip-tarball (Windows) @@ -139,7 +142,7 @@ jobs: mkdir "${{ runner.workspace }}/build" mkdir "${{ runner.workspace }}/build/hdf5_plugins" Copy-Item -Path ${{ runner.workspace }}/hdf5_plugins/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/build/hdf5_plugins/ - Copy-Item -Path ${{ runner.workspace }}/hdf5_plugins/hdfsrc/README.md -Destination ${{ runner.workspace }}/build/hdf5_plugins/ + Copy-Item -Path ${{ runner.workspace }}/hdf5_plugins/build/${{ inputs.preset_name }}-MSVC/README.txt -Destination ${{ runner.workspace }}/build/hdf5_plugins/ Copy-Item -Path ${{ runner.workspace }}/hdf5_plugins/build/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace }}/build/hdf5_plugins/ -Include *.zip cd "${{ runner.workspace }}/build" 7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip hdf5_plugins @@ -166,16 +169,29 @@ jobs: runs-on: ubuntu-latest steps: - name: Install CMake Dependencies (Linux) - run: sudo apt-get install ninja-build doxygen graphviz + run: | + sudo apt-get update + sudo apt-get install ninja-build - - uses: actions/checkout@v3 + - name: Set file base name (Linux) + id: set-file-base + run: | + FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + if [[ '${{ inputs.use_environ }}' == 'release' ]] + then + SOURCE_NAME_BASE=$(echo "${{ inputs.snap_name }}") + else + SOURCE_NAME_BASE=$(echo "hdfsrc") + fi + echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT - name: Get hdf5 release if: ${{ (inputs.use_environ == 'snapshots') }} uses: dsaltares/fetch-gh-release-asset@master with: repo: 'HDFGroup/hdf5' - version: 'tags/snapshot' + version: 'tags/snapshot-1.14' file: '${{ inputs.use_hdf }}-ubuntu-2204_gcc.tar.gz' - name: Get hdf5 release @@ -188,8 +204,8 @@ jobs: - name: List files for the space (Linux) run: | - ls -l ${{ github.workspace }} - ls ${{ runner.workspace }} + ls -l ${{ github.workspace }} + ls ${{ runner.workspace }} - name: Uncompress gh binary (Linux) run: tar -zxvf ${{ github.workspace }}/${{ inputs.use_hdf }}-ubuntu-2204_gcc.tar.gz @@ -208,20 +224,7 @@ jobs: - name: List files for the binaries (Linux) run: | - ls -l ${{ github.workspace }}/hdf5/HDF_Group/HDF5 - - - name: Set file base name (Linux) - id: set-file-base - run: | - FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") - echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT - if [[ '${{ inputs.use_environ }}' == 'snapshots' ]] - then - SOURCE_NAME_BASE=$(echo "hdfsrc") - else - SOURCE_NAME_BASE=$(echo "$FILE_NAME_BASE") - fi - echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT + ls -l ${{ github.workspace }}/hdf5/HDF_Group/HDF5 # Get files created by release script - name: Get tgz-tarball (Linux) @@ -252,7 +255,7 @@ jobs: mkdir "${{ runner.workspace }}/build" mkdir "${{ runner.workspace }}/build/hdf5_plugins" cp ${{ runner.workspace }}/hdf5_plugins/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING ${{ runner.workspace }}/build/hdf5_plugins - cp ${{ runner.workspace }}/hdf5_plugins/hdfsrc/README.md ${{ runner.workspace }}/build/hdf5_plugins + cp ${{ runner.workspace }}/hdf5_plugins/build/${{ inputs.preset_name }}-GNUC/README.txt ${{ runner.workspace }}/build/hdf5_plugins cp ${{ runner.workspace }}/hdf5_plugins/build/${{ inputs.preset_name }}-GNUC/*.tar.gz ${{ runner.workspace }}/build/hdf5_plugins cd "${{ runner.workspace }}/build" tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz hdf5_plugins @@ -275,19 +278,30 @@ jobs: # MacOS w/ Clang + CMake # name: "MacOS Clang CMake" - runs-on: macos-11 + runs-on: macos-13 steps: - name: Install Dependencies (MacOS) - run: brew install ninja doxygen + run: brew install ninja - - uses: actions/checkout@v3 + - name: Set file base name (MacOS) + id: set-file-base + run: | + FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + if [[ '${{ inputs.use_environ }}' == 'release' ]] + then + SOURCE_NAME_BASE=$(echo "${{ inputs.snap_name }}") + else + SOURCE_NAME_BASE=$(echo "hdfsrc") + fi + echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT - name: Get hdf5 release if: ${{ (inputs.use_environ == 'snapshots') }} uses: dsaltares/fetch-gh-release-asset@master with: repo: 'HDFGroup/hdf5' - version: 'tags/snapshot' + version: 'tags/snapshot-1.14' file: '${{ inputs.use_hdf }}-osx12.tar.gz' - name: Get hdf5 release @@ -300,8 +314,8 @@ jobs: - name: List files for the space (MacOS) run: | - ls -l ${{ github.workspace }} - ls ${{ runner.workspace }} + ls -l ${{ github.workspace }} + ls ${{ runner.workspace }} - name: Uncompress gh binary (MacOS) run: tar -zxvf ${{ github.workspace }}/${{ inputs.use_hdf }}-osx12.tar.gz @@ -320,20 +334,7 @@ jobs: - name: List files for the binaries (MacOS) run: | - ls -l ${{ github.workspace }}/hdf5/HDF_Group/HDF5 - - - name: Set file base name (MacOS) - id: set-file-base - run: | - FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") - echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT - if [[ '${{ inputs.use_environ }}' == 'snapshots' ]] - then - SOURCE_NAME_BASE=$(echo "hdfsrc") - else - SOURCE_NAME_BASE=$(echo "$FILE_NAME_BASE") - fi - echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT + ls -l ${{ github.workspace }}/hdf5/HDF_Group/HDF5 # Get files created by release script - name: Get tgz-tarball (MacOS) @@ -365,7 +366,7 @@ jobs: mkdir "${{ runner.workspace }}/build" mkdir "${{ runner.workspace }}/build/hdf5_plugins" cp ${{ runner.workspace }}/hdf5_plugins/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING ${{ runner.workspace }}/build/hdf5_plugins - cp ${{ runner.workspace }}/hdf5_plugins/hdfsrc/README.md ${{ runner.workspace }}/build/hdf5_plugins + cp ${{ runner.workspace }}/hdf5_plugins/build/${{ inputs.preset_name }}-Clang/README.txt ${{ runner.workspace }}/build/hdf5_plugins cp ${{ runner.workspace }}/hdf5_plugins/build/${{ inputs.preset_name }}-Clang/*.tar.gz ${{ runner.workspace }}/build/hdf5_plugins cd "${{ runner.workspace }}/build" tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz hdf5_plugins @@ -383,4 +384,3 @@ jobs: name: tgz-osx12-binary path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - diff --git a/.github/workflows/daily-build.yml b/.github/workflows/daily-build.yml index 97623b20..d290fe17 100644 --- a/.github/workflows/daily-build.yml +++ b/.github/workflows/daily-build.yml @@ -8,13 +8,8 @@ on: type: string required: false default: check - use_environ: - description: 'Environment to locate files' - type: string - required: false - default: snapshots pull_request: - branches: [ "master" ] + branches: [ "release/1_14_4" ] permissions: contents: read @@ -22,12 +17,12 @@ permissions: # A workflow run is made up of one or more jobs that can run sequentially or # in parallel. jobs: - get-base-names: + get-old-names: runs-on: ubuntu-latest outputs: hdf5-name: ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }} + plugin-name: ${{ steps.getpluginbase.outputs.PLUGIN_NAME_BASE }} run-ignore: ${{ steps.getinputs.outputs.INPUTS_IGNORE }} - run-environ: ${{ steps.getinputs.outputs.INPUTS_ENVIRON }} steps: - uses: actions/checkout@v4.1.1 @@ -36,44 +31,56 @@ jobs: uses: dsaltares/fetch-gh-release-asset@master with: repo: 'HDFGroup/hdf5' - version: 'tags/snapshot' + version: 'tags/snapshot-1.14' file: 'last-file.txt' - - name: Read base-name file + - name: Read HDF5 base-name file id: gethdf5base run: echo "HDF5_NAME_BASE=$(cat last-file.txt)" >> $GITHUB_OUTPUT - run: echo "hdf5 base name is ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }}." + - name: Get plugin release base name + uses: dsaltares/fetch-gh-release-asset@master + with: + repo: 'HDFGroup/hdf5_plugins' + version: 'tags/snapshot-1.14' + file: 'last-file.txt' + continue-on-error: true + + - name: Read base-name file + id: getpluginbase + run: echo "PLUGIN_NAME_BASE=$(cat last-file.txt)" >> $GITHUB_OUTPUT + + - run: echo "plugin base name is ${{ steps.getpluginbase.outputs.PLUGIN_NAME_BASE }}." + - name: Read inputs id: getinputs run: | echo "INPUTS_IGNORE=${{ ((github.event.inputs.use_ignore == '' && github.event.inputs.use_ignore) || 'ignore') }}" >> $GITHUB_OUTPUT - echo "INPUTS_ENVIRON=${{ ((github.event.inputs.use_environ == '' && github.event.inputs.use_environ) || 'snapshots') }}" >> $GITHUB_OUTPUT - run: echo "use_ignore is ${{ steps.getinputs.outputs.INPUTS_IGNORE }}." - - run: echo "use_environ is ${{ steps.getinputs.outputs.INPUTS_ENVIRON }}." - call-workflow-tarball: - needs: get-base-names + needs: [get-old-names] uses: ./.github/workflows/tarball.yml with: - use_ignore: ${{ needs.get-base-names.outputs.run-ignore }} - use_environ: ${{ needs.get-base-names.outputs.run-environ }} + use_tag: snapshot-1.14 + use_environ: snapshots call-workflow-ctest: - needs: [get-base-names, call-workflow-tarball] + needs: [get-old-names, call-workflow-tarball] uses: ./.github/workflows/cmake-ctest.yml with: - file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} preset_name: ci-StdShar - use_hdf: ${{ needs.get-base-names.outputs.hdf5-name }} - use_environ: ${{ needs.get-base-names.outputs.run-environ }} - if: ${{ ((needs.get-base-names.outputs.run-environ == 'snapshots') && ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-base-names.outputs.run-ignore == 'ignore'))) || (needs.get-base-names.outputs.run-environ == 'release') }} + file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} + use_hdf: ${{ needs.get-old-names.outputs.hdf5-name }} +# use_tag: snapshot-1.14 + use_environ: snapshots + if: ${{ ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-base-names.outputs.run-ignore == 'ignore')) }} call-workflow-release: - needs: [get-base-names, call-workflow-tarball, call-workflow-ctest] + needs: [get-old-names, call-workflow-tarball, call-workflow-ctest] permissions: contents: write # In order to allow tag creation uses: ./.github/workflows/release-files.yml @@ -81,7 +88,18 @@ jobs: file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} file_branch: ${{ needs.call-workflow-tarball.outputs.file_branch }} file_sha: ${{ needs.call-workflow-tarball.outputs.file_sha }} - use_tag: snapshot - use_environ: ${{ needs.get-base-names.outputs.run-environ }} - if: ${{ ((needs.get-base-names.outputs.run-environ == 'snapshots') && ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-base-names.outputs.run-ignore == 'ignore'))) || (needs.get-base-names.outputs.run-environ == 'release') }} + use_tag: snapshot-1.14 + use_environ: snapshots + if: ${{ ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-base-names.outputs.run-ignore == 'ignore')) }} + + call-workflow-remove: + needs: [get-old-names, call-workflow-tarball, call-workflow-ctest, call-workflow-release] + permissions: + contents: write # In order to allow file deletion + uses: ./.github/workflows/remove-files.yml + with: + file_base: ${{ needs.get-old-names.outputs.plugin-name }} + use_tag: snapshot-1.14 + use_environ: snapshots + if: ${{ ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-base-names.outputs.run-ignore == 'ignore')) }} diff --git a/.github/workflows/release-files.yml b/.github/workflows/release-files.yml index c4f724e0..c4d16efb 100644 --- a/.github/workflows/release-files.yml +++ b/.github/workflows/release-files.yml @@ -8,7 +8,7 @@ on: description: 'Release version tag' type: string required: false - default: snapshot + default: snapshot-1.14 use_environ: description: 'Environment to locate files' type: string @@ -55,7 +55,7 @@ jobs: commit_sha: ${{ inputs.file_sha }} tag: "${{ inputs.use_tag }}" force_push_tag: true - message: "Latest snapshot" + message: "Latest snapshot-1.14" if: ${{ inputs.use_environ == 'snapshots' }} # Print result using the action output. @@ -109,21 +109,20 @@ jobs: - name: Create sha256 sums for files run: | - sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz >> sha256sums.txt - sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.zip >> sha256sums.txt - sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz >> sha256sums.txt - sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz >> sha256sums.txt - sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip >> sha256sums.txt + sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt + sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt + sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt + sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt + sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt - name: Store snapshot name - if: ${{ (inputs.use_environ == 'snapshots') }} run: | echo "${{ steps.get-file-base.outputs.FILE_BASE }}" > ./last-file.txt - name: PreRelease tag id: create_prerelease if: ${{ (inputs.use_environ == 'snapshots') }} - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 + uses: softprops/action-gh-release@9d7c94cfd0a1f3ed45544c887983e9fa900f0564 # v2.0.4 with: tag_name: "${{ inputs.use_tag }}" prerelease: true @@ -134,13 +133,13 @@ jobs: ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip - sha256sums.txt + ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - name: Release tag id: create_release if: ${{ (inputs.use_environ == 'release') }} - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 + uses: softprops/action-gh-release@9d7c94cfd0a1f3ed45544c887983e9fa900f0564 # v2.0.4 with: tag_name: "${{ inputs.use_tag }}" prerelease: false @@ -150,7 +149,7 @@ jobs: ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip - sha256sums.txt + ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - name: List files for the space (Linux) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8e1c6ca8..d883c5dc 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,24 +1,18 @@ -name: hdf5 plugins release build +name: hdf5 1.14 plugins release build -# Controls when the action will run. Triggers the workflow on a manual run on: workflow_dispatch: inputs: - use_tag: + hdf_tag: description: 'Release hdf5 version tag' type: string required: false - default: snapshot - use_name: - description: 'Plugin Release Name' - type: string - required: false - default: hdf5_plugins-snapshot - use_environ: - description: 'Environment to locate files' + default: snapshot-1.14 + use_tag: + description: 'Release version tag' type: string required: false - default: snapshots + default: snapshot-1.14 permissions: contents: read @@ -29,83 +23,44 @@ jobs: log-the-inputs: runs-on: ubuntu-latest outputs: + hdf_tag: ${{ steps.get-tag-name.outputs.HDF_TAG }} rel_tag: ${{ steps.get-tag-name.outputs.RELEASE_TAG }} - rel_name: ${{ steps.get-tag-name.outputs.RELEASE_NAME }} - run-environ: ${{ steps.get-tag-name.outputs.INPUTS_ENVIRON }} - steps: - name: Get tag name id: get-tag-name env: + HDFTAG: ${{ inputs.hdf_tag }} TAG: ${{ inputs.use_tag }} - TAG_NAME: ${{ inputs.use_name }} run: | + echo "HDF_TAG=$HDFTAG" >> $GITHUB_OUTPUT echo "RELEASE_TAG=$TAG" >> $GITHUB_OUTPUT - echo "RELEASE_NAME=$TAG_NAME" >> $GITHUB_OUTPUT - echo "INPUTS_ENVIRON=${{ ((github.event.inputs.use_environ == '' && github.event.inputs.use_environ) || 'snapshots') }}" >> $GITHUB_OUTPUT - create-files-ctest: + call-workflow-tarball: needs: log-the-inputs - runs-on: ubuntu-latest - outputs: - file_base: ${{ steps.set-file-base.outputs.FILE_BASE }} - steps: - - name: Set file base name - id: set-file-base - run: | - FILE_NAME_BASE=$(echo "${{ needs.log-the-inputs.outputs.rel_name }}") - echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT - shell: bash - - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - name: Get Sources - uses: actions/checkout@v3 - with: - path: hdfsrc - - - name: Zip Folder - run: | - zip -r ${{ steps.set-file-base.outputs.FILE_BASE }}.zip ./hdfsrc - tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz ./hdfsrc - - - name: List files in the repository - run: | - ls -l ${{ github.workspace }} - ls $GITHUB_WORKSPACE - - # Save files created by release script - - name: Save tgz-tarball - uses: actions/upload-artifact@v4 - with: - name: tgz-tarball - path: ${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz - if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - - - name: Save zip-tarball - uses: actions/upload-artifact@v4 - with: - name: zip-tarball - path: ${{ steps.set-file-base.outputs.FILE_BASE }}.zip - if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + uses: ./.github/workflows/tarball.yml + with: + use_tag: 1.14.4 + use_environ: release call-workflow-ctest: - needs: [log-the-inputs, create-files-ctest] + needs: [log-the-inputs, call-workflow-tarball] uses: ./.github/workflows/cmake-ctest.yml with: - file_base: ${{ needs.create-files-ctest.outputs.file_base }} preset_name: ci-StdShar - use_hdf: ${{ needs.log-the-inputs.outputs.rel_tag }} - use_environ: ${{ needs.log-the-inputs.outputs.run-environ }} + file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} + use_hdf: ${{ needs.log-the-inputs.outputs.hdf_tag }} + snap_name: hdf5_plugins-${{ needs.call-workflow-tarball.outputs.source_base }} + use_environ: release call-workflow-release: - needs: [log-the-inputs, create-files-ctest, call-workflow-ctest] + needs: [log-the-inputs, call-workflow-tarball, call-workflow-ctest] permissions: contents: write # In order to allow tag creation uses: ./.github/workflows/release-files.yml with: - file_base: ${{ needs.create-files-ctest.outputs.file_base }} - file_branch: ${{ needs.log-the-inputs.outputs.rel_name }} - file_sha: ${{ needs.log-the-inputs.outputs.rel_tag }} + file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} + file_branch: ${{ needs.call-workflow-tarball.outputs.file_branch }} + file_sha: ${{ needs.call-workflow-tarball.outputs.file_sha }} use_tag: ${{ needs.log-the-inputs.outputs.rel_tag }} - use_environ: ${{ needs.log-the-inputs.outputs.run-environ }} + use_environ: release diff --git a/.github/workflows/remove-files.yml b/.github/workflows/remove-files.yml new file mode 100644 index 00000000..ac7c9e04 --- /dev/null +++ b/.github/workflows/remove-files.yml @@ -0,0 +1,54 @@ +name: hdf5 plugins remove-files + +# Controls when the action will run. Triggers the workflow on a schedule +on: + workflow_call: + inputs: + use_tag: + description: 'Release version tag' + type: string + required: false + default: snapshot-1.14 + use_environ: + description: 'Environment to locate files' + type: string + required: true + default: snapshots + file_base: + description: "The common base name of the source tarballs" + required: true + type: string + +# Minimal permissions to be inherited by any job that doesn't declare its own permissions +permissions: + contents: read + +# Previous workflows must pass to get here so tag the commit that created the files +jobs: + PreRelease-delfiles: + runs-on: ubuntu-latest + environment: ${{ inputs.use_environ }} + permissions: + contents: write + steps: + - name: Get file base name + id: get-file-base + run: | + FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") + echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT + + - name: PreRelease delete from tag + id: delete_prerelease + if: ${{ (inputs.use_environ == 'snapshots') }} + uses: mknejp/delete-release-assets@v1 + with: + token: ${{ github.token }} + tag: "${{ inputs.use_tag }}" + assets: | + ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}.zip + ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip + ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt + \ No newline at end of file diff --git a/.github/workflows/tarball.yml b/.github/workflows/tarball.yml index 85f8b90a..eccbd83f 100644 --- a/.github/workflows/tarball.yml +++ b/.github/workflows/tarball.yml @@ -4,11 +4,11 @@ name: hdf5 plugins tarball on: workflow_call: inputs: - use_ignore: - description: 'Ignore has changes check' + use_tag: + description: 'Release version tag' type: string required: false - default: check + default: snapshot-1.14 use_environ: description: 'Environment to locate files' type: string @@ -18,6 +18,9 @@ on: has_changes: description: "Whether there were changes the previous day" value: ${{ jobs.check_commits.outputs.has_changes }} + source_base: + description: "The common base name of the source tarballs" + value: ${{ jobs.create_tarball.outputs.source_base }} file_base: description: "The common base name of the source tarballs" value: ${{ jobs.create_tarball.outputs.file_base }} @@ -66,7 +69,7 @@ jobs: with: seconds: 86400 # One day in seconds branch: '${{ steps.get-branch-name.outputs.branch_ref }}' - if: ${{ (inputs.use_environ == 'snapshots' && inputs.use_ignore == 'check') }} + if: ${{ inputs.use_environ == 'snapshots' }} - run: echo "You have ${{ steps.check-new-commits.outputs.new-commits-number }} new commit(s) in ${{ steps.get-branch-name.outputs.BRANCH_REF }} ✅!" if: ${{ steps.check-new-commits.outputs.has-new-commits == 'true' }} @@ -77,9 +80,10 @@ jobs: name: Create a source tarball runs-on: ubuntu-latest needs: check_commits - if: ${{ ((inputs.use_environ == 'snapshots') && ((needs.check_commits.outputs.has_changes == 'true') || (inputs.use_ignore == 'ignore'))) || (inputs.use_environ == 'release') }} + if: ${{ ((inputs.use_environ == 'snapshots') && (needs.check_commits.outputs.has_changes == 'true')) || (inputs.use_environ == 'release') }} outputs: file_base: ${{ steps.set-file-base.outputs.FILE_BASE }} + source_base: ${{ steps.version.outputs.SOURCE_TAG }} steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - name: Get Sources @@ -91,16 +95,21 @@ jobs: id: version run: | cd "$GITHUB_WORKSPACE/hdfsrc" - echo "TAG_VERSION=master" >> $GITHUB_OUTPUT + echo "SOURCE_TAG=${{ inputs.use_tag }}" >> $GITHUB_OUTPUT - name: Set file base name id: set-file-base run: | - if [[ '${{ inputs.use_environ }}' == 'snapshots' && '${{ needs.check_commits.outputs.has_changes }}' == 'true' ]] + if [[ '${{ inputs.use_environ }}' == 'snapshots' ]] then FILE_NAME_BASE=$(echo "hdf5_plugins-${{ needs.check_commits.outputs.branch_ref }}-${{ needs.check_commits.outputs.branch_sha }}") else - FILE_NAME_BASE=$(echo "hdf5_plugins-${{ steps.version.outputs.TAG_VERSION }}") + if [[ '${{ inputs.use_tag }}' == 'snapshot-1.14' ]] + then + FILE_NAME_BASE=$(echo "snapshot-1.14") + else + FILE_NAME_BASE=$(echo "hdf5_plugins-${{ steps.version.outputs.SOURCE_TAG }}") + fi fi echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT shell: bash @@ -112,11 +121,21 @@ jobs: - name: Create source file (tgz and zip) id: create-files + if: ${{ (inputs.use_environ == 'snapshots') }} run: | zip -r ${{ steps.set-file-base.outputs.FILE_BASE }}.zip ./hdfsrc tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz ./hdfsrc shell: bash + - name: Create release source file (tgz and zip) + id: create-rel-files + if: ${{ (inputs.use_environ == 'release') }} + run: | + mv hdfsrc ${{ steps.set-file-base.outputs.FILE_BASE }} + zip -r ${{ steps.set-file-base.outputs.FILE_BASE }}.zip ./${{ steps.set-file-base.outputs.FILE_BASE }} + tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz ./${{ steps.set-file-base.outputs.FILE_BASE }} + shell: bash + - name: List files in the repository run: | ls -l ${{ github.workspace }} diff --git a/Building.txt b/Building.txt index 0610fad6..a668dd70 100644 --- a/Building.txt +++ b/Building.txt @@ -1,4 +1,4 @@ -HDF5 plugins +HDF5 1.14.4 plugins Building these filter/example requires knowledge of the hdf5 and the compression library installation. Out-of-source build process is expected. diff --git a/CMakeLists.txt b/CMakeLists.txt index c8c89a21..7039d805 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -125,14 +125,16 @@ endif () # LZ4 filter if (NOT MINGW) - if (WIN32 AND MSVC_VERSION GREATER_EQUAL 1930) - if (NOT CMAKE_C_COMPILER_ID MATCHES "[Cc]lang" AND NOT CMAKE_C_COMPILER_ID MATCHES "Intel[Ll][Ll][Vv][Mm]") - FILTER_OPTION (LZ4) + if (WIN32) + if (NOT CMAKE_C_COMPILER_ID MATCHES "Intel[Ll][Ll][Vv][Mm]") + if (MSVC_VERSION GREATER_EQUAL 1930 AND NOT CMAKE_C_COMPILER_ID MATCHES "[Cc]lang") + FILTER_OPTION (LZ4) + else () + set (ENABLE_LZ4 OFF CACHE BOOL "" FORCE) + endif () else () set (ENABLE_LZ4 OFF CACHE BOOL "" FORCE) endif () - elseif (WIN32 AND NOT CMAKE_C_COMPILER_ID MATCHES "Intel[Ll][Ll][Vv][Mm]") - FILTER_OPTION (LZ4) else () FILTER_OPTION (LZ4) #set (ENABLE_LZ4 OFF CACHE BOOL "" FORCE) diff --git a/CMakePresets.json b/CMakePresets.json index 50ed4972..423f3754 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -10,8 +10,8 @@ "inherits": "ci-base", "cacheVariables": { "H5PL_VERS_MAJOR": {"type": "STRING", "value": "1"}, - "H5PL_VERS_MINOR": {"type": "STRING", "value": "15"}, - "H5PL_VERS_RELEASE": {"type": "STRING", "value": "0"}, + "H5PL_VERS_MINOR": {"type": "STRING", "value": "14"}, + "H5PL_VERS_RELEASE": {"type": "STRING", "value": "4"}, "H5PL_ALLOW_EXTERNAL_SUPPORT": {"type": "STRING", "value": "TGZ"}, "H5PL_COMP_TGZPATH": {"type": "STRING", "value": "${sourceDir}/libs"}, "H5PL_BUILD_TESTING": "ON", @@ -22,7 +22,7 @@ "name": "ci-base-plugins", "hidden": true, "cacheVariables": { - "PLUGIN_TGZ_NAME": {"type": "STRING", "value": "hdf5_plugins-master.tar.gz"}, + "PLUGIN_TGZ_NAME": {"type": "STRING", "value": "hdf5_plugins-1.14.4.tar.gz"}, "PLUGIN_PACKAGE_NAME": {"type": "STRING", "value": "pl"}, "PL_PACKAGE_NAME": "pl", "HDF5_NAMESPACE": {"type": "STRING", "value": "hdf5::"}, diff --git a/config/cmake/cacheinit.cmake b/config/cmake/cacheinit.cmake index befe8b2d..7aa58787 100644 --- a/config/cmake/cacheinit.cmake +++ b/config/cmake/cacheinit.cmake @@ -34,8 +34,8 @@ set_property (CACHE H5PL_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ) set (H5PL_GIT_URL "https://github.com/HDFGroup/h5plugin.git" CACHE STRING "Use plugins from HDF repository" FORCE) set (H5PL_GIT_BRANCH "master" CACHE STRING "" FORCE) -set (H5PL_TGZ_ORIGPATH "https://github.com/HDFGroup/hdf5_plugins/releases/download/snapshots" CACHE STRING "Use PLUGINS from original location" FORCE) -set (H5PL_TGZ_NAME "hdf5_plugins-master.tar.gz" CACHE STRING "Use plugins from compressed file" FORCE) +set (H5PL_TGZ_ORIGPATH "https://github.com/HDFGroup/hdf5_plugins/releases/download/snapshot-1.14" CACHE STRING "Use PLUGINS from original location" FORCE) +set (H5PL_TGZ_NAME "hdf5_plugins-1_14_4.tar.gz" CACHE STRING "Use plugins from compressed file" FORCE) set (PL_PACKAGE_NAME "pl" CACHE STRING "Name of plugins package" FORCE) set (H5PL_CPACK_ENABLE ON CACHE BOOL "Enable the CPACK include and components" FORCE) diff --git a/docs/RegisteredFilterPlugins.md b/docs/RegisteredFilterPlugins.md index 39b877b8..bff188fb 100644 --- a/docs/RegisteredFilterPlugins.md +++ b/docs/RegisteredFilterPlugins.md @@ -21,33 +21,677 @@ List of Filters Registered with The HDF Group --------------------------------------------- |Filter |Identifier Name |Short Description| |--------|----------------|---------------------| -|305 |LZO |LZO lossless compression used by PyTables| -|307 |BZIP2 |BZIP2 lossless compression used by PyTables| -|32000 |LZF |LZF lossless compression used by H5Py project| -|32001 |BLOSC |Blosc lossless compression used by PyTables| -|32002 |MAFISC |Modified LZMA compression filter, MAFISC (Multidimensional Adaptive Filtering Improved Scientific data Compression)| -|32003 |Snappy |Snappy lossless compression| -|32004 |LZ4 |LZ4 fast lossless compression algorithm| -|32005 |APAX |Samplify’s APAX Numerical Encoding Technology| -|32006 |CBF |All imgCIF/CBF compressions and decompressions, including Canonical, Packed, Packed Version 2, Byte Offset and Nibble Offset| -|32007 |JPEG-XR |Enables images to be compressed/decompressed with JPEG-XR compression| -|32008 |bitshuffle |Extreme version of shuffle filter that shuffles data at bit level instead of byte level| -|32009 |SPDP |SPDP fast lossless compression algorithm for single- and double-precision floating-point data| -|32010 |LPC-Rice |LPC-Rice multi-threaded lossless compression| -|32011 |CCSDS-123 |ESA CCSDS-123 multi-threaded compression filter| -|32012 |JPEG-LS |CharLS JPEG-LS multi-threaded compression filter| -|32013 |zfp |Lossy & lossless compression of floating point and integer datasets to meet rate, accuracy, and/or precision targets.| -|32014 |fpzip |Fast and Efficient Lossy or Lossless Compressor for Floating-Point Data| -|32015 |Zstandard |Real-time compression algorithm with wide range of compression / speed trade-off and fast decoder| -|32016 |B³D |GPU based image compression method developed for light-microscopy applications| -|32017 |SZ |An error-bounded lossy compressor for scientific floating-point data| -|32018 |FCIDECOMP |EUMETSAT CharLS compression filter for use with netCDF| -|32019 |JPEG |Jpeg compression filter| -|32020 |VBZ |Compression filter for raw dna signal data used by Oxford Nanopore| -|32021 |FAPEC | Versatile and efficient data compressor supporting many kinds of data and using an outlier-resilient entropy coder| -|32022 |BitGroom |The BitGroom quantization algorithm| -|32023 |Granular |BitRound (GBR) The GBG quantization algorithm is a significant improvement to the BitGroom filter| -|32024 |SZ3 |A modular error-bounded lossy compression framework for scientific datasets| -|32025 |Delta-Rice |Lossless compression algorithm optimized for digitized analog signals based on delta encoding and rice coding| -|32026 |BLOSC |The recent new-generation version of the Blosc compression library| -|32027 |FLAC |FLAC audio compression filter in HDF5| +|305 |LZO |LZO lossless compression used by PyTables| +|307 |BZIP2 |BZIP2 lossless compression used by PyTables| +|32000 |LZF |LZF lossless compression used by H5Py project| +|32001 |BLOSC |Blosc lossless compression used by PyTables| +|32002 |MAFISC |Modified LZMA compression filter, MAFISC (Multidimensional Adaptive Filtering Improved Scientific data Compression)| +|32003 |Snappy |Snappy lossless compression| +|32004 |LZ4 |LZ4 fast lossless compression algorithm| +|32005 |APAX |Samplify’s APAX Numerical Encoding Technology| +|32006 |CBF |All imgCIF/CBF compressions and decompressions, including Canonical, Packed, Packed Version 2, Byte Offset and Nibble Offset| +|32007 |JPEG-XR |Enables images to be compressed/decompressed with JPEG-XR compression| +|32008 |bitshuffle |Extreme version of shuffle filter that shuffles data at bit level instead of byte level| +|32009 |SPDP |SPDP fast lossless compression algorithm for single- and double-precision floating-point data| +|32010 |LPC-Rice |LPC-Rice multi-threaded lossless compression| +|32011 |CCSDS-123 |ESA CCSDS-123 multi-threaded compression filter| +|32012 |JPEG-LS |CharLS JPEG-LS multi-threaded compression filter| +|32013 |zfp |Lossy & lossless compression of floating point and integer datasets to meet rate, accuracy, and/or precision targets.| +|32014 |fpzip |Fast and Efficient Lossy or Lossless Compressor for Floating-Point Data| +|32015 |Zstandard |Real-time compression algorithm with wide range of compression / speed trade-off and fast decoder| +|32016 |B³D |GPU based image compression method developed for light-microscopy applications| +|32017 |SZ |An error-bounded lossy compressor for scientific floating-point data| +|32018 |FCIDECOMP |EUMETSAT CharLS compression filter for use with netCDF| +|32019 |JPEG |Jpeg compression filter| +|32020 |VBZ |Compression filter for raw dna signal data used by Oxford Nanopore| +|32021 |FAPEC | Versatile and efficient data compressor supporting many kinds of data and using an outlier-resilient entropy coder| +|32022 |BitGroom |The BitGroom quantization algorithm| +|32023 |Granular |BitRound (GBR) The GBG quantization algorithm is a significant improvement to the BitGroom filter| +|32024 |SZ3 |A modular error-bounded lossy compression framework for scientific datasets| +|32025 |Delta-Rice |Lossless compression algorithm optimized for digitized analog signals based on delta encoding and rice coding| +|32026 |BLOSC |The recent new-generation version of the Blosc compression library| +|32027 |FLAC |FLAC audio compression filter in HDF5| +|32028 |H5Z-SPERR |H5Z-SPERR is the HDF5 filter for SPERR| + + +##