diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 7e6995d76d9..3dba7d52109 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -14,7 +14,7 @@ Thank you for contributing to the Dataverse Project through the creation of a bu
WARNING: If this is a security issue it should be reported privately to security@dataverse.org
More information on bug issues and contributions can be found in the "Contributing to Dataverse" page:
-https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#bug-reportsissues
+https://guides.dataverse.org/en/latest/contributor/index.html
Please fill out as much of the template as you can.
Start below this comment section.
@@ -44,7 +44,6 @@ Start below this comment section.
**Any related open or closed issues to this bug report?**
-
**Screenshots:**
No matter the issue, screenshots are always welcome.
@@ -53,3 +52,7 @@ To add a screenshot, please use one of the following formats and/or methods desc
* https://help.github.com/en/articles/file-attachments-on-issues-and-pull-requests
*
+
+
+**Are you thinking about creating a pull request for this issue?**
+Help is always welcome, is this bug something you or your organization plan to fix?
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index d6248537418..7365cb4317c 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -1,7 +1,7 @@
---
name: Feature request
about: Suggest an idea or new feature for the Dataverse software!
-title: 'Feature Request/Idea:'
+title: 'Feature Request:'
labels: 'Type: Feature'
assignees: ''
@@ -11,7 +11,7 @@ assignees: ''
Thank you for contributing to the Dataverse Project through the creation of a feature request!
More information on ideas/feature requests and contributions can be found in the "Contributing to Dataverse" page:
-https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#ideasfeature-requests
+https://guides.dataverse.org/en/latest/contributor/index.html
Please fill out as much of the template as you can.
Start below this comment section.
@@ -34,3 +34,6 @@ Start below this comment section.
**Any open or closed issues related to this feature request?**
+
+**Are you thinking about creating a pull request for this feature?**
+Help is always welcome, is this feature something you or your organization plan to implement?
diff --git a/.github/ISSUE_TEMPLATE/idea_proposal.md b/.github/ISSUE_TEMPLATE/idea_proposal.md
new file mode 100644
index 00000000000..8cb6c7bfafe
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/idea_proposal.md
@@ -0,0 +1,40 @@
+---
+name: Idea proposal
+about: Propose a new idea for discussion to improve the Dataverse software!
+title: 'Suggestion:'
+labels: 'Type: Suggestion'
+assignees: ''
+
+---
+
+
+
+**Overview of the Suggestion**
+
+
+**What kind of user is the suggestion intended for?**
+(Example users roles: API User, Curator, Depositor, Guest, Superuser, Sysadmin)
+
+
+**What inspired this idea?**
+
+
+**What existing behavior do you want changed?**
+
+
+**Any brand new behavior do you want to add to Dataverse?**
+
+
+**Any open or closed issues related to this suggestion?**
+
+
+**Are you thinking about creating a pull request for this issue?**
+Help is always welcome, is this idea something you or your organization plan to implement?
diff --git a/.github/actions/setup-maven/action.yml b/.github/actions/setup-maven/action.yml
new file mode 100644
index 00000000000..4cf09f34231
--- /dev/null
+++ b/.github/actions/setup-maven/action.yml
@@ -0,0 +1,37 @@
+---
+name: "Setup Maven and Caches"
+description: "Determine Java version and setup Maven, including necessary caches."
+inputs:
+ git-reference:
+ description: 'The git reference (branch/tag) to check out'
+ required: false
+ default: '${{ github.ref }}'
+ pom-paths:
+ description: "List of paths to Maven POM(s) for cache dependency setup"
+ required: false
+ default: 'pom.xml'
+runs:
+ using: composite
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.git-reference }}
+ - name: Determine Java version by reading the Maven property
+ shell: bash
+ run: |
+ echo "JAVA_VERSION=$(grep '' ${GITHUB_WORKSPACE}/modules/dataverse-parent/pom.xml | cut -f2 -d'>' | cut -f1 -d'<')" | tee -a ${GITHUB_ENV}
+ - name: Set up JDK ${{ env.JAVA_VERSION }}
+ id: setup-java
+ uses: actions/setup-java@v4
+ with:
+ java-version: ${{ env.JAVA_VERSION }}
+ distribution: 'temurin'
+ cache: 'maven'
+ cache-dependency-path: ${{ inputs.pom-paths }}
+ - name: Download common cache on branch cache miss
+ if: ${{ steps.setup-java.outputs.cache-hit != 'true' }}
+ uses: actions/cache/restore@v4
+ with:
+ key: dataverse-maven-cache
+ path: ~/.m2/repository
diff --git a/.github/workflows/check_property_files.yml b/.github/workflows/check_property_files.yml
new file mode 100644
index 00000000000..505310aab35
--- /dev/null
+++ b/.github/workflows/check_property_files.yml
@@ -0,0 +1,32 @@
+name: "Properties Check"
+on:
+ pull_request:
+ paths:
+ - "src/**/*.properties"
+ - "scripts/api/data/metadatablocks/*"
+jobs:
+ duplicate_keys:
+ name: Duplicate Keys
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Run duplicates detection script
+ shell: bash
+ run: tests/check_duplicate_properties.sh
+
+ metadata_blocks_properties:
+ name: Metadata Blocks Properties
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Setup GraalVM + Native Image
+ uses: graalvm/setup-graalvm@v1
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ java-version: '21'
+ distribution: 'graalvm-community'
+ - name: Setup JBang
+ uses: jbangdev/setup-jbang@main
+ - name: Run metadata block properties verification script
+ shell: bash
+ run: tests/verify_mdb_properties.sh
diff --git a/.github/workflows/container_app_push.yml b/.github/workflows/container_app_push.yml
index b3e247e376c..3b7ce066d73 100644
--- a/.github/workflows/container_app_push.yml
+++ b/.github/workflows/container_app_push.yml
@@ -5,6 +5,12 @@ on:
# We are deliberately *not* running on push events here to avoid double runs.
# Instead, push events will trigger from the base image and maven unit tests via workflow_call.
workflow_call:
+ inputs:
+ base-image-ref:
+ type: string
+ description: "Reference of the base image to build on in full qualified form [/]/:"
+ required: false
+ default: "gdcc/base:unstable"
pull_request:
branches:
- develop
@@ -16,7 +22,6 @@ on:
env:
IMAGE_TAG: unstable
- BASE_IMAGE_TAG: unstable
REGISTRY: "" # Empty means default to Docker Hub
PLATFORMS: "linux/amd64,linux/arm64"
MASTER_BRANCH_TAG: alpha
@@ -33,20 +38,24 @@ jobs:
if: ${{ github.repository_owner == 'IQSS' }}
steps:
- - name: Checkout repository
- uses: actions/checkout@v3
-
- - name: Set up JDK
- uses: actions/setup-java@v3
+ - name: Checkout and Setup Maven
+ uses: IQSS/dataverse/.github/actions/setup-maven@develop
with:
- java-version: "17"
- distribution: temurin
- cache: maven
+ pom-paths: |
+ pom.xml
+ modules/container-configbaker/pom.xml
+ modules/dataverse-parent/pom.xml
+
+ # TODO: Add a filter step here, that avoids building the image if this is a PR and there are other files touched than declared above.
+ # Use https://github.com/dorny/paths-filter to solve this. This will ensure we do not run this twice if this workflow
+ # will be triggered by the other workflows already (base image or java changes)
+ # To become a part of #10618.
- name: Build app and configbaker container image with local architecture and submodules (profile will skip tests)
run: >
mvn -B -f modules/dataverse-parent
-P ct -pl edu.harvard.iq:dataverse -am
+ $( [[ -n "${{ inputs.base-image-ref }}" ]] && echo "-Dbase.image=${{ inputs.base-image-ref }}" )
install
# TODO: add smoke / integration testing here (add "-Pct -DskipIntegrationTests=false")
@@ -106,11 +115,13 @@ jobs:
if: needs.check-secrets.outputs.available == 'true' &&
( github.event_name != 'push' || ( github.event_name == 'push' && contains(fromJSON('["develop", "master"]'), github.ref_name)))
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-java@v3
+ - name: Checkout and Setup Maven
+ uses: IQSS/dataverse/.github/actions/setup-maven@develop
with:
- java-version: "17"
- distribution: temurin
+ pom-paths: |
+ pom.xml
+ modules/container-configbaker/pom.xml
+ modules/dataverse-parent/pom.xml
# Depending on context, we push to different targets. Login accordingly.
- if: github.event_name != 'pull_request'
@@ -146,11 +157,13 @@ jobs:
run: >
mvn -B -f modules/dataverse-parent
-P ct -pl edu.harvard.iq:dataverse -am
+ $( [[ -n "${{ inputs.base-image-ref }}" ]] && echo "-Dbase.image=${{ inputs.base-image-ref }}" )
install
- name: Deploy multi-arch application and configbaker container image
run: >
mvn
- -Dapp.image.tag=${{ env.IMAGE_TAG }} -Dbase.image.tag=${{ env.BASE_IMAGE_TAG }}
+ -Dapp.image.tag=${{ env.IMAGE_TAG }}
+ $( [[ -n "${{ inputs.base-image-ref }}" ]] && echo "-Dbase.image=${{ inputs.base-image-ref }}" )
${{ env.REGISTRY }} -Ddocker.platforms=${{ env.PLATFORMS }}
-P ct deploy
diff --git a/.github/workflows/container_base_push.yml b/.github/workflows/container_base_push.yml
index b938851f816..c2340576c78 100644
--- a/.github/workflows/container_base_push.yml
+++ b/.github/workflows/container_base_push.yml
@@ -1,99 +1,130 @@
---
-name: Base Container Image
+name: Container Images Releasing
on:
push:
+ tags:
+ - 'v[6-9].**'
branches:
- 'develop'
- - 'master'
+ # "Path filters are not evaluated for pushes of tags" https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#onpushpull_requestpull_request_targetpathspaths-ignore
paths:
- 'modules/container-base/**'
+ - '!modules/container-base/src/backports/**'
+ - '!modules/container-base/README.md'
- 'modules/dataverse-parent/pom.xml'
- '.github/workflows/container_base_push.yml'
- pull_request:
- branches:
- - 'develop'
- - 'master'
- paths:
- - 'modules/container-base/**'
- - 'modules/dataverse-parent/pom.xml'
- - '.github/workflows/container_base_push.yml'
- schedule:
- - cron: '23 3 * * 0' # Run for 'develop' every Sunday at 03:23 UTC
+
+ # These TODOs are left for #10618
+ # TODO: we are missing a workflow_call option here, so we can trigger this flow from pr comments and maven tests (keep the secrets availability in mind!)
+ # TODO: we are missing a pull_request option here (filter for stuff that would trigger the maven runs!) so we can trigger preview builds for them when coming from the main repo (keep the secrets availability in mind!)
env:
- IMAGE_TAG: unstable
PLATFORMS: linux/amd64,linux/arm64
+ DEVELOPMENT_BRANCH: develop
jobs:
build:
- name: Build image
+ name: Base Image
runs-on: ubuntu-latest
permissions:
contents: read
packages: read
- strategy:
- matrix:
- jdk: [ '17' ]
# Only run in upstream repo - avoid unnecessary runs in forks
if: ${{ github.repository_owner == 'IQSS' }}
+ outputs:
+ base-image-ref: ${{ steps.finalize.outputs.base-image-ref }}
steps:
- - name: Checkout repository
- uses: actions/checkout@v3
-
- - name: Set up JDK ${{ matrix.jdk }}
- uses: actions/setup-java@v3
+ - name: Checkout and Setup Maven
+ uses: IQSS/dataverse/.github/actions/setup-maven@develop
with:
- java-version: ${{ matrix.jdk }}
- distribution: 'adopt'
- - name: Cache Maven packages
- uses: actions/cache@v3
- with:
- path: ~/.m2
- key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
- restore-keys: ${{ runner.os }}-m2
-
- - name: Build base container image with local architecture
- run: mvn -f modules/container-base -Pct package
+ pom-paths: modules/container-base/pom.xml
- # Run anything below only if this is not a pull request.
- # Accessing, pushing tags etc. to DockerHub will only succeed in upstream because secrets.
-
- - if: ${{ github.event_name == 'push' && github.ref_name == 'develop' }}
- name: Push description to DockerHub
- uses: peter-evans/dockerhub-description@v3
+ # Note: Accessing, pushing tags etc. to DockerHub will only succeed in upstream and
+ # on events in context of upstream because secrets. PRs run in context of forks by default!
+ - name: Log in to the Container registry
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- repository: gdcc/base
- short-description: "Dataverse Base Container image providing Payara application server and optimized configuration"
- readme-filepath: ./modules/container-base/README.md
- - if: ${{ github.event_name != 'pull_request' }}
- name: Log in to the Container registry
- uses: docker/login-action@v2
+ # In case this is a push to develop, we care about buildtime.
+ # Configure a remote ARM64 build host in addition to the local AMD64 in two steps.
+ - name: Setup SSH agent
+ if: ${{ github.event_name != 'schedule' }}
+ uses: webfactory/ssh-agent@v0.9.0
with:
- registry: ${{ env.REGISTRY }}
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- - if: ${{ github.event_name != 'pull_request' }}
- name: Set up QEMU for multi-arch builds
- uses: docker/setup-qemu-action@v2
- - name: Re-set image tag based on branch
- if: ${{ github.ref_name == 'master' }}
- run: echo "IMAGE_TAG=alpha" >> $GITHUB_ENV
- - if: ${{ github.event_name != 'pull_request' }}
- name: Deploy multi-arch base container image to Docker Hub
- run: mvn -f modules/container-base -Pct deploy -Dbase.image.tag=${{ env.IMAGE_TAG }} -Ddocker.platforms=${{ env.PLATFORMS }}
+ ssh-private-key: ${{ secrets.BUILDER_ARM64_SSH_PRIVATE_KEY }}
+ - name: Provide the known hosts key and the builder config
+ if: ${{ github.event_name != 'schedule' }}
+ run: |
+ echo "${{ secrets.BUILDER_ARM64_SSH_HOST_KEY }}" > ~/.ssh/known_hosts
+ mkdir -p modules/container-base/target/buildx-state/buildx/instances
+ cat > modules/container-base/target/buildx-state/buildx/instances/maven << EOF
+ { "Name": "maven",
+ "Driver": "docker-container",
+ "Dynamic": false,
+ "Nodes": [{"Name": "maven0",
+ "Endpoint": "unix:///var/run/docker.sock",
+ "Platforms": [{"os": "linux", "architecture": "amd64"}],
+ "DriverOpts": null,
+ "Flags": ["--allow-insecure-entitlement=network.host"],
+ "Files": null},
+ {"Name": "maven1",
+ "Endpoint": "ssh://${{ secrets.BUILDER_ARM64_SSH_CONNECTION }}",
+ "Platforms": [{"os": "linux", "architecture": "arm64"}],
+ "DriverOpts": null,
+ "Flags": ["--allow-insecure-entitlement=network.host"],
+ "Files": null}]}
+ EOF
+
+ # Determine the base image name we are going to use from here on
+ - name: Determine base image name
+ run: |
+ if [[ "${{ github.ref_name }}" = "${{ env.DEVELOPMENT_BRANCH }}" ]]; then
+ echo "BASE_IMAGE=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -q -DforceStdout )" | tee -a "${GITHUB_ENV}"
+ echo "BASE_IMAGE_UPCOMING=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -Dbase.image.tag.suffix="" -q -DforceStdout )" | tee -a "${GITHUB_ENV}"
+ else
+ echo "BASE_IMAGE=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -Dbase.image.tag.suffix="" -q -DforceStdout )" | tee -a "${GITHUB_ENV}"
+ fi
+ - name: Calculate revision number for immutable tag (on release branches only)
+ if: ${{ github.ref_name != env.DEVELOPMENT_BRANCH }}
+ id: revision-tag
+ uses: ./.github/actions/get-image-revision
+ with:
+ image-ref: ${{ env.BASE_IMAGE }}
+ tag-options-prefix: "-Dbase.image.tag.suffix='' -Ddocker.tags.revision="
+ - name: Configure update of "latest" tag for development branch
+ id: develop-tag
+ if: ${{ github.ref_name == env.DEVELOPMENT_BRANCH }}
+ run: |
+ echo "tag-options=-Ddocker.tags.develop=unstable -Ddocker.tags.upcoming=${BASE_IMAGE_UPCOMING#*:}" | tee -a "${GITHUB_OUTPUT}"
+
+ - name: Deploy multi-arch base container image to Docker Hub
+ id: build
+ run: |
+ mvn -f modules/container-base -Pct deploy -Ddocker.noCache -Ddocker.platforms=${{ env.PLATFORMS }} \
+ -Ddocker.imagePropertyConfiguration=override ${{ steps.develop-tag.outputs.tag-options }} ${{ steps.revision-tag.outputs.tag-options }}
+
+ - name: Determine appropriate base image ref for app image
+ id: finalize
+ run: |
+ if [[ "${{ github.ref_name }}" = "${{ env.DEVELOPMENT_BRANCH }}" ]]; then
+ echo "base-image-ref=${BASE_IMAGE_UPCOMING}" | tee -a "$GITHUB_OUTPUT"
+ else
+ echo "base-image-ref=gdcc/base:${{ steps.revision-tag.outputs.revision-tag }}" | tee -a "$GITHUB_OUTPUT"
+ fi
+
push-app-img:
name: "Rebase & Publish App Image"
permissions:
contents: read
packages: write
pull-requests: write
- needs: build
- # We do not release a new base image for pull requests, so do not trigger.
- if: ${{ github.event_name != 'pull_request' }}
- uses: ./.github/workflows/container_app_push.yml
secrets: inherit
+ needs:
+ - build
+ uses: ./.github/workflows/container_app_push.yml
+ with:
+ base-image-ref: ${{ needs.build.outputs.base-image-ref }}
diff --git a/.github/workflows/container_maintenance.yml b/.github/workflows/container_maintenance.yml
new file mode 100644
index 00000000000..986fe25cdf5
--- /dev/null
+++ b/.github/workflows/container_maintenance.yml
@@ -0,0 +1,119 @@
+---
+name: Container Images Scheduled Maintenance
+
+on:
+ # TODO: think about adding a (filtered) push event trigger here in case we change the patches
+ # ---
+ # Allow manual workflow triggers in case we need to repair images on Docker Hub (build and replace)
+ workflow_dispatch:
+ inputs:
+ force_build:
+ type: boolean
+ required: false
+ default: false
+ description: "Build and deploy even if no newer Java images or package updates are found."
+ schedule:
+ - cron: '23 3 * * 0' # Run for 'develop' every Sunday at 03:23 UTC
+
+env:
+ PLATFORMS: linux/amd64,linux/arm64
+ NUM_PAST_RELEASES: 3
+
+jobs:
+ build:
+ name: Base Image Matrix Build
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ packages: read
+ # Only run in upstream repo - avoid unnecessary runs in forks
+ if: ${{ github.repository_owner == 'IQSS' }}
+ outputs:
+ supported_tag_matrix: ${{ steps.execute.outputs.supported_tag_matrix }}
+ rebuilt_base_images: ${{ steps.execute.outputs.rebuilt_base_images }}
+
+ steps:
+ - name: Checkout and Setup Maven
+ uses: IQSS/dataverse/.github/actions/setup-maven@develop
+ with:
+ pom-paths: modules/container-base/pom.xml
+
+ # Note: Accessing, pushing tags etc. to DockerHub will only succeed in upstream and
+ # on events in context of upstream because secrets. PRs run in context of forks by default!
+ - name: Log in to the Container registry
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Set up QEMU for multi-arch builds
+ uses: docker/setup-qemu-action@v3
+ with:
+ platforms: ${{ env.PLATFORMS }}
+
+ # Discover the releases we want to maintain
+ - name: Discover maintained releases
+ id: discover
+ run: |
+ echo "FORCE_BUILD=$( [[ "${{ inputs.force_build }}" = "true" ]] && echo 1 || echo 0 )" | tee -a "$GITHUB_ENV"
+ DEVELOPMENT_BRANCH=$( curl -f -sS https://api.github.com/repos/${{ github.repository }} | jq -r '.default_branch' )
+ echo "DEVELOPMENT_BRANCH=$DEVELOPMENT_BRANCH" | tee -a "$GITHUB_ENV"
+ echo "branches=$( curl -f -sS https://api.github.com/repos/IQSS/dataverse/releases | jq -r " .[0:${{ env.NUM_PAST_RELEASES }}] | .[].tag_name, \"${DEVELOPMENT_BRANCH}\" " | tr "\n" " " )" | tee -a "${GITHUB_OUTPUT}"
+
+ # Execute matrix build for the discovered branches
+ - name: Execute build matrix script
+ id: execute
+ run: |
+ .github/workflows/scripts/maintenance-job.sh ${{ steps.discover.outputs.branches }}
+
+ # TODO: Use the needs.build.outputs.rebuilt_base_images with fromJSON() to create a matrix job.
+ # Must be a single rank matrix (vector), the branch and base image tag information ships as "branch=tag" string
+ # Will be part of working on #10618, app image versioned tags.
+ #push-app-img:
+ # name: "Rebase & Publish App Image"
+ # permissions:
+ # contents: read
+ # packages: write
+ # pull-requests: write
+ # secrets: inherit
+ # needs:
+ # - build
+ # strategy:
+ # fail-fast: false
+ # matrix:
+ # branch: ${{ fromJson(needs.discover.outputs.branches) }}
+ # uses: ./.github/workflows/container_app_push.yml
+ # with:
+ # branch: ${{ matrix.branch }}
+
+ hub-description:
+ name: Push description to DockerHub
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ packages: read
+ needs: build
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ - name: Render README
+ id: render
+ run: |
+ TAGS_JSON='${{ needs.build.outputs.supported_tag_matrix }}'
+ echo "$TAGS_JSON" | jq -r 'keys | sort | reverse | .[]' |
+ while IFS= read -r branch; do
+ echo \
+ "- \`$( echo "$TAGS_JSON" | jq --arg v "$branch" -r '.[$v] | join("`, `")' )\`" \
+ "([Dockerfile](https://github.com/IQSS/dataverse/blob/${branch}/modules/container-base/src/main/docker/Dockerfile)," \
+ "[Patches](https://github.com/IQSS/dataverse/blob/develop/modules/container-base/src/backports/${branch}))" \
+ | tee -a "${GITHUB_WORKSPACE}/tags.md"
+ done
+ sed -i -e "/<\!-- TAG BLOCK HERE -->/r ${GITHUB_WORKSPACE}/tags.md" "./modules/container-base/README.md"
+
+ - name: Push description to DockerHub
+ uses: peter-evans/dockerhub-description@v4
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ repository: gdcc/base
+ short-description: "Dataverse Base Container image providing Payara application server and optimized configuration"
+ readme-filepath: ./modules/container-base/README.md
\ No newline at end of file
diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml
index 4ad4798bc64..a94b17a67ba 100644
--- a/.github/workflows/maven_unit_test.yml
+++ b/.github/workflows/maven_unit_test.yml
@@ -30,6 +30,7 @@ jobs:
continue-on-error: ${{ matrix.experimental }}
runs-on: ubuntu-latest
steps:
+ # TODO: As part of #10618 change to setup-maven custom action
# Basic setup chores
- uses: actions/checkout@v3
- name: Set up JDK ${{ matrix.jdk }}
@@ -95,6 +96,7 @@ jobs:
# status: "Experimental"
continue-on-error: ${{ matrix.experimental }}
steps:
+ # TODO: As part of #10618 change to setup-maven custom action
# Basic setup chores
- uses: actions/checkout@v3
- name: Set up JDK ${{ matrix.jdk }}
@@ -128,6 +130,7 @@ jobs:
needs: integration-test
name: Coverage Report Submission
steps:
+ # TODO: As part of #10618 change to setup-maven custom action
# Basic setup chores
- uses: actions/checkout@v3
- uses: actions/setup-java@v3
@@ -156,6 +159,11 @@ jobs:
# NOTE: this may be extended with adding a report to the build output, leave a comment, send to Sonarcloud, ...
+ # TODO: Add a filter step here, that avoids calling the app image release workflow if there are changes to the base image.
+ # Use https://github.com/dorny/paths-filter to solve this. Will require and additional job or adding to integration-test job.
+ # This way we ensure that we're not running the app image flow with a non-matching base image.
+ # To become a part of #10618.
+
push-app-img:
name: Publish App Image
permissions:
diff --git a/.github/workflows/scripts/maintenance-job.sh b/.github/workflows/scripts/maintenance-job.sh
new file mode 100755
index 00000000000..370988b9812
--- /dev/null
+++ b/.github/workflows/scripts/maintenance-job.sh
@@ -0,0 +1,180 @@
+#!/bin/bash
+
+# A matrix-like job to maintain a number of releases as well as the latest snap of Dataverse.
+
+# PREREQUISITES:
+# - You have Java, Maven, QEMU and Docker all setup and ready to go
+# - You obviously checked out the develop branch, otherwise you'd not be executing this script
+# - You added all the branch names you want to run maintenance for as arguments
+# Optional, but recommended:
+# - You added a DEVELOPMENT_BRANCH env var to your runner/job env with the name of the development branch
+# - You added a FORCE_BUILD=0|1 env var to indicate if the base image build should be forced
+# - You added a PLATFORMS env var with all the target platforms you want to build for
+
+# NOTE:
+# This script is a culmination of Github Action steps into a single script.
+# The reason to put all of this in here is due to the complexity of the Github Action and the limitation of the
+# matrix support in Github actions, where outputs cannot be aggregated or otherwise used further.
+
+set -euo pipefail
+
+# Get all the inputs
+# If not within a runner, just print to stdout (duplicating the output in case of tee usage, but that's ok for testing)
+GITHUB_OUTPUT=${GITHUB_OUTPUT:-"/proc/self/fd/1"}
+GITHUB_ENV=${GITHUB_ENV:-"/proc/self/fd/1"}
+GITHUB_WORKSPACE=${GITHUB_WORKSPACE:-"$(pwd)"}
+GITHUB_SERVER_URL=${GITHUB_SERVER_URL:-"https://github.com"}
+GITHUB_REPOSITORY=${GITHUB_REPOSITORY:-"IQSS/dataverse"}
+
+MAINTENANCE_WORKSPACE="${GITHUB_WORKSPACE}/maintenance-job"
+
+DEVELOPMENT_BRANCH="${DEVELOPMENT_BRANCH:-"develop"}"
+FORCE_BUILD="${FORCE_BUILD:-"0"}"
+PLATFORMS="${PLATFORMS:-"linux/amd64,linux/arm64"}"
+
+# Setup and validation
+if [[ -z "$*" ]]; then
+ >&2 echo "You must give a list of branch names as arguments"
+ exit 1;
+fi
+
+source "$( dirname "$0" )/utils.sh"
+
+# Delete old stuff if present
+rm -rf "$MAINTENANCE_WORKSPACE"
+mkdir -p "$MAINTENANCE_WORKSPACE"
+
+# Store the image tags we maintain in this array (same order as branches array!)
+# This list will be used to build the support matrix within the Docker Hub image description
+SUPPORTED_ROLLING_TAGS=()
+# Store the tags of base images we are actually rebuilding to base new app images upon
+# Takes the from "branch-name=base-image-ref"
+REBUILT_BASE_IMAGES=()
+
+for BRANCH in "$@"; do
+ echo "::group::Running maintenance for $BRANCH"
+
+ # 0. Determine if this is a development branch and the most current release
+ IS_DEV=0
+ if [[ "$BRANCH" = "$DEVELOPMENT_BRANCH" ]]; then
+ IS_DEV=1
+ fi
+ IS_CURRENT_RELEASE=0
+ if [[ "$BRANCH" = $( curl -f -sS "https://api.github.com/repos/$GITHUB_REPOSITORY/releases" | jq -r '.[0].tag_name' ) ]]; then
+ IS_CURRENT_RELEASE=1
+ fi
+
+ # 1. Let's get the maintained sources
+ git clone -c advice.detachedHead=false --depth 1 --branch "$BRANCH" "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}" "$MAINTENANCE_WORKSPACE/$BRANCH"
+ # Switch context
+ cd "$MAINTENANCE_WORKSPACE/$BRANCH"
+
+ # 2. Now let's apply the patches (we have them checked out in $GITHUB_WORKSPACE, not necessarily in this local checkout)
+ echo "Checking for patches..."
+ if [[ -d ${GITHUB_WORKSPACE}/modules/container-base/src/backports/$BRANCH ]]; then
+ echo "Applying patches now."
+ find "${GITHUB_WORKSPACE}/modules/container-base/src/backports/$BRANCH" -type f -name '*.patch' -print0 | xargs -0 -n1 patch -p1 -s -i
+ fi
+
+ # 3. Determine the base image ref (/:)
+ BASE_IMAGE_REF=""
+ # For the dev branch we want to full flexi stack tag, to detect stack upgrades requiring new build
+ if (( IS_DEV )); then
+ BASE_IMAGE_REF=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -q -DforceStdout )
+ else
+ BASE_IMAGE_REF=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -Dbase.image.tag.suffix="" -q -DforceStdout )
+ fi
+ echo "Determined BASE_IMAGE_REF=$BASE_IMAGE_REF from Maven"
+
+ # 4. Check for Temurin image updates
+ JAVA_IMAGE_REF=$( mvn help:evaluate -Pct -f modules/container-base -Dexpression=java.image -q -DforceStdout )
+ echo "Determined JAVA_IMAGE_REF=$JAVA_IMAGE_REF from Maven"
+ NEWER_JAVA_IMAGE=0
+ if check_newer_parent "$JAVA_IMAGE_REF" "$BASE_IMAGE_REF"; then
+ NEWER_JAVA_IMAGE=1
+ fi
+
+ # 5. Check for package updates in base image
+ PKGS="$( grep "ARG PKGS" modules/container-base/src/main/docker/Dockerfile | cut -f2 -d= | tr -d '"' )"
+ echo "Determined installed packages=\"$PKGS\" from Maven"
+ NEWER_PKGS=0
+ # Don't bother with package checks if the java image is newer already
+ if ! (( NEWER_JAVA_IMAGE )); then
+ if check_newer_pkgs "$BASE_IMAGE_REF" "$PKGS"; then
+ NEWER_PKGS=1
+ fi
+ fi
+
+ # 6. Get current immutable revision tag if not on the dev branch
+ REV=$( current_revision "$BASE_IMAGE_REF" )
+ CURRENT_REV_TAG="${BASE_IMAGE_REF#*:}-r$REV"
+ NEXT_REV_TAG="${BASE_IMAGE_REF#*:}-r$(( REV + 1 ))"
+
+ # 7. Let's put together what tags we want added to this build run
+ TAG_OPTIONS=""
+ if ! (( IS_DEV )); then
+ TAG_OPTIONS="-Dbase.image=$BASE_IMAGE_REF -Ddocker.tags.revision=$NEXT_REV_TAG"
+ # In case of the current release, add the "latest" tag as well.
+ if (( IS_CURRENT_RELEASE )); then
+ TAG_OPTIONS="$TAG_OPTIONS -Ddocker.tags.latest=latest"
+ fi
+ else
+ UPCOMING_TAG=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image.tag -Dbase.image.tag.suffix="" -q -DforceStdout )
+ TAG_OPTIONS="-Ddocker.tags.develop=unstable -Ddocker.tags.upcoming=$UPCOMING_TAG"
+
+ # For the dev branch we only have rolling tags and can add them now already
+ SUPPORTED_ROLLING_TAGS+=("[\"unstable\", \"$UPCOMING_TAG\", \"${BASE_IMAGE_REF#*:}\"]")
+ fi
+ echo "Determined these additional Maven tag options: $TAG_OPTIONS"
+
+ # 8. Let's build the base image if necessary
+ NEWER_IMAGE=0
+ if (( NEWER_JAVA_IMAGE + NEWER_PKGS + FORCE_BUILD > 0 )); then
+ mvn -Pct -f modules/container-base deploy -Ddocker.noCache -Ddocker.platforms="${PLATFORMS}" \
+ -Ddocker.imagePropertyConfiguration=override $TAG_OPTIONS
+ NEWER_IMAGE=1
+ # Save the information about the immutable or rolling tag we just built
+ if ! (( IS_DEV )); then
+ REBUILT_BASE_IMAGES+=("$BRANCH=${BASE_IMAGE_REF%:*}:$NEXT_REV_TAG")
+ else
+ REBUILT_BASE_IMAGES+=("$BRANCH=$BASE_IMAGE_REF")
+ fi
+ else
+ echo "No rebuild necessary, we're done here."
+ fi
+
+ # 9. Add list of rolling and immutable tags for release builds
+ if ! (( IS_DEV )); then
+ RELEASE_TAGS_LIST="["
+ if (( IS_CURRENT_RELEASE )); then
+ RELEASE_TAGS_LIST+="\"latest\", "
+ fi
+ RELEASE_TAGS_LIST+="\"${BASE_IMAGE_REF#*:}\", "
+ if (( NEWER_IMAGE )); then
+ RELEASE_TAGS_LIST+="\"$NEXT_REV_TAG\"]"
+ else
+ RELEASE_TAGS_LIST+="\"$CURRENT_REV_TAG\"]"
+ fi
+ SUPPORTED_ROLLING_TAGS+=("${RELEASE_TAGS_LIST}")
+ fi
+
+ echo "::endgroup::"
+done
+
+# Built the output which base images have actually been rebuilt as JSON
+REBUILT_IMAGES="["
+for IMAGE in "${REBUILT_BASE_IMAGES[@]}"; do
+ REBUILT_IMAGES+=" \"$IMAGE\" "
+done
+REBUILT_IMAGES+="]"
+echo "rebuilt_base_images=${REBUILT_IMAGES// /, }" | tee -a "${GITHUB_OUTPUT}"
+
+# Built the supported rolling tags matrix as JSON
+SUPPORTED_TAGS="{"
+for (( i=0; i < ${#SUPPORTED_ROLLING_TAGS[@]} ; i++ )); do
+ j=$((i+1))
+ SUPPORTED_TAGS+="\"${!j}\": ${SUPPORTED_ROLLING_TAGS[$i]}"
+ (( i < ${#SUPPORTED_ROLLING_TAGS[@]}-1 )) && SUPPORTED_TAGS+=", "
+done
+SUPPORTED_TAGS+="}"
+echo "supported_tag_matrix=$SUPPORTED_TAGS" | tee -a "$GITHUB_OUTPUT"
diff --git a/.github/workflows/scripts/utils.sh b/.github/workflows/scripts/utils.sh
new file mode 100644
index 00000000000..987b58d8bb5
--- /dev/null
+++ b/.github/workflows/scripts/utils.sh
@@ -0,0 +1,108 @@
+#!/bin/bash
+
+set -euo pipefail
+
+function check_newer_parent() {
+ PARENT_IMAGE="$1"
+ # Get namespace, default to "library" if not found
+ PARENT_IMAGE_NS="${PARENT_IMAGE%/*}"
+ if [[ "$PARENT_IMAGE_NS" = "${PARENT_IMAGE}" ]]; then
+ PARENT_IMAGE_NS="library"
+ fi
+ PARENT_IMAGE_REPO="${PARENT_IMAGE%:*}"
+ PARENT_IMAGE_TAG="${PARENT_IMAGE#*:}"
+
+ PARENT_IMAGE_LAST_UPDATE="$( curl -sS "https://hub.docker.com/v2/namespaces/${PARENT_IMAGE_NS}/repositories/${PARENT_IMAGE_REPO}/tags/${PARENT_IMAGE_TAG}" | jq -r .last_updated )"
+ if [[ "$PARENT_IMAGE_LAST_UPDATE" = "null" ]]; then
+ echo "::error title='Invalid PARENT Image'::Could not find ${PARENT_IMAGE} in the registry"
+ exit 1
+ fi
+
+ DERIVED_IMAGE="$2"
+ # Get namespace, default to "library" if not found
+ DERIVED_IMAGE_NS="${DERIVED_IMAGE%/*}"
+ if [[ "${DERIVED_IMAGE_NS}" = "${DERIVED_IMAGE}" ]]; then
+ DERIVED_IMAGE_NS="library"
+ fi
+ DERIVED_IMAGE_REPO="$( echo "${DERIVED_IMAGE%:*}" | cut -f2 -d/ )"
+ DERIVED_IMAGE_TAG="${DERIVED_IMAGE#*:}"
+
+ DERIVED_IMAGE_LAST_UPDATE="$( curl -sS "https://hub.docker.com/v2/namespaces/${DERIVED_IMAGE_NS}/repositories/${DERIVED_IMAGE_REPO}/tags/${DERIVED_IMAGE_TAG}" | jq -r .last_updated )"
+ if [[ "$DERIVED_IMAGE_LAST_UPDATE" = "null" || "$DERIVED_IMAGE_LAST_UPDATE" < "$PARENT_IMAGE_LAST_UPDATE" ]]; then
+ echo "Parent image $PARENT_IMAGE has a newer release ($PARENT_IMAGE_LAST_UPDATE), which is more recent than $DERIVED_IMAGE ($DERIVED_IMAGE_LAST_UPDATE)"
+ return 0
+ else
+ echo "Parent image $PARENT_IMAGE ($PARENT_IMAGE_LAST_UPDATE) is older than $DERIVED_IMAGE ($DERIVED_IMAGE_LAST_UPDATE)"
+ return 1
+ fi
+}
+
+function check_newer_pkgs() {
+ IMAGE="$1"
+ PKGS="$2"
+
+ docker run --rm -u 0 "${IMAGE}" sh -c "apt update >/dev/null 2>&1 && apt install -s ${PKGS}" | tee /proc/self/fd/2 | grep -q "0 upgraded"
+ STATUS=$?
+
+ if [[ $STATUS -eq 0 ]]; then
+ echo "Base image $IMAGE has no updates for our custom installed packages"
+ return 1
+ else
+ echo "Base image $IMAGE needs updates for our custom installed packages"
+ return 0
+ fi
+
+ # TODO: In a future version of this script, we might want to include checking for other security updates,
+ # not just updates to the packages we installed.
+ # grep security /etc/apt/sources.list > /tmp/security.list
+ # apt-get update -oDir::Etc::Sourcelist=/tmp/security.list
+ # apt-get dist-upgrade -y -oDir::Etc::Sourcelist=/tmp/security.list -oDir::Etc::SourceParts=/bin/false -s
+
+}
+
+function current_revision() {
+ IMAGE="$1"
+ IMAGE_NS_REPO="${IMAGE%:*}"
+ IMAGE_TAG="${IMAGE#*:}"
+
+ if [[ "$IMAGE_TAG" = "$IMAGE_NS_REPO" ]]; then
+ >&2 echo "You must provide an image reference in the format [/]:"
+ exit 1
+ fi
+
+ case "$IMAGE_NS_REPO" in
+ */*) :;; # namespace/repository syntax, leave as is
+ *) IMAGE_NS_REPO="library/$IMAGE_NS_REPO";; # bare repository name (docker official image); must convert to namespace/repository syntax
+ esac
+
+ # Without such a token we may run into rate limits
+ # OB 2024-09-16: for some reason using this token stopped working. Let's go without and see if we really fall into rate limits.
+ # token=$( curl -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:$IMAGE_NS_REPO:pull" )
+
+ ALL_TAGS="$(
+ i=0
+ while [ $? == 0 ]; do
+ i=$((i+1))
+ # OB 2024-09-16: for some reason using this token stopped working. Let's go without and see if we really fall into rate limits.
+ # RESULT=$( curl -s -H "Authorization: Bearer $token" "https://registry.hub.docker.com/v2/repositories/$IMAGE_NS_REPO/tags/?page=$i&page_size=100" )
+ RESULT=$( curl -s "https://registry.hub.docker.com/v2/repositories/$IMAGE_NS_REPO/tags/?page=$i&page_size=100" )
+ if [[ $( echo "$RESULT" | jq '.message' ) != "null" ]]; then
+ # If we run into an error on the first attempt, that means we have a problem.
+ if [[ "$i" == "1" ]]; then
+ >&2 echo "Error when retrieving tag data: $( echo "$RESULT" | jq '.message' )"
+ exit 2
+ # Otherwise it will just mean we reached the last page already
+ else
+ break
+ fi
+ else
+ echo "$RESULT" | jq -r '."results"[]["name"]'
+ # DEBUG:
+ #echo "$RESULT" | >&2 jq -r '."results"[]["name"]'
+ fi
+ done
+ )"
+
+ # Note: if a former tag could not be found, it just might not exist already. Start new series with rev 0
+ echo "$ALL_TAGS" | grep "${IMAGE_TAG}-r" | sed -e "s#${IMAGE_TAG}-r##" | sort -h | tail -n1 || echo "-1"
+}
diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml
index 227a74fa00f..3320d9d08a4 100644
--- a/.github/workflows/shellspec.yml
+++ b/.github/workflows/shellspec.yml
@@ -24,28 +24,11 @@ jobs:
run: |
cd tests/shell
shellspec
- shellspec-centos7:
- name: "CentOS 7"
+ shellspec-rocky9:
+ name: "RockyLinux 9"
runs-on: ubuntu-latest
container:
- image: centos:7
- steps:
- - uses: actions/checkout@v2
- - name: Install shellspec
- run: |
- curl -fsSL https://github.com/shellspec/shellspec/releases/download/${{ env.SHELLSPEC_VERSION }}/shellspec-dist.tar.gz | tar -xz -C /usr/share
- ln -s /usr/share/shellspec/shellspec /usr/bin/shellspec
- - name: Install dependencies
- run: yum install -y ed
- - name: Run shellspec
- run: |
- cd tests/shell
- shellspec
- shellspec-rocky8:
- name: "RockyLinux 8"
- runs-on: ubuntu-latest
- container:
- image: rockylinux/rockylinux:8
+ image: rockylinux/rockylinux:9
steps:
- uses: actions/checkout@v2
- name: Install shellspec
diff --git a/conf/solr/schema.xml b/conf/solr/schema.xml
index 5dde750573d..2aed50e9998 100644
--- a/conf/solr/schema.xml
+++ b/conf/solr/schema.xml
@@ -142,6 +142,7 @@
+
@@ -205,6 +206,7 @@
+
@@ -350,6 +352,7 @@
+
@@ -426,6 +429,7 @@
+
@@ -590,6 +594,7 @@
+
diff --git a/doc/release-notes/6.4-release-notes.md b/doc/release-notes/6.4-release-notes.md
new file mode 100644
index 00000000000..979fd16bf9e
--- /dev/null
+++ b/doc/release-notes/6.4-release-notes.md
@@ -0,0 +1,526 @@
+# Dataverse 6.4
+
+Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.4 rather than the list of releases, which will cut them off.
+
+This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project.
+
+## Release Highlights
+
+New features in Dataverse 6.4:
+
+- Enhanced DataCite Metadata, including "Relation Type"
+- All ISO 639-3 languages are now supported
+- There is now a button for "Unlink Dataset"
+- Users will have DOIs/PIDs reserved for their files as part of file upload instead of at publication time
+- Datasets can now have types such as "software" or "workflow"
+- Croissant support
+- RO-Crate support
+- and more! Please see below.
+
+New client library:
+
+- Rust
+
+This release also fixes two important bugs described below and in [a post](https://groups.google.com/g/dataverse-community/c/evn5C-pyrS8/m/JrH9vp47DwAJ) on the mailing list:
+
+- "Update Current Version" can cause metadata loss
+- Publishing breaks designated dataset thumbnail, messes up collection page
+
+Additional details on the above as well as many more features and bug fixes included in the release are described below. Read on!
+
+## Features Added
+
+### Enhanced DataCite Metadata, Including "Relation Type"
+
+Within the "Related Publication" field, a new subfield has been added called "Relation Type" that allows for the most common [values](https://datacite-metadata-schema.readthedocs.io/en/4.5/appendices/appendix-1/relationType/) recommended by DataCite: isCitedBy, Cites, IsSupplementTo, IsSupplementedBy, IsReferencedBy, and References. For existing datasets where no "Relation Type" has been specified, "IsSupplementTo" is assumed.
+
+Dataverse now supports the [DataCite v4.5 schema](http://schema.datacite.org/meta/kernel-4/). Additional metadata is now being sent to DataCite including metadata about related publications and files in the dataset. Improved metadata is being sent including how PIDs (ORCID, ROR, DOIs, etc.), license/terms, geospatial, and other metadata are represented. The enhanced metadata will automatically be sent to DataCite when datasets are created and published. Additionally, after publication, you can inspect what was sent by looking at the DataCite XML export.
+
+The additions are in rough alignment with the OpenAIRE XML export, but there are some minor differences in addition to the Relation Type addition, including an update to the DataCite 4.5 schema. For details see #10632, #10615 and the [design document](https://docs.google.com/document/d/1JzDo9UOIy9dVvaHvtIbOI8tFU6bWdfDfuQvWWpC0tkA/edit?usp=sharing) referenced there.
+
+Multiple backward incompatible changes and bug fixes have been made to API calls (three of four of which were not documented) related to updating PID target URLs and metadata at the provider service:
+- [Update Target URL for a Published Dataset at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-target-url-for-a-published-dataset-at-the-pid-provider)
+- [Update Target URL for all Published Datasets at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-target-url-for-all-published-datasets-at-the-pid-provider)
+- [Update Metadata for a Published Dataset at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-a-published-dataset-at-the-pid-provider)
+- [Update Metadata for all Published Datasets at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-all-published-datasets-at-the-pid-provider)
+
+### Full List of ISO 639-3 Languages Now Supported
+
+The controlled vocabulary values list for the metadata field "Language" in the citation block has now been extended to include roughly 7920 ISO 639-3 values.
+
+Some of the language entries in the pre-6.4 list correspond to "macro languages" in ISO-639-3 and admins/users may wish to update to use the corresponding individual language entries from ISO-639-3. As these cases are expected to be rare (they do not involve major world languages), finding them is not covered in the release notes. Anyone who desires help in this area is encouraged to reach out to the Dataverse community via any of the standard communication channels.
+
+ISO 639-3 codes were downloaded from [sil.org](https://iso639-3.sil.org/code_tables/download_tables#Complete%20Code%20Tables:~:text=iso%2D639%2D3_Code_Tables_20240415.zip) and the file used for merging with the existing citation.tsv was "iso-639-3.tab". See also #8578 and #10762.
+
+### Unlink Dataset Button
+
+A new "Unlink Dataset" button has been added to the dataset page to allow a user to unlink a dataset from a collection. To unlink a dataset the user must have permission to link the dataset. Additionally, the [existing API](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#unlink-a-dataset) for unlinking datasets has been updated to no longer require superuser access as the "Publish Dataset" permission is now enough. See also #10583 and #10689.
+
+### Pre-Publish File DOI Reservation
+
+Dataverse installations using DataCite as a persistent identifier (PID) provider (or other providers that support reserving PIDs) will be able to reserve PIDs for files when they are uploaded (rather than at publication time). Note that reserving file DOIs can slow uploads with large numbers of files so administrators may need to adjust timeouts (specifically any Apache "``ProxyPass / ajp://localhost:8009/ timeout=``" setting in the recommended Dataverse configuration).
+
+### Initial Support for Dataset Types
+
+Out of the box, all datasets now have the type "dataset" but superusers can add additional types. At this time the type of a dataset can only be set at creation time via API. The types "dataset", "software", and "workflow" (just those three, for now) will be sent to DataCite (as `resourceTypeGeneral`) when the dataset is published.
+
+For details see [the guides](https://guides.dataverse.org/en/6.4/user/dataset-management.html#dataset-types), #10517 and #10694. Please note that this feature is highly experimental and is expected to [evolve](https://github.com/IQSS/dataverse-pm/issues/307).
+
+### Croissant Support (Metadata Export)
+
+A new metadata export format called [Croissant](https://github.com/mlcommons/croissant) is now available as an external metadata exporter. It is oriented toward making datasets consumable by machine learning.
+
+For more about the Croissant exporter, including installation instructions, see . See also #10341, #10533, and [discussion](https://groups.google.com/g/dataverse-community/c/JI8HPgGarr8/m/DqEIkiwlAgAJ) on the mailing list.
+
+Please note: the Croissant exporter works best with Dataverse 6.2 and higher (where it updates the content of `` as [described](https://guides.dataverse.org/en/6.4/admin/discoverability.html#schema-org-head) in the guides) but can be used with 6.0 and higher (to get the export functionality).
+
+### RO-Crate Support (Metadata Export)
+
+Dataverse now supports [RO-Crate](https://www.researchobject.org/ro-crate/) as a metadata export format. This functionality is not available out of the box, but you can enable one or more RO-Crate exporters from the [list of external exporters](https://guides.dataverse.org/en/6.4/installation/advanced.html#inventory-of-external-exporters). See also #10744 and #10796.
+
+### Rust API Client Library
+
+An Dataverse API client library for the Rust programming language is now available at https://github.com/gdcc/rust-dataverse and has been added to the [list of client libraries](https://guides.dataverse.org/en/6.4/api/client-libraries.html) in the API Guide. See also #10758.
+
+### Collection Thumbnail Logo for Featured Collections
+
+Collections can now have a thumbnail logo that is displayed when the collection is configured as a featured collection. If present, this thumbnail logo is shown. Otherwise, the collection logo is shown. Configuration is done under the "Theme" for a collection as explained in [the guides](https://guides.dataverse.org/en/6.4/user/dataverse-management.html#theme). See also #10291 and #10433.
+
+### Saved Searches Can Be Deleted
+
+Saved searches can now be deleted via API. See the [Saved Search](https://guides.dataverse.org/en/6.4/api/native-api.html#saved-search) section of the API Guide, #9317 and #10198.
+
+### Notification Email Improvement
+
+When notification emails are sent the part of the closing that says "contact us for support at" will now show the support email address (`dataverse.mail.support-email`), when configured, instead of the default system email address. Using the system email address here was particularly problematic when it was a "noreply" address. See also #10287 and #10504.
+
+### Ability to Disable Automatic Thumbnail Selection
+
+It is now possible to turn off the feature that automatically selects one of the image datafiles to serve as the thumbnail of the parent dataset. An admin can turn it off by enabling the [feature flag](https://guides.dataverse.org/en/6.4/installation/config.html#feature-flags) `dataverse.feature.disable-dataset-thumbnail-autoselect`. When the feature is disabled, a user can still manually pick a thumbnail image, or upload a dedicated thumbnail image. See also #10820.
+
+### More Flexible PermaLinks
+
+The configuration setting `dataverse.pid.*.permalink.base-url`, which is used for PermaLinks, has been updated to support greater flexibility. Previously, the string `/citation?persistentId=` was automatically appended to the configured base URL. With this update, the base URL will now be used exactly as configured, without any automatic additions. See also #10775.
+
+### Globus Async Framework
+
+A new alternative implementation of Globus polling during upload data transfers has been added in this release. This experimental framework does not rely on the instance staying up continuously for the duration of the transfer and saves the state information about Globus upload requests in the database. See `globus-use-experimental-async-framework` under [Feature Flags](https://guides.dataverse.org/en/6.4/installation/config.html#feature-flags) and [dataverse.files.globus-monitoring-server](https://guides.dataverse.org/en/6.4/installation/config.html#dataverse-files-globus-monitoring-server) in the Installation Guide. See also #10623 and #10781.
+
+### CVoc (Controlled Vocabulary): Allow ORCID and ROR to Be Used Together in Author Field
+
+Changes in Dataverse and updates to the ORCID and ROR external vocabulary scripts support deploying these for the citation block author field (and others). See also #10711, #10712, and .
+
+### Development on Windows
+
+New instructions have been added for developers on Windows trying to run a Dataverse development environment using Windows Subsystem for Linux (WSL). See [the guides](https://guides.dataverse.org/en/6.4/developers/windows.html), #10606, and #10608.
+
+### Experimental Crossref PID (DOI) Provider
+
+Crossref can now be used as a PID (DOI) provider, but this feature is experimental. Please provide feedback through the usual channels. See also the [guides](https://guides.dataverse.org/en/6.4/installation/config.html#crossref-specific-settings), #8581, and #10806.
+
+### Improved JSON Schema Validation for Datasets
+
+JSON Schema validation has been enhanced with checks for required and allowed child objects as well as type checking for field types including `primitive`, `compound` and `controlledVocabulary`. More user-friendly error messages help pinpoint the issues in the dataset JSON. See [Retrieve a Dataset JSON Schema for a Collection](https://guides.dataverse.org/en/6.4/api/native-api.html#retrieve-a-dataset-json-schema-for-a-collection) in the API Guide, #10169, and #10543.
+
+### Counter Processor 1.05 Support (Make Data Count)
+
+Counter Processor 1.05 is now supported for use with Make Data Count. If you are running Counter Processor, you should reinstall/reconfigure it as described in the latest guides. Note that Counter Processor 1.05 requires Python 3, so you will need to follow the full Counter Processor install. Also note that if you configure the new version the same way, it will reprocess the days in the current month when it is first run. This is normal and will not affect the metrics in Dataverse. See also #10479.
+
+### Version Tags for Container Base Images
+
+With this release we introduce a detailed maintenance workflow for our container images. As output of the [Containerization Working Group](https://ct.gdcc.io), the community takes another step towards production ready containers available directly from the core project.
+
+The maintenance workflow regularly updates the [Container Base Image](https://guides.dataverse.org/en/6.4/container/base-image.html), which contains the operating system, Java, Payara, and tools and libraries required by the Dataverse application. Shipping these rolling releases as well as immutable revisions is the foundation for secure and reliable [Dataverse Application Container](https://guides.dataverse.org/en/6.4/container/app-image.html) images. See also #10478 and #10827.
+
+## Bugs Fixed
+
+### Update Current Version
+
+A significant bug in the superuser-only [Update Current Version](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#make-metadata-updates-without-changing-dataset-version) publication option was fixed. If the "Update Current Version" option was used when changes were made to the dataset terms (rather than to dataset metadata) or if the PID provider service was down or returned an error, the update would fail and render the dataset unusable and require restoration from a backup. The fix in this release allows the update to succeed in both of these cases and redesigns the functionality such that any unknown issues should not make the dataset unusable (i.e. the error would be reported and the dataset would remain in its current state with the last-published version as it was and changes still in the draft version.)
+
+If you do not plan to upgrade to Dataverse 6.4 right away, you are encouraged to alert your superusers to this issue (see [this post](https://groups.google.com/g/dataverse-community/c/evn5C-pyrS8/m/JrH9vp47DwAJ)). Here are some workarounds for pre-6.4 versions:
+
+* Change the "dataset.updateRelease" entry in the Bundle.properties file (or local language version) to "Do Not Use" or similar (this doesn't disable the button but alerts superusers to the issue), or
+* Edit the dataset.xhtml file to remove the lines below, delete the contents of the generated and osgi-cache directories in the Dataverse Payara domain, and restart the Payara server. This will remove the "Update Current Version" from the UI.
+
+```
+
+
+
+```
+
+Again, the workarounds above are only for pre-6.4 versions. The bug has been fixed in Dataverse 6.4. See also #10797.
+
+### Broken Thumbnails
+
+Dataverse 6.3 introduced a bug where publishing would break the dataset thumbnail, which in turn broke the rendering of the parent collection (dataverse) page.
+
+This bug has been fixed but any existing broken thumbnails must be fixed manually. See "clearThumbnailFailureFlag" in the upgrade instructions below.
+
+Additionally, it is now possible to turn off the feature that automatically selects of one of the image datafiles to serve as the thumbnail of the parent dataset. An admin can turn it off by raising the feature flag `-Ddataverse.feature.disable-dataset-thumbnail-autoselect=true`. When the feature is disabled, a user can still manually pick a thumbnail image, or upload a dedicated thumbnail image.
+
+See also #10819, #10820, and [the post](https://groups.google.com/g/dataverse-community/c/evn5C-pyrS8/m/JrH9vp47DwAJ) on the mailing list.
+
+### No License, No Terms of Use
+
+When datasets have neither a license nor custom terms of use, the dataset page will now indicate this. Also, these datasets will no longer be indexed as having custom terms. See also #8796, #10513, and #10614.
+
+### CC0 License Bug Fix
+
+At a high level, some datasets have been mislabeled as "Custom License" when they should have been "CC0 1.0". This has been corrected.
+
+In Dataverse 5.10, datasets with only "CC0 Waiver" in the "termsofuse" field were converted to "Custom License" (instead of the CC0 1.0 license) through a SQL migration script (see #10634). On deployment of Dataverse 6.4, a new SQL migration script will be run automatically to correct this, changing these datasets to CC0. You can review the script in #10634, which only affect the following datasets:
+
+- The existing "Terms of Use" must be equal to "This dataset is made available under a Creative Commons CC0 license with the following additional/modified terms and conditions: CC0 Waiver" (this was set in #10634).
+- The following terms fields must be empty: Confidentiality Declaration, Special Permissions, Restrictions, Citation Requirements, Depositor Requirements, Conditions, and Disclaimer.
+- The license ID must not be assigned.
+
+The script will set the license ID to that of the CC0 1.0 license and remove the contents of "termsofuse" field. See also #9081 and #10634.
+
+### Remap oai_dc Export and Harvesting Format Fields: dc:type and dc:date
+
+The `oai_dc` export and harvesting format has had the following fields remapped:
+
+- dc:type was mapped to the field "Kind of Data". Now it is hard-coded to the word "Dataset".
+- dc:date was mapped to the field "Production Date" when available and otherwise to "Publication Date". Now it is mapped the field "Publication Date" or the field used for the citation date, if set (see [Set Citation Date Field Type for a Dataset](https://guides.dataverse.org/en/6.4/api/native-api.html#set-citation-date-field-type-for-a-dataset)).
+
+In order for these changes to be reflected in existing datasets, a [reexport all](https://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api) should be run (mentioned below). See #8129 and #10737.
+
+### Zip File No Longer Misdetected as Shapefile (Hidden Directories)
+
+When detecting files types, Dataverse would previously detect a zip file as a shapefile if it contained [markers of a shapefile](https://guides.dataverse.org/en/6.4/developers/geospatial.html) in hidden directories. These hidden directories are now ignored when deciding if a zip file is a shapefile or not. See also #8945 and #10627.
+
+### External Controlled Vocabulary
+
+This release fixes a bug (introduced in v6.3) in the external controlled vocabulary mechanism that could cause indexing to fail (with a NullPointerException) when a script is configured for one child field and no other child fields were managed. See also #10869 and #10870.
+
+### Valid JSON in Error Response
+
+When any `ApiBlockingFilter` policy applies to a request, the JSON in the body of the error response is now valid JSON. See also #10085.
+
+### Docker Container Base Image Security and Compatibility
+
+- Switch "wait-for" to "wait4x", aligned with the Configbaker Image
+- Update "jattach" to v2.2
+- Install AMD64 / ARM64 versions of tools as necessary
+- Run base image as unprivileged user by default instead of `root` - this was an oversight from OpenShift changes
+- Linux User, Payara Admin and Domain Master passwords:
+ - Print hints about default, public knowledge passwords in place for
+ - Enable replacing these passwords at container boot time
+- Enable building with updates Temurin JRE image based on Ubuntu 24.04 LTS
+- Fix entrypoint script troubles with pre- and postboot script files
+- Unify location of files at CONFIG_DIR=/opt/payara/config, avoid writing to other places
+
+See also #10508, #10672 and #10722.
+
+### Cleanup of Temp Directories
+
+In this release we addressed an issue where copies of files uploaded via the UI were left in one specific temp directory (`.../domain1/uploads` by default). We would like to remind all the installation admins that it is strongly recommended to have some automated (and aggressive) cleanup mechanisms in place for all the temp directories used by Dataverse. For example, at Harvard/IQSS we have the following configuration for the PrimeFaces uploads directory above: (note that, even with this fix in place, PrimeFaces will be leaving a large number of small log files in that location)
+
+Instead of the default location (`.../domain1/uploads`) we use a directory on a dedicated partition, outside of the filesystem where Dataverse is installed, via the following JVM option:
+
+```
+-Ddataverse.files.uploads=/uploads/web
+```
+
+and we have a dedicated cronjob that runs every 30 minutes and deletes everything older than 2 hours in that directory:
+
+```
+15,45 * * * * /bin/find /uploads/web/ -mmin +119 -type f -name "upload*" -exec rm -f {} \; > /dev/null 2>&1
+```
+
+### Trailing Commas in Author Name Now Permitted
+
+When an author name ended in a comma (e.g. `Smith,` or `Smith, `), the dataset page was broken after publishing (a "500" error page was presented to the user). The underlying issue causing the JSON-LD Schema.org output on the page to break was fixed. See #10343 and #10776.
+
+## API Updates
+
+### Search API: affiliation, parentDataverseName, image_url, etc.
+
+The Search API (`/api/search`) response now includes additional fields, depending on the type.
+
+For collections (dataverses):
+
+- "affiliation"
+- "parentDataverseName"
+- "parentDataverseIdentifier"
+- "image_url" (optional)
+
+```javascript
+"items": [
+ {
+ "name": "Darwin's Finches",
+ ...
+ "affiliation": "Dataverse.org",
+ "parentDataverseName": "Root",
+ "parentDataverseIdentifier": "root",
+ "image_url":"/api/access/dvCardImage/{identifier}"
+(etc, etc)
+```
+
+For datasets:
+
+- "image_url" (optional)
+
+```javascript
+"items": [
+ {
+ ...
+ "image_url": "http://localhost:8080/api/datasets/2/logo"
+ ...
+(etc, etc)
+```
+
+For files:
+
+- "releaseOrCreateDate"
+- "image_url" (optional)
+
+```javascript
+"items": [
+ {
+ "name": "test.png",
+ ...
+ "releaseOrCreateDate": "2016-05-10T12:53:39Z",
+ "image_url":"/api/access/datafile/42?imageThumb=true"
+(etc, etc)
+```
+
+These examples are also shown in the [Search API](https://guides.dataverse.org/en/6.4/api/search.html) section of the API Guide.
+
+The image_url field was already part of the SolrSearchResult JSON (and incorrectly appeared in Search API documentation), but it wasn't returned by the API because it was appended only after the Solr query was executed in the SearchIncludeFragment of JSF (the old/current UI framework). Now, the field is set in SearchServiceBean, ensuring it is always returned by the API when an image is available.
+
+The Solr schema.xml file has been updated to include a new field called "dvParentAlias" for supporting the new response field "parentDataverseIdentifier". See upgrade instructions below.
+
+See also #10810 and #10811.
+
+### Search API: publicationStatuses
+
+The Search API (`/api/search`) response will now include publicationStatuses in the JSON response as long as the list is not empty.
+
+Example:
+
+```javascript
+"items": [
+ {
+ "name": "Darwin's Finches",
+ ...
+ "publicationStatuses": [
+ "Unpublished",
+ "Draft"
+ ],
+(etc, etc)
+```
+
+See also #10733 and #10738.
+
+### Search Facet Information Exposed
+
+A new endpoint `/api/datasetfields/facetables` lists all facetable dataset fields defined in the installation, as described in [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#list-all-facetable-dataset-fields).
+
+A new optional query parameter "returnDetails" added to `/api/dataverses/{identifier}/facets/` endpoint to include detailed information of each DataverseFacet, as described in [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#list-facets-configured-for-a-dataverse-collection). See also #10726 and #10727.
+
+### User Permissions on Collections
+
+A new endpoint at `/api/dataverses/{identifier}/userPermissions` for obtaining the user permissions on a collection (dataverse) has been added. See also [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#get-user-permissions-on-a-dataverse), #10749 and #10751.
+
+### addDataverse Extended
+
+The addDataverse (`/api/dataverses/{identifier}`) API endpoint has been extended to allow adding metadata blocks, input levels and facet IDs at creation time, as the Dataverse page in create mode does in JSF. See also [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#create-a-dataverse-collection), #10633 and #10644.
+
+### Metadata Blocks and Display on Create
+
+The `/api/dataverses/{identifier}/metadatablocks` endpoint has been fixed to not return fields marked as displayOnCreate=true if there is an input level with include=false, when query parameters returnDatasetFieldTypes=true and onlyDisplayedOnCreate=true are set. See also #10741 and #10767.
+
+The fields "depositor" and "dateOfDeposit" in the citation.tsv metadata block file have been updated to have the property "displayOnCreate" set to TRUE. In practice, only the API is affected because the UI has special logic that already shows these fields when datasets are created. See also and #10850 and #10884.
+
+### Feature Flags Can Be Listed
+
+It is now possible to list all feature flags and see if they are enabled or not. See also [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#list-all-feature-flags) and #10732.
+
+## Settings Added
+
+The following settings have been added:
+
+- dataverse.feature.disable-dataset-thumbnail-autoselect
+- dataverse.feature.globus-use-experimental-async-framework
+- dataverse.files.globus-monitoring-server
+- dataverse.pid.*.crossref.url
+- dataverse.pid.*.crossref.rest-api-url
+- dataverse.pid.*.crossref.username
+- dataverse.pid.*.crossref.password
+- dataverse.pid.*.crossref.depositor
+- dataverse.pid.*.crossref.depositor-email
+
+## Backward Incompatible Changes
+
+- The oai_dc export format has changed. See the "Remap oai_dc" section above.
+- Several APIs related to DataCite have changed. See "More and Better Data Sent to DataCite" above.
+
+## Complete List of Changes
+
+For the complete list of code changes in this release, see the [6.4 milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.4+is%3Aclosed) in GitHub.
+
+## Getting Help
+
+For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/g/dataverse-community) or email support@dataverse.org.
+
+## Installation
+
+If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it!
+
+Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club!
+
+You are also very welcome to join the [Global Dataverse Community Consortium](https://www.gdcc.io/) (GDCC).
+
+## Upgrade Instructions
+
+Upgrading requires a maintenance window and downtime. Please plan accordingly, create backups of your database, etc.
+
+These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.3.
+
+0\. These instructions assume that you are upgrading from the immediate previous version. If you are running an earlier version, the only supported way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to this version.
+
+If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user.
+
+In the following commands, we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed.
+
+```shell
+export PAYARA=/usr/local/payara6`
+```
+
+(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell)
+
+1\. Undeploy the previous version
+
+```shell
+$PAYARA/bin/asadmin undeploy dataverse-6.3
+```
+
+2\. Stop and start Payara
+
+```shell
+service payara stop
+sudo service payara start
+```
+
+3\. Deploy this version
+
+```shell
+$PAYARA/bin/asadmin deploy dataverse-6.4.war
+```
+
+Note: if you have any trouble deploying, stop Payara, remove the following directories, start Payara, and try to deploy again.
+
+```shell
+service payara stop
+rm -rf $PAYARA/glassfish/domains/domain1/generated
+rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache
+rm -rf $PAYARA/glassfish/domains/domain1/lib/databases
+```
+
+4\. For installations with internationalization:
+
+Please remember to update translations via [Dataverse language packs](https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs).
+
+5\. Restart Payara
+
+```shell
+service payara stop
+service payara start
+```
+
+6\. Update metadata blocks
+
+These changes reflect incremental improvements made to the handling of core metadata fields.
+
+```shell
+wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/scripts/api/data/metadatablocks/citation.tsv
+
+curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv
+```
+
+7\. Update Solr schema.xml file. Start with the standard v6.4 schema.xml, then, if your installation uses any custom or experimental metadata blocks, update it to include the extra fields (step 7a).
+
+Stop Solr (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.4/installation/prerequisites.html#solr-init-script)).
+
+```shell
+service solr stop
+```
+
+Replace schema.xml
+
+```shell
+wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/conf/solr/schema.xml
+cp schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf
+```
+
+Start Solr (but if you use any custom metadata blocks, perform the next step, 7a first).
+
+```shell
+service solr start
+```
+
+7a\. For installations with custom or experimental metadata blocks:
+
+Before starting Solr, update the schema to include all the extra metadata fields that your installation uses. We do this by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed to reflect the names of the directories, if different):
+
+```shell
+ wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/conf/solr/update-fields.sh
+ chmod +x update-fields.sh
+ curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.4.1/server/solr/collection1/conf/schema.xml
+```
+
+Now start Solr.
+
+8\. Reindex Solr
+
+Below is the simplest way to reindex Solr:
+
+```shell
+curl http://localhost:8080/api/admin/index
+```
+
+The API above rebuilds the existing index "in place". If you want to be absolutely sure that your index is up-to-date and consistent, you may consider wiping it clean and reindexing everything from scratch (see [the guides](https://guides.dataverse.org/en/latest/admin/solr-search-index.html)). Just note that, depending on the size of your database, a full reindex may take a while and the users will be seeing incomplete search results during that window.
+
+9\. Run reExportAll to update dataset metadata exports
+
+This step is necessary because of changes described above for the `Datacite` and `oai_dc` export formats.
+
+Below is the simple way to reexport all dataset metadata. For more advanced usage, please see [the guides](http://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api).
+
+```shell
+curl http://localhost:8080/api/admin/metadata/reExportAll
+```
+
+10\. Pushing updated metadata to DataCite
+
+(If you don't use DataCite, you can skip this.)
+
+Above you updated the citation metadata block and Solr with the new "relationType" field. With these two changes, the "Relation Type" fields will be available and creation/publication of datasets will result in the expanded XML being sent to DataCite. You've also already run "reExportAll" to update the `Datacite` metadata export format.
+
+Entries at DataCite for published datasets can be updated by a superuser using an API call (newly [documented](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-all-published-datasets-at-the-pid-provider)):
+
+`curl -X POST -H 'X-Dataverse-key:' http://localhost:8080/api/datasets/modifyRegistrationPIDMetadataAll`
+
+This will loop through all published datasets (and released files with PIDs). As long as the loop completes, the call will return a 200/OK response. Any PIDs for which the update fails can be found using the following command:
+
+`grep 'Failure for id' server.log`
+
+Failures may occur if PIDs were never registered, or if they were never made findable. Any such cases can be fixed manually in DataCite Fabrica or using the [Reserve a PID](https://guides.dataverse.org/en/6.4/api/native-api.html#reserve-a-pid) API call and the newly documented `/api/datasets//modifyRegistration` call respectively. See https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#send-dataset-metadata-to-pid-provider. Please reach out with any questions.
+
+PIDs can also be updated by a superuser on a per-dataset basis using
+
+`curl -X POST -H 'X-Dataverse-key:' http://localhost:8080/api/datasets//modifyRegistrationMetadata`
+
+### Additional Upgrade Steps
+
+11\. If there are broken thumbnails
+
+To restore any broken thumbnails caused by the bug described above, you can call the `http://localhost:8080/api/admin/clearThumbnailFailureFlag` API, which will attempt to clear the flag on all files (regardless of whether caused by this bug or some other problem with the file) or the `http://localhost:8080/api/admin/clearThumbnailFailureFlag/$FILE_ID` to clear the flag for individual files. Calling the former, batch API is recommended.
+
+12\. PermaLinks with custom base-url
+
+If you currently use PermaLinks with a custom `base-url`: You must manually append `/citation?persistentId=` to the base URL to maintain functionality.
+
+If you use a PermaLinks without a configured `base-url`, no changes are required.
diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software.json b/doc/sphinx-guides/source/_static/api/dataset-create-software.json
new file mode 100644
index 00000000000..4c649bff0aa
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/dataset-create-software.json
@@ -0,0 +1,82 @@
+{
+ "datasetType": "software",
+ "datasetVersion": {
+ "license": {
+ "name": "CC0 1.0",
+ "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+ },
+ "metadataBlocks": {
+ "citation": {
+ "fields": [
+ {
+ "value": "pyDataverse",
+ "typeClass": "primitive",
+ "multiple": false,
+ "typeName": "title"
+ },
+ {
+ "value": [
+ {
+ "authorName": {
+ "value": "Range, Jan",
+ "typeClass": "primitive",
+ "multiple": false,
+ "typeName": "authorName"
+ },
+ "authorAffiliation": {
+ "value": "University of Stuttgart",
+ "typeClass": "primitive",
+ "multiple": false,
+ "typeName": "authorAffiliation"
+ }
+ }
+ ],
+ "typeClass": "compound",
+ "multiple": true,
+ "typeName": "author"
+ },
+ {
+ "value": [
+ { "datasetContactEmail" : {
+ "typeClass": "primitive",
+ "multiple": false,
+ "typeName": "datasetContactEmail",
+ "value" : "jan@mailinator.com"
+ },
+ "datasetContactName" : {
+ "typeClass": "primitive",
+ "multiple": false,
+ "typeName": "datasetContactName",
+ "value": "Range, Jan"
+ }
+ }],
+ "typeClass": "compound",
+ "multiple": true,
+ "typeName": "datasetContact"
+ },
+ {
+ "value": [ {
+ "dsDescriptionValue":{
+ "value": "A Python module for Dataverse.",
+ "multiple":false,
+ "typeClass": "primitive",
+ "typeName": "dsDescriptionValue"
+ }}],
+ "typeClass": "compound",
+ "multiple": true,
+ "typeName": "dsDescription"
+ },
+ {
+ "value": [
+ "Computer and Information Science"
+ ],
+ "typeClass": "controlledVocabulary",
+ "multiple": true,
+ "typeName": "subject"
+ }
+ ],
+ "displayName": "Citation Metadata"
+ }
+ }
+ }
+}
diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld b/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld
new file mode 100644
index 00000000000..6f072967dc8
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld
@@ -0,0 +1,16 @@
+{
+ "http://purl.org/dc/terms/title": "Darwin's Finches",
+ "http://purl.org/dc/terms/subject": "Medicine, Health and Life Sciences",
+ "http://purl.org/dc/terms/creator": {
+ "https://dataverse.org/schema/citation/authorName": "Finch, Fiona",
+ "https://dataverse.org/schema/citation/authorAffiliation": "Birds Inc."
+ },
+ "https://dataverse.org/schema/citation/datasetContact": {
+ "https://dataverse.org/schema/citation/datasetContactEmail": "finch@mailinator.com",
+ "https://dataverse.org/schema/citation/datasetContactName": "Finch, Fiona"
+ },
+ "https://dataverse.org/schema/citation/dsDescription": {
+ "https://dataverse.org/schema/citation/dsDescriptionValue": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds."
+ },
+ "https://dataverse.org/schema/core#datasetType": "software"
+}
diff --git a/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json
new file mode 100644
index 00000000000..fef32aa1e2c
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json
@@ -0,0 +1,65 @@
+{
+ "name": "Scientific Research",
+ "alias": "science",
+ "dataverseContacts": [
+ {
+ "contactEmail": "pi@example.edu"
+ },
+ {
+ "contactEmail": "student@example.edu"
+ }
+ ],
+ "affiliation": "Scientific Research University",
+ "description": "We do all the science.",
+ "dataverseType": "LABORATORY",
+ "metadataBlocks": {
+ "metadataBlockNames": [
+ "citation", "geospatial"
+ ],
+ "inputLevels": [
+ {
+ "datasetFieldTypeName": "geographicCoverage",
+ "include": true,
+ "required": true
+ },
+ {
+ "datasetFieldTypeName": "country",
+ "include": true,
+ "required": true
+ },
+ {
+ "datasetFieldTypeName": "geographicUnit",
+ "include": false,
+ "required": false
+ },
+ {
+ "datasetFieldTypeName": "geographicBoundingBox",
+ "include": false,
+ "required": false
+ },
+ {
+ "datasetFieldTypeName": "westLongitude",
+ "include": false,
+ "required": false
+ },
+ {
+ "datasetFieldTypeName": "eastLongitude",
+ "include": false,
+ "required": false
+ },
+ {
+ "datasetFieldTypeName": "northLatitude",
+ "include": false,
+ "required": false
+ },
+ {
+ "datasetFieldTypeName": "southLatitude",
+ "include": false,
+ "required": false
+ }
+ ],
+ "facetIds": [
+ "authorName", "authorAffiliation"
+ ]
+ }
+}
diff --git a/doc/sphinx-guides/source/_static/util/counter_daily.sh b/doc/sphinx-guides/source/_static/util/counter_daily.sh
index 674972b18f2..5095a83b7e2 100644
--- a/doc/sphinx-guides/source/_static/util/counter_daily.sh
+++ b/doc/sphinx-guides/source/_static/util/counter_daily.sh
@@ -1,6 +1,6 @@
#! /bin/bash
-COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-0.1.04"
+COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-1.05"
MDC_LOG_DIRECTORY="/usr/local/payara6/glassfish/domains/domain1/logs/mdc"
# counter_daily.sh
diff --git a/doc/sphinx-guides/source/_templates/navbar.html b/doc/sphinx-guides/source/_templates/navbar.html
index c7b81dcb937..d88306be8ae 100644
--- a/doc/sphinx-guides/source/_templates/navbar.html
+++ b/doc/sphinx-guides/source/_templates/navbar.html
@@ -25,7 +25,6 @@
About
-
+
-
-
+
+
diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml
index 936d354e9d7..6de0f00e94e 100644
--- a/src/main/webapp/dataset.xhtml
+++ b/src/main/webapp/dataset.xhtml
@@ -86,7 +86,7 @@
-
+
@@ -522,6 +522,16 @@
+
+
+
+ #{bundle['dataset.unlinkBtn']}
+
+
+
@@ -585,13 +595,13 @@
-
+
- )
-
+
@@ -631,6 +641,7 @@
or !empty DatasetPage.datasetVersionUI.keywordDisplay
or !empty DatasetPage.datasetVersionUI.subject.value
or !empty DatasetPage.datasetVersionUI.relPublicationCitation
+ or !empty DatasetPage.datasetVersionUI.relPublicationUrl
or !empty DatasetPage.datasetVersionUI.notes.value) and !empty DatasetPage.datasetSummaryFields}">