diff --git a/doc/release-notes/8533_semantic-api-updates.md b/doc/release-notes/8533_semantic-api-updates.md new file mode 100644 index 00000000000..a0391ca0dc3 --- /dev/null +++ b/doc/release-notes/8533_semantic-api-updates.md @@ -0,0 +1,14 @@ + +## Notes for Developers and Integrators + +This release include an update to the experimental semantic API and the underlying assignment of URIs to metadatablock termshat are not explicitly mapped to terms in community vocabularies. The change affects the output of the OAI_ORE metadata export, the OAI_ORE file in archival bags, and the input/out allowed for those terms in the semantic api. For those updating integrating code or existing files intended for input into this release of Dataverse: URIs of the form: + https://dataverse.org/schema//#, and + https://dataverse.org/schema// + are both replaced with URIs of the form: + https://dataverse.org/schema// + +## Additional Release Steps + +Upgrade should include re-export of metadata files (only the OAI_ORE is affected). + +For this PR and other changes coming from DataCommons, it will also be advisable for people archiving Bags to re-archive. More detail on the overall set if changes in those tbd PRs. diff --git a/doc/release-notes/8561-license-info-in-submit-for-review-pop-up.md b/doc/release-notes/8561-license-info-in-submit-for-review-pop-up.md new file mode 100644 index 00000000000..c31fd93b907 --- /dev/null +++ b/doc/release-notes/8561-license-info-in-submit-for-review-pop-up.md @@ -0,0 +1 @@ +Information about the license selection (or custom terms) is now available in the confirmation popup when contributors click "Submit for Review". Previously, this was only available in the confirmation popup for the "Publish" button, which contributors do not see. diff --git a/doc/sphinx-guides/source/_static/api/dataset-create.jsonld b/doc/sphinx-guides/source/_static/api/dataset-create.jsonld index 16861ff64ad..a010e30bf7b 100644 --- a/doc/sphinx-guides/source/_static/api/dataset-create.jsonld +++ b/doc/sphinx-guides/source/_static/api/dataset-create.jsonld @@ -2,14 +2,14 @@ "http://purl.org/dc/terms/title": "Darwin's Finches", "http://purl.org/dc/terms/subject": "Medicine, Health and Life Sciences", "http://purl.org/dc/terms/creator": { - "https://dataverse.org/schema/citation/author#Name": "Finch, Fiona", - "https://dataverse.org/schema/citation/author#Affiliation": "Birds Inc." + "https://dataverse.org/schema/citation/authorName": "Finch, Fiona", + "https://dataverse.org/schema/citation/authorAffiliation": "Birds Inc." }, - "https://dataverse.org/schema/citation/Contact": { - "https://dataverse.org/schema/citation/datasetContact#E-mail": "finch@mailinator.com", - "https://dataverse.org/schema/citation/datasetContact#Name": "Finch, Fiona" + "https://dataverse.org/schema/citation/datasetContact": { + "https://dataverse.org/schema/citation/datasetContactEmail": "finch@mailinator.com", + "https://dataverse.org/schema/citation/datasetContactName": "Finch, Fiona" }, - "https://dataverse.org/schema/citation/Description": { - "https://dataverse.org/schema/citation/dsDescription#Text": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds." + "https://dataverse.org/schema/citation/dsDescription": { + "https://dataverse.org/schema/citation/dsDescriptionValue": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds." } } \ No newline at end of file diff --git a/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld b/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld index f79dbd30d8f..8f43d1dd6e9 100644 --- a/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld +++ b/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld @@ -1,25 +1,25 @@ { -"citation:Depositor": "Admin, Dataverse", -"Title": "Test Dataset", -"Subject": "Computer and Information Science", -"Creator": { - "author:Name": "Admin, Dataverse", - "author:Affiliation": "GDCC" +"citation:depositor": "Admin, Dataverse", +"title": "Test Dataset", +"subject": "Computer and Information Science", +"author": { + "citation:authorName": "Admin, Dataverse", + "citation:authorAffiliation": "GDCC" }, -"Deposit Date": "2020-10-08", -"citation:Distributor": { - "distributor:Name": "Demo Dataverse Repository", - "distributor:Affiliation": "Dataverse Community", - "distributor:Abbreviation": "GDCC", - "distributor:URL": "https://dataverse.org/global-dataverse-community-consortium" +"dateOfDeposit": "2020-10-08", +"citation:distributor": { + "citation:distributorName": "Demo Dataverse Repository", + "citation:distributorAffiliation": "Dataverse Community", + "citation:distributorAbbreviation": "GDCC", + "citation:distributorURL": "https://dataverse.org/global-dataverse-community-consortium" }, -"citation:Contact": { -"datasetContact:Name": "Admin, Dataverse", -"datasetContact:Affiliation": "GDCC", -"datasetContact:E-mail": "admin@demo.dataverse.org" +"citation:datasetContact": { +"citation:datasetContactName": "Admin, Dataverse", +"citation:datasetContactAffiliation": "GDCC", +"citation:datasetContactEmail": "admin@demo.dataverse.org" }, -"citation:Description": { - "dsDescription:Text": "A short description" +"citation:dsDescription": { + "citation:dsDescriptionValue": "A short description" }, "@id": "doi:10.33564/FK27U7YBV", "schema:version": "1.0", @@ -29,15 +29,11 @@ "dvcore:fileRequestAccess": false }, "@context": { - "Creator": "http://purl.org/dc/terms/creator", - "Deposit Date": "http://purl.org/dc/terms/dateSubmitted", - "Subject": "http://purl.org/dc/terms/subject", - "Title": "http://purl.org/dc/terms/title", - "author": "https://dataverse.org/schema/citation/author#", + "author": "http://purl.org/dc/terms/creator", + "dateOfDeposit": "http://purl.org/dc/terms/dateSubmitted", + "subject": "http://purl.org/dc/terms/subject", + "title": "http://purl.org/dc/terms/title", "citation": "https://dataverse.org/schema/citation/", - "datasetContact": "https://dataverse.org/schema/citation/datasetContact#", - "distributor": "https://dataverse.org/schema/citation/distributor#", - "dsDescription": "https://dataverse.org/schema/citation/dsDescription#", "dvcore": "https://dataverse.org/schema/core#", "schema": "http://schema.org/" }} diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index b7d0086e221..e59d3d4bc3b 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -76,310 +76,310 @@ Each of the three main sections own sets of properties: #metadataBlock properties ~~~~~~~~~~~~~~~~~~~~~~~~~ -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| **Property** | **Purpose** | **Allowed values and restrictions** | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| name | A user-definable string used to identify a | \• No spaces or punctuation, except underscore. | -| | #metadataBlock | | -| | | \• By convention, should start with a letter, and use | -| | | lower camel case [3]_ | -| | | | -| | | \• Must not collide with a field of the same name in | -| | | the same or any other #datasetField definition, | -| | | including metadata blocks defined elsewhere. [4]_ | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| dataverseAlias | If specified, this metadata block will be available | Free text. For an example, see custom_hbgdki.tsv. | -| | only to the Dataverse collection designated here by | | -| | its alias and to children of that Dataverse collection. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| displayName | Acts as a brief label for display related to this | Should be relatively brief. The limit is 256 character, | -| | #metadataBlock. | but very long names might cause display problems. | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| blockURI | Associates the properties in a block with an external | The citation #metadataBlock has the blockURI | -| | URI. | https://dataverse.org/schema/citation/ which assigns a | -| | Properties will be assigned the global assigned the | global URI to terms such as | -| | global identifier blockURI in the OAI_ORE | https://dataverse.org/schema/citation/subtitle | -| | metadata and archival Bags | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ ++----------------+---------------------------------------------------------+---------------------------------------------------------+ +| **Property** | **Purpose** | **Allowed values and restrictions** | ++----------------+---------------------------------------------------------+---------------------------------------------------------+ +| name | A user-definable string used to identify a | \• No spaces or punctuation, except underscore. | +| | #metadataBlock | | +| | | \• By convention, should start with a letter, and use | +| | | lower camel case [3]_ | +| | | | +| | | \• Must not collide with a field of the same name in | +| | | the same or any other #datasetField definition, | +| | | including metadata blocks defined elsewhere. [4]_ | ++----------------+---------------------------------------------------------+---------------------------------------------------------+ +| dataverseAlias | If specified, this metadata block will be available | Free text. For an example, see custom_hbgdki.tsv. | +| | only to the Dataverse collection designated here by | | +| | its alias and to children of that Dataverse collection. | | ++----------------+---------------------------------------------------------+---------------------------------------------------------+ +| displayName | Acts as a brief label for display related to this | Should be relatively brief. The limit is 256 character, | +| | #metadataBlock. | but very long names might cause display problems. | ++----------------+---------------------------------------------------------+---------------------------------------------------------+ +| blockURI | Associates the properties in a block with an external | The citation #metadataBlock has the blockURI | +| | URI. | https://dataverse.org/schema/citation/ which assigns a | +| | Properties will be assigned the | default global URI to terms such as | +| | global identifier blockURI in the OAI_ORE | https://dataverse.org/schema/citation/subtitle | +| | metadata and archival Bags | | ++----------------+---------------------------------------------------------+---------------------------------------------------------+ #datasetField (field) properties ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| **Property** | **Purpose** | **Allowed values and restrictions** | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| name | A user-definable string used to identify a | \• (from DatasetFieldType.java) The internal DDI-like | -| | #datasetField. Maps directly to field name used by | name, no spaces, etc. | -| | Solr. | | -| | | \• (from Solr) Field names should consist of | -| | | alphanumeric or underscore characters only and not start| -| | | with a digit. This is not currently strictly enforced, | -| | | but other field names will not have first class | -| | | support from all components and back compatibility | -| | | is not guaranteed. | -| | | Names with both leading and trailing underscores | -| | | (e.g. \_version_) are reserved. | -| | | | -| | | \• Must not collide with a field of | -| | | the same same name in another #metadataBlock | -| | | definition or any name already included as a | -| | | field in the Solr index. | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| title | Acts as a brief label for display | Should be relatively brief. | -| | related to this #datasetField. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| description | Used to provide a description of the | Free text | -| | field. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| watermark | A string to initially display in a field | Free text | -| | as a prompt for what the user should enter. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| fieldType | Defines the type of content that the | | \• none | -| | field, if not empty, is meant to contain. | | \• date | -| | | | \• email | -| | | | \• text | -| | | | \• textbox | -| | | | \• url | -| | | | \• int | -| | | | \• float | -| | | | \• See below for | -| | | | fieldtype definitions | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| displayOrder | Controls the sequence in which the fields | Non-negative integer. | -| | are displayed, both for input and | | -| | presentation. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| displayFormat | Controls how the content is displayed | See below for displayFormat | -| | for presentation (not entry). The value of | variables | -| | this field may contain one or more | | -| | special variables (enumerated below). | | -| | HTML tags, likely in conjunction with one | | -| | or more of these values, may be used | | -| | to control the display of content in | | -| | the web UI. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| advancedSearchField | Specify whether this field is available in | TRUE (available) or | -| | advanced search. | FALSE (not available) | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| allowControlledVocabulary | Specify whether the possible values of | TRUE (controlled) or FALSE (not | -| | this field are determined by values | controlled) | -| | in the #controlledVocabulary section. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| allowmultiples | Specify whether this field is repeatable. | TRUE (repeatable) or FALSE (not | -| | | repeatable) | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| facetable | Specify whether the field is facetable | TRUE (controlled) or FALSE (not | -| | (i.e., if the expected values for | controlled) | -| | this field are themselves useful | | -| | search terms for this field). If a field is | | -| | "facetable" (able to be faceted on), it | | -| | appears under "Browse/Search | | -| | Facets" when you edit | | -| | "General Information" for a Dataverse | | -| | collection. | | -| | Setting this value to TRUE generally makes | | -| | sense for enumerated or controlled | | -| | vocabulary fields, fields representing | | -| | identifiers (IDs, names, email | | -| | addresses), and other fields that are | | -| | likely to share values across | | -| | entries. It is less likely to make sense | | -| | for fields containing descriptions, | | -| | floating point numbers, and other | | -| | values that are likely to be unique. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| displayoncreate [5]_ | Designate fields that should display during | TRUE (display during creation) or FALSE | -| | the creation of a new dataset, even before | (don’t display during creation) | -| | the dataset is saved. | | -| | Fields not so designated will not | | -| | be displayed until the dataset has been | | -| | saved. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| required | For primitive fields, specify whether or not the | For primitive fields, TRUE | -| | field is required. | (required) or FALSE (optional). | -| | | | -| | For compound fields, also specify if one or more | For compound fields: | -| | subfields are required or conditionally required. At | | -| | least one instance of a required field must be | \• To make one or more | -| | present. More than one instance of a field may be | subfields optional, the parent | -| | allowed, depending on the value of allowmultiples. | field and subfield(s) must be | -| | | FALSE (optional). | -| | | | -| | | \• To make one or more subfields | -| | | required, the parent field and | -| | | the required subfield(s) must be | -| | | TRUE (required). | -| | | | -| | | \• To make one or more subfields | -| | | conditionally required, make the | -| | | parent field FALSE (optional) | -| | | and make TRUE (required) any | -| | | subfield or subfields that are | -| | | required if any other subfields | -| | | are filled. | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| parent | For subfields, specify the name of the parent or | \• Must not result in a cyclical reference. | -| | containing field. | | -| | | \• Must reference an existing field in the same | -| | | #metadataBlock. | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| metadatablock_id | Specify the name of the #metadataBlock that contains | \• Must reference an existing #metadataBlock. | -| | this field. | | -| | | \• As a best practice, the value should reference the | -| | | #metadataBlock in the current | -| | | definition (it is technically | -| | | possible to reference another | -| | | existing metadata block.) | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| termURI | Specify a global URI identifying this term in an | For example, the existing citation | -| | external community vocabulary. | #metadataBlock defines the property | -| | | names 'title' as http://purl.org/dc/terms/title | -| | This value overrides the default created by appending | - i.e. indicating that it can | -| | the property name to the blockURI defined for the | be interpreted as the Dublin Core term 'title' | -| | #metadataBlock | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| **Property** | **Purpose** | **Allowed values and restrictions** | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| name | A user-definable string used to identify a | \• (from DatasetFieldType.java) The internal DDI-like | | +| | #datasetField. Maps directly to field name used by | name, no spaces, etc. | | +| | Solr. | | | +| | | \• (from Solr) Field names should consist of | | +| | | alphanumeric or underscore characters only and not start | | +| | | with a digit. This is not currently strictly enforced, | | +| | | but other field names will not have first class | | +| | | support from all components and back compatibility | | +| | | is not guaranteed. | | +| | | Names with both leading and trailing underscores | | +| | | (e.g. \_version_) are reserved. | | +| | | | | +| | | \• Must not collide with a field of | | +| | | the same same name in another #metadataBlock | | +| | | definition or any name already included as a | | +| | | field in the Solr index. | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| title | Acts as a brief label for display | Should be relatively brief. | | +| | related to this #datasetField. | | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| description | Used to provide a description of the | Free text | | +| | field. | | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| watermark | A string to initially display in a field | Free text | | +| | as a prompt for what the user should enter. | | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| fieldType | Defines the type of content that the | | \• none | +| | field, if not empty, is meant to contain. | | \• date | +| | | | \• email | +| | | | \• text | +| | | | \• textbox | +| | | | \• url | +| | | | \• int | +| | | | \• float | +| | | | \• See below for | +| | | | fieldtype definitions | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| displayOrder | Controls the sequence in which the fields | Non-negative integer. | | +| | are displayed, both for input and | | | +| | presentation. | | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| displayFormat | Controls how the content is displayed | See below for displayFormat | | +| | for presentation (not entry). The value of | variables | | +| | this field may contain one or more | | | +| | special variables (enumerated below). | | | +| | HTML tags, likely in conjunction with one | | | +| | or more of these values, may be used | | | +| | to control the display of content in | | | +| | the web UI. | | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| advancedSearchField | Specify whether this field is available in | TRUE (available) or | | +| | advanced search. | FALSE (not available) | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| allowControlledVocabulary | Specify whether the possible values of | TRUE (controlled) or FALSE (not | | +| | this field are determined by values | controlled) | | +| | in the #controlledVocabulary section. | | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| allowmultiples | Specify whether this field is repeatable. | TRUE (repeatable) or FALSE (not | | +| | | repeatable) | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| facetable | Specify whether the field is facetable | TRUE (controlled) or FALSE (not | | +| | (i.e., if the expected values for | controlled) | | +| | this field are themselves useful | | | +| | search terms for this field). If a field is | | | +| | "facetable" (able to be faceted on), it | | | +| | appears under "Browse/Search | | | +| | Facets" when you edit | | | +| | "General Information" for a Dataverse | | | +| | collection. | | | +| | Setting this value to TRUE generally makes | | | +| | sense for enumerated or controlled | | | +| | vocabulary fields, fields representing | | | +| | identifiers (IDs, names, email | | | +| | addresses), and other fields that are | | | +| | likely to share values across | | | +| | entries. It is less likely to make sense | | | +| | for fields containing descriptions, | | | +| | floating point numbers, and other | | | +| | values that are likely to be unique. | | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| displayoncreate [5]_ | Designate fields that should display during | TRUE (display during creation) or FALSE | | +| | the creation of a new dataset, even before | (don’t display during creation) | | +| | the dataset is saved. | | | +| | Fields not so designated will not | | | +| | be displayed until the dataset has been | | | +| | saved. | | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| required | For primitive fields, specify whether or not the | For primitive fields, TRUE | | +| | field is required. | (required) or FALSE (optional). | | +| | | | | +| | For compound fields, also specify if one or more | For compound fields: | | +| | subfields are required or conditionally required. At | | | +| | least one instance of a required field must be | \• To make one or more | | +| | present. More than one instance of a field may be | subfields optional, the parent | | +| | allowed, depending on the value of allowmultiples. | field and subfield(s) must be | | +| | | FALSE (optional). | | +| | | | | +| | | \• To make one or more subfields | | +| | | required, the parent field and | | +| | | the required subfield(s) must be | | +| | | TRUE (required). | | +| | | | | +| | | \• To make one or more subfields | | +| | | conditionally required, make the | | +| | | parent field FALSE (optional) | | +| | | and make TRUE (required) any | | +| | | subfield or subfields that are | | +| | | required if any other subfields | | +| | | are filled. | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| parent | For subfields, specify the name of the parent or | \• Must not result in a cyclical reference. | | +| | containing field. | | | +| | | \• Must reference an existing field in the same | | +| | | #metadataBlock. | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| metadatablock_id | Specify the name of the #metadataBlock that contains | \• Must reference an existing #metadataBlock. | | +| | this field. | | | +| | | \• As a best practice, the value should reference the | | +| | | #metadataBlock in the current | | +| | | definition (it is technically | | +| | | possible to reference another | | +| | | existing metadata block.) | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ +| termURI | Specify a global URI identifying this term in an | For example, the existing citation | | +| | external community vocabulary. | #metadataBlock defines the property | | +| | | named 'title' as http://purl.org/dc/terms/title | | +| | This value overrides the default (created by appending | - i.e. indicating that it can | | +| | the property name to the blockURI defined for the | be interpreted as the Dublin Core term 'title' | | +| | #metadataBlock) | | | ++---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+ #controlledVocabulary (enumerated) properties ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| **Property** | **Purpose** | **Allowed values and restrictions** | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| DatasetField | Specifies the #datasetField to which | Must reference an existing | -| | #datasetField to which this entry applies. | #datasetField. | -| | | As a best practice, the value should | -| | | reference a #datasetField in the | -| | | current metadata block definition. (It | -| | | is technically possible to reference | -| | | an existing #datasetField from | -| | | another metadata block.) | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| Value | A short display string, representing | Free text | -| | an enumerated value for this field. If | | -| | the identifier property is empty, | | -| | this value is used as the identifier. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| identifier | A string used to encode the selected | Free text | -| | enumerated value of a field. If this | | -| | property is empty, the value of the | | -| | “Value” field is used as the identifier. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ -| displayOrder | Control the order in which the enumerated | Non-negative integer. | -| | values are displayed for selection. | | -+---------------------------------------------------------+---------------------------------------------------------+---------------------------------------------------------+ ++--------------+--------------------------------------------+-----------------------------------------+ +| **Property** | **Purpose** | **Allowed values and restrictions** | ++--------------+--------------------------------------------+-----------------------------------------+ +| DatasetField | Specifies the #datasetField to which | Must reference an existing | +| | #datasetField to which this entry applies. | #datasetField. | +| | | As a best practice, the value should | +| | | reference a #datasetField in the | +| | | current metadata block definition. (It | +| | | is technically possible to reference | +| | | an existing #datasetField from | +| | | another metadata block.) | ++--------------+--------------------------------------------+-----------------------------------------+ +| Value | A short display string, representing | Free text | +| | an enumerated value for this field. If | | +| | the identifier property is empty, | | +| | this value is used as the identifier. | | ++--------------+--------------------------------------------+-----------------------------------------+ +| identifier | A string used to encode the selected | Free text | +| | enumerated value of a field. If this | | +| | property is empty, the value of the | | +| | “Value” field is used as the identifier. | | ++--------------+--------------------------------------------+-----------------------------------------+ +| displayOrder | Control the order in which the enumerated | Non-negative integer. | +| | values are displayed for selection. | | ++--------------+--------------------------------------------+-----------------------------------------+ FieldType definitions ~~~~~~~~~~~~~~~~~~~~~ -+---------------------------------------------------------+---------------------------------------------------------+ -| **Fieldtype** | **Definition** | -+---------------------------------------------------------+---------------------------------------------------------+ -| none | Used for compound fields, in which | -| | case the parent field would have | -| | no value and display no data | -| | entry control. | -+---------------------------------------------------------+---------------------------------------------------------+ -| date | A date, expressed in one of three | -| | resolutions of the form | -| | YYYY-MM-DD, YYYY-MM, or YYYY. | -+---------------------------------------------------------+---------------------------------------------------------+ -| email | A valid email address. Not | -| | indexed for privacy reasons. | -+---------------------------------------------------------+---------------------------------------------------------+ -| text | Any text other than newlines may | -| | be entered into this field. | -+---------------------------------------------------------+---------------------------------------------------------+ -| textbox | Any text may be entered. For | -| | input, the Dataverse Software | -| | presents a | -| | multi-line area that accepts | -| | newlines. While any HTML is | -| | permitted, only a subset of HTML | -| | tags will be rendered in the UI. | -| | See the | -| | :ref:`supported-html-fields` | -| | section of the Dataset + File | -| | Management page in the User Guide. | -+---------------------------------------------------------+---------------------------------------------------------+ -| url | If not empty, field must contain | -| | a valid URL. | -+---------------------------------------------------------+---------------------------------------------------------+ -| int | An integer value destined for a | -| | numeric field. | -+---------------------------------------------------------+---------------------------------------------------------+ -| float | A floating point number destined | -| | for a numeric field. | -+---------------------------------------------------------+---------------------------------------------------------+ ++---------------+------------------------------------+ +| **Fieldtype** | **Definition** | ++---------------+------------------------------------+ +| none | Used for compound fields, in which | +| | case the parent field would have | +| | no value and display no data | +| | entry control. | ++---------------+------------------------------------+ +| date | A date, expressed in one of three | +| | resolutions of the form | +| | YYYY-MM-DD, YYYY-MM, or YYYY. | ++---------------+------------------------------------+ +| email | A valid email address. Not | +| | indexed for privacy reasons. | ++---------------+------------------------------------+ +| text | Any text other than newlines may | +| | be entered into this field. | ++---------------+------------------------------------+ +| textbox | Any text may be entered. For | +| | input, the Dataverse Software | +| | presents a | +| | multi-line area that accepts | +| | newlines. While any HTML is | +| | permitted, only a subset of HTML | +| | tags will be rendered in the UI. | +| | See the | +| | :ref:`supported-html-fields` | +| | section of the Dataset + File | +| | Management page in the User Guide. | ++---------------+------------------------------------+ +| url | If not empty, field must contain | +| | a valid URL. | ++---------------+------------------------------------+ +| int | An integer value destined for a | +| | numeric field. | ++---------------+------------------------------------+ +| float | A floating point number destined | +| | for a numeric field. | ++---------------+------------------------------------+ displayFormat variables ~~~~~~~~~~~~~~~~~~~~~~~ These are common ways to use the displayFormat to control how values are displayed in the UI. This list is not exhaustive. -+---------------------------------------------------------+---------------------------------------------------------+ -| **Variable** | **Description** | -+---------------------------------------------------------+---------------------------------------------------------+ -| (blank) | The displayFormat is left blank | -| | for primitive fields (e.g. | -| | subtitle) and fields that do not | -| | take values (e.g. author), since | -| | displayFormats do not work for | -| | these fields. | -+---------------------------------------------------------+---------------------------------------------------------+ -| #VALUE | The value of the field (instance level). | -+---------------------------------------------------------+---------------------------------------------------------+ -| #NAME | The name of the field (class level). | -+---------------------------------------------------------+---------------------------------------------------------+ -| #EMAIL | For displaying emails. | -+---------------------------------------------------------+---------------------------------------------------------+ -| #VALUE | For displaying the value as a | -| | link (if the value entered is a | -| | link). | -+---------------------------------------------------------+---------------------------------------------------------+ -| #VALUE | For displaying the value as a | -| | link, with the value included in | -| | the URL (e.g. if URL is | -| | \http://emsearch.rutgers.edu/atla\ | -| | \s/#VALUE_summary.html, | -| | and the value entered is 1001, | -| | the field is displayed as | -| | `1001 `__ | -| | (hyperlinked to | -| | http://emsearch.rutgers.edu/atlas/1001_summary.html)). | -+---------------------------------------------------------+---------------------------------------------------------+ -|
| entered image URL (used to | -| | display images in the producer | -| | and distributor logos metadata | -| | fields). | -+---------------------------------------------------------+---------------------------------------------------------+ -| #VALUE: | Appends and/or prepends | -| | characters to the value of the | -| \- #VALUE: | field. e.g. if the displayFormat | -| | for the distributorAffiliation is | -| (#VALUE) | (#VALUE) (wrapped with parens) | -| | and the value entered | -| | is University of North | -| | Carolina, the field is displayed | -| | in the UI as (University of | -| | North Carolina). | -+---------------------------------------------------------+---------------------------------------------------------+ -| ; | Displays the character (e.g. | -| | semicolon, comma) between the | -| : | values of fields within | -| | compound fields. For example, | -| , | if the displayFormat for the | -| | compound field “series” is a | -| | colon, and if the value | -| | entered for seriesName is | -| | IMPs and for | -| | seriesInformation is A | -| | collection of NMR data, the | -| | compound field is displayed in | -| | the UI as IMPs: A | -| | collection of NMR data. | -+---------------------------------------------------------+---------------------------------------------------------+ ++---------------------------------+--------------------------------------------------------+ +| **Variable** | **Description** | ++---------------------------------+--------------------------------------------------------+ +| (blank) | The displayFormat is left blank | +| | for primitive fields (e.g. | +| | subtitle) and fields that do not | +| | take values (e.g. author), since | +| | displayFormats do not work for | +| | these fields. | ++---------------------------------+--------------------------------------------------------+ +| #VALUE | The value of the field (instance level). | ++---------------------------------+--------------------------------------------------------+ +| #NAME | The name of the field (class level). | ++---------------------------------+--------------------------------------------------------+ +| #EMAIL | For displaying emails. | ++---------------------------------+--------------------------------------------------------+ +| #VALUE | For displaying the value as a | +| | link (if the value entered is a | +| | link). | ++---------------------------------+--------------------------------------------------------+ +| #VALUE | For displaying the value as a | +| | link, with the value included in | +| | the URL (e.g. if URL is | +| | \http://emsearch.rutgers.edu/atla\ | +| | \s/#VALUE_summary.html, | +| | and the value entered is 1001, | +| | the field is displayed as | +| | `1001 `__ | +| | (hyperlinked to | +| | http://emsearch.rutgers.edu/atlas/1001_summary.html)). | ++---------------------------------+--------------------------------------------------------+ +|
| entered image URL (used to | +| | display images in the producer | +| | and distributor logos metadata | +| | fields). | ++---------------------------------+--------------------------------------------------------+ +| #VALUE: | Appends and/or prepends | +| | characters to the value of the | +| \- #VALUE: | field. e.g. if the displayFormat | +| | for the distributorAffiliation is | +| (#VALUE) | (#VALUE) (wrapped with parens) | +| | and the value entered | +| | is University of North | +| | Carolina, the field is displayed | +| | in the UI as (University of | +| | North Carolina). | ++---------------------------------+--------------------------------------------------------+ +| ; | Displays the character (e.g. | +| | semicolon, comma) between the | +| : | values of fields within | +| | compound fields. For example, | +| , | if the displayFormat for the | +| | compound field “series” is a | +| | colon, and if the value | +| | entered for seriesName is | +| | IMPs and for | +| | seriesInformation is A | +| | collection of NMR data, the | +| | compound field is displayed in | +| | the UI as IMPs: A | +| | collection of NMR data. | ++---------------------------------+--------------------------------------------------------+ Metadata Block Setup -------------------- diff --git a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst index 2f4cb177ccb..7abae535276 100644 --- a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst +++ b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst @@ -48,11 +48,11 @@ To add json-ld formatted metadata for a Dataset, specify the Dataset ID (DATASET Example: Change the Dataset title - curl -X PUT -H X-Dataverse-key:$API_TOKEN -H 'Content-Type: application/ld+json' -d '{"Title": "Submit menu test", "@context":{"Title": "http://purl.org/dc/terms/title"}}' "$SERVER_URL/api/datasets/$DATASET_ID/metadata?replace=true" + curl -X PUT -H X-Dataverse-key:$API_TOKEN -H 'Content-Type: application/ld+json' -d '{"title": "Submit menu test", "@context":{"title": "http://purl.org/dc/terms/title"}}' "$SERVER_URL/api/datasets/$DATASET_ID/metadata?replace=true" Example 2: Add a description using the DATASET PID - curl -X PUT -H X-Dataverse-key:$API_TOKEN -H 'Content-Type: application/ld+json' -d '{"citation:Description": {"dsDescription:Text": "New description"}, "@context":{"citation": "https://dataverse.org/schema/citation/","dsDescription": "https://dataverse.org/schema/citation/dsDescription#"}}' "$SERVER_URL/api/datasets/:persistentId/metadata?persistentId=$DATASET_PID" + curl -X PUT -H X-Dataverse-key:$API_TOKEN -H 'Content-Type: application/ld+json' -d '{"citation:dsDescription": {"citation:dsDescriptionValue": "New description"}, "@context":{"citation": "https://dataverse.org/schema/citation/"}}' "$SERVER_URL/api/datasets/:persistentId/metadata?persistentId=$DATASET_PID" You should expect a 200 ("OK") response indicating whether a draft Dataset version was created or an existing draft was updated. diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 60425dfeed1..10b770bd09b 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -509,7 +509,7 @@ will be removed as soon as you Publish. Submit for Review ================= -If you have a Contributor role (can edit metadata, upload files, and edit files, edit Terms, Guestbook, and Submit datasets for review) in a Dataverse collection you can submit your dataset for review when you have finished uploading your files and filling in all of the relevant metadata fields. To Submit for Review, go to your dataset and click on the "Submit for Review" button, which is located next to the "Edit" button on the upper-right. Once Submitted for Review: the Admin or Curator for this Dataverse collection will be notified to review this dataset before they decide to either "Publish" the dataset or "Return to Author". If the dataset is published the contributor will be notified that it is now published. If the dataset is returned to the author, the contributor of this dataset will be notified that they need to make modifications before it can be submitted for review again. +If you have a Contributor role (can edit metadata, upload files, and edit files, edit Terms, Guestbook, and submit datasets for review) in a Dataverse collection you can submit your dataset for review when you have finished uploading your files and filling in all of the relevant metadata fields. To submit your dataset for review, go to your dataset and click the "Submit for Review" button, which is located next to the "Edit" button on the upper-right. In the confirmation popup, you can review your selection of license (or custom terms, if available). Once you have confirmed the submission, the Admin or Curator for this Dataverse collection will be notified to review this dataset before they decide to either publish the dataset or click "Return to Author". If the dataset is published, the contributor will be notified that it is now published. If the dataset is returned to the author, the contributor of this dataset will be notified that they need to make modifications before it can be submitted for review again. .. _privateurl: diff --git a/scripts/search/tests/data/dataset-finch1.jsonld b/scripts/search/tests/data/dataset-finch1.jsonld index 4a20b3e08ea..9e4b10033e9 100644 --- a/scripts/search/tests/data/dataset-finch1.jsonld +++ b/scripts/search/tests/data/dataset-finch1.jsonld @@ -3,15 +3,15 @@ "http://purl.org/dc/terms/title": "Darwin's Finches", "http://purl.org/dc/terms/subject": "Medicine, Health and Life Sciences", "http://purl.org/dc/terms/creator": { - "https://dataverse.org/schema/citation/author#Name": "Finch, Fiona", - "https://dataverse.org/schema/citation/author#Affiliation": "Birds Inc." + "https://dataverse.org/schema/citation/authorName": "Finch, Fiona", + "https://dataverse.org/schema/citation/authorAffiliation": "Birds Inc." }, - "https://dataverse.org/schema/citation/Contact": { - "https://dataverse.org/schema/citation/datasetContact#E-mail": "finch@mailinator.com", - "https://dataverse.org/schema/citation/datasetContact#Name": "Finch, Fiona" + "https://dataverse.org/schema/citation/datasetContact": { + "https://dataverse.org/schema/citation/datasetContactEmail": "finch@mailinator.com", + "https://dataverse.org/schema/citation/datasetContactName": "Finch, Fiona" }, - "https://dataverse.org/schema/citation/Description": { - "https://dataverse.org/schema/citation/dsDescription#Text": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds." + "https://dataverse.org/schema/citation/dsDescription": { + "https://dataverse.org/schema/citation/dsDescriptionValue": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds." }, "@type": [ "http://www.openarchives.org/ore/terms/Aggregation", diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java index d25d8428902..df126514308 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.search.SolrField; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.json.JsonLDTerm; import java.util.Collection; @@ -54,7 +55,7 @@ public void setId(Long id) { /** * The internal, DDI-like name, no spaces, etc. */ - @Column(name = "name", columnDefinition = "TEXT", nullable = false) + @Column(name = "name", columnDefinition = "TEXT", nullable = false, unique=true) private String name; /** @@ -309,6 +310,14 @@ public void setMetadataBlock(MetadataBlock metadataBlock) { public String getUri() { return uri; } + + public JsonLDTerm getJsonLDTerm() { + if(uri!=null) { + return new JsonLDTerm(name,uri); + } else { + return new JsonLDTerm(metadataBlock.getJsonLDNamespace(), name); + } + } public void setUri(String uri) { this.uri=uri; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index a900eefb4ab..d752c46d9a0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2081,6 +2081,7 @@ private String init(boolean initFull) { } displayLockInfo(dataset); + displayPublishMessage(); for (FileMetadata fmd : workingVersion.getFileMetadatas()) { if (fmd.getDataFile().isTabularData()) { @@ -2136,6 +2137,14 @@ private void displayWorkflowComments() { } } } + + private void displayPublishMessage(){ + if (workingVersion.isDraft() && workingVersion.getId() != null && canUpdateDataset() + && !dataset.isLockedFor(DatasetLock.Reason.finalizePublication) + && (canPublishDataset() || !dataset.isLockedFor(DatasetLock.Reason.InReview) )){ + JsfHelper.addWarningMessage(datasetService.getReminderString(dataset, canPublishDataset())); + } + } private void displayLockInfo(Dataset dataset) { // Various info messages, when the dataset is locked (for various reasons): @@ -2194,6 +2203,10 @@ private void displayLockInfo(Dataset dataset) { JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.locked.pidNotReserved.message"), BundleUtil.getStringFromBundle("dataset.locked.pidNotReserved.message.details")); } + + //if necessary refresh publish message also + + displayPublishMessage(); } @@ -3642,7 +3655,7 @@ public String save() { } if (addFilesSuccess && dataset.getFiles().size() > 0) { if (nNewFiles == dataset.getFiles().size()) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess")); } else { String partialSuccessMessage = BundleUtil.getStringFromBundle("dataset.message.createSuccess.partialSuccessSavingFiles"); partialSuccessMessage = partialSuccessMessage.replace("{0}", "" + dataset.getFiles().size() + ""); @@ -3653,25 +3666,25 @@ public String save() { JsfHelper.addWarningMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess.failedToSaveFiles")); } } else { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.createSuccess")); } } if (editMode.equals(EditMode.METADATA)) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.metadataSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.metadataSuccess")); } if (editMode.equals(EditMode.LICENSE)) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.termsSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.termsSuccess")); } if (editMode.equals(EditMode.FILE)) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.filesSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.filesSuccess")); } } else { // must have been a bulk file update or delete: if (bulkFileDeleteInProgress) { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.bulkFileDeleteSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.bulkFileDeleteSuccess")); } else { - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.bulkFileUpdateSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.filesSuccess")); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index f5a4acdffb8..b9b54fb6216 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -71,47 +71,47 @@ public class DatasetServiceBean implements java.io.Serializable { @EJB SettingsServiceBean settingsService; - + @EJB DatasetVersionServiceBean versionService; - + @EJB DvObjectServiceBean dvObjectService; - + @EJB AuthenticationServiceBean authentication; - + @EJB - DataFileServiceBean fileService; - + DataFileServiceBean fileService; + @EJB PermissionServiceBean permissionService; - + @EJB OAIRecordServiceBean recordService; - + @EJB EjbDataverseEngine commandEngine; - + @EJB SystemConfig systemConfig; private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); - + @PersistenceContext(unitName = "VDCNet-ejbPU") protected EntityManager em; public Dataset find(Object pk) { return em.find(Dataset.class, pk); } - + public List findByOwnerId(Long ownerId) { return findByOwnerId(ownerId, false); } - + public List findPublishedByOwnerId(Long ownerId) { return findByOwnerId(ownerId, true); - } + } private List findByOwnerId(Long ownerId, boolean onlyPublished) { List retList = new ArrayList<>(); @@ -128,11 +128,11 @@ private List findByOwnerId(Long ownerId, boolean onlyPublished) { return retList; } } - + public List findIdsByOwnerId(Long ownerId) { return findIdsByOwnerId(ownerId, false); } - + private List findIdsByOwnerId(Long ownerId, boolean onlyPublished) { List retList = new ArrayList<>(); if (!onlyPublished) { @@ -165,7 +165,7 @@ public List filterByPidQuery(String filterQuery) { List ret = new ArrayList<>(); if (ds != null) ret.add(ds); - + /* List ret = em.createNamedQuery("Dataset.filterByPid", Dataset.class) .setParameter("affiliation", "%" + filterQuery.toLowerCase() + "%").getResultList(); @@ -177,23 +177,23 @@ public List filterByPidQuery(String filterQuery) { } return ret; } - + public List findAll() { return em.createQuery("select object(o) from Dataset as o order by o.id", Dataset.class).getResultList(); } - + public List findIdStale() { return em.createNamedQuery("Dataset.findIdStale").getResultList(); } - + public List findIdStalePermission() { return em.createNamedQuery("Dataset.findIdStalePermission").getResultList(); } - + public List findAllLocalDatasetIds() { return em.createQuery("SELECT o.id FROM Dataset o WHERE o.harvestedFrom IS null ORDER BY o.id", Long.class).getResultList(); } - + public List findAllUnindexed() { return em.createQuery("SELECT o.id FROM Dataset o WHERE o.indexTime IS null ORDER BY o.id DESC", Long.class).getResultList(); } @@ -210,7 +210,7 @@ public List findAllUnpublished() { * @param skipIndexed * @return a list of datasets * @see DataverseServiceBean#findAllOrSubset(long, long, boolean) - */ + */ public List findAllOrSubset(long numPartitions, long partitionId, boolean skipIndexed) { if (numPartitions < 1) { long saneNumPartitions = 1; @@ -224,7 +224,7 @@ public List findAllOrSubset(long numPartitions, long partitionId, boolean typedQuery.setParameter("partitionId", partitionId); return typedQuery.getResultList(); } - + /** * For docs, see the equivalent method on the DataverseServiceBean. * @param numPartitions @@ -232,7 +232,7 @@ public List findAllOrSubset(long numPartitions, long partitionId, boolean * @param skipIndexed * @return a list of datasets * @see DataverseServiceBean#findAllOrSubset(long, long, boolean) - */ + */ public List findAllOrSubsetOrderByFilesOwned(boolean skipIndexed) { /* Disregards deleted or replaced files when determining 'size' of dataset. @@ -254,7 +254,7 @@ public List findAllOrSubsetOrderByFilesOwned(boolean skipIndexed) { List retVal = new ArrayList(); for (Object[] result : queryResults) { - Long dsId; + Long dsId; if (result[0] != null) { try { dsId = Long.parseLong(result[0].toString()) ; @@ -269,7 +269,7 @@ public List findAllOrSubsetOrderByFilesOwned(boolean skipIndexed) { } return retVal; } - + /** * Merges the passed dataset to the persistence context. * @param ds the dataset whose new state we want to persist. @@ -278,7 +278,7 @@ public List findAllOrSubsetOrderByFilesOwned(boolean skipIndexed) { public Dataset merge( Dataset ds ) { return em.merge(ds); } - + public Dataset findByGlobalId(String globalId) { Dataset retVal = (Dataset) dvObjectService.findByGlobalId(globalId, "Dataset"); if (retVal != null){ @@ -286,18 +286,18 @@ public Dataset findByGlobalId(String globalId) { } else { //try to find with alternative PID return (Dataset) dvObjectService.findByGlobalId(globalId, "Dataset", true); - } + } } - + /** * Instantiate dataset, and its components (DatasetVersions and FileMetadatas) * this method is used for object validation; if there are any invalid values * in the dataset components, a ConstraintViolationException will be thrown, * which can be further parsed to detect the specific offending values. * @param id the id of the dataset - * @throws javax.validation.ConstraintViolationException + * @throws javax.validation.ConstraintViolationException */ - + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void instantiateDatasetInNewTransaction(Long id, boolean includeVariables) { Dataset dataset = find(id); @@ -319,7 +319,7 @@ public void instantiateDatasetInNewTransaction(Long id, boolean includeVariables public String generateDatasetIdentifier(Dataset dataset, GlobalIdServiceBean idServiceBean) { String identifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString"); String shoulder = settingsService.getValueForKey(SettingsServiceBean.Key.Shoulder, ""); - + switch (identifierType) { case "randomString": return generateIdentifierAsRandomString(dataset, idServiceBean, shoulder); @@ -330,31 +330,31 @@ public String generateDatasetIdentifier(Dataset dataset, GlobalIdServiceBean idS return generateIdentifierAsRandomString(dataset, idServiceBean, shoulder); } } - + private String generateIdentifierAsRandomString(Dataset dataset, GlobalIdServiceBean idServiceBean, String shoulder) { String identifier = null; do { - identifier = shoulder + RandomStringUtils.randomAlphanumeric(6).toUpperCase(); + identifier = shoulder + RandomStringUtils.randomAlphanumeric(6).toUpperCase(); } while (!isIdentifierLocallyUnique(identifier, dataset)); - + return identifier; } private String generateIdentifierFromStoredProcedure(Dataset dataset, GlobalIdServiceBean idServiceBean, String shoulder) { - - String identifier; + + String identifier; do { StoredProcedureQuery query = this.em.createNamedStoredProcedureQuery("Dataset.generateIdentifierFromStoredProcedure"); query.execute(); String identifierFromStoredProcedure = (String) query.getOutputParameterValue(1); - // some diagnostics here maybe - is it possible to determine that it's failing + // some diagnostics here maybe - is it possible to determine that it's failing // because the stored procedure hasn't been created in the database? if (identifierFromStoredProcedure == null) { - return null; + return null; } identifier = shoulder + identifierFromStoredProcedure; } while (!isIdentifierLocallyUnique(identifier, dataset)); - + return identifier; } @@ -369,7 +369,7 @@ private String generateIdentifierFromStoredProcedure(Dataset dataset, GlobalIdSe */ public boolean isIdentifierUnique(String userIdentifier, Dataset dataset, GlobalIdServiceBean persistentIdSvc) { if ( ! isIdentifierLocallyUnique(userIdentifier, dataset) ) return false; // duplication found in local database - + // not in local DB, look in the persistent identifier service try { return ! persistentIdSvc.alreadyExists(dataset); @@ -379,11 +379,11 @@ public boolean isIdentifierUnique(String userIdentifier, Dataset dataset, Global return true; } - + public boolean isIdentifierLocallyUnique(Dataset dataset) { return isIdentifierLocallyUnique(dataset.getIdentifier(), dataset); } - + public boolean isIdentifierLocallyUnique(String identifier, Dataset dataset) { return em.createNamedQuery("Dataset.findByIdentifierAuthorityProtocol") .setParameter("identifier", identifier) @@ -391,7 +391,7 @@ public boolean isIdentifierLocallyUnique(String identifier, Dataset dataset) { .setParameter("protocol", dataset.getProtocol()) .getResultList().isEmpty(); } - + public Long getMaximumExistingDatafileIdentifier(Dataset dataset) { //Cannot rely on the largest table id having the greatest identifier counter long zeroFiles = new Long(0); @@ -414,7 +414,7 @@ public Long getMaximumExistingDatafileIdentifier(Dataset dataset) { testVal = new Long(identifier) ; if (testVal > retVal){ retVal = testVal; - } + } } } } @@ -425,7 +425,7 @@ public DatasetVersion storeVersion( DatasetVersion dsv ) { em.persist(dsv); return dsv; } - + public DatasetVersionUser getDatasetVersionUser(DatasetVersion version, User user) { @@ -449,22 +449,22 @@ public boolean checkDatasetLock(Long datasetId) { List lock = lockCounter.getResultList(); return lock.size()>0; } - + public List getDatasetLocksByUser( AuthenticatedUser user) { return listLocks(null, user); } - + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public DatasetLock addDatasetLock(Dataset dataset, DatasetLock lock) { lock.setDataset(dataset); dataset.addLock(lock); lock.setStartTime( new Date() ); em.persist(lock); - //em.merge(dataset); + //em.merge(dataset); return lock; } - + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) /*?*/ public DatasetLock addDatasetLock(Long datasetId, DatasetLock.Reason reason, Long userId, String info) { @@ -477,11 +477,11 @@ public DatasetLock addDatasetLock(Long datasetId, DatasetLock.Reason reason, Lon // Check if the dataset is already locked for this reason: // (to prevent multiple, duplicate locks on the dataset!) - DatasetLock lock = dataset.getLockFor(reason); + DatasetLock lock = dataset.getLockFor(reason); if (lock != null) { return lock; } - + // Create new: lock = new DatasetLock(reason, user); lock.setDataset(dataset); @@ -521,22 +521,22 @@ public void removeDatasetLocks(Dataset dataset, DatasetLock.Reason aReason) { }); } } - + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void updateDatasetLock(DatasetLock datasetLock) { em.merge(datasetLock); } - + /* * Lists all dataset locks, optionally filtered by lock type or user, or both - * @param lockType + * @param lockType * @param user - * @return a list of DatasetLocks + * @return a list of DatasetLocks */ public List listLocks(DatasetLock.Reason lockType, AuthenticatedUser user) { - TypedQuery query; - + TypedQuery query; + if (lockType == null && user == null) { query = em.createNamedQuery("DatasetLock.findAll", DatasetLock.class); } else if (user == null) { @@ -556,21 +556,21 @@ public List listLocks(DatasetLock.Reason lockType, AuthenticatedUse return null; } } - + /* getTitleFromLatestVersion methods use native query to return a dataset title - + There are two versions: 1) The version with datasetId param only will return the title regardless of version state 2)The version with the param 'includeDraft' boolean will return the most recently published title if the param is set to false If no Title found return empty string - protects against calling with include draft = false with no published version */ - + public String getTitleFromLatestVersion(Long datasetId){ return getTitleFromLatestVersion(datasetId, true); } - + public String getTitleFromLatestVersion(Long datasetId, boolean includeDraft){ String whereDraft = ""; @@ -578,7 +578,7 @@ public String getTitleFromLatestVersion(Long datasetId, boolean includeDraft){ if (!includeDraft) { whereDraft = " and v.versionstate !='DRAFT' "; } - + try { return (String) em.createNativeQuery("select dfv.value from dataset d " + " join datasetversion v on d.id = v.dataset_id " @@ -596,7 +596,7 @@ public String getTitleFromLatestVersion(Long datasetId, boolean includeDraft){ } } - + public Dataset getDatasetByHarvestInfo(Dataverse dataverse, String harvestIdentifier) { String queryStr = "SELECT d FROM Dataset d, DvObject o WHERE d.id = o.id AND o.owner.id = " + dataverse.getId() + " and d.harvestIdentifier = '" + harvestIdentifier + "'"; Query query = em.createQuery(queryStr); @@ -611,45 +611,45 @@ public Dataset getDatasetByHarvestInfo(Dataverse dataverse, String harvestIdenti return dataset; } - + public Long getDatasetVersionCardImage(Long versionId, User user) { if (versionId == null) { return null; } - - - + + + return null; } - + /** * Used to identify and properly display Harvested objects on the dataverse page. - * + * * @param datasetIds - * @return + * @return */ public Map getArchiveDescriptionsForHarvestedDatasets(Set datasetIds){ if (datasetIds == null || datasetIds.size() < 1) { return null; } - + String datasetIdStr = Strings.join(datasetIds, ", "); - + String qstr = "SELECT d.id, h.archiveDescription FROM harvestingClient h, dataset d WHERE d.harvestingClient_id = h.id AND d.id IN (" + datasetIdStr + ")"; List searchResults; - + try { searchResults = em.createNativeQuery(qstr).getResultList(); } catch (Exception ex) { searchResults = null; } - + if (searchResults == null) { return null; } - + Map ret = new HashMap<>(); - + for (Object[] result : searchResults) { Long dsId; if (result[0] != null) { @@ -661,78 +661,78 @@ public Map getArchiveDescriptionsForHarvestedDatasets(Set da if (dsId == null) { continue; } - + ret.put(dsId, (String)result[1]); } } - - return ret; + + return ret; } - - - - public boolean isDatasetCardImageAvailable(DatasetVersion datasetVersion, User user) { + + + + public boolean isDatasetCardImageAvailable(DatasetVersion datasetVersion, User user) { if (datasetVersion == null) { - return false; + return false; } - - // First, check if this dataset has a designated thumbnail image: - + + // First, check if this dataset has a designated thumbnail image: + if (datasetVersion.getDataset() != null) { DataFile dataFile = datasetVersion.getDataset().getThumbnailFile(); if (dataFile != null) { return ImageThumbConverter.isThumbnailAvailable(dataFile, 48); } } - + // If not, we'll try to use one of the files in this dataset version: // (the first file with an available thumbnail, really) - + List fileMetadatas = datasetVersion.getFileMetadatas(); for (FileMetadata fileMetadata : fileMetadatas) { DataFile dataFile = fileMetadata.getDataFile(); - - // TODO: use permissionsWrapper here - ? - // (we are looking up these download permissions on individual files, - // true, and those are unique... but the wrapper may be able to save + + // TODO: use permissionsWrapper here - ? + // (we are looking up these download permissions on individual files, + // true, and those are unique... but the wrapper may be able to save // us some queries when it determines the download permission on the // dataset as a whole? -- L.A. 4.2.1 - + if (fileService.isThumbnailAvailable(dataFile) && permissionService.userOn(user, dataFile).has(Permission.DownloadFile)) { //, user)) { return true; } - + } - + return false; } - - - // reExportAll *forces* a reexport on all published datasets; whether they - // have the "last export" time stamp set or not. - @Asynchronous + + + // reExportAll *forces* a reexport on all published datasets; whether they + // have the "last export" time stamp set or not. + @Asynchronous public void reExportAllAsync() { exportAllDatasets(true); } - + public void reExportAll() { exportAllDatasets(true); } - - + + // exportAll() will try to export the yet unexported datasets (it will honor // and trust the "last export" time stamp). - + @Asynchronous public void exportAllAsync() { exportAllDatasets(false); } - + public void exportAll() { exportAllDatasets(false); } - + public void exportAllDatasets(boolean forceReExport) { Integer countAll = 0; Integer countSuccess = 0; @@ -760,20 +760,20 @@ public void exportAllDatasets(boolean forceReExport) { exportLogger.info("Starting an export all job"); for (Long datasetId : findAllLocalDatasetIds()) { - // Potentially, there's a godzillion datasets in this Dataverse. - // This is why we go through the list of ids here, and instantiate - // only one dataset at a time. + // Potentially, there's a godzillion datasets in this Dataverse. + // This is why we go through the list of ids here, and instantiate + // only one dataset at a time. Dataset dataset = this.find(datasetId); if (dataset != null) { // Accurate "is published?" test - ? - // Answer: Yes, it is! We can't trust dataset.isReleased() alone; because it is a dvobject method + // Answer: Yes, it is! We can't trust dataset.isReleased() alone; because it is a dvobject method // that returns (publicationDate != null). And "publicationDate" is essentially - // "the first publication date"; that stays the same as versions get - // published and/or deaccessioned. But in combination with !isDeaccessioned() + // "the first publication date"; that stays the same as versions get + // published and/or deaccessioned. But in combination with !isDeaccessioned() // it is indeed an accurate test. if (dataset.isReleased() && dataset.getReleasedVersion() != null && !dataset.isDeaccessioned()) { - // can't trust dataset.getPublicationDate(), no. + // can't trust dataset.getPublicationDate(), no. Date publicationDate = dataset.getReleasedVersion().getReleaseTime(); // we know this dataset has a non-null released version! Maybe not - SEK 8/19 (We do now! :) if (forceReExport || (publicationDate != null && (dataset.getLastExportTime() == null @@ -795,34 +795,44 @@ public void exportAllDatasets(boolean forceReExport) { exportLogger.info("Datasets exported successfully: " + countSuccess.toString()); exportLogger.info("Datasets failures: " + countError.toString()); exportLogger.info("Finished export-all job."); - + if (fileHandlerSuceeded) { fileHandler.close(); } } - //get a string to add to save success message - //depends on dataset state and user privleges public String getReminderString(Dataset dataset, boolean canPublishDataset) { + return getReminderString( dataset, canPublishDataset, false); + } + //get a string to add to save success message + //depends on page (dataset/file) and user privleges + public String getReminderString(Dataset dataset, boolean canPublishDataset, boolean filePage) { + String reminderString; - if(!dataset.isReleased() ){ - //messages for draft state. - if (canPublishDataset){ - reminderString = BundleUtil.getStringFromBundle("dataset.message.publish.remind.draft"); + if (canPublishDataset) { + reminderString = BundleUtil.getStringFromBundle("dataset.message.publish.warning"); + } else { + reminderString = BundleUtil.getStringFromBundle("dataset.message.submit.warning"); + } + + if (canPublishDataset) { + if (!filePage) { + reminderString = reminderString + " " + BundleUtil.getStringFromBundle("dataset.message.publish.remind.draft"); } else { - reminderString = BundleUtil.getStringFromBundle("dataset.message.submit.remind.draft"); - } - } else{ - //messages for new version - post-publish - if (canPublishDataset){ - reminderString = BundleUtil.getStringFromBundle("dataset.message.publish.remind.version"); + reminderString = reminderString + " " + BundleUtil.getStringFromBundle("dataset.message.publish.remind.draft.filePage"); + reminderString = reminderString.replace("{0}", "" + (dataset.getGlobalId().asString().concat("&version=DRAFT"))); + } + } else { + if (!filePage) { + reminderString = reminderString + " " + BundleUtil.getStringFromBundle("dataset.message.submit.remind.draft"); } else { - reminderString = BundleUtil.getStringFromBundle("dataset.message.submit.remind.version"); - } - } + reminderString = reminderString + " " + BundleUtil.getStringFromBundle("dataset.message.submit.remind.draft.filePage"); + reminderString = reminderString.replace("{0}", "" + (dataset.getGlobalId().asString().concat("&version=DRAFT"))); + } + } if (reminderString != null) { return reminderString; @@ -831,7 +841,7 @@ public String getReminderString(Dataset dataset, boolean canPublishDataset) { return ""; } } - + public void updateLastExportTimeStamp(Long datasetId) { Date now = new Date(); em.createNativeQuery("UPDATE Dataset SET lastExportTime='"+now.toString()+"' WHERE id="+datasetId).executeUpdate(); @@ -876,22 +886,22 @@ public Dataset removeDatasetThumbnail(Dataset dataset) { dataset.setUseGenericThumbnail(true); return merge(dataset); } - + // persist assigned thumbnail in a single one-field-update query: // (the point is to avoid doing an em.merge() on an entire dataset object...) public void assignDatasetThumbnailByNativeQuery(Long datasetId, Long dataFileId) { try { em.createNativeQuery("UPDATE dataset SET thumbnailfile_id=" + dataFileId + " WHERE id=" + datasetId).executeUpdate(); } catch (Exception ex) { - // it's ok to just ignore... + // it's ok to just ignore... } } - + public void assignDatasetThumbnailByNativeQuery(Dataset dataset, DataFile dataFile) { try { em.createNativeQuery("UPDATE dataset SET thumbnailfile_id=" + dataFile.getId() + " WHERE id=" + dataset.getId()).executeUpdate(); } catch (Exception ex) { - // it's ok to just ignore... + // it's ok to just ignore... } } @@ -899,17 +909,17 @@ public WorkflowComment addWorkflowComment(WorkflowComment workflowComment) { em.persist(workflowComment); return workflowComment; } - + public void markWorkflowCommentAsRead(WorkflowComment workflowComment) { workflowComment.setToBeShown(false); em.merge(workflowComment); } - - + + /** - * This method used to throw CommandException, which was pretty pointless - * seeing how it's called asynchronously. As of v5.0 any CommanExceptiom - * thrown by the FinalizeDatasetPublicationCommand below will be caught + * This method used to throw CommandException, which was pretty pointless + * seeing how it's called asynchronously. As of v5.0 any CommanExceptiom + * thrown by the FinalizeDatasetPublicationCommand below will be caught * and we'll log it as a warning - which is the best we can do at this point. * Any failure notifications to users should be sent from inside the command. */ @@ -917,11 +927,11 @@ public void markWorkflowCommentAsRead(WorkflowComment workflowComment) { @TransactionAttribute(TransactionAttributeType.SUPPORTS) public void callFinalizePublishCommandAsynchronously(Long datasetId, CommandContext ctxt, DataverseRequest request, boolean isPidPrePublished) { - // Since we are calling the next command asynchronously anyway - sleep here - // for a few seconds, just in case, to make sure the database update of - // the dataset initiated by the PublishDatasetCommand has finished, - // to avoid any concurrency/optimistic lock issues. - // Aug. 2020/v5.0: It MAY be working consistently without any + // Since we are calling the next command asynchronously anyway - sleep here + // for a few seconds, just in case, to make sure the database update of + // the dataset initiated by the PublishDatasetCommand has finished, + // to avoid any concurrency/optimistic lock issues. + // Aug. 2020/v5.0: It MAY be working consistently without any // sleep here, after the call the method has been moved to the onSuccess() // portion of the PublishDatasetCommand. I'm going to leave the 1 second // sleep below, for just in case reasons: -- L.A. @@ -938,21 +948,21 @@ public void callFinalizePublishCommandAsynchronously(Long datasetId, CommandCont logger.warning("CommandException caught when executing the asynchronous portion of the Dataset Publication Command."); } } - + /* - Experimental asynchronous method for requesting persistent identifiers for - datafiles. We decided not to run this method on upload/create (so files - will not have persistent ids while in draft; when the draft is published, - we will force obtaining persistent ids for all the files in the version. - - If we go back to trying to register global ids on create, care will need to - be taken to make sure the asynchronous changes below are not conflicting with - the changes from file ingest (which may be happening in parallel, also - asynchronously). We would also need to lock the dataset (similarly to how + Experimental asynchronous method for requesting persistent identifiers for + datafiles. We decided not to run this method on upload/create (so files + will not have persistent ids while in draft; when the draft is published, + we will force obtaining persistent ids for all the files in the version. + + If we go back to trying to register global ids on create, care will need to + be taken to make sure the asynchronous changes below are not conflicting with + the changes from file ingest (which may be happening in parallel, also + asynchronously). We would also need to lock the dataset (similarly to how tabular ingest logs the dataset), to prevent the user from publishing the - version before all the identifiers get assigned - otherwise more conflicts + version before all the identifiers get assigned - otherwise more conflicts are likely. (It sounds like it would make sense to treat these two tasks - - persistent identifiers for files and ingest - as one post-upload job, so that + persistent identifiers for files and ingest - as one post-upload job, so that they can be run in sequence). -- L.A. Mar. 2018 */ @Asynchronous @@ -1005,58 +1015,58 @@ public void obtainPersistentIdentifiersForDatafiles(Dataset dataset) { datafile.setIdentifierRegistered(true); datafile.setGlobalIdCreateTime(new Date()); } - + DataFile merged = em.merge(datafile); - merged = null; + merged = null; } } } - + public long findStorageSize(Dataset dataset) throws IOException { return findStorageSize(dataset, false, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } - - + + public long findStorageSize(Dataset dataset, boolean countCachedExtras) throws IOException { return findStorageSize(dataset, countCachedExtras, GetDatasetStorageSizeCommand.Mode.STORAGE, null); } - + /** - * Returns the total byte size of the files in this dataset - * + * Returns the total byte size of the files in this dataset + * * @param dataset * @param countCachedExtras boolean indicating if the cached disposable extras should also be counted * @param mode String indicating whether we are getting the result for storage (entire dataset) or download version based * @param version optional param for dataset version - * @return total size - * @throws IOException if it can't access the objects via StorageIO - * (in practice, this can only happen when called with countCachedExtras=true; when run in the - * default mode, the method doesn't need to access the storage system, as the + * @return total size + * @throws IOException if it can't access the objects via StorageIO + * (in practice, this can only happen when called with countCachedExtras=true; when run in the + * default mode, the method doesn't need to access the storage system, as the * sizes of the main files are recorded in the database) */ public long findStorageSize(Dataset dataset, boolean countCachedExtras, GetDatasetStorageSizeCommand.Mode mode, DatasetVersion version) throws IOException { - long total = 0L; - + long total = 0L; + if (dataset.isHarvested()) { return 0L; } List filesToTally = new ArrayList(); - + if (version == null || (mode != null && mode.equals("storage"))){ filesToTally = dataset.getFiles(); } else { List fmds = version.getFileMetadatas(); for (FileMetadata fmd : fmds){ filesToTally.add(fmd.getDataFile()); - } + } } - - + + //CACHED EXTRAS FOR DOWNLOAD? - - + + for (DataFile datafile : filesToTally) { total += datafile.getFilesize(); @@ -1075,13 +1085,13 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras, GetDatas } } } - + // and finally, if (countCachedExtras) { // count the sizes of the files cached for the dataset itself // (i.e., the metadata exports): StorageIO datasetSIO = DataAccess.getStorageIO(dataset); - + for (String[] exportProvider : ExportService.getInstance().getExportersLabels()) { String exportLabel = "export_" + exportProvider[1] + ".cached"; try { @@ -1091,31 +1101,31 @@ public long findStorageSize(Dataset dataset, boolean countCachedExtras, GetDatas } } } - - return total; + + return total; } - + /** - * An optimized method for deleting a harvested dataset. - * + * An optimized method for deleting a harvested dataset. + * * @param dataset * @param request DataverseRequest (for initializing the DestroyDatasetCommand) * @param hdLogger logger object (in practice, this will be a separate log file created for a specific harvesting job) */ @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Logger hdLogger) { - // Purge all the SOLR documents associated with this client from the - // index server: + // Purge all the SOLR documents associated with this client from the + // index server: indexService.deleteHarvestedDocuments(dataset); - + try { - // files from harvested datasets are removed unceremoniously, - // directly in the database. no need to bother calling the + // files from harvested datasets are removed unceremoniously, + // directly in the database. no need to bother calling the // DeleteFileCommand on them. for (DataFile harvestedFile : dataset.getFiles()) { DataFile merged = em.merge(harvestedFile); em.remove(merged); - harvestedFile = null; + harvestedFile = null; } dataset.setFiles(null); Dataset merged = em.merge(dataset); @@ -1123,6 +1133,6 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo hdLogger.info("Successfully destroyed the dataset"); } catch (Exception ex) { hdLogger.warning("Failed to destroy the dataset"); - } + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index c4d3f51c86a..66935c89e0b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -1205,7 +1205,7 @@ public String save() { } else { int nFilesTotal = workingVersion.getFileMetadatas().size(); if (nNewFiles == 0 || nFilesTotal == nExpectedFilesTotal) { - JsfHelper.addSuccessMessage(getBundleString("dataset.message.filesSuccess").concat(" ").concat(datasetService.getReminderString(dataset, canPublishDataset()))); + JsfHelper.addSuccessMessage(getBundleString("dataset.message.filesSuccess")); } else if (nFilesTotal == nOldFiles) { JsfHelper.addErrorMessage(getBundleString("dataset.message.addFiles.Failure")); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index ca8a5d74b40..3fa6d4fdfff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -109,6 +109,9 @@ public class FilePage implements java.io.Serializable { @EJB AuthenticationServiceBean authService; + @EJB + DatasetServiceBean datasetService; + @EJB SystemConfig systemConfig; @@ -248,10 +251,18 @@ public String init() { if(!hasValidTermsOfAccess && canUpdateDataset() ){ JsfHelper.addWarningMessage(BundleUtil.getStringFromBundle("dataset.message.editMetadata.invalid.TOUA.message")); } - + + displayPublishMessage(); return null; } + private void displayPublishMessage(){ + if (fileMetadata.getDatasetVersion().isDraft() && canUpdateDataset() + && (canPublishDataset() || !fileMetadata.getDatasetVersion().getDataset().isLockedFor(DatasetLock.Reason.InReview))){ + JsfHelper.addWarningMessage(datasetService.getReminderString(fileMetadata.getDatasetVersion().getDataset(), canPublishDataset(), true)); + } + } + private boolean canViewUnpublishedDataset() { return permissionsWrapper.canViewUnpublishedDataset( dvRequestService.getDataverseRequest(), fileMetadata.getDatasetVersion().getDataset()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java index 1a1a87b1b87..039915c7201 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java +++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java @@ -1,6 +1,8 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.json.JsonLDNamespace; import java.io.Serializable; import java.util.List; @@ -66,9 +68,26 @@ public void setName(String name) { public String getNamespaceUri() { return namespaceUri; } + public void setNamespaceUri(String namespaceUri) { this.namespaceUri = namespaceUri; } + + private String getAssignedNamespaceUri() { + String nsUri = getNamespaceUri(); + // Standard blocks will have a namespaceUri + if (nsUri == null) { + // Locally created/edited blocks, legacy blocks may not have a defined + // namespaceUri, so generate one that indicates that this is a locally defined + // term + nsUri = SystemConfig.getDataverseSiteUrlStatic() + "/schema/" + name + "#"; + } + return nsUri; + } + + public JsonLDNamespace getJsonLDNamespace() { + return JsonLDNamespace.defineNamespace(name, getAssignedNamespaceUri()); + } @OneToMany(mappedBy = "metadataBlock", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) @OrderBy("displayOrder") diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 6ea63e2b51f..bd27405fae5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -8,8 +8,6 @@ import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider; import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider; - -import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.validation.PasswordValidatorUtil; import java.io.FileInputStream; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index 7ae14655e81..9d404190cc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -99,7 +99,7 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except if (excludeEmail && DatasetFieldType.FieldType.EMAIL.equals(dfType.getFieldType())) { continue; } - JsonLDTerm fieldName = getTermFor(dfType); + JsonLDTerm fieldName = dfType.getJsonLDTerm(); if (fieldName.inNamespace()) { localContext.putIfAbsent(fieldName.getNamespace().getPrefix(), fieldName.getNamespace().getUrl()); } else { @@ -145,7 +145,7 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except if (!dsf.isEmpty()) { // Add context entry // ToDo - also needs to recurse here? - JsonLDTerm subFieldName = getTermFor(dfType, dsft); + JsonLDTerm subFieldName = dsft.getJsonLDTerm(); if (subFieldName.inNamespace()) { localContext.putIfAbsent(subFieldName.getNamespace().getPrefix(), subFieldName.getNamespace().getUrl()); @@ -364,11 +364,11 @@ public JsonLDTerm getContactTerm() { } public JsonLDTerm getContactNameTerm() { - return getTermFor(DatasetFieldConstant.datasetContact, DatasetFieldConstant.datasetContactName); + return getTermFor(DatasetFieldConstant.datasetContactName); } public JsonLDTerm getContactEmailTerm() { - return getTermFor(DatasetFieldConstant.datasetContact, DatasetFieldConstant.datasetContactEmail); + return getTermFor(DatasetFieldConstant.datasetContactEmail); } public JsonLDTerm getDescriptionTerm() { @@ -376,61 +376,15 @@ public JsonLDTerm getDescriptionTerm() { } public JsonLDTerm getDescriptionTextTerm() { - return getTermFor(DatasetFieldConstant.description, DatasetFieldConstant.descriptionText); + return getTermFor(DatasetFieldConstant.descriptionText); } private JsonLDTerm getTermFor(String fieldTypeName) { + //Could call datasetFieldService.findByName(fieldTypeName) - is that faster/prefereable? for (DatasetField dsf : version.getDatasetFields()) { DatasetFieldType dsft = dsf.getDatasetFieldType(); if (dsft.getName().equals(fieldTypeName)) { - return getTermFor(dsft); - } - } - return null; - } - - private JsonLDTerm getTermFor(DatasetFieldType dsft) { - if (dsft.getUri() != null) { - return new JsonLDTerm(dsft.getTitle(), dsft.getUri()); - } else { - String namespaceUri = dsft.getMetadataBlock().getNamespaceUri(); - if (namespaceUri == null) { - namespaceUri = SystemConfig.getDataverseSiteUrlStatic() + "/schema/" + dsft.getMetadataBlock().getName() - + "#"; - } - JsonLDNamespace blockNamespace = JsonLDNamespace.defineNamespace(dsft.getMetadataBlock().getName(), namespaceUri); - return new JsonLDTerm(blockNamespace, dsft.getTitle()); - } - } - - private JsonLDTerm getTermFor(DatasetFieldType dfType, DatasetFieldType dsft) { - if (dsft.getUri() != null) { - return new JsonLDTerm(dsft.getTitle(), dsft.getUri()); - } else { - // Use metadatablock URI or custom URI for this field based on the path - String subFieldNamespaceUri = dfType.getMetadataBlock().getNamespaceUri(); - if (subFieldNamespaceUri == null) { - subFieldNamespaceUri = SystemConfig.getDataverseSiteUrlStatic() + "/schema/" - + dfType.getMetadataBlock().getName() + "/"; - } - subFieldNamespaceUri = subFieldNamespaceUri + dfType.getName() + "#"; - JsonLDNamespace fieldNamespace = JsonLDNamespace.defineNamespace(dfType.getName(), subFieldNamespaceUri); - return new JsonLDTerm(fieldNamespace, dsft.getTitle()); - } - } - - private JsonLDTerm getTermFor(String type, String subType) { - for (DatasetField dsf : version.getDatasetFields()) { - DatasetFieldType dsft = dsf.getDatasetFieldType(); - if (dsft.getName().equals(type)) { - for (DatasetFieldCompoundValue dscv : dsf.getDatasetFieldCompoundValues()) { - for (DatasetField subField : dscv.getChildDatasetFields()) { - DatasetFieldType subFieldType = subField.getDatasetFieldType(); - if (subFieldType.getName().equals(subType)) { - return getTermFor(dsft, subFieldType); - } - } - } + return dsft.getJsonLDTerm(); } } return null; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java index 62cd54387b8..465360f84cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java @@ -74,7 +74,8 @@ public static JsonObject getContext(Map contextMap) { } public static Dataset updateDatasetMDFromJsonLD(Dataset ds, String jsonLDBody, - MetadataBlockServiceBean metadataBlockSvc, DatasetFieldServiceBean datasetFieldSvc, boolean append, boolean migrating, LicenseServiceBean licenseSvc) { + MetadataBlockServiceBean metadataBlockSvc, DatasetFieldServiceBean datasetFieldSvc, boolean append, + boolean migrating, LicenseServiceBean licenseSvc) { DatasetVersion dsv = new DatasetVersion(); @@ -491,27 +492,13 @@ private static JsonArray getValues(JsonValue val, boolean allowMultiples, String static Map localContext = new TreeMap(); static Map dsftMap = new TreeMap(); + //A map if DatasetFieldTypes by decontextualized URL private static void populateFieldTypeMap(MetadataBlockServiceBean metadataBlockSvc) { if (dsftMap.isEmpty()) { - List mdbList = metadataBlockSvc.listMetadataBlocks(); - for (MetadataBlock mdb : mdbList) { - boolean blockHasUri = mdb.getNamespaceUri() != null; for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) { - if (dsft.getUri() != null) { - dsftMap.put(dsft.getUri(), dsft); - } - if (blockHasUri) { - if (dsft.getParentDatasetFieldType() != null) { - // ToDo - why not getName for child type? Would have to fix in ORE generation - // code and handle legacy bags - dsftMap.put(mdb.getNamespaceUri() + dsft.getParentDatasetFieldType().getName() + "#" - + dsft.getTitle(), dsft); - } else { - dsftMap.put(mdb.getNamespaceUri() + dsft.getTitle(), dsft); - } - } + dsftMap.put(dsft.getJsonLDTerm().getUrl(), dsft); } } logger.fine("DSFT Map: " + String.join(", ", dsftMap.keySet())); @@ -522,15 +509,12 @@ public static void populateContext(MetadataBlockServiceBean metadataBlockSvc) { if (localContext.isEmpty()) { List mdbList = metadataBlockSvc.listMetadataBlocks(); - for (MetadataBlock mdb : mdbList) { - boolean blockHasUri = mdb.getNamespaceUri() != null; - if (blockHasUri) { - JsonLDNamespace.defineNamespace(mdb.getName(), mdb.getNamespaceUri()); - } + //Assures the mdb's namespace is in the list checked by JsonLDNamespace.isInNamespace() below + mdb.getJsonLDNamespace(); for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) { if ((dsft.getUri() != null) && !JsonLDNamespace.isInNamespace(dsft.getUri())) { - //Add term if uri exists and it's not in one of the namespaces already defined + // Add term if uri exists and it's not in one of the namespaces already defined localContext.putIfAbsent(dsft.getName(), dsft.getUri()); } } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index efd8bd522ec..c59492bf8ca 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1524,20 +1524,22 @@ dataset.message.createSuccess=This dataset has been created. dataset.message.createSuccess.failedToSaveFiles=Partial Success: The dataset has been created. But the file(s) could not be saved. Please try uploading the file(s) again. dataset.message.createSuccess.partialSuccessSavingFiles=Partial Success: The dataset has been created. But only {0} out of {1} files have been saved. Please try uploading the missing file(s) again. dataset.message.linkSuccess= {0} has been successfully linked to {1}. -dataset.message.metadataSuccess=The metadata for this dataset has been updated. +dataset.message.metadataSuccess=The metadata for this dataset have been updated. dataset.message.termsSuccess=The terms for this dataset have been updated. -dataset.message.filesSuccess=The files for this dataset have been updated. +dataset.message.filesSuccess=One or more files have been updated. dataset.message.addFiles.Failure=Failed to add files to the dataset. Please try uploading the file(s) again. dataset.message.addFiles.partialSuccess=Partial success: only {0} files out of {1} have been saved. Please try uploading the missing file(s) again. -dataset.message.publish.remind.draft=If it's ready for sharing, please publish it. -dataset.message.submit.remind.draft=If it's ready for sharing, please submit it for review. -dataset.message.publish.remind.version=If it's ready for sharing, please publish it so that others can see these changes. -dataset.message.submit.remind.version=If it's ready for sharing, please submit it for review so that others can see these changes. +dataset.message.publish.warning=This draft version needs to be published. +dataset.message.submit.warning=This draft version needs to be submitted for review. +dataset.message.publish.remind.draft=When ready for sharing, please publish it so that others can see these changes. +dataset.message.submit.remind.draft=When ready for sharing, please submit it for review. +dataset.message.publish.remind.draft.filePage=When ready for sharing, please go to the dataset page to publish it so that others can see these changes. +dataset.message.submit.remind.draft.filePage=When ready for sharing, please go to the dataset page to submit it for review. dataset.message.publishSuccess=This dataset has been published. dataset.message.only.authenticatedUsers=Only authenticated users may release Datasets. dataset.message.deleteSuccess=This dataset has been deleted. dataset.message.bulkFileUpdateSuccess=The selected files have been updated. -dataset.message.bulkFileDeleteSuccess=The selected files have been deleted. +dataset.message.bulkFileDeleteSuccess=One or more files have been deleted. datasetVersion.message.deleteSuccess=This dataset draft has been deleted. datasetVersion.message.deaccessionSuccess=The selected version(s) have been deaccessioned. dataset.message.deaccessionSuccess=This dataset has been deaccessioned. diff --git a/src/main/resources/db/migration/V5.10.1.0.1__8533-semantic-updates.sql b/src/main/resources/db/migration/V5.10.1.0.1__8533-semantic-updates.sql new file mode 100644 index 00000000000..7186adbee3e --- /dev/null +++ b/src/main/resources/db/migration/V5.10.1.0.1__8533-semantic-updates.sql @@ -0,0 +1,11 @@ +DO $$ +BEGIN + + BEGIN + ALTER TABLE datasetfieldtype ADD CONSTRAINT datasetfieldtype_name_key UNIQUE(name); + EXCEPTION + WHEN duplicate_object THEN RAISE NOTICE 'Table unique constraint datasetfieldtype_name_key already exists'; + END; + +END $$; + diff --git a/src/main/webapp/dataset-widgets.xhtml b/src/main/webapp/dataset-widgets.xhtml index a57f144b97a..93072952a36 100644 --- a/src/main/webapp/dataset-widgets.xhtml +++ b/src/main/webapp/dataset-widgets.xhtml @@ -72,7 +72,8 @@ data-toggle="tooltip" data-placement="auto right" data-original-title="#{bundle['dataset.thumbnailsAndWidget.thumbnailImage.uploadNew.title']}">