diff --git a/build/Dockerfile b/build/Dockerfile index 213120138c..a61316436c 100644 --- a/build/Dockerfile +++ b/build/Dockerfile @@ -1,12 +1,17 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.04 as base-environment ARG saxonversion ARG hugoversion +ARG calabashversion ENV TZ=US/Eastern RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone -RUN apt-get update && apt-get install -y wget apt-utils libxml2-utils jq maven nodejs npm build-essential python3-pip git && apt-get clean +RUN apt-get update && apt-get dist-upgrade -y + +FROM base-environment as oscal-dependencies + +RUN apt-get install -y apt-utils build-essential git jq libxml2-utils maven nodejs npm python3-pip unzip wget && apt-get clean RUN npm install -g npm n RUN n latest RUN npm install --loglevel verbose -g ajv-cli@"^4.0.x" ajv-formats@"^1.5.x" json-diff markdown-link-check yaml-convert@"^1.0.x" yargs @@ -15,15 +20,28 @@ RUN pip3 install lxml #RUN useradd --create-home --home-dir /home/user user #USER user -RUN mvn org.apache.maven.plugins:maven-dependency-plugin:2.10:get -DartifactId=Saxon-HE -DgroupId=net.sf.saxon -Dversion=${saxonversion} +ENV SAXON_VERSION ${saxonversion} + +RUN mvn org.apache.maven.plugins:maven-dependency-plugin:2.10:get -DartifactId=Saxon-HE -DgroupId=net.sf.saxon -Dversion=${SAXON_VERSION} -RUN wget https://github.com/gohugoio/hugo/releases/download/v${hugoversion}/hugo_extended_${hugoversion}_Linux-64bit.deb -RUN dpkg -i hugo_extended_${hugoversion}_Linux-64bit.deb +ENV HUGO_VERSION ${hugoversion} +RUN wget https://github.com/gohugoio/hugo/releases/download/v${HUGO_VERSION}/hugo_extended_${HUGO_VERSION}_Linux-64bit.deb +RUN dpkg -i hugo_extended_${HUGO_VERSION}_Linux-64bit.deb + +# calabash +ENV CALABASH_VERSION ${calabashversion} +ENV CALABASH_HOME /dependencies/calabash +RUN wget https://github.com/ndw/xmlcalabash1/releases/download/${CALABASH_VERSION}/xmlcalabash-${CALABASH_VERSION}.zip +RUN mkdir -p "${CALABASH_HOME}" +RUN unzip -d "${CALABASH_HOME}" "xmlcalabash-${CALABASH_VERSION}.zip" +RUN f=`ls -d "${CALABASH_HOME}"/*| xargs` && mv "${CALABASH_HOME}"/*/* "${CALABASH_HOME}" && rmdir "${f}" #RUN chown -R user:user /home/user -ENV SAXON_VERSION ${saxonversion} +FROM oscal-dependencies as oscal-base VOLUME ["/oscal"] WORKDIR /oscal +FROM oscal-base as cli + ENTRYPOINT ["/bin/bash"] diff --git a/build/README.md b/build/README.md index c30f890fd3..bb3d54b49b 100644 --- a/build/README.md +++ b/build/README.md @@ -6,7 +6,7 @@ This subdirectory contains a set of build scripts used to create OSCAL-related a If using Docker: -- [Docker 19.03+](https://docs.docker.com/install/) +- [Docker 20.10+](https://docs.docker.com/install/) If not using Docker: @@ -26,7 +26,7 @@ A Docker container configuration is provided that establishes the runtime enviro You can build the Docker container for the build environment using Docker Compose as follows from the OSCAL `/build` directory: ``` - docker-compose build + docker compose build ``` 3. Run the Docker container @@ -36,14 +36,14 @@ A Docker container configuration is provided that establishes the runtime enviro You can run the Docker container for the build environment using Docker Compose as follows: ``` - docker-compose run cli + docker compose run cli ``` On Windows environments, you may need to execute in a pty that allows for using an interactive shell. In such a case you can run the Docker container as follows: ``` - winpty docker-compose run cli + winpty docker compose run cli ``` This should launch an interactive shell. @@ -58,12 +58,16 @@ The following steps are known to work on [Ubuntu](https://ubuntu.com/) (tested i - SAXON_VERSION - Defines which version of Saxon-HE to use - HUGO_VERSION - Defines which version of Hugo to use + - CALABASH_VERSION - Defines which version of XML Calabash to use + - CALABASH_HOME - Defines where calabash will be installed The following is an example of how to configure the environment. ```bash export SAXON_VERSION="9.9.1-3" - export HUGO_VERSION="0.74.3" + export HUGO_VERSION="0.83.1" + export CALABASH_VERSION="1.2.5-100" + export CALABASH_HOME="$HOME/calabash" ``` You may want to add this export to your `~/.bashrc` to persist the configuration. @@ -117,6 +121,16 @@ The following steps are known to work on [Ubuntu](https://ubuntu.com/) (tested i mvn org.apache.maven.plugins:maven-dependency-plugin:2.10:get -DartifactId=Saxon-HE -DgroupId=net.sf.saxon -Dversion=${SAXON_VERSION} ``` +1. Install Calabash + + To install Calabash, run the following: + + ```bash + wget https://github.com/ndw/xmlcalabash1/releases/download/${CALABASH_VERSION}/xmlcalabash-${CALABASH_VERSION}.zip + mkdir -p "${CALABASH_HOME}" + unzip -d "${CALABASH_HOME}" "xmlcalabash-${CALABASH_VERSION}.zip" + mv "${CALABASH_HOME}"/*/* "${CALABASH_HOME}" + Your environment should be setup. ## Running the Build Scripts diff --git a/build/ci-cd/README.md b/build/ci-cd/README.md index aa2198dffa..965b2d8f1e 100644 --- a/build/ci-cd/README.md +++ b/build/ci-cd/README.md @@ -150,18 +150,15 @@ export SAXON_VERSION=9.9.1-3 mvn org.apache.maven.plugins:maven-dependency-plugin:2.10:get -DartifactId=Saxon-HE -DgroupId=net.sf.saxon -Dversion=$SAXON_VERSION ``` -mkdir $HOME/oscal-oss +You will also need a copy of the ISO Schematron skeleton. +``` +mkdir $HOME/oscal-oss export SCHEMATRON_HOME=$HOME/oscal-oss/git-schematron git clone --depth 1 --no-checkout https://github.com/Schematron/schematron.git "$SCHEMATRON_HOME" cd "$SCHEMATRON_HOME" git checkout master -- trunk/schematron/code - - -export OSCAL_TOOLS_DIR=$HOME/oscal-oss/oscal_tools -git clone --depth 1 https://github.com/usnistgov/oscal-tools.git "${OSCAL_TOOLS_DIR}" -cd $OSCAL_TOOLS_DIR/json-cli -mvn clean install +``` Finally, export instructions in the preceding must also be copied into .bashrc so they persist in your environment. @@ -171,7 +168,6 @@ prettyson sudo npm install -g prettyjson ``` - jq ``` diff --git a/build/ci-cd/generate-specification-documentation.sh b/build/ci-cd/generate-specification-documentation.sh index a7366414c9..7129d413d5 100755 --- a/build/ci-cd/generate-specification-documentation.sh +++ b/build/ci-cd/generate-specification-documentation.sh @@ -57,7 +57,7 @@ if [ "$VERBOSE" = "true" ]; then fi SPEC_SOURCE="${OSCALDIR}/src/specifications/profile-resolution/profile-resolution-specml.xml" -SPEC_OUTPUT="$WORKING_DIR/docs/content/documentation/processing/profile-resolution.html" +SPEC_OUTPUT="$WORKING_DIR/docs/content/concepts/processing/profile-resolution.html" result=$(xsl_transform "$OSCALDIR/src/specifications/profile-resolution/specml-html-hugo-uswds.xsl" "${SPEC_SOURCE}" "${SPEC_OUTPUT}" 2>&1) cmd_exitcode=$? diff --git a/build/docker-compose.yml b/build/docker-compose.yml index 7d0c349053..0a8104b412 100644 --- a/build/docker-compose.yml +++ b/build/docker-compose.yml @@ -5,11 +5,14 @@ services: tty: true build: context: . + target: cli args: saxonversion: 9.9.1-3 hugoversion: 0.83.1 + calabashversion: 1.2.5-100 volumes: - "../:/oscal" - environment: - - SAXON_VERSION=9.9.1-3 - - JSON_CLI_VERSION=0.0.1-SNAPSHOT +# environment: +# - SAXON_VERSION=9.9.1-3 +# - JSON_CLI_VERSION=0.0.1-SNAPSHOT +# - CALABASH_VERSION=1.2.5-100 diff --git a/docs/Dockerfile b/docs/Dockerfile deleted file mode 100644 index 071d3b67ef..0000000000 --- a/docs/Dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -FROM ubuntu:20.04 -ARG HUGO_VERSION=0.83.1 - -RUN mkdir /hugo && \ - cd /hugo && \ - apt-get update && \ - apt-get install -y apt-utils curl && \ - curl -L -O https://github.com/gohugoio/hugo/releases/download/v${HUGO_VERSION}/hugo_extended_${HUGO_VERSION}_Linux-64bit.deb && \ - apt-get install ./hugo_extended_${HUGO_VERSION}_Linux-64bit.deb -WORKDIR /docs -COPY . . -ENTRYPOINT hugo server --enableGitInfo=false -v --debug --minify --bind 0.0.0.0 diff --git a/docs/README.md b/docs/README.md index 99a04e6639..782b25e83e 100644 --- a/docs/README.md +++ b/docs/README.md @@ -9,7 +9,7 @@ The website is built using the [Hugo](https://gohugo.io/) static site generator If using Docker: - [Saxon-HE for Java](http://saxon.sourceforge.net/#F9.9HE) -- [Docker 19.03+](https://docs.docker.com/install/) +- [Docker 20.10+](https://docs.docker.com/install/) If not using Docker: @@ -35,7 +35,7 @@ Instructions for installing the Hugo CLI on your OS can be found [here](https:// The website's visual styling is also backed by the U.S. Web Design System (USWDS) via an open source Hugo theme at https://github.com/usnistgov/hugo-uswds. -The USWDS framework, a Jekyll customization we are using, is documented here: https://designsystem.digital.gov/. +The USWDS framework is documented here: https://designsystem.digital.gov/. ### Building the site with LiveReload @@ -86,8 +86,8 @@ The website can also be developed and built using the included Docker resources. Assuming you've [installed Docker](https://docs.docker.com/install/) and [Docker Compose](https://docs.docker.com/compose/install/) for your system, you can build and serve the site using Docker Compose as follows: ``` -docker-compose build -docker-compose up +docker compose build +docker compose up ``` Once the site is running, it can be accessed at http://localhost:1313/OSCAL. Whenever you make any changes to the content with the Hugo server running, you'll notice that the site automatically updates itself to reflect those changes. diff --git a/docs/content/concepts/processing/profile-resolution.html b/docs/content/concepts/processing/profile-resolution.html index 160c1b10e0..7740861da0 100644 --- a/docs/content/concepts/processing/profile-resolution.html +++ b/docs/content/concepts/processing/profile-resolution.html @@ -1,12 +1,7 @@ --- title: OSCAL Profile Resolution description: Transforming a profile into the tailored catalog it represents -toc: - enabled: true -aliases: - - /documentation/processing/profile-resolution/ --- -
These specifications describe how to render an OSCAL profile document in the form of an OSCAL catalog. We call this profile resolution.
These DRAFT specifications take the form of a tag set mapping between the OSCAL profile and OSCAL catalog XML document models. The same mapping @@ -28,19 +23,20 @@ the intended or expected results. Conformance to these specifications requires that a processor's result corresponds to the target as described here, given a particular source.
The term directive is used in these specifications to refer to an element or
- combination of elements in source data, which is designed to effect a particular outcome in
+ combination of elements in source data, which is designed to affect a particular outcome in
the target catalog. For the most part, directives are in the source profile document – for
- example, a set
element in a profile is a directive to set a parameter value in
- the target catalog.
In contrast to profiles, catalogs do not contain directives but instead, representations of
+ example, a set-parameter
element in a profile is a directive to set a parameter
+ value in the target catalog.
In contrast to profiles, catalogs do not contain directives but instead, representations of
security controls and their parts, optionally gathered in groups, on which directives are
assumed to operate. There is one exception: the insert
element in control
- catalogs can be considered a directive that a parameter value be inserted into a particular
- location in running text (data content), when a catalog is rendered. Since these semantics
- respect catalog rendering, however, not profile resolution, they are out of scope for this
- document. For purposes of profile resolution, catalogs may be considered to be passive
- resources from which contents are replicated (copied): the operation of the profile, in
- contrast - its OSCAL semantics
as opposed to the domain-specific semantics of the
- information it handles – requires that it express what is to be done and how.
Original order refers to the order of elements and objects as given in the
+ catalogs can be considered a directive that a parameter value (or other value, as
+ appropriate) be inserted into a particular location in running text (data content), when a
+ catalog is rendered. Since these semantics respect catalog rendering, however, not profile
+ resolution, they are out of scope for this document. For purposes of profile resolution,
+ catalogs may be considered to be passive resources from which contents are replicated
+ (copied): the operation of the profile, in contrast - its OSCAL semantics
as opposed
+ to the domain-specific semantics of the information it handles – requires that it express
+ what is to be done and how.
Original order refers to the order of elements and objects as given in the
source. Canonical order refers to the correct
order of elements or
objects as defined by an OSCAL format such as the catalog or profile models. That is, the
respective schemas of these models enforce order to the extent it is meaningful; where it is
@@ -79,16 +75,25 @@
element
@attribute (punctuated @
) indicates an attribute named
attribute
elem/@attr indicates an attribute named attr on an element named elem
elmn[@att='val'] indicates an element elmn that has an
- attribute @att with value val
.
Examples given to illustrate targets for particular source (example) inputs are given, like - the source, in XML notation. Sometimes, however, the expected outputs are not fully defined - by the inputs given. In these cases, a description of the dynamic (variable) contents in - curly braces { }.
So for example where a property is added to target metadata
, an example could
+ attribute @att with value val
.
Examples given to illustrate targets for particular source (example) inputs are given, like + the source, in XML notation. Sometimes, however, the expected outputs are not defined by the + inputs given and not literally present. In these cases, a description of the dynamic + (variable) contents in curly braces { }.
So for example where a property is added to target metadata
, an example could
show
This indicates the last-modified
element should be produced with contents
generated appropriately, namely with an actual time stamp in this case, and not the string
{ timestamp }
. In display, these values should also be shown with special
- formatting as shown here.
Examples also indicate unspecified content using a simple …
(elision) character,
- typically for more verbose chunks of target data whose production is specified elsewhere in
- the document.
A profile in OSCAL represents a selection and configuration of a set of + formatting as shown here.
Such highlighting is also used to indicate the beginning and end of nominal or implicit + structures described in these specifications (mainly selection), which are not + represented by tagging in the final results of resolution.
XML examples also indicate unspecified content using a simple …
(elision)
+ character, typically for more verbose chunks of target data whose production is specified
+ elsewhere in the document.
Finally, although examples are syntactically faithful to OSCAL, they are not necessarily
+ always formally valid in every respect. For example, OSCAL defines permissible, recognized
+ values for property names (prop/@name
) and permissible values for properties with
+ certain names (controlled by OSCAL), and those rules may not be observed here. (OSCAL does
+ not permit a prop[@name='status']
to have a value of pending
, for
+ example, as sometimes shown in this documentation). Examples are given for purposes of
+ illustrating profile resolution semantics only, and should not be taken as normative for any
+ actual use.
A profile in OSCAL represents a selection and configuration of a set of
controls. In the normal case, the set of controls available to a profile is
provided by a catalog. For example, the three NIST SP 800-53 profiles representing the
impact baselines HIGH, MODERATE and LOW: each of these calls on a catalog representing the
@@ -118,7 +123,7 @@
controls are included; their ordering (insofar as the target format represents ordering);
and the structure (groups) in which they are embedded. This specification is designed to
make this possible, by defining outputs with sufficient rigor that the same
is
- meaningful and testable in this context.
A profile is a representation of a delta, a here to there
. As such, a profile might
+ meaningful and testable in this context.
A profile is a representation of a delta, a here to there
. As such, a profile might
be conceived of as a recipe or instruction set – a sequence of steps or procedures – whereby
to create an OSCAL catalog (set of controls describing security requirements) from an OSCAL
catalog (set of controls describing security requirements). The primary use case for this is
@@ -146,13 +151,13 @@
the system in the context of the actual intentions appropriate to that system, can we
properly assess its security. Profiles make this possible.
In order for this to work however we need a process that can perform the necessary operations, to apply the delta to an unmodified catalog, to produce the local view of the - catalog as modified. This is profile resolution.
In an XML context, document validation is available as a means of determining whether an + catalog as modified. This is profile resolution.
In an XML context, document validation is available as a means of determining whether an
arbitrary XML document entity conforms to structural and naming constraints such as must be
assumed by processors respecting application semantics. These specifications assume that XML
inputs for profile resolution will be schema-valid with respect to the OSCAL
profile model, and hence will be tagged in the namespace
http://csrc.nist.gov/ns/oscal/1.0
.
Similarly, the definition of the output format or results, is constrained by the OSCAL - catalog model as defined by its schema.
As described above, original order refers to the order of elements and objects + catalog model as defined by its schema.
As described above, original order refers to the order of elements and objects
as given in the source, whereas canonical order refers to the correct
order of elements or objects as defined by an OSCAL format such as the catalog or profile
models. That is, the respective schemas of these models enforce order to the extent it is
@@ -178,7 +183,7 @@
metadata
, param
, control
, group
,
back-matter
. Within this sequence, members may be missing, as for example not
all catalogs will have parameters or groups. Any members that are present, however, must be
- given in this order.
In addition to those described and enforced by the respective schemas, there are additional + given in this order.
In addition to those described and enforced by the respective schemas, there are additional
constraints over the data that this process relies on. The most important of these is
ID-distinctiveness. Every id
flag (in XML, represented by @id
attributes) on an element in the model, whether it be attached to a control, group or an
@@ -198,14 +203,14 @@
namely keep
(also the default), use-first
and merge
.
Because ID distinctiveness within the scope of processing, is critical, OSCAL users should take care that different catalogs have distinct sets of IDs. This applies to all structures within the catalogs, not only controls but also groups, parameters, - citations and resources.
A central problem in profile resolution is determining when two controls are both + citations and resources.
A central problem in profile resolution is determining when two controls are both
representations of the same
control, even when those representations have dissimilar
contents or come to a profile through different import pathways.
For purposes of profile resolution, control identity (or parameter, group identity etc.) can be determined by any of several ways, depending on the identity designation - mode:
strict - control ID only - ordinary mode for well-controlled inputs
diagnostic - control ID + provenance identifier
maintenance - control ID + provenance + class (version etc)
none - no identification (each input control is distinct) - for tracing
Brian Ruf suggested we provide a resolution/disambiguation/debugging mode that + mode:
strict - control ID only - ordinary mode for well-controlled inputs
diagnostic - control ID + provenance identifier
maintenance - control ID + provenance + class (version etc)
none - no identification (each input control is distinct) - for tracing
Dev team member suggests we provide a resolution/disambiguation/debugging mode that produces valid OSCAL by producing new IDs for everything, thereby disambiguating by brute force. This is not a bad idea. This proposal offers that feature as 'no identity - designation', i.e. nothing is found to be identical with anything.
It is frequently possible by static analysis to detect many conditions even in schema-valid + designation', i.e. nothing is found to be identical with anything.
It is frequently possible by static analysis to detect many conditions even in schema-valid profile documents, that will result in problematic catalogs in resolution. The most common issue will be clashing controls, that is more than one control in underlying catalogs with a given identifier (ID), making references to such controls ambiguous. The identity @@ -213,14 +218,14 @@ handling, for purposes of development or diagnosis; but the quality of the results can only be guaranteed determinatively by the quality of the inputs, and full analysis in support of profile development and diagnostics – to produce correct inputs that accurately reflect the - intent – is outside the scope of these specifications.
In an XML-based profile resolution, XML comments are one straightforward way for a + intent – is outside the scope of these specifications.
In an XML-based profile resolution, XML comments are one straightforward way for a processor to record events or conditions without affecting the output's nominal semantics. To support this, while two processors are obliged to return the same catalog XML for the same profile XML inputs, they are not required to match one another's comments, whitespace usage, attribute order, or processing instructions, only each other's elements, attributes and data content.
One consequence of this is that processes intended to compare two profile resolutions may have to accommodate differences in comments, considering them as insignificant along with - other differences in serialization.
Inasmuch as the target of profile resolution is a catalog, the resulting document is expected + other differences in serialization.
Inasmuch as the target of profile resolution is a catalog, the resulting document is expected to be a catalog document as specified by OSCAL, conforming to the applicable schema, XSD in the case of OSCAL XML or JSON Schema for OSCAL object serialization formats.
These two approaches to validation do not constrain their respective data models – although OSCAL-equivalent – in exactly the same way. In particular, element structures in the XML may @@ -230,23 +235,23 @@ understood that what is described is both (a) the order in XML, and (b) the canonical order in OSCAL, which while it is not exposed in a JSON object serialization, will always available to an OSCAL processor by reference to the appropriate - metaschema (in this case, the Metaschema instance that defines the catalog model). [See: Terminology]
The output of a profile resolution should take the form of a catalog. Catalog results (the + metaschema (in this case, the Metaschema instance that defines the catalog model). [See: Terminology]
The output of a profile resolution should take the form of a catalog. Catalog results (the output of a conformant profile resolution) will be valid to the OSCAL catalog schema.
At its base, an OSCAL profile
source produces a catalog
:
A valid catalog must have metadata
in addition to controls and control groups.
Additionally it may have back-matter
. How to produce and populate the
- metadata
and back-matter
is described below
+ metadata
and back-matter
is described below
link me, as is the construction
- of the catalog/@id
in the target.
Although the target of profile resolution takes the form of a catalog, it may not be valid
+ of the catalog/@id
in the target.
Although the target of profile resolution takes the form of a catalog, it may not be valid
to all constraints that define the OSCAL catalog format, whether enforced by its schema or
by other means such as Schematron. Specifically, while a correctly implemented profile
resolution may produce a result catalog that is structurally valid, there is a mode of
operation, namely the keep-all combination rule
, which can permit replication of
controls or control contents, when the control imports are redundant or contain redundant
- call
or match
directives. Such replication can result in violations
- of ID uniqueness constraints, in particular. [See: Formal validation of source and result]
If the merge behavior is set to combine[@method='keep']
, or not given (as this
+ include-controls
directives. Such replication can result in violations of ID
+ uniqueness constraints, in particular. [See: Formal validation of source and result]
If the merge behavior is set to combine[@method='keep']
, or not given (as this
setting is the default), a profile with multiple calls on controls will result in multiple
copies of controls with clashing IDs. These should raise validation errors in the results,
since IDs must be unique on elements in document scope. Accordingly, this is an appropriate
@@ -254,39 +259,48 @@
removed; it is also an appropriate rule to apply when a profile has been tested and found to
be free of any such collisions. Other combination rules (method='use-first'
or
method='merge'
) are provided for special exigencies or to permit certain kinds
- of optimization.
Because document IDs are sometimes used to distinguish data points in processing context, a + of optimization.
Because document IDs are sometimes used to distinguish data points in processing context, a resolved profile may not present the same ID as any of the catalogs it is importing, or the same ID as its source profile.
It is permitted to produce the profile’s ID in resolution via a static mapping from the ID
of the source. For example, the resolution of profile with ID profile-X
might
have ID profile-X-RESOLVED
.
BR feels this is underspecified and I agree. He also thinks that resolutions should always have a (generated) unique IDs, since upstream catalogs can change between - resolutions.
Metadata in the target is derived directly from the source metadata, with modifications.
- All elements inside metadata
in the source profile are copied in their original
- order into the catalog. Additionally, new elements are introduced as described here.
Because of options in producing metadata and especially the requirement for a timestamp, + resolutions.
Metadata in the target is derived directly from the source metadata, with modifications.
+ With the exceptions given, elements inside metadata
in the source profile are
+ copied in their original order into the catalog. Additionally, new elements are introduced
+ as described here.
Because of options in producing metadata and especially the requirement for a timestamp,
developers and users should note that two different resolutions of the same profile will
- not, ordinarily, be identical inside metadata
.
This has been worked, with changes suggested by DW, since BR's - review
A prop
element with name
- resolution-timestamp
is added to the resolution target metadata with a valid
- timestamp indicating the time of the resolution runtime. Conforming with canonical order
- of metadata
in the target, It must be placed into metadata structure after
- any title
, published
, last-modified
,
- version
, oscal-version
, or doc-id
elements, and before
- any prop
, link
, role
, party
, or
+ not, ordinarily, be identical inside metadata
.
Reworked in context of Issue #580
The catalog document resulting from profile resolution may have a new timestamp,
+ reflecting its time of resolution (when the process was invoked, or executed, or when
+ its outputs were serialized). This is captured in the required element
+ metadata/last-modified
.
Processors have the option of assigning this value sensibly in operational context. In
+ general, the earliest "actual" (i.e., not predated) time warranted by the facts, is to
+ be preferred. For example, a profile resolver that periodically refreshes a catalog of
+ baselines, may determine that a profile is unchanged (despite changes in underlying
+ catalogs), and does not require a new last-modified
assignment – whereas,
+ even though its source profile had not changed, profile resolution results might
+ nonetheless change from one resolution to another, if underlying controls (in an
+ upstream source catalog) have changed: in this case, a new last-modified
is
+ called for on the result catalog.
Optionally, a prop
element with name
+ source-profile-last-modified
may be added to the resolution target metadata with
+ a valid timestamp capturing the stated last-modified
value from the source
+ document metadata
. Conforming with canonical order of metadata
in
+ the target, It must be placed into metadata structure after any title
,
+ published
, last-modified
, version
,
+ oscal-version
, or document-id
elements, and before any
+ prop
, link
, role
, party
, or
responsible-party
elements. The target is schema valid, and the new
prop
is given before copies of prop
elements already
present.
The presence of this property with a valid timestamp may be taken as an indicator that
- an OSCAL catalog
has been produced by resolution of a profile
.
Note that due to this provision, two different resolutions of the same profile produced - at different times will not be bit-equivalent, since their timestamps will be - different.
Any provision for systems that cannot provide a valid timestamp?
Optionally, a processor may add a link to the result
+ an OSCAL
catalog
has been produced by resolution of a profile
.
Any provision for systems that cannot provide a valid timestamp?
Optionally, a processor may add a link to the result
metadata
. Conforming with canonical order of metadata
in the target,
It must be placed into metadata structure after any title
,
published
, last-modified
, version
,
@@ -305,18 +319,17 @@
{{}}{{< highlight xml>}}
The construction of the body of the target catalog is described in the next section. [See: Phases of profile resolution]
The body of the target catalog
, after its metadata, is structured
- as follows, depending on its merge directive. (Details on merging are given below [See: Merge phase].)
If no merge
was given, or if merge
is given without
+{{}}
The construction of the body of the target catalog is described in the next section. [See: Phases of profile resolution]
The body of the target catalog
, after its metadata, is structured
+ as follows, depending on its merge directive. (Details on merging are given below [See: Merge phase].)
If no merge
was given, or if merge
is given without
custom
or as-is
, controls are flat (unstructured) in the target
catalog. This mode of processing is referred to as no merge
. Note that the same
merge
element in the source is also the location for the combination
@@ -335,7 +348,7 @@
show any groups or structures in the form of elements not described here. Depending on the
merge directive, all groups will be given either by the catalog source(s)
(merge/as-is
) or the profile (merge/custom
), or there will be no
- groups at all (no merge directive).
The back matter in target catalogs is assembled from the back matter of their source + groups at all (no merge directive).
The back matter in target catalogs is assembled from the back matter of their source
profiles in combination with the back matter of source catalogs. In both catalogs and
profiles, back-matter
is comprised of (multiple uses of either)
citation
or resource
. Either of these elements may be
@@ -355,7 +368,7 @@
eligible citation
elements followed by all eligible resource
elements,
in the same order as given in their sources.
Additionally, combination rules (see next section) will affect whether and how duplicate or
competing citation
or resource
elements are handled, to remove or
- combine multiple occurrences of the same referenced object.
Considered as a document
or integrated data set (object), a profile has three
+ combine multiple occurrences of the same referenced object.
Considered as a document
or integrated data set (object), a profile has three
sections, each of which corresponds to a conceptual step in resolution. To resolve a catalog
fully is to perform all these steps.
While the steps are described in this specification as occurring in sequence, a profile
processor is not obliged to perform them in the order described. Conformance to these
@@ -380,7 +393,7 @@
target. A profile can be valid and useful without making any modifications, only selections,
so the profile/modify
element (node) is optional in the profile model.
As described in the previous section, when resolved, an OSCAL profile takes the form of an OSCAL catalog. The merge semantics described below will produce outputs conforming to this - organization.
A profile begins by importing catalogs, all of whose controls are made available. The + organization.
A profile begins by importing catalogs, all of whose controls are made available. The
catalogs it references may be available as static resources, or they may be produced
dynamically on request; accordingly a profile may also import profiles, to be resolved as
catalogs, instead of or in addition to catalogs as such. Imports are given in sequence after
@@ -391,8 +404,8 @@
{{}}{{< highlight xml>}}
The resource indicated can be either an OSCAL catalog or an OSCAL profile. A catalog provides controls in their native form. An imported profile is resolved on import, using the @@ -400,32 +413,32 @@ profiles is not defined by this specification.)
No profile may import itself either directly or indirectly. An import directive that indicates either the profile itself, or a profile into which it is being (currently) imported, must be ignored. Optionally, a processor may issue a warning.
In an import directive, the reference to the resource to be imported appears on an
- @href
flag. It takes either of two forms, external or internal:
An external reference appears as an absolute or relative URL
Indicating a file available via [prototol?]
{{< highlight xml>}}@href
flag. It takes either of two forms, external or internal:An external reference appears as an absolute or relative URL
Indicating a file available via [prototol?]
{{< highlight xml>}}A catalog or profile to be imported can also be indicated using an internal link via a +{{}}
A catalog or profile to be imported can also be indicated using an internal link via a
URI fragment identifier (starting with #
). If an import href resolves to a
resource
elsewhere in the profile (typically in back matter), that resource
can be retrieved to provide the source catalog.
A reference from an import to a resource that does not reference a catalog or profile, or a cross-reference to something other than a resource, is inoperative for purposes of importing. It may be signaled as an error [or warning] by a processor. Allow - processors to import other stuff as they like?
The rules of XSLT document()
apply to the traversal of any URI: that is, it
+ processors to import other stuff as they like?
The rules of XSLT document()
apply to the traversal of any URI: that is, it
is assumed that all calls to a given (resolved) URI reference, will return the same
result.
If documents called for import are determined to be unavailable, the processor may drop
them along with a warning, both or either out of band (e.g. through STDERR
)
@@ -433,13 +446,13 @@
not obliged, when requested for a resource, to use the given URI in an http request to
produce it. Processors may offer fallback behaviors for URI requests that fail; conversely
a processor may be able to avoid a URI request in favor of a better way to provide
- resources identified by URI.
An import that does not reference either an OSCAL catalog, or an OSCAL profile that can + resources identified by URI.
An import that does not reference either an OSCAL catalog, or an OSCAL profile that can be resolved as a catalog, is dropped.
In this context, an OSCAL profile that makes direct or indirect reference (via its own
imports) to the importing catalog, is also dropped as unavailable. See below under
- Circular Imports
.
When a profile imports a profile, the subordinate profile is resolved into a catalog
+ Circular Imports
.
When a profile imports a profile, the subordinate profile is resolved into a catalog using the same rules. This presents a possibility of circular imports, when a profile is directed to import itself either directly or indirectly.
A circular import is defined as a directive to import a resource, which has
already been called higher in the import hierarchy. For example, if Profile A imports
@@ -469,17 +482,17 @@
circular_profile.xml
:
In turn this file invokes home_profile.xml
:
No control selection is produced for the circular import:
{{< highlight xml>}}In the selection phase, each import directive is processed to produce a set of controls +{{}}
In the selection phase, each import directive is processed to produce a set of controls
and (when they are referenced) parameters. These controls and parameters are represented
in the form of a control selection. Note that this occurs in the selection phase even if
the same catalog (resource) is imported multiple times: each distinct import collects
controls into a selection
:
The control selections are combined and collapsed in the next phase of processing, @@ -514,87 +527,67 @@ underlying profiles), and no merge behavior has been indicated to resolve the clash.
Typical cases of multiple imports of the same (underlying) resource will be when a profile A imports another profile B, which calls catalog Z, and then profile A calls catalog Z again to acquire other controls not included in profile B, or in an original - form unmodified by profile B.
Imports can specify controls by inclusion and exclusion, either or both in - combination.
Using inclusion, all or some controls may be selected from a catalog.
Select individual controls or controls by groups using control-id
- selectors corresponding to their IDs.
Controls may also be selected using match patterns against their IDs. This is useful + form unmodified by profile B.
Imports can specify controls by inclusion and exclusion, either or both in + combination.
Using inclusion, all or some controls may be selected from a catalog.
Select individual controls or controls by groups using with-id
selectors
+ inside include-controls
, corresponding to their IDs.
Controls may also be selected using match patterns against their IDs. This is useful because related controls (either in a hierarchy, or together in a group) frequently - have related IDs as well.
{{< highlight xml>}}The match pattern is evaluated as a regular expression using XPath regular - expression syntax. [XXXX]
Select all controls from a catalog by using an include/all
rule:
In OSCAL, controls may contain controls. For example, in SP 800-53 many controls are + expression syntax. [XXXX]
Select all controls from a catalog by using an include-all
directive:
In OSCAL, controls may contain controls. For example, in SP 800-53 many controls are supplemented with control enhancements; in OSCAL these are represented as child - controls within parent controls. So parent AC-2 has children AC-2(1) through AC-2(13), - for example.
Child controls can be included by the same mechanism as controls, i.e. by means of an + controls within parent controls. So parent AC-2 (in a given catalog) has children + AC-2(1) through AC-2(13), for example.
Child controls can be included by the same mechanism as controls, i.e. by means of an ID call. Alternatively a match can frequently be applied (at least given most ID - assignment schemes) to match controls and child controls together.
Additionally, a @with-child-controls
directive on a call
or
- match
can indicate that child controls (that is, direct children not all
- descendants) should be included with the applicable call(s) or match(es).
Furthermore, all[@with-child-controls='no']
may select all controls placed
- directly within a group, excluding all controls appearing inside other controls.
Calls and matches may be combined in a single import. Their effect is cumulative; any
- control that is selected by both calling and matching from a given include
- directive, or by matching more than one pattern, is included once (unless it is also
- marked for exclusion, see [See: Excluding controls]).
Exclusions work the same way as inclusions, except with the opposite effect - the - indicated control(s) do not appear in the target catalog.
Additionally, there is no such thing as exclude/all
, which is invalid and
+ assignment schemes) to match controls and child controls together.
Additionally, a @with-child-controls
directive on an
+ include-controls
indicate that all descendant controls should be
+ included with the applicable call(s) or match(es). Its operation is recursive in this
+ respect: @with-child-controls='no'
applies not only to controls included
+ specifically, but also to those selected by means of the flag applying to their parent
+ (containing) controls.
Furthermore, include-all[@with-child-controls='no']
may select all
+ controls placed directly within a group, excluding all controls appearing inside other
+ controls.
Calls and matches may be combined in a single import. Their effect is cumulative; any
+ control that is selected by both calling and matching from a given
+ include-controls
directive, or by matching more than one pattern, is
+ included once (unless it is also marked for exclusion, see [See: Excluding controls]).
Exclusions work the same way as inclusions, except with the opposite effect - the + indicated control(s) do not appear in the target catalog.
Additionally, there is no such thing as exclude-all
, which is invalid and
should be considered inoperable.
Any control designated to be both included and excluded, is excluded. This holds
irrespective of the specificity of the selection for inclusion or exclusion. For
example, if AC-1 is included by id ac-1
and excluded by matching
ac.*
, it is excluded. Selection for exclusion prevails.
One possible exception: what about when a control is excluded, but it has
- descendant controls that are included? Under include/all
, the descendants
- maybe would be excluded. If the subcontrol is included by call
or
- match
perhaps the 'shells' of its ancestors should be included despite
- their nominal exclusion. See section on "implicit inclusions under as-is",
- below.
all
An empty import statement should be considered the same as an import with an
- include/all
. An empy import
directive
is functionally equivalent to this:
{{< highlight xml>}}The same thing occurs if an exclude
directive is given without an
- include
directive.
Example:
{{< highlight xml>}}is the same as this:
{{< highlight xml>}}Target: all controls from the catalog are included except AC-1.
Because controls may be organized in the subsequence merge phase, order of controls
+ descendant controls that are included? Under include-all
, the descendants
+ maybe would be excluded. If the subcontrol is included by include-controls
+ perhaps the 'shells' of its ancestors should be included despite their nominal
+ exclusion. See section on "implicit inclusions under as-is", below.
Because controls may be organized in the subsequence merge phase, order of controls
returned in the selection step is unimportant in most circumstances. If no merge
directive is applied, however, or the no merge
combination rule is used (see
below), controls are returned for an import in the order of their appearance in their
catalog of origin.
BR suggests we revisit as an alternative would be to sort in call order. I - think the given proposal is simplier given cases of match and "all".
Unlike controls, parameters are copied implicitly. Whether a parameter in the source + think the given proposal is simpler given cases of match and "all".
Unlike controls, parameters are copied implicitly. Whether a parameter in the source
catalog is copied into the target depends on two factors: its location, and whether it is
referenced by an insert
directive in the target catalog.
insert
directives may occur anywhere in a catalog's data content including not
only in controls, but in other parameters. Thus a parameter may have to be included by
virtue of an insert
directive, making reference to it, that occurs in another
parameter, included by virtue of a different insert
directive found in an
- included control.
Any parameter (param
element) that appears in a control being selected,
+ included control.
Any parameter (param
element) that appears in a control being selected,
appears in the target, unless the parameter element was indicated by a remove directive
- (see below [See: Removing contents from controls]).
BR thinks such nonsense should be prevented somehow perhaps warnings or + (see below [See: Removing contents from controls]).
BR thinks such nonsense should be prevented somehow perhaps warnings or errors. My feeling is, that's what validation is all about.
A1 aaaaa aaaaaaaaaa
There are two situations where parameters must be considered separately from controls, +{{}}
There are two situations where parameters must be considered separately from controls,
in which case they are considered to be loose
, appearing in the target directly
inside catalog
or group
. The first is when they have no control
parent in the source catalog (or rendered profile): they are inherently loose
.
@@ -624,7 +617,7 @@
are copied into the resulting grouping structure directly preceding the first control
appearing that contains an insert
directive referencing that parameter.
Parameters that are not referenced by an insert
elsewhere in the target, do not appear
in the target. While it is not formally invalid to include unreferenced (unused) parameters,
- profile resolution requires that none be included except as a special option.
merge/combine
- handling control collisionsColliding controls (or clashing) describes the condition that + profile resolution requires that none be included except as a special option.
merge/combine
- handling control collisionsColliding controls (or clashing) describes the condition that occurs when multiple invocations of controls with the same ID are given, and so a profile resolution will result in duplicative and/or contradictory information. Even given a rule against multiple imports of the same resource, it may frequently occur that in profiles @@ -639,7 +632,7 @@ problems.
Apart from handling problems and exceptions, however, for different purposes different
profiles might wish to apply different rules. OSCAL offers three such rules: the
directives for each are given as a value on the @method
attribute of a profile
- merge/combine
element.
merge/combine
givenIf no merge
directive is given in the profile, or if a merge
is
+ merge/combine
element.
merge/combine
givenIf no merge
directive is given in the profile, or if a merge
is
given without a combine
, control or parameter clashes are treated as if
merge/combine[@method='keep']
were given. A profile with no
merge
directive:
No mergecombination rule
When a merge is indicated by merge/combine[@method='keep']
, or not given,
+{{}}
No mergecombination rule
When a merge is indicated by In the target (showing control selections): In this case, downstream errors should be expected: the two Processors may optionally produce warnings when clashing controls are produced due to
- redundant invocations.merge/combine[@method='keep']
, or not given,
the no merge
combination rule is used. Clashing controls or parameters are not
merged. (They are kept.) Their groups are merged only if another merge
directive such as as-is
is given. (The combination rules affect only the
@@ -668,16 +661,16 @@
id="merge-keep_profile">
a1
controls
clash with each other, as do the two b1
controls. To remove the duplicates,
either repair the source catalog to avoid the redundant invocations (usually a better
solution) or (when that is unavoidable) use a different merge combination rule.
{{< highlight xml>}}Use first
merge combination rule
Use firstmerge combination rule
When the use first
combination rule is applied, the first reference to a given
@@ -707,23 +700,23 @@
imported profile presents a representation of a control also given in the catalog
import, the profile’s representation (perhaps modified) is taken. However, if the import
directive targeting the catalog appears first, the catalog's (unmodified) representation
- of the control is included.
The same logic applies to parameters in the source catalogs, as modified by parameter
- set
directives.
The same logic applies to parameters in the source catalogs, as modified by
+ set-parameter
directives.
In the target (showing control selections):
{{< highlight xml>}}The use first
combination rule also affects back matter,
- q.v.
Merge controlsmerge combination rule
Merge controlsmerge combination rule
When the merge controls merge combination rule is applied, the processor @@ -751,19 +744,19 @@ original order of the controls in their source catalog, then sorted into canonical order for control contents.
Example:
{{< highlight xml>}}merging with
{{< highlight xml>}}produces
{{< highlight xml>}}Note that groups are not merged by this rule; for that purpose, merge/as-is
or merge/custom
should be used (in addition to the combine directive).
Since the effect of this merge rule is to drop data silently (when it is considered to @@ -771,9 +764,9 @@ catalog and profile inputs, whose potential clashes or collisions are known in advance.
BR wants SP800-53 examples and points out prop[@name='status'] is not in it. But this is not a guide to SP800-53 it is a spec for any profile - resolution.
When profiles import from more than one resource (catalog or profile), not only is + resolution.
When profiles import from more than one resource (catalog or profile), not only is there a potential for clashing controls, but also citation and resource elements - appearing in the back matter, may clash with other citations and resources, yielding + appearing in the back matter may clash with other citations and resources, yielding validation errors in the result when an ID appears more than once.
This situation is not remedied by the no merge
combination rule. Under this
rule, any citation or resource that is referenced from an imported catalog or profile,
is included in the resolution target. This may result in replicated contents, where the
@@ -788,7 +781,7 @@
Maintenance and possibly revision of upstream data sets is necessary to prevent such
ambiguities.
Examples
How does the merge/combine[@method='merge']
(merge controls
)
rule apply to back matter? Maybe the same as use-first. Or maybe use an approach
- similar to controls.
The overall organization of the target document at the top level has been described [See: Target catalog structure]. This section describes how a profile may dictate the body of + similar to controls.
The overall organization of the target document at the top level has been described [See: Target catalog structure]. This section describes how a profile may dictate the body of
the target catalog
, apart from its metadata
or
back-matter
.
The merge
element, in addition to proposing a strategy by which to
disambiguate clashing controls, also includes directives that can be used to organize the
@@ -796,24 +789,24 @@
are used to introduce structure into the target catalog, as-is
and
custom
. If neither is given, no structure is provided: parameters and
controls in the target appear in sequence, with no structure or grouping apart from what
- is internal to the controls
Profiles that have neither of these directives are resolved as unstructured catalogs, + is internal to the controls
Profiles that have neither of these directives are resolved as unstructured catalogs, with no groupings of controls.
Unstructured catalog output is produced by emitting the contents of the sequence of
control selections produced in the selection phase, seeing to it that (for validity in
XML) they are sorted so that all loose param
elements (that is, that do not
appear inside controls) appear before all control
elements.
as is
An as-is
directive is used to reproduce the structure of a source catalog in
+
as is
An as-is
directive is used to reproduce the structure of a source catalog in
the target catalog. When more than one catalog is referenced by a profile, the
as-is
directive produces a catalog that combines the grouping structures of
all the source catalogs.
(consider case of multiple imports of single resource – we need to track - catalog identity here)
as-is
Under as is, a resolved profile’s structure is expected to replicate the + catalog identity here)
as-is
Under as is, a resolved profile’s structure is expected to replicate the structure of source catalogs.
This is achieved by propagating, with all controls that are included, all groups that
they appear within, along with the groups’ IDs, titles and other contents, including
- any parameters to be referenced in the target ([See: Loose parameters] ) or
+ any parameters to be referenced in the target ([See: Loose parameters] ) or
part
element children; but not including any controls not included in one
or another control selection, directly or indirectly. Controls may be included into a
group indirectly when merging as is, by virtue of containing controls
- that have been included [See: Merging as is
]
Groups that do not include controls that have been included (either directly or as
+ that have been included [See: Merging as is
]
Groups that do not include controls that have been included (either directly or as descendants of contained groups), are not propagated to the target.
This is slightly different from groups, which may in valid inputs have other contents – besides titles, controls, or groups – that profile authors wish to see copied into the customized catalog.
Example:
{{< highlight xml>}}In this case the profile does not designate either control xyz-1
or
control xyz-1.2
for inclusion
The unmentioned elements are included, however, with their titles, because controls
that they contain are included. But control xyz-1.3
does not appear in
@@ -873,56 +866,75 @@
no merge
process.
Processors may optionally provide services for detection of nesting and structural issues related to the importing and merging of controls, including but not limited to validation / detection utilities (for imports regarded as incorrect, incomplete or - otherwise subject to improvement); runtime warnings; or user interfaces.
One title only. Other contents? Elements and controls are subject to the merge - combination rules. Other kinds of contents of groups …
custom
The merge/custom
directive provides a way to provide a target catalog with a
- custom structure. It combines call
or match
directives, which work
- as they do in selection, into groups, which provide the target with its structure.
The controls to be included must be selected in the profile's import. A call or match + otherwise subject to improvement); runtime warnings; or user interfaces.
One title only. Other contents? Elements and controls are subject to the merge + combination rules. Other kinds of contents of groups …
custom
The merge/custom
directive provides a way to provide a target catalog with a
+ custom structure. It combines include-controls
directives, which indicate
+ only the controls to be included, into groups, which provide the target with its
+ structure.
The controls to be included must be selected in the profile's import. A call or match
appearing inside custom
, if it refers to controls not selected, is
inoperative.
The structures given inside the custom
assembly indicate the structure of
the target catalog. They produce this structure by mapping one for one. Unlike
as-is
merging, there is no implicit inclusion of controls by virtue of
including descendant controls. Instead, all controls are included in the target in the
- locations indicated by the custom structure.
A group
element given in a custom structure results in an analogous
+ locations indicated by the custom structure.
A group
element given in a custom structure results in an analogous
group
. Its attributes (flags) are copied to the target catalog to appear
on the group
in the target. All element contents inside the group,
including title
, param
, prop
and part
elements are likewise copied into the target, appearing in the same order as in the
- source.
Within custom
, call
and match
elements reference
- controls in much the same way as they do when given in import/include directives - a
- control is either selected by @id
(call.@control-id
) or matched
- using a pattern on its @id
(match/@pattern
).
A call
results in including, at that point inside the new grouping, all
- controls with the @id
given by the call/@control-id
. They should
- be given in the same order as they appear in the control selection(s). Merge
- combination rules apply. [See: merge/combine
- handling control collisions]
A match
results in including, at that point inside the new grouping, all
- controls whose @id
matching, as an XPath regular expression, the pattern
- given in the match/@pattern
. This may result in several or many controls;
- again they are given in the same order as they have in the control selection(s).
- Again, among clashing controls (that have the same ID), merge combination rules apply.
- [See: merge/combine
- handling control collisions]
Within custom
, include-controls
reference controls in much the
+ same way as they do when given in import
- a control is identified by
+ @id
using with-id
or by glob match on
+ matching/@pattern
).
A with-id
results in including, at that point inside the new grouping,
+ all controls with the @id
given by the with-id
. They
+ should be given in the same order as they appear in the control selection(s). Note
+ that merge combination rules apply, so depending on settings, conflicts
+ between controls with the same @id
may be resolved. [See: merge/combine
- handling control collisions]
A matching
directive results in including, at that point inside the new
+ grouping, all controls whose @id
matching, as an XPath regular expression,
+ the pattern given in the matching/@pattern
. This may result in several or
+ many controls; again they are given in the same order as they have in the control
+ selection(s). Again, among clashing controls (that have the same ID), merge
+ combination rules apply. [See: merge/combine
- handling control collisions]
Within merge
, controls to be placed include-controls
can also
+ indicate the order that the selected controls are to be emitted in the result catalog
+ using an @order
attribute. Three values must be supported:
ascending
will sort all included controls, whether selected by
+ with-id
(literal ID) or matching
(matching ID to glob
+ expression), into ascending alphanumeric order. collation
+ guidance?
descending
will sort all included controls, whether selected by
+ with-id
(literal ID) or matching
(matching ID to glob
+ expression), into descending alphanumeric order.
keep
indicates that controls should be inserted in the order of
+ their appearance, first among import selections, then in the order they appear in
+ their (imported) catalog source.
In this example, control a2
does not appear in the target
catalog, as a custom structure was indicated (by the presence of the source profile
- merge/custom
), but the control never referenced by a call
- (or match
) directive inside the custom structure.
When a custom structure references a control that is not included in any control
+ merge/custom
), but the control never referenced from inside an
+ insert-controls
directive in the custom structure.
When a custom structure references a control that is not included in any control selection (in the import phase), the reference is inoperative. A processor may signal a warning in this case.
Similarly, no error occurs if a control is selected (appears in a control selection
or even more than one), but it is never referenced from the custom
@@ -951,25 +963,27 @@
id="merge-custom_profile">
In this case, no group
container appears in the target for the controls
- b2
and c2
; because they appear unwrapped before the
+ b2
and c2
; because they appear unwrapped before the
group
in the profile source, they appear the same way in the target
catalog. Control c2
, however, appears in a subgroup group2
inside the group with ID group1
, again reflecting the source
- organization. Note also that id
flags are copied.
Again, no provision is made to prevent duplicate or colliding controls from
+ organization. Note also that @id
flags are copied.
Again, no provision is made to prevent duplicate or colliding controls from
appearing. Optionally, a processor may signal warnings when it determines that
controls from selections are designated to appear in the results more than once, due
- to multiple colliding occurrences of call
or match
inside
- custom
.
with-child-controls
inside a custom catalog structure@with-child-controls
works also: if child controls are selected, they can
- be included implicitly by virtue of including their parents.)
How do we arrange for deeper nesting of controls in a custom - structure?
(Aka patching) Explicit modification of control content
There are two ways a control may need to be modified. Commonly, controls might be amended –
+ to multiple colliding occurrences of include-controls/with-id
and
+ include-controls/matching
inside custom
.
with-child-controls
inside a custom catalog structureEffect of setting to 'yes' or 'no' in this context?
(Aka patching) Explicit modification of control content
There are two ways a control may need to be modified. Commonly, controls might be amended – new material might be added. (For example, the priority marker properties P1-P3 in the SP800-53 LOW MODERATE and HIGH baselines.) Less commonly, materials might be removed or edited.
OSCAL does not provide for changing
a control, but editing can be achieved by
@@ -1006,31 +1018,31 @@
levels as well. Features make it easy both to trim (filter) and to supplement catalogs.
In addition to any modification or adjustment to controls, this section of a profile is used to set parameter values or other properties of parameters declared in catalogs and referenced from their text. Parameters are propagated with the profile's setting into the - target catalog for its use.
Modification of parameter settings is indicated using set
.
Parameters are not always given within controls. A profile resolution target must include + target catalog for its use.
Modification of parameter settings is indicated using set-parameter
.
Parameters are not always given within controls. A profile resolution target must include
copies (with or without modifications or settings) of all parameters defined within the
source catalog, which are referenced from insert
directives anywhere inside
included controls.
Accordingly, any parameter referenced by an insert
in any source catalog is
copied from catalog source to target parameter propagation rules for merge/as-is,
merge/custom without any configuration required in the profile.
However, a profile should not reproduce parameters from source catalogs, which are not
- referenced from insert
directives.
When parameters are propagated, they may be modified by set
directives given
- in the profile.
If more than one set
directive is given for the same parameter, all are
- applied, in the sequence given in the profile. [XXX even when
- merge/combine/@method='use-first'?]
A set/label
replaces the param/label
on the affected
- parameter
A set/value
- or a set/select
replaces any
- param/value
or param/select
on the affected parameter
Other elements given in a parameter set
are added to the affected
+ referenced from insert
directives.
When parameters are propagated, they may be modified by set-parameter
+ directives given in the profile.
If more than one set-parameter
directive is given for the same parameter, all
+ are applied, in the sequence given in the profile. [XXX even when
+ merge/combine/@method='use-first'?]
A set-parameter/label
replaces the param/label
on the affected
+ parameter
A set-parameter/value
+ or a set-parameter/select
replaces any
+ param/value
or param/select
on the affected parameter
Other elements given in a set-parameter
are added to the affected
parameter, after elements of the same name
The prescribed order of elements in the affected parameter is retained:
label
; usage
; constraint
; guideline
;
value
or select
(a choice); and link
.
[example]
{{< highlight xml>}}Note that select
replaces value
:
A set
directive whose @control-id
does not correspond to any
- control in the resolved catalog, is inoperative.
Optionally, a processor may issue a warning where such directives are found.
A control can be altered by an alter
directive on a control. The
+{{}}
A set-parameter
directive whose @param-id
does not correspond to
+ any parameter in the resolved catalog, is inoperative.
Optionally, a processor may issue a warning where such directives are found.
A control can be altered by an alter
directive on a control. The
@control-id
flag on the alter
indicates the control to which the
- alteration is applied.
Contents may be added to controls using an add directive inside an alter directive. - There are two forms of alteration: with implicit and explicit bindings.
An add
directive with no id-ref
flag is taken to apply to the
+ alteration is applied.
Contents may be added to controls using an add directive inside an alter directive. + There are two forms of alteration: with implicit and explicit bindings.
An add
directive with no id-ref
flag is taken to apply to the
control as a whole. Its position
flag may be either of two values:
starting
and ending
.
The contents of the add directive are then added to the control contents in the
target, either after its title
when position
is
@@ -1059,35 +1071,35 @@
permits these values.
An addition operating on a control with implicit binding and position
starting
Only for adult use.
Pending scheduled testing.
Only for adult use.
Pending scheduled testing.
Position is starting
but the new part
is added after the
existing prop
, because prop
elements must always occur
first.
An addition operating on a control with implicit binding and position
ending
Only for adult use.
Pending scheduled testing.
Only for adult use.
Pending scheduled testing.
The add/@position
is ending
so the new prop
- appears after the existing prop
.
An explicit binding on an addition permits inserting new contents anywhere in a
+ appears after the existing prop
.
An explicit binding on an addition permits inserting new contents anywhere in a
control, not only at the top level. It is given by a @ref-id
flag on the
add
directive. The value of the @ref-id
must correspond to the
value of an @id
flag on an element inside the control, and not the control
@@ -1106,7 +1118,7 @@
schema.
An addition operating on a control with explicit binding and position
after
Collect recycling for pickup.
Sweep surfaces free of dust.
Note that the add
directive identifies the element with @id
a1.b1
as its target.
Unavailable on weekends.
Collect recycling for pickup.
Unavailable on weekends.
Sweep surfaces free of dust.
part
. A better result can be obtained
(a better target may be defined) by using two add
directives, to insert
the new prop
separately
- before any part
elements in the target.add
directives modifying controls inside controlsOSCAL supports control extensions inside controls in the form of control
+ before any part
elements in the target.
add
directives modifying controls inside controlsOSCAL supports control extensions inside controls in the form of control
elements inside control
elements. Because the semantics of the
add
directive target any (element) contents of controls, they can be used
to target these control extensions for modification as well as other contents.
Because such a control can already be modified using implicit bindings, it is @@ -1142,33 +1154,35 @@ properly by the resolution processor. XXX can we guarantee valid results here and do we have to specify a sort/order? However, it is not an error to target control elements in this way, manipulating them in the same way as other - targets may be manipulated.
Contents inside controls can be removed from them in catalog targets. In combination + targets may be manipulated.
Contents inside controls can be removed from them in catalog targets. In combination with adding new contents, this feature can be used to edit controls as well as amend them.
A remove
directive inside an alter
directive identifies an
element or set of elements inside a control to be removed. It does this using any of
- four flags:
@id-ref
, like add/@id-ref
, matches an element by its
+ five flags. Flag are additive; that is, if more than flag is given, all must
+ match.:
@id-ref
, like add/@id-ref
, matches an element by its
@id
value.
Because @id
values are unique, the remove directive will remove only a
- single element.
@name-ref
keys to the @name
attribute on any element inside
- the control.
Any element inside the control with the assigned name
, is removed.
@class-ref
keys to the @class
attribute on any element inside
+ single element. Ordinarily this would not combined with other identifiers for
+ removal.
@name-ref
keys to the @name
attribute on any element inside
+ the control.
Any element inside the control with the assigned name
, is removed
+ (typically providing there is also a match on @ns
).
@ns-ref
keys to the @ns
attribute on any element inside the
+ control.
Any element inside the control with the assigned name
, is removed
+ (typically providing there is also a match on @name
).
@class-ref
keys to the @class
attribute on any element inside
the control. All elements with matching class
are removed.
@item-name
keys to the element or property name; for example,
<remove item-name='prop'/>
has the effect of removing all
- prop
elements from inside the control. (NB: what about in JSON
- where names are often group names? Can we support removal by item
- name?)
These are examined in the order given here; the first one that appears is used. Thus a
- remove directive with both a name-ref
and a class-ref
, uses only
- the name-ref
. (Or should the effect be cumulative?)
Unlike an add
directive, a remove
may not be bound implicitly to
+ prop
elements from inside the control. (NB: explain how this
+ maps into JSON when items are grouped)
Unlike an add
directive, a remove
may not be bound implicitly to
the control; its binding, to contents inside the control, must be explicit.
To remove an control, simply avoid selecting it into the profile, or exclude it
- specifically using import/exclude
.
As with add
, a remove that targets any element outside the control, is
+ specifically using import/exclude-controls
.
As with add
, a remove that targets any element outside the control, is
inoperative. Similarly, a remove directive that indicates that all prop
- elements should be removed from the target catalog, applies only to prop
Finally some alterations are made automatically.
For example, link
elements in source data representing cross-references in a
+ elements should be removed from the target catalog, applies only to prop
Finally some alterations are made automatically.
For example, link
elements in source data representing cross-references in a
catalog, may no longer be effective in catalogs produced from profiles, which have not
included the elements (controls or other) to which the links refer.
Either: expand the links to refer back to the source catalogs; or remove the links.
Note: we could also expand links to point back to the (resolved) source, - particularly if it is described in back-matter as a resource...
In addition to selecting, merging and modifying, certainl operations + particularly if it is described in back-matter as a resource...
In addition to selecting, merging and modifying, certainl operations are conducted in profile resolution that occur irrespective of phases, that is in any profile resolution. These include rewriting the profile metadata into the resolved baseline (catalog), and filtering and - rewriting back matter.
back-matter
in the result is produced by combining all
+ rewriting back matter.
back-matter
in the result is produced by combining all
elements within back-matter
in all source catalogs, with
the back-matter
in the input profile. Since catalog
resolution is defined to operate recursively, when profiles import
@@ -1197,7 +1211,7 @@
resolved catalog, by means of an ID reference taking the form of
the reference's id
value prepended with #
,
with the following exception:
Any resource
given in either an imported catalog
- (baseline), or in the back-matter
of the soruce
+ (baseline), or in the back-matter
of the source
profile, is kept (appears in the result) if it is marked in
source with a prop
element whose @name
is given as keep
and whose value is
@@ -1206,7 +1220,7 @@
any member has the effect of excluding from the group, any
that do not. In other words, only the resources marked as
always keep
with that identity, are selected from,
- and the last of these (only) is propagated.
Marking a resource as name='keep'
and value
+ and the last of these (only) is propagated.
Marking a resource with a prop
with name='keep'
and value
always
thus has a couple of uses:
Placing such a property on a resource in a catalog, has the effect of ensuring it will always appear in the baseline produced by any profile importing that catalog, even if @@ -1218,39 +1232,32 @@ in the resulting baseline catalog, and that its version is used, unless when imported into another profile, in which case it may be overridden by another one (also marked as always - keep).
Check to be sure 'keep' is permitted and reserved by the schema + here
{{< highlight xml>}}A null profile is a profile that imports a catalog and selects all its controls
+
A null profile is a profile that imports a catalog and selects all its controls
without modification. A truly null
profile – that is, with only an import and nothing
else – does not return its imported catalog unchanged. But the changes it makes can be useful.
This makes a null profile or its near-null variants, as described in this
section, potentially useful for catalog management, design and use, as described in this
- section.
In the target, the catalog group structure and all hierarchy is removed; its controls are - all returned in sequence. Any loose parameters in the catalog not referenced in a control, - are dropped.
This is the same as
{{< highlight xml>}}The same, except that only top-level controls are included, not descendants.
unit test this
Using merge as-is='true'
, the grouping hierarchy of a source catalog can be
+
In the target, the catalog group structure and all hierarchy is removed; its controls are + all returned in sequence. Any loose parameters in the catalog not referenced in a control, + are dropped.
The same, except that only top-level controls are included, not descendants.
unit test this
Using merge as-is='true'
, the grouping hierarchy of a source catalog can be
reflected in the target.
This enables a near-null profile to define a normalization or cleanup pass on a catalog, as it will have the result of removing dangling references and orphan parameters, without otherwise affecting controls.
unit test this too, especially with more than two levels of control hierarchy as - well as groups
unit test this
For conformance, an OSCAL processor must deliver results of profile processing, in a basic + well as groups
unit test this
For conformance, an OSCAL processor must deliver results of profile processing, in a basic configuration, exactly as described in these specifications with no elaboration.
Conformance does not preclude providing additional features, however, including elaborated
outputs, in an optional mode. Such features could include offering warning or process
exception handling (appropriate to workflow) not described here; outputs provided with
- comments or extra structure to support tracing or analytics; or gateway
or
- Draconian
modes that would provide user assistance or refuse to deliver results for
- inputs considered erroneous in a workflow.
rendering - showing both unresolved (raw) and resolved profiles
editing / integrity checking
rewriting/normalization
diffing
deriving a profile as the delta of two catalogs (base
and tailored
)
gateway+ and
draconianmodes that would provide user assistance or refuse to deliver results for + inputs considered erroneous in a workflow.
rendering - showing both unresolved (raw) and resolved profiles
editing / integrity checking
rewriting/normalization
diffing
deriving a profile as the delta of two catalogs (base
and tailored
)