diff --git a/build/Dockerfile b/build/Dockerfile index 213120138c..a61316436c 100644 --- a/build/Dockerfile +++ b/build/Dockerfile @@ -1,12 +1,17 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.04 as base-environment ARG saxonversion ARG hugoversion +ARG calabashversion ENV TZ=US/Eastern RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone -RUN apt-get update && apt-get install -y wget apt-utils libxml2-utils jq maven nodejs npm build-essential python3-pip git && apt-get clean +RUN apt-get update && apt-get dist-upgrade -y + +FROM base-environment as oscal-dependencies + +RUN apt-get install -y apt-utils build-essential git jq libxml2-utils maven nodejs npm python3-pip unzip wget && apt-get clean RUN npm install -g npm n RUN n latest RUN npm install --loglevel verbose -g ajv-cli@"^4.0.x" ajv-formats@"^1.5.x" json-diff markdown-link-check yaml-convert@"^1.0.x" yargs @@ -15,15 +20,28 @@ RUN pip3 install lxml #RUN useradd --create-home --home-dir /home/user user #USER user -RUN mvn org.apache.maven.plugins:maven-dependency-plugin:2.10:get -DartifactId=Saxon-HE -DgroupId=net.sf.saxon -Dversion=${saxonversion} +ENV SAXON_VERSION ${saxonversion} + +RUN mvn org.apache.maven.plugins:maven-dependency-plugin:2.10:get -DartifactId=Saxon-HE -DgroupId=net.sf.saxon -Dversion=${SAXON_VERSION} -RUN wget https://github.com/gohugoio/hugo/releases/download/v${hugoversion}/hugo_extended_${hugoversion}_Linux-64bit.deb -RUN dpkg -i hugo_extended_${hugoversion}_Linux-64bit.deb +ENV HUGO_VERSION ${hugoversion} +RUN wget https://github.com/gohugoio/hugo/releases/download/v${HUGO_VERSION}/hugo_extended_${HUGO_VERSION}_Linux-64bit.deb +RUN dpkg -i hugo_extended_${HUGO_VERSION}_Linux-64bit.deb + +# calabash +ENV CALABASH_VERSION ${calabashversion} +ENV CALABASH_HOME /dependencies/calabash +RUN wget https://github.com/ndw/xmlcalabash1/releases/download/${CALABASH_VERSION}/xmlcalabash-${CALABASH_VERSION}.zip +RUN mkdir -p "${CALABASH_HOME}" +RUN unzip -d "${CALABASH_HOME}" "xmlcalabash-${CALABASH_VERSION}.zip" +RUN f=`ls -d "${CALABASH_HOME}"/*| xargs` && mv "${CALABASH_HOME}"/*/* "${CALABASH_HOME}" && rmdir "${f}" #RUN chown -R user:user /home/user -ENV SAXON_VERSION ${saxonversion} +FROM oscal-dependencies as oscal-base VOLUME ["/oscal"] WORKDIR /oscal +FROM oscal-base as cli + ENTRYPOINT ["/bin/bash"] diff --git a/build/README.md b/build/README.md index c30f890fd3..bb3d54b49b 100644 --- a/build/README.md +++ b/build/README.md @@ -6,7 +6,7 @@ This subdirectory contains a set of build scripts used to create OSCAL-related a If using Docker: -- [Docker 19.03+](https://docs.docker.com/install/) +- [Docker 20.10+](https://docs.docker.com/install/) If not using Docker: @@ -26,7 +26,7 @@ A Docker container configuration is provided that establishes the runtime enviro You can build the Docker container for the build environment using Docker Compose as follows from the OSCAL `/build` directory: ``` - docker-compose build + docker compose build ``` 3. Run the Docker container @@ -36,14 +36,14 @@ A Docker container configuration is provided that establishes the runtime enviro You can run the Docker container for the build environment using Docker Compose as follows: ``` - docker-compose run cli + docker compose run cli ``` On Windows environments, you may need to execute in a pty that allows for using an interactive shell. In such a case you can run the Docker container as follows: ``` - winpty docker-compose run cli + winpty docker compose run cli ``` This should launch an interactive shell. @@ -58,12 +58,16 @@ The following steps are known to work on [Ubuntu](https://ubuntu.com/) (tested i - SAXON_VERSION - Defines which version of Saxon-HE to use - HUGO_VERSION - Defines which version of Hugo to use + - CALABASH_VERSION - Defines which version of XML Calabash to use + - CALABASH_HOME - Defines where calabash will be installed The following is an example of how to configure the environment. ```bash export SAXON_VERSION="9.9.1-3" - export HUGO_VERSION="0.74.3" + export HUGO_VERSION="0.83.1" + export CALABASH_VERSION="1.2.5-100" + export CALABASH_HOME="$HOME/calabash" ``` You may want to add this export to your `~/.bashrc` to persist the configuration. @@ -117,6 +121,16 @@ The following steps are known to work on [Ubuntu](https://ubuntu.com/) (tested i mvn org.apache.maven.plugins:maven-dependency-plugin:2.10:get -DartifactId=Saxon-HE -DgroupId=net.sf.saxon -Dversion=${SAXON_VERSION} ``` +1. Install Calabash + + To install Calabash, run the following: + + ```bash + wget https://github.com/ndw/xmlcalabash1/releases/download/${CALABASH_VERSION}/xmlcalabash-${CALABASH_VERSION}.zip + mkdir -p "${CALABASH_HOME}" + unzip -d "${CALABASH_HOME}" "xmlcalabash-${CALABASH_VERSION}.zip" + mv "${CALABASH_HOME}"/*/* "${CALABASH_HOME}" + Your environment should be setup. ## Running the Build Scripts diff --git a/build/ci-cd/README.md b/build/ci-cd/README.md index aa2198dffa..965b2d8f1e 100644 --- a/build/ci-cd/README.md +++ b/build/ci-cd/README.md @@ -150,18 +150,15 @@ export SAXON_VERSION=9.9.1-3 mvn org.apache.maven.plugins:maven-dependency-plugin:2.10:get -DartifactId=Saxon-HE -DgroupId=net.sf.saxon -Dversion=$SAXON_VERSION ``` -mkdir $HOME/oscal-oss +You will also need a copy of the ISO Schematron skeleton. +``` +mkdir $HOME/oscal-oss export SCHEMATRON_HOME=$HOME/oscal-oss/git-schematron git clone --depth 1 --no-checkout https://github.com/Schematron/schematron.git "$SCHEMATRON_HOME" cd "$SCHEMATRON_HOME" git checkout master -- trunk/schematron/code - - -export OSCAL_TOOLS_DIR=$HOME/oscal-oss/oscal_tools -git clone --depth 1 https://github.com/usnistgov/oscal-tools.git "${OSCAL_TOOLS_DIR}" -cd $OSCAL_TOOLS_DIR/json-cli -mvn clean install +``` Finally, export instructions in the preceding must also be copied into .bashrc so they persist in your environment. @@ -171,7 +168,6 @@ prettyson sudo npm install -g prettyjson ``` - jq ``` diff --git a/build/ci-cd/generate-specification-documentation.sh b/build/ci-cd/generate-specification-documentation.sh index a7366414c9..7129d413d5 100755 --- a/build/ci-cd/generate-specification-documentation.sh +++ b/build/ci-cd/generate-specification-documentation.sh @@ -57,7 +57,7 @@ if [ "$VERBOSE" = "true" ]; then fi SPEC_SOURCE="${OSCALDIR}/src/specifications/profile-resolution/profile-resolution-specml.xml" -SPEC_OUTPUT="$WORKING_DIR/docs/content/documentation/processing/profile-resolution.html" +SPEC_OUTPUT="$WORKING_DIR/docs/content/concepts/processing/profile-resolution.html" result=$(xsl_transform "$OSCALDIR/src/specifications/profile-resolution/specml-html-hugo-uswds.xsl" "${SPEC_SOURCE}" "${SPEC_OUTPUT}" 2>&1) cmd_exitcode=$? diff --git a/build/docker-compose.yml b/build/docker-compose.yml index 7d0c349053..0a8104b412 100644 --- a/build/docker-compose.yml +++ b/build/docker-compose.yml @@ -5,11 +5,14 @@ services: tty: true build: context: . + target: cli args: saxonversion: 9.9.1-3 hugoversion: 0.83.1 + calabashversion: 1.2.5-100 volumes: - "../:/oscal" - environment: - - SAXON_VERSION=9.9.1-3 - - JSON_CLI_VERSION=0.0.1-SNAPSHOT +# environment: +# - SAXON_VERSION=9.9.1-3 +# - JSON_CLI_VERSION=0.0.1-SNAPSHOT +# - CALABASH_VERSION=1.2.5-100 diff --git a/docs/Dockerfile b/docs/Dockerfile deleted file mode 100644 index 071d3b67ef..0000000000 --- a/docs/Dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -FROM ubuntu:20.04 -ARG HUGO_VERSION=0.83.1 - -RUN mkdir /hugo && \ - cd /hugo && \ - apt-get update && \ - apt-get install -y apt-utils curl && \ - curl -L -O https://github.com/gohugoio/hugo/releases/download/v${HUGO_VERSION}/hugo_extended_${HUGO_VERSION}_Linux-64bit.deb && \ - apt-get install ./hugo_extended_${HUGO_VERSION}_Linux-64bit.deb -WORKDIR /docs -COPY . . -ENTRYPOINT hugo server --enableGitInfo=false -v --debug --minify --bind 0.0.0.0 diff --git a/docs/README.md b/docs/README.md index 99a04e6639..782b25e83e 100644 --- a/docs/README.md +++ b/docs/README.md @@ -9,7 +9,7 @@ The website is built using the [Hugo](https://gohugo.io/) static site generator If using Docker: - [Saxon-HE for Java](http://saxon.sourceforge.net/#F9.9HE) -- [Docker 19.03+](https://docs.docker.com/install/) +- [Docker 20.10+](https://docs.docker.com/install/) If not using Docker: @@ -35,7 +35,7 @@ Instructions for installing the Hugo CLI on your OS can be found [here](https:// The website's visual styling is also backed by the U.S. Web Design System (USWDS) via an open source Hugo theme at https://github.com/usnistgov/hugo-uswds. -The USWDS framework, a Jekyll customization we are using, is documented here: https://designsystem.digital.gov/. +The USWDS framework is documented here: https://designsystem.digital.gov/. ### Building the site with LiveReload @@ -86,8 +86,8 @@ The website can also be developed and built using the included Docker resources. Assuming you've [installed Docker](https://docs.docker.com/install/) and [Docker Compose](https://docs.docker.com/compose/install/) for your system, you can build and serve the site using Docker Compose as follows: ``` -docker-compose build -docker-compose up +docker compose build +docker compose up ``` Once the site is running, it can be accessed at http://localhost:1313/OSCAL. Whenever you make any changes to the content with the Hugo server running, you'll notice that the site automatically updates itself to reflect those changes. diff --git a/docs/content/concepts/processing/profile-resolution.html b/docs/content/concepts/processing/profile-resolution.html index 160c1b10e0..7740861da0 100644 --- a/docs/content/concepts/processing/profile-resolution.html +++ b/docs/content/concepts/processing/profile-resolution.html @@ -1,12 +1,7 @@ --- title: OSCAL Profile Resolution description: Transforming a profile into the tailored catalog it represents -toc: - enabled: true -aliases: - - /documentation/processing/profile-resolution/ --- -

These specifications describe how to render an OSCAL profile document in the form of an OSCAL catalog. We call this profile resolution.

Reading these specifications

Terminology

These DRAFT specifications take the form of a tag set mapping between the OSCAL profile and OSCAL catalog XML document models. The same mapping @@ -28,19 +23,20 @@ the intended or expected results. Conformance to these specifications requires that a processor's result corresponds to the target as described here, given a particular source.

The term directive is used in these specifications to refer to an element or - combination of elements in source data, which is designed to effect a particular outcome in + combination of elements in source data, which is designed to affect a particular outcome in the target catalog. For the most part, directives are in the source profile document – for - example, a set element in a profile is a directive to set a parameter value in - the target catalog.

In contrast to profiles, catalogs do not contain directives but instead, representations of + example, a set-parameter element in a profile is a directive to set a parameter + value in the target catalog.

In contrast to profiles, catalogs do not contain directives but instead, representations of security controls and their parts, optionally gathered in groups, on which directives are assumed to operate. There is one exception: the insert element in control - catalogs can be considered a directive that a parameter value be inserted into a particular - location in running text (data content), when a catalog is rendered. Since these semantics - respect catalog rendering, however, not profile resolution, they are out of scope for this - document. For purposes of profile resolution, catalogs may be considered to be passive - resources from which contents are replicated (copied): the operation of the profile, in - contrast - its OSCAL semantics as opposed to the domain-specific semantics of the - information it handles – requires that it express what is to be done and how.

Original order refers to the order of elements and objects as given in the + catalogs can be considered a directive that a parameter value (or other value, as + appropriate) be inserted into a particular location in running text (data content), when a + catalog is rendered. Since these semantics respect catalog rendering, however, not profile + resolution, they are out of scope for this document. For purposes of profile resolution, + catalogs may be considered to be passive resources from which contents are replicated + (copied): the operation of the profile, in contrast - its OSCAL semantics as opposed + to the domain-specific semantics of the information it handles – requires that it express + what is to be done and how.

Original order refers to the order of elements and objects as given in the source. Canonical order refers to the correct order of elements or objects as defined by an OSCAL format such as the catalog or profile models. That is, the respective schemas of these models enforce order to the extent it is meaningful; where it is @@ -79,16 +75,25 @@ element

  • @attribute (punctuated @) indicates an attribute named attribute

  • elem/@attr indicates an attribute named attr on an element named elem

  • elmn[@att='val'] indicates an element elmn that has an - attribute @att with value val.

  • Variable contents in XML examples

    Examples given to illustrate targets for particular source (example) inputs are given, like - the source, in XML notation. Sometimes, however, the expected outputs are not fully defined - by the inputs given. In these cases, a description of the dynamic (variable) contents in - curly braces { }.

    So for example where a property is added to target metadata, an example could + attribute @att with value val.

    XML examples

    Examples given to illustrate targets for particular source (example) inputs are given, like + the source, in XML notation. Sometimes, however, the expected outputs are not defined by the + inputs given and not literally present. In these cases, a description of the dynamic + (variable) contents in curly braces { }.

    So for example where a property is added to target metadata, an example could show

    {{< highlight xml>}}{ timestamp }{{}}

    This indicates the last-modified element should be produced with contents generated appropriately, namely with an actual time stamp in this case, and not the string { timestamp }. In display, these values should also be shown with special - formatting as shown here.

    Examples also indicate unspecified content using a simple (elision) character, - typically for more verbose chunks of target data whose production is specified elsewhere in - the document.

    Operational context

    Processing context and scope

    A profile in OSCAL represents a selection and configuration of a set of + formatting as shown here.

    Such highlighting is also used to indicate the beginning and end of nominal or implicit + structures described in these specifications (mainly selection), which are not + represented by tagging in the final results of resolution.

    XML examples also indicate unspecified content using a simple (elision) + character, typically for more verbose chunks of target data whose production is specified + elsewhere in the document.

    Finally, although examples are syntactically faithful to OSCAL, they are not necessarily + always formally valid in every respect. For example, OSCAL defines permissible, recognized + values for property names (prop/@name) and permissible values for properties with + certain names (controlled by OSCAL), and those rules may not be observed here. (OSCAL does + not permit a prop[@name='status'] to have a value of pending, for + example, as sometimes shown in this documentation). Examples are given for purposes of + illustrating profile resolution semantics only, and should not be taken as normative for any + actual use.

    Operational context

    Processing context and scope

    A profile in OSCAL represents a selection and configuration of a set of controls. In the normal case, the set of controls available to a profile is provided by a catalog. For example, the three NIST SP 800-53 profiles representing the impact baselines HIGH, MODERATE and LOW: each of these calls on a catalog representing the @@ -118,7 +123,7 @@ controls are included; their ordering (insofar as the target format represents ordering); and the structure (groups) in which they are embedded. This specification is designed to make this possible, by defining outputs with sufficient rigor that the same is - meaningful and testable in this context.

    Why resolve a profile as a catalog

    A profile is a representation of a delta, a here to there. As such, a profile might + meaningful and testable in this context.

    Why resolve a profile as a catalog

    A profile is a representation of a delta, a here to there. As such, a profile might be conceived of as a recipe or instruction set – a sequence of steps or procedures – whereby to create an OSCAL catalog (set of controls describing security requirements) from an OSCAL catalog (set of controls describing security requirements). The primary use case for this is @@ -146,13 +151,13 @@ the system in the context of the actual intentions appropriate to that system, can we properly assess its security. Profiles make this possible.

    In order for this to work however we need a process that can perform the necessary operations, to apply the delta to an unmodified catalog, to produce the local view of the - catalog as modified. This is profile resolution.

    Formal validation of source and result

    In an XML context, document validation is available as a means of determining whether an + catalog as modified. This is profile resolution.

    Formal validation of source and result

    In an XML context, document validation is available as a means of determining whether an arbitrary XML document entity conforms to structural and naming constraints such as must be assumed by processors respecting application semantics. These specifications assume that XML inputs for profile resolution will be schema-valid with respect to the OSCAL profile model, and hence will be tagged in the namespace http://csrc.nist.gov/ns/oscal/1.0.

    Similarly, the definition of the output format or results, is constrained by the OSCAL - catalog model as defined by its schema.

    Order of objects in serialization

    As described above, original order refers to the order of elements and objects + catalog model as defined by its schema.

    Order of objects in serialization

    As described above, original order refers to the order of elements and objects as given in the source, whereas canonical order refers to the correct order of elements or objects as defined by an OSCAL format such as the catalog or profile models. That is, the respective schemas of these models enforce order to the extent it is @@ -178,7 +183,7 @@ metadata, param, control, group, back-matter. Within this sequence, members may be missing, as for example not all catalogs will have parameters or groups. Any members that are present, however, must be - given in this order.

    ID uniqueness constraint

    In addition to those described and enforced by the respective schemas, there are additional + given in this order.

    ID uniqueness constraint

    In addition to those described and enforced by the respective schemas, there are additional constraints over the data that this process relies on. The most important of these is ID-distinctiveness. Every id flag (in XML, represented by @id attributes) on an element in the model, whether it be attached to a control, group or an @@ -198,14 +203,14 @@ namely keep (also the default), use-first and merge.

    Because ID distinctiveness within the scope of processing, is critical, OSCAL users should take care that different catalogs have distinct sets of IDs. This applies to all structures within the catalogs, not only controls but also groups, parameters, - citations and resources.

    Defining control identity

    A central problem in profile resolution is determining when two controls are both + citations and resources.

    Defining control identity

    A central problem in profile resolution is determining when two controls are both representations of the same control, even when those representations have dissimilar contents or come to a profile through different import pathways.

    For purposes of profile resolution, control identity (or parameter, group identity etc.) can be determined by any of several ways, depending on the identity designation - mode:

    Brian Ruf suggested we provide a resolution/disambiguation/debugging mode that + mode:

    Dev team member suggests we provide a resolution/disambiguation/debugging mode that produces valid OSCAL by producing new IDs for everything, thereby disambiguating by brute force. This is not a bad idea. This proposal offers that feature as 'no identity - designation', i.e. nothing is found to be identical with anything.

    Detecting issues in profiles

    It is frequently possible by static analysis to detect many conditions even in schema-valid + designation', i.e. nothing is found to be identical with anything.

    Detecting issues in profiles

    It is frequently possible by static analysis to detect many conditions even in schema-valid profile documents, that will result in problematic catalogs in resolution. The most common issue will be clashing controls, that is more than one control in underlying catalogs with a given identifier (ID), making references to such controls ambiguous. The identity @@ -213,14 +218,14 @@ handling, for purposes of development or diagnosis; but the quality of the results can only be guaranteed determinatively by the quality of the inputs, and full analysis in support of profile development and diagnostics – to produce correct inputs that accurately reflect the - intent – is outside the scope of these specifications.

    Comments in result documents

    In an XML-based profile resolution, XML comments are one straightforward way for a + intent – is outside the scope of these specifications.

    Comments in result documents

    In an XML-based profile resolution, XML comments are one straightforward way for a processor to record events or conditions without affecting the output's nominal semantics. To support this, while two processors are obliged to return the same catalog XML for the same profile XML inputs, they are not required to match one another's comments, whitespace usage, attribute order, or processing instructions, only each other's elements, attributes and data content.

    One consequence of this is that processes intended to compare two profile resolutions may have to accommodate differences in comments, considering them as insignificant along with - other differences in serialization.

    Target catalog structure

    Inasmuch as the target of profile resolution is a catalog, the resulting document is expected + other differences in serialization.

    Target catalog structure

    Inasmuch as the target of profile resolution is a catalog, the resulting document is expected to be a catalog document as specified by OSCAL, conforming to the applicable schema, XSD in the case of OSCAL XML or JSON Schema for OSCAL object serialization formats.

    These two approaches to validation do not constrain their respective data models – although OSCAL-equivalent – in exactly the same way. In particular, element structures in the XML may @@ -230,23 +235,23 @@ understood that what is described is both (a) the order in XML, and (b) the canonical order in OSCAL, which while it is not exposed in a JSON object serialization, will always available to an OSCAL processor by reference to the appropriate - metaschema (in this case, the Metaschema instance that defines the catalog model). [See: Terminology]

    Form and organization of resolution target

    The output of a profile resolution should take the form of a catalog. Catalog results (the + metaschema (in this case, the Metaschema instance that defines the catalog model). [See: Terminology]

    Form and organization of resolution target

    The output of a profile resolution should take the form of a catalog. Catalog results (the output of a conformant profile resolution) will be valid to the OSCAL catalog schema.

    At its base, an OSCAL profile source produces a catalog:

    {{< highlight xml>}}...{{}}{{< highlight xml>}} [Required metadata, as described below] { controls or control groups, as described below } { back matter as described below } {{}}

    A valid catalog must have metadata in addition to controls and control groups. Additionally it may have back-matter. How to produce and populate the - metadata and back-matter is described below + metadata and back-matter is described below link me, as is the construction - of the catalog/@id in the target.

    Validation of resolution result

    Although the target of profile resolution takes the form of a catalog, it may not be valid + of the catalog/@id in the target.

    Validation of resolution result

    Although the target of profile resolution takes the form of a catalog, it may not be valid to all constraints that define the OSCAL catalog format, whether enforced by its schema or by other means such as Schematron. Specifically, while a correctly implemented profile resolution may produce a result catalog that is structurally valid, there is a mode of operation, namely the keep-all combination rule, which can permit replication of controls or control contents, when the control imports are redundant or contain redundant - call or match directives. Such replication can result in violations - of ID uniqueness constraints, in particular. [See: Formal validation of source and result]

    If the merge behavior is set to combine[@method='keep'], or not given (as this + include-controls directives. Such replication can result in violations of ID + uniqueness constraints, in particular. [See: Formal validation of source and result]

    If the merge behavior is set to combine[@method='keep'], or not given (as this setting is the default), a profile with multiple calls on controls will result in multiple copies of controls with clashing IDs. These should raise validation errors in the results, since IDs must be unique on elements in document scope. Accordingly, this is an appropriate @@ -254,39 +259,48 @@ removed; it is also an appropriate rule to apply when a profile has been tested and found to be free of any such collisions. Other combination rules (method='use-first' or method='merge') are provided for special exigencies or to permit certain kinds - of optimization.

    top-level @id

    Because document IDs are sometimes used to distinguish data points in processing context, a + of optimization.

    top-level @id

    Because document IDs are sometimes used to distinguish data points in processing context, a resolved profile may not present the same ID as any of the catalogs it is importing, or the same ID as its source profile.

    It is permitted to produce the profile’s ID in resolution via a static mapping from the ID of the source. For example, the resolution of profile with ID profile-X might have ID profile-X-RESOLVED.

    BR feels this is underspecified and I agree. He also thinks that resolutions should always have a (generated) unique IDs, since upstream catalogs can change between - resolutions.

    Instance metadata

    Metadata in the target is derived directly from the source metadata, with modifications. - All elements inside metadata in the source profile are copied in their original - order into the catalog. Additionally, new elements are introduced as described here.

    Because of options in producing metadata and especially the requirement for a timestamp, + resolutions.

    Instance metadata

    Metadata in the target is derived directly from the source metadata, with modifications. + With the exceptions given, elements inside metadata in the source profile are + copied in their original order into the catalog. Additionally, new elements are introduced + as described here.

    Because of options in producing metadata and especially the requirement for a timestamp, developers and users should note that two different resolutions of the same profile will - not, ordinarily, be identical inside metadata.

    This has been worked, with changes suggested by DW, since BR's - review

    Metadata resolution timestamp

    A prop element with name - resolution-timestamp is added to the resolution target metadata with a valid - timestamp indicating the time of the resolution runtime. Conforming with canonical order - of metadata in the target, It must be placed into metadata structure after - any title, published, last-modified, - version, oscal-version, or doc-id elements, and before - any prop, link, role, party, or + not, ordinarily, be identical inside metadata.

    Reworked in context of Issue #580

    Metadata resolution timestamp

    The catalog document resulting from profile resolution may have a new timestamp, + reflecting its time of resolution (when the process was invoked, or executed, or when + its outputs were serialized). This is captured in the required element + metadata/last-modified.

    Processors have the option of assigning this value sensibly in operational context. In + general, the earliest "actual" (i.e., not predated) time warranted by the facts, is to + be preferred. For example, a profile resolver that periodically refreshes a catalog of + baselines, may determine that a profile is unchanged (despite changes in underlying + catalogs), and does not require a new last-modified assignment – whereas, + even though its source profile had not changed, profile resolution results might + nonetheless change from one resolution to another, if underlying controls (in an + upstream source catalog) have changed: in this case, a new last-modified is + called for on the result catalog.

    Optionally, a prop element with name + source-profile-last-modified may be added to the resolution target metadata with + a valid timestamp capturing the stated last-modified value from the source + document metadata. Conforming with canonical order of metadata in + the target, It must be placed into metadata structure after any title, + published, last-modified, version, + oscal-version, or document-id elements, and before any + prop, link, role, party, or responsible-party elements. The target is schema valid, and the new prop is given before copies of prop elements already present.

    {{< highlight xml>}} Example Profile 2019-06-30T10:54:16.372-05:00 - NEW + {{}}{{< highlight xml>}} Example Profile - 2019-06-30T10:54:16.372-05:00 - { timestamp of profile resolution runtime } - NEW + { timestamp of profile resolution runtime } + {{}}

    The presence of this property with a valid timestamp may be taken as an indicator that - an OSCAL catalog has been produced by resolution of a profile.

    Note that due to this provision, two different resolutions of the same profile produced - at different times will not be bit-equivalent, since their timestamps will be - different.

    Any provision for systems that cannot provide a valid timestamp?

    Metadata link to source profile

    Optionally, a processor may add a link to the result + an OSCAL catalog has been produced by resolution of a profile.

    Any provision for systems that cannot provide a valid timestamp?

    Metadata link to source profile

    Optionally, a processor may add a link to the result metadata. Conforming with canonical order of metadata in the target, It must be placed into metadata structure after any title, published, last-modified, version, @@ -305,18 +319,17 @@ {{}}{{< highlight xml>}} Test profile - RESOLUTION RESULT - 2019-12-03T11:36:32.284-05:00 + { timestamp of profile resolution runtime } 1.0 1.0-MR2 - { timestamp of profile resolution runtime } Test profile Standing Committee -{{}}

    Body of the target

    The construction of the body of the target catalog is described in the next section. [See: Phases of profile resolution]

    The body of the target catalog, after its metadata, is structured - as follows, depending on its merge directive. (Details on merging are given below [See: Merge phase].)

    Body of the target

    The construction of the body of the target catalog is described in the next section. [See: Phases of profile resolution]

    The body of the target catalog, after its metadata, is structured + as follows, depending on its merge directive. (Details on merging are given below [See: Merge phase].)

  • Marking a resource with a prop with name='keep' and value always thus has a couple of uses:

  • {{< highlight xml>}} + keep).

    Check to be sure 'keep' is permitted and reserved by the schema + here

    {{< highlight xml>}} FedRAMP Applicable Laws and Regulations - always - fedramp-citations - -/resource> {{}}

    Null profile

    A null profile is a profile that imports a catalog and selects all its controls + + + +/resource> {{}}

    Null profile

    A null profile is a profile that imports a catalog and selects all its controls without modification. A truly null profile – that is, with only an import and nothing else – does not return its imported catalog unchanged. But the changes it makes can be useful. This makes a null profile or its near-null variants, as described in this section, potentially useful for catalog management, design and use, as described in this - section.

    Importing and nothing else

    {{< highlight xml>}} - ... - -{{}}

    In the target, the catalog group structure and all hierarchy is removed; its controls are - all returned in sequence. Any loose parameters in the catalog not referenced in a control, - are dropped.

    This is the same as

    {{< highlight xml>}} + section.

    Importing and nothing else

    {{< highlight xml>}} ... - - - + - - - -{{}}

    all/with-child-controls='no'

    The same, except that only top-level controls are included, not descendants.

    unit test this

    including merge directives

    Using merge as-is='true', the grouping hierarchy of a source catalog can be +{{}}

    In the target, the catalog group structure and all hierarchy is removed; its controls are + all returned in sequence. Any loose parameters in the catalog not referenced in a control, + are dropped.

    include-all[@with-child-controls='no']

    The same, except that only top-level controls are included, not descendants.

    unit test this

    including merge directives

    Using merge as-is='true', the grouping hierarchy of a source catalog can be reflected in the target.

    This enables a near-null profile to define a normalization or cleanup pass on a catalog, as it will have the result of removing dangling references and orphan parameters, without otherwise affecting controls.

    unit test this too, especially with more than two levels of control hierarchy as - well as groups

    unit test this

    Options

    For conformance, an OSCAL processor must deliver results of profile processing, in a basic + well as groups

    unit test this

    Options

    For conformance, an OSCAL processor must deliver results of profile processing, in a basic configuration, exactly as described in these specifications with no elaboration.

    Conformance does not preclude providing additional features, however, including elaborated outputs, in an optional mode. Such features could include offering warning or process exception handling (appropriate to workflow) not described here; outputs provided with - comments or extra structure to support tracing or analytics; or gateway or - Draconian modes that would provide user assistance or refuse to deliver results for - inputs considered erroneous in a workflow.

    Profile tools and utilities

    • rendering - showing both unresolved (raw) and resolved profiles

    • editing / integrity checking

    • rewriting/normalization

    • diffing

    • deriving a profile as the delta of two catalogs (base and tailored)

    \ No newline at end of file + comments or extra structure to support tracing or analytics; or (alternative) gateway + and draconian modes that would provide user assistance or refuse to deliver results for + inputs considered erroneous in a workflow.

    Profile tools and utilities

    • rendering - showing both unresolved (raw) and resolved profiles

    • editing / integrity checking

    • rewriting/normalization

    • diffing

    • deriving a profile as the delta of two catalogs (base and tailored)

    \ No newline at end of file diff --git a/docs/docker-compose.yml b/docs/docker-compose.yml index b30e592b52..c724880e89 100644 --- a/docs/docker-compose.yml +++ b/docs/docker-compose.yml @@ -2,9 +2,11 @@ version: "3.7" services: docs: - build: - context: . + extends: + file: ../build/docker-compose.yml + service: cli ports: - "1313:1313" volumes: - "./:/docs" + entrypoint: /docs/run-server.sh