diff --git a/.changeset/short-jokes-sleep.md b/.changeset/short-jokes-sleep.md new file mode 100644 index 000000000..ed2c79243 --- /dev/null +++ b/.changeset/short-jokes-sleep.md @@ -0,0 +1,46 @@ +--- +'style-dictionary': major +--- + +BREAKING: preprocessors must now also be explicitly applied on global or platform level, rather than only registering it. This is more consistent with how the other hooks work and allows applying it on a platform level rather than only on the global level. + +`preprocessors` property that contains the registered preprocessors has been moved under a wrapping property called `hooks`. + +Before: + +```js +export default { + // register it inline or by SD.registerPreprocessor + // applies automatically, globally + preprocessors: { + foo: (dictionary) => { + // preprocess it + return dictionary; + } + } +} +``` + +After: + +```js +export default { + // register it inline or by SD.registerPreprocessor + hooks: { + preprocessors: { + foo: (dictionary) => { + // preprocess it + return dictionary; + } + } + }, + // apply it globally + preprocessors: ['foo'], + platforms: { + css: { + // or apply is per platform + preprocessors: ['foo'] + } + } +} +``` diff --git a/.changeset/sweet-toes-fly.md b/.changeset/sweet-toes-fly.md index 93701febe..db454cf5f 100644 --- a/.changeset/sweet-toes-fly.md +++ b/.changeset/sweet-toes-fly.md @@ -1,5 +1,5 @@ --- -'style-dictionary': minor +'style-dictionary': major --- BREAKING: expose getReferences and usesReference utilities as standalone utils rather than requiring them to be bound to dictionary object. This makes it easier to use. diff --git a/.changeset/thick-scissors-admire.md b/.changeset/thick-scissors-admire.md new file mode 100644 index 000000000..19281042a --- /dev/null +++ b/.changeset/thick-scissors-admire.md @@ -0,0 +1,5 @@ +--- +'style-dictionary': minor +--- + +Allow expanding tokens on a global or platform-specific level. Supports conditionally expanding per token type, or using a function to determine this per individual token. diff --git a/README.md b/README.md index cedf41a5b..21fc6daff 100644 --- a/README.md +++ b/README.md @@ -166,7 +166,7 @@ This tells the style dictionary build system how and what to build. The default } ``` -| Attribute | Type | Description | +| Property | Type | Description | | :----------------------------------- | :---------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | source | Array | An array of file path [globs](https://github.com/isaacs/node-glob) to design token files. Style Dictionary will do a deep merge of all of the token files, allowing you to organize your files files however you want. | | include | Array | An array of file path [globs](https://github.com/isaacs/node-glob) to design token files that contain default styles. The Style Dictionary uses this as a base collection of tokens. The tokens found using the "source" attribute will overwrite tokens found using include. | diff --git a/__integration__/async.test.js b/__integration__/async.test.js index bdf8d12ec..6ff25387c 100644 --- a/__integration__/async.test.js +++ b/__integration__/async.test.js @@ -91,6 +91,7 @@ describe('integration', async function () { const sd = new SDExtension({ source: [`__integration__/tokens/**/[!_]*.json?(c)`], + preprocessors: ['foo-processor'], platforms: { css: { transforms: [ diff --git a/__tests__/StyleDictionary.test.js b/__tests__/StyleDictionary.test.js index 7144921af..4cc5d2c59 100644 --- a/__tests__/StyleDictionary.test.js +++ b/__tests__/StyleDictionary.test.js @@ -23,6 +23,18 @@ function traverseObj(obj, fn) { } } +function stripMeta(obj) { + Object.keys(obj).forEach((key) => { + if (['attributes', 'name', 'original', 'path'].includes(key)) { + delete obj[key]; + } + if (typeof obj[key] === 'object') { + stripMeta(obj[key]); + } + }); + return obj; +} + const test_props = { size: { padding: { @@ -33,6 +45,70 @@ const test_props = { // extend method is called by StyleDictionary constructor, therefore we're basically testing both things here describe('StyleDictionary class + extend method', () => { + it('should accept a string as a path to a JSON5 file', async () => { + const StyleDictionaryExtended = new StyleDictionary('__tests__/__configs/test.json5'); + await StyleDictionaryExtended.hasInitialized; + expect(StyleDictionaryExtended).to.have.nested.property('platforms.web'); + }); + + it('should accept a string as a path to a JSONC file', async () => { + const StyleDictionaryExtended = new StyleDictionary('__tests__/__configs/test.jsonc'); + await StyleDictionaryExtended.hasInitialized; + expect(StyleDictionaryExtended).to.have.nested.property('platforms.web'); + }); + + it('should allow for chained extends and not mutate the original', async () => { + const StyleDictionary1 = new StyleDictionary({ + foo: 'bar', + bar: 'other', + }); + const StyleDictionary2 = await StyleDictionary1.extend({ + foo: 'baz', + }); + const StyleDictionary3 = await ( + await StyleDictionary2.extend({ + foo: 'bar', + }) + ).extend({ + foo: 'boo', + }); + expect(StyleDictionary1.foo).to.equal('bar'); + expect(StyleDictionary2.foo).to.equal('baz'); + expect(StyleDictionary3.foo).to.equal('boo'); + // check that the bar prop from SD1 is not lost in SD3 + expect(StyleDictionary3.bar).to.equal('other'); + expect(StyleDictionary).not.to.have.property('foo'); + }); + + it(`should not pollute the prototype`, async () => { + const obj = {}; + // method 1 + new StyleDictionary(JSON.parse('{"__proto__":{"polluted":"yes"}}')); + // method 2, which executes a deepmerge under the hood + // this verifies that this deepmerge util is also protected against prototype pollution + const sd = new StyleDictionary(); + await sd.hasInitialized; + await sd.extend(JSON.parse('{"__proto__":{"polluted":"yes"}}')); + + // FIXME: method 3, by putting it into a design token, currently not tested + // for these we use our own deepExtend utility, which must be prototype pollution protected + // however, we don't actually test this here.. + + expect(obj.polluted).to.be.undefined; + }); + + it('should not merge tokens together but rather override on collision', async () => { + const sd = new StyleDictionary({ + source: [ + '__tests__/__json_files/token-collision-1.json', + '__tests__/__json_files/token-collision-2.json', + ], + }); + await sd.hasInitialized; + expect(sd.tokens.test.value).to.equal('#ff0000'); + expect(sd.tokens.test.$extensions).to.be.undefined; + }); + describe('method signature', () => { it('should accept a string as a path to a JSON file', () => { const StyleDictionaryExtended = new StyleDictionary('__tests__/__configs/test.json'); @@ -190,182 +266,304 @@ describe('StyleDictionary class + extend method', () => { }); }); - // This is to allow style dictionaries to depend on other style dictionaries and - // override tokens. Useful for skinning - it('should not throw a collision error if a source file collides with an include', async () => { - const StyleDictionaryExtended = new StyleDictionary({ - include: ['__tests__/__tokens/paddings.json'], - source: ['__tests__/__tokens/paddings.json'], - log: 'error', + describe('collisions', () => { + it('should not throw a collision error if a source file collides with an include', async () => { + const StyleDictionaryExtended = new StyleDictionary({ + include: ['__tests__/__tokens/paddings.json'], + source: ['__tests__/__tokens/paddings.json'], + log: 'error', + }); + const output = fileToJSON('__tests__/__tokens/paddings.json'); + traverseObj(output, (obj) => { + if (Object.hasOwn(obj, 'value') && !obj.filePath) { + obj.filePath = '__tests__/__tokens/paddings.json'; + obj.isSource = true; + } + }); + await StyleDictionaryExtended.hasInitialized; + expect(StyleDictionaryExtended.tokens).to.eql(output); }); - const output = fileToJSON('__tests__/__tokens/paddings.json'); - traverseObj(output, (obj) => { - if (Object.hasOwn(obj, 'value') && !obj.filePath) { - obj.filePath = '__tests__/__tokens/paddings.json'; - obj.isSource = true; + + it('should throw an error if the collision is in source files and log is set to error', async () => { + const sd = new StyleDictionary( + { + source: ['__tests__/__tokens/paddings.json', '__tests__/__tokens/_paddings.json'], + log: { warnings: 'error', verbosity: 'verbose' }, + }, + { init: false }, + ); + let error; + try { + await sd.init(); + } catch (e) { + error = e; } + await expect(error.message).to.matchSnapshot(); }); - await StyleDictionaryExtended.hasInitialized; - expect(StyleDictionaryExtended.tokens).to.eql(output); - }); - - it('should throw an error if the collision is in source files and log is set to error', async () => { - const sd = new StyleDictionary( - { - source: ['__tests__/__tokens/paddings.json', '__tests__/__tokens/_paddings.json'], - log: { warnings: 'error', verbosity: 'verbose' }, - }, - { init: false }, - ); - let error; - try { - await sd.init(); - } catch (e) { - error = e; - } - await expect(error.message).to.matchSnapshot(); - }); - it('should throw a brief error if the collision is in source files and log is set to error and verbosity default', async () => { - const sd = new StyleDictionary( - { - source: ['__tests__/__tokens/paddings.json', '__tests__/__tokens/_paddings.json'], - log: { warnings: 'error' }, - }, - { init: false }, - ); - let error; - try { - await sd.init(); - } catch (e) { - error = e; - } - await expect(error.message).to.matchSnapshot(); - }); - - it('should throw a warning if the collision is in source files and log is set to warn', async () => { - const sd = new StyleDictionary( - { - source: ['__tests__/__tokens/paddings.json', '__tests__/__tokens/paddings.json'], - log: 'warn', - }, - { init: false }, - ); - await expect(sd.init()).to.eventually.be.fulfilled; - }); - - it('should accept a string as a path to a JSON5 file', async () => { - const StyleDictionaryExtended = new StyleDictionary('__tests__/__configs/test.json5'); - await StyleDictionaryExtended.hasInitialized; - expect(StyleDictionaryExtended).to.have.nested.property('platforms.web'); - }); - - it('should accept a string as a path to a JSONC file', async () => { - const StyleDictionaryExtended = new StyleDictionary('__tests__/__configs/test.jsonc'); - await StyleDictionaryExtended.hasInitialized; - expect(StyleDictionaryExtended).to.have.nested.property('platforms.web'); - }); - - it('should allow for chained extends and not mutate the original', async () => { - const StyleDictionary1 = new StyleDictionary({ - foo: 'bar', - bar: 'other', - }); - const StyleDictionary2 = await StyleDictionary1.extend({ - foo: 'baz', + it('should throw a brief error if the collision is in source files and log is set to error and verbosity default', async () => { + const sd = new StyleDictionary( + { + source: ['__tests__/__tokens/paddings.json', '__tests__/__tokens/_paddings.json'], + log: { warnings: 'error' }, + }, + { init: false }, + ); + let error; + try { + await sd.init(); + } catch (e) { + error = e; + } + await expect(error.message).to.matchSnapshot(); }); - const StyleDictionary3 = await ( - await StyleDictionary2.extend({ - foo: 'bar', - }) - ).extend({ - foo: 'boo', + + it('should throw a warning if the collision is in source files and log is set to warn', async () => { + const sd = new StyleDictionary( + { + source: ['__tests__/__tokens/paddings.json', '__tests__/__tokens/paddings.json'], + log: 'warn', + }, + { init: false }, + ); + await expect(sd.init()).to.eventually.be.fulfilled; }); - expect(StyleDictionary1.foo).to.equal('bar'); - expect(StyleDictionary2.foo).to.equal('baz'); - expect(StyleDictionary3.foo).to.equal('boo'); - // check that the bar prop from SD1 is not lost in SD3 - expect(StyleDictionary3.bar).to.equal('other'); - expect(StyleDictionary).not.to.have.property('foo'); }); - it(`should not pollute the prototype`, async () => { - const obj = {}; - // method 1 - new StyleDictionary(JSON.parse('{"__proto__":{"polluted":"yes"}}')); - // method 2, which executes a deepmerge under the hood - // this verifies that this deepmerge util is also protected against prototype pollution - const sd = new StyleDictionary(); - await sd.hasInitialized; - await sd.extend(JSON.parse('{"__proto__":{"polluted":"yes"}}')); + describe('expand object value tokens', () => { + it('should not expand object value tokens by default', async () => { + const input = { + border: { + type: 'border', + value: { + width: '2px', + style: 'solid', + color: '#000', + }, + }, + }; + const sd = new StyleDictionary({ + tokens: input, + }); + await sd.hasInitialized; + expect(sd.tokens).to.eql(input); + }); - // FIXME: method 3, by putting it into a design token, currently not tested - // for these we use our own deepExtend utility, which must be prototype pollution protected - // however, we don't actually test this here.. + it('should allow expanding tokens globally', async () => { + const input = { + border: { + type: 'border', + value: { + width: '2px', + style: 'solid', + color: '#000', + }, + }, + }; + const sd = new StyleDictionary({ + tokens: input, + expand: true, + }); + await sd.hasInitialized; + expect(sd.tokens).to.eql({ + border: { + color: { + type: 'color', + value: '#000', + }, + style: { + type: 'strokeStyle', + value: 'solid', + }, + width: { + type: 'dimension', + value: '2px', + }, + }, + }); + }); - expect(obj.polluted).to.be.undefined; - }); + it('should allow expanding tokens on a per platform basis', async () => { + const input = { + border: { + type: 'border', + value: { + width: '2px', + style: 'solid', + color: '#000', + }, + }, + }; + const sd = new StyleDictionary({ + tokens: input, + platforms: { + css: { + expand: true, + }, + js: {}, + }, + }); + await sd.hasInitialized; + const cssTokens = await sd.exportPlatform('css'); + const jsTokens = await sd.exportPlatform('js'); + expect(stripMeta(cssTokens)).to.eql({ + border: { + color: { + type: 'color', + value: '#000', + }, + style: { + type: 'strokeStyle', + value: 'solid', + }, + width: { + type: 'dimension', + value: '2px', + }, + }, + }); + expect(stripMeta(jsTokens)).to.eql(input); + }); - it('should allow using $type value on a token group, children inherit, local overrides take precedence', async () => { - const sd = new StyleDictionary({ - tokens: { - dimensions: { - $type: 'dimension', - sm: { - $value: '5', + it('should allow combining global expand with per platform expand', async () => { + const input = { + border: { + type: 'border', + value: { + width: '2px', + style: 'solid', + color: '#000', + }, + }, + borderTwo: { + type: 'border', + value: { + width: '1px', + style: 'dashed', + color: '#ccc', }, - md: { - $value: '10', + }, + }; + const sd = new StyleDictionary({ + tokens: input, + expand: { + include: (token) => { + return token.value.width === '2px'; }, - nested: { - deep: { - lg: { - $value: '15', - }, + }, + platforms: { + css: {}, + js: { + expand: { + typesMap: true, }, }, - nope: { - $value: '20', - $type: 'spacing', + }, + }); + await sd.hasInitialized; + const cssTokens = await sd.exportPlatform('css'); + const jsTokens = await sd.exportPlatform('js'); + + expect(stripMeta(cssTokens)).to.eql({ + border: { + color: { + type: 'color', + value: '#000', + }, + style: { + type: 'strokeStyle', + value: 'solid', + }, + width: { + type: 'dimension', + value: '2px', }, }, - }, - platforms: { - css: { - transformGroup: 'css', + borderTwo: input.borderTwo, + }); + expect(stripMeta(jsTokens)).to.eql({ + border: { + color: { + type: 'color', + value: '#000', + }, + style: { + type: 'strokeStyle', + value: 'solid', + }, + width: { + type: 'dimension', + value: '2px', + }, }, - }, + borderTwo: { + color: { + type: 'color', + value: '#ccc', + }, + style: { + type: 'strokeStyle', + value: 'dashed', + }, + width: { + type: 'dimension', + value: '1px', + }, + }, + }); }); - await sd.hasInitialized; - - expect(sd.tokens.dimensions.sm.$type).to.equal('dimension'); - expect(sd.tokens.dimensions.md.$type).to.equal('dimension'); - expect(sd.tokens.dimensions.nested.deep.lg.$type).to.equal('dimension'); - expect(sd.tokens.dimensions.nope.$type).to.equal('spacing'); }); - it('should detect usage of DTCG draft spec tokens', async () => { - const sd = new StyleDictionary({ - tokens: { - datalist: { - key: { color: { $value: '#222' } }, - value: { color: { $value: '#000' } }, + describe('DTCG integration', () => { + it('should allow using $type value on a token group, children inherit, local overrides take precedence', async () => { + const sd = new StyleDictionary({ + tokens: { + dimensions: { + $type: 'dimension', + sm: { + $value: '5', + }, + md: { + $value: '10', + }, + nested: { + deep: { + lg: { + $value: '15', + }, + }, + }, + nope: { + $value: '20', + $type: 'spacing', + }, + }, + }, + platforms: { + css: { + transformGroup: 'css', + }, }, - }, + }); + await sd.hasInitialized; + + expect(sd.tokens.dimensions.sm.$type).to.equal('dimension'); + expect(sd.tokens.dimensions.md.$type).to.equal('dimension'); + expect(sd.tokens.dimensions.nested.deep.lg.$type).to.equal('dimension'); + expect(sd.tokens.dimensions.nope.$type).to.equal('spacing'); }); - await sd.hasInitialized; - expect(sd.usesDtcg).to.be.true; - }); - it('should not merge tokens together but rather override on collision', async () => { - const sd = new StyleDictionary({ - source: [ - '__tests__/__json_files/token-collision-1.json', - '__tests__/__json_files/token-collision-2.json', - ], + it('should detect usage of DTCG draft spec tokens', async () => { + const sd = new StyleDictionary({ + tokens: { + datalist: { + key: { color: { $value: '#222' } }, + value: { color: { $value: '#000' } }, + }, + }, + }); + await sd.hasInitialized; + expect(sd.usesDtcg).to.be.true; }); - await sd.hasInitialized; - expect(sd.tokens.test.value).to.equal('#ff0000'); - expect(sd.tokens.test.$extensions).to.be.undefined; }); }); diff --git a/__tests__/__snapshots__/StyleDictionary.test.snap.js b/__tests__/__snapshots__/StyleDictionary.test.snap.js index ed78a050d..57523ee90 100644 --- a/__tests__/__snapshots__/StyleDictionary.test.snap.js +++ b/__tests__/__snapshots__/StyleDictionary.test.snap.js @@ -1,6 +1,6 @@ /* @web/test-runner snapshot v1 */ export const snapshots = {}; -snapshots["StyleDictionary class + extend method should throw an error if the collision is in source files and log is set to error"] = +snapshots["StyleDictionary class + extend method collisions should throw an error if the collision is in source files and log is set to error"] = ` Token collisions detected (7): @@ -13,11 +13,11 @@ Collision detected at: size.padding.xl! Original value: 20, New value: 20 Collision detected at: size.padding.xxl! Original value: 30, New value: 30 `; -/* end snapshot StyleDictionary class + extend method should throw an error if the collision is in source files and log is set to error */ +/* end snapshot StyleDictionary class + extend method collisions should throw an error if the collision is in source files and log is set to error */ -snapshots["StyleDictionary class + extend method should throw a brief error if the collision is in source files and log is set to error and verbosity default"] = +snapshots["StyleDictionary class + extend method collisions should throw a brief error if the collision is in source files and log is set to error and verbosity default"] = ` Token collisions detected (7): Use log.verbosity "verbose" or use CLI option --verbose for more details.`; -/* end snapshot StyleDictionary class + extend method should throw a brief error if the collision is in source files and log is set to error and verbosity default */ +/* end snapshot StyleDictionary class + extend method collisions should throw a brief error if the collision is in source files and log is set to error and verbosity default */ diff --git a/__tests__/register/preprocessor.test.js b/__tests__/register/preprocessor.test.js index 63020c1ca..fbecb92c5 100644 --- a/__tests__/register/preprocessor.test.js +++ b/__tests__/register/preprocessor.test.js @@ -20,12 +20,13 @@ registerSuite({ }, registerMethod: 'registerPreprocessor', prop: 'preprocessors', + hooks: true, }); describe('register/transformGroup', async () => { let StyleDictionaryExtended; beforeEach(async () => { - StyleDictionary.preprocessors = {}; + StyleDictionary.hooks.preprocessors = {}; StyleDictionaryExtended = new StyleDictionary({}); await StyleDictionaryExtended.hasInitialized; }); @@ -35,8 +36,8 @@ describe('register/transformGroup', async () => { name: 'example-preprocessor', preprocessor: (dict) => dict, }); - expect(StyleDictionary.preprocessors['example-preprocessor']).to.not.be.undefined; - expect(StyleDictionaryExtended.preprocessors['example-preprocessor']).to.not.be.undefined; + expect(StyleDictionary.hooks.preprocessors['example-preprocessor']).to.not.be.undefined; + expect(StyleDictionaryExtended.hooks.preprocessors['example-preprocessor']).to.not.be.undefined; }); it('should throw if the preprocessor name is not a string', () => { @@ -76,6 +77,7 @@ describe('register/transformGroup', async () => { }); StyleDictionaryExtended = new StyleDictionary({ + preprocessors: ['strip-descriptions'], tokens: { foo: { value: '4px', @@ -120,6 +122,7 @@ describe('register/transformGroup', async () => { }); StyleDictionaryExtended = new StyleDictionary({ + preprocessors: ['strip-descriptions'], tokens: { foo: { value: '4px', diff --git a/__tests__/register/register.suite.js b/__tests__/register/register.suite.js index 841f88477..74134fcf5 100644 --- a/__tests__/register/register.suite.js +++ b/__tests__/register/register.suite.js @@ -17,7 +17,11 @@ export function registerSuite(opts) { describe('Register Test Suite', () => { const reset = () => { - StyleDictionary[prop] = defaultPropVal; + if (opts.hooks) { + StyleDictionary.hooks[prop] = defaultPropVal; + } else { + StyleDictionary[prop] = defaultPropVal; + } }; beforeEach(() => { reset(); @@ -33,10 +37,15 @@ export function registerSuite(opts) { const sd1 = new StyleDictionary(); const sd2 = new StyleDictionary(); const sd3 = await sd2.extend(); - - expect(sd1[prop][configFoo.name]).to.not.be.undefined; - expect(sd2[prop][configFoo.name]).to.not.be.undefined; - expect(sd3[prop][configFoo.name]).to.not.be.undefined; + if (opts.hooks) { + expect(sd1.hooks[prop][configFoo.name]).to.not.be.undefined; + expect(sd2.hooks[prop][configFoo.name]).to.not.be.undefined; + expect(sd3.hooks[prop][configFoo.name]).to.not.be.undefined; + } else { + expect(sd1[prop][configFoo.name]).to.not.be.undefined; + expect(sd2[prop][configFoo.name]).to.not.be.undefined; + expect(sd3[prop][configFoo.name]).to.not.be.undefined; + } }); it(`should allow registering ${prop} on instance, affecting only that instance`, async () => { @@ -45,10 +54,15 @@ export function registerSuite(opts) { const sd3 = await sd2.extend(); sd2[registerMethod](configFoo); - - expect(sd1[prop][configFoo.name]).to.be.undefined; - expect(sd2[prop][configFoo.name]).to.not.be.undefined; - expect(sd3[prop][configFoo.name]).to.be.undefined; + if (opts.hooks) { + expect(sd1.hooks[prop][configFoo.name]).to.be.undefined; + expect(sd2.hooks[prop][configFoo.name]).to.not.be.undefined; + expect(sd3.hooks[prop][configFoo.name]).to.be.undefined; + } else { + expect(sd1[prop][configFoo.name]).to.be.undefined; + expect(sd2[prop][configFoo.name]).to.not.be.undefined; + expect(sd3[prop][configFoo.name]).to.be.undefined; + } }); it(`should combine class and instance registrations for ${prop} on the instance`, async () => { @@ -59,14 +73,25 @@ export function registerSuite(opts) { sd2[registerMethod](configBar); const sd3 = await sd2.extend(); - expect(sd1[prop][configFoo.name]).to.not.be.undefined; - expect(sd2[prop][configFoo.name]).to.not.be.undefined; - expect(sd3[prop][configFoo.name]).to.not.be.undefined; - // should not be registered on sd1, because we registered only on sd2 - expect(sd1[prop][configBar.name]).to.be.undefined; - expect(sd2[prop][configBar.name]).to.not.be.undefined; - // should be registered because sd3 extends sd2 - expect(sd3[prop][configBar.name]).to.not.be.undefined; + if (opts.hooks) { + expect(sd1.hooks[prop][configFoo.name]).to.not.be.undefined; + expect(sd2.hooks[prop][configFoo.name]).to.not.be.undefined; + expect(sd3.hooks[prop][configFoo.name]).to.not.be.undefined; + // should not be registered on sd1, because we registered only on sd2 + expect(sd1.hooks[prop][configBar.name]).to.be.undefined; + expect(sd2.hooks[prop][configBar.name]).to.not.be.undefined; + // should be registered because sd3 extends sd2 + expect(sd3.hooks[prop][configBar.name]).to.not.be.undefined; + } else { + expect(sd1[prop][configFoo.name]).to.not.be.undefined; + expect(sd2[prop][configFoo.name]).to.not.be.undefined; + expect(sd3[prop][configFoo.name]).to.not.be.undefined; + // should not be registered on sd1, because we registered only on sd2 + expect(sd1[prop][configBar.name]).to.be.undefined; + expect(sd2[prop][configBar.name]).to.not.be.undefined; + // should be registered because sd3 extends sd2 + expect(sd3[prop][configBar.name]).to.not.be.undefined; + } }); }); }); diff --git a/__tests__/utils/expandObjectTokens.test.js b/__tests__/utils/expandObjectTokens.test.js new file mode 100644 index 000000000..88c96b421 --- /dev/null +++ b/__tests__/utils/expandObjectTokens.test.js @@ -0,0 +1,391 @@ +/* + * Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with + * the License. A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR + * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions + * and limitations under the License. + */ +import { expect } from 'chai'; +import { getTypeFromMap, expandToken, expandTokens } from '../../lib/utils/expandObjectTokens.js'; + +const input = { + border: { + type: 'border', + value: { + width: '2px', + style: 'solid', + color: '#000', + }, + }, + typography: { + type: 'typography', + value: { + fontWeight: '800', + fontSize: '16px', + fontFamily: 'Arial Black', + }, + }, +}; + +const borderOutput = { + color: { + type: 'color', + value: '#000', + }, + style: { + type: 'strokeStyle', + value: 'solid', + }, + width: { + type: 'dimension', + value: '2px', + }, +}; + +const typographyOutput = { + fontWeight: { + type: 'fontWeight', + value: '800', + }, + fontSize: { + type: 'dimension', + value: '16px', + }, + fontFamily: { + type: 'fontFamily', + value: 'Arial Black', + }, +}; + +describe('utils', () => { + describe('expandObjectTokens', () => { + describe('expandObjectTokens', () => { + describe('getTypeFromMap', () => { + it('should return the type input as output by default', () => { + const output = getTypeFromMap('width', 'foo', {}); + expect(output).to.equal('width'); + }); + + it('should return mapped type, when overriding the base DTCG map', () => { + const output = getTypeFromMap('width', 'border', { + border: { width: 'foo' }, + }); + expect(output).to.equal('foo'); + }); + + it('should return mapped type keyed by the composition type', () => { + const output = getTypeFromMap('width', 'foo', { + foo: { width: 'foo' }, + }); + expect(output).to.equal('foo'); + }); + + it('should prioritise the mapped type keyed by composition type when also available on the top-level', () => { + const output = getTypeFromMap('width', 'foo', { + foo: { width: 'foo' }, + width: 'bar', + }); + expect(output).to.equal('foo'); + }); + }); + + describe('expandToken', () => { + it('should expand a single object value token into multiple tokens', () => { + const expanded = expandToken(input.border, { expand: true, usesDtcg: false }); + expect(expanded).to.eql(borderOutput); + }); + + it('should handle DTCG spec tokens expansion', () => { + const expanded = expandToken( + { + $type: 'border', + $value: { + width: '2px', + style: 'solid', + color: '#000', + }, + }, + { expand: true, usesDtcg: true }, + ); + expect(expanded).to.eql({ + color: { + $type: 'color', + $value: '#000', + }, + style: { + $type: 'strokeStyle', + $value: 'solid', + }, + width: { + $type: 'dimension', + $value: '2px', + }, + }); + }); + + it('should handle the expansion of array of objects values', () => { + const expanded = expandToken( + { + type: 'shadow', + value: [ + { + offsetX: '2px', + offsetY: '4px', + blur: '2px', + spread: '0', + color: '#000', + }, + { + offsetX: '10px', + offsetY: '12px', + blur: '4px', + spread: '3px', + color: '#ccc', + }, + ], + }, + { + expand: true, + usesDtcg: false, + }, + ); + + expect(expanded).to.eql({ + 1: { + offsetX: { + type: 'dimension', + value: '2px', + }, + offsetY: { + type: 'dimension', + value: '4px', + }, + blur: { + type: 'dimension', + value: '2px', + }, + spread: { + type: 'dimension', + value: '0', + }, + color: { + type: 'color', + value: '#000', + }, + }, + 2: { + offsetX: { + type: 'dimension', + value: '10px', + }, + offsetY: { + type: 'dimension', + value: '12px', + }, + blur: { + type: 'dimension', + value: '4px', + }, + spread: { + type: 'dimension', + value: '3px', + }, + color: { + type: 'color', + value: '#ccc', + }, + }, + }); + }); + }); + + describe('expandTokens', () => { + it('should not expand tokens when expand is false', () => { + const expanded = expandTokens(input, { + expand: false, + usesDtcg: false, + }); + + expect(expanded).to.eql(input); + }); + + it('should expand tokens when expand is set to true', () => { + const expanded = expandTokens( + { + objectValues: { + nested: input.border, + double: { + nested: input.typography, + }, + }, + }, + { + expand: true, + usesDtcg: false, + }, + ); + + expect(expanded).to.eql({ + objectValues: { + nested: borderOutput, + double: { + nested: typographyOutput, + }, + }, + }); + }); + + it('should allow conditionally expanding tokens by type using include', () => { + const expanded = expandTokens(input, { + expand: { + include: ['typography'], + }, + usesDtcg: false, + }); + + expect(expanded).to.eql({ + border: input.border, + typography: typographyOutput, + }); + }); + + it('should allow conditionally expanding tokens by type using exclude', () => { + const expanded = expandTokens(input, { + expand: { + exclude: ['typography'], + }, + usesDtcg: false, + }); + + expect(expanded).to.eql({ + border: borderOutput, + typography: input.typography, + }); + }); + + it('should allow conditionally expanding tokens by condition function', () => { + const expanded = expandTokens(input, { + expand: (token) => token.value.fontWeight === '800', + usesDtcg: false, + }); + + expect(expanded).to.eql({ + border: input.border, + typography: typographyOutput, + }); + + const expandedInclude = expandTokens(input, { + expand: { include: (token) => token.value.fontWeight === '800' }, + usesDtcg: false, + }); + + expect(expandedInclude).to.eql({ + border: input.border, + typography: typographyOutput, + }); + + const expandedExclude = expandTokens(input, { + expand: { exclude: (token) => token.value.fontWeight === '800' }, + usesDtcg: false, + }); + + expect(expandedExclude).to.eql({ + border: borderOutput, + typography: input.typography, + }); + }); + + it('should also expand tokens that are references to other tokens', () => { + const refInput = { + border: input.border, + borderRef: { + type: 'border', + value: '{border}', + }, + }; + const expanded = expandTokens(refInput, { + expand: true, + usesDtcg: false, + }); + + expect(expanded).to.eql({ + border: borderOutput, + borderRef: borderOutput, + }); + }); + + it('should support DTCG format', () => { + const input = { + border: { + $type: 'border', + $value: { + width: '2px', + style: 'solid', + color: '#000', + }, + }, + borderRef: { + $type: 'border', + $value: '{border}', + }, + }; + const expanded = expandTokens(input, { + expand: true, + usesDtcg: true, + }); + + expect(expanded).to.eql({ + border: { + color: { + $type: 'color', + $value: '#000', + }, + style: { + $type: 'strokeStyle', + $value: 'solid', + }, + width: { + $type: 'dimension', + $value: '2px', + }, + }, + borderRef: { + color: { + $type: 'color', + $value: '#000', + }, + style: { + $type: 'strokeStyle', + $value: 'solid', + }, + width: { + $type: 'dimension', + $value: '2px', + }, + }, + }); + }); + + it('should throw an error when include and exclude are combined', () => { + const badFn = () => + expandTokens(input, { + expand: { + include: ['typography'], + exclude: ['border'], + }, + usesDtcg: false, + }); + + expect(badFn).to.throw( + 'expand.include should not be combined with expand.exclude, use one or the other.', + ); + }); + }); + }); + }); +}); diff --git a/__tests__/utils/preprocess.test.js b/__tests__/utils/preprocess.test.js index 3b8ab3a52..f64ffff6a 100644 --- a/__tests__/utils/preprocess.test.js +++ b/__tests__/utils/preprocess.test.js @@ -11,7 +11,7 @@ * and limitations under the License. */ import { expect } from 'chai'; -import { typeDtcgDelegate, preprocess } from '../../lib/utils/preprocess.js'; +import { preprocess } from '../../lib/utils/preprocess.js'; describe('utils', () => { describe('preprocess', () => { @@ -22,10 +22,13 @@ describe('utils', () => { value: '5px', }, }, + ['preprocessorA'], { - preprocessorA: (tokens) => { - tokens.bar = tokens.foo; - return tokens; + preprocessorA: { + preprocessor: (tokens) => { + tokens.bar = tokens.foo; + return tokens; + }, }, }, ); @@ -41,19 +44,26 @@ describe('utils', () => { value: '5px', }, }, + ['preprocessorA', 'preprocessorB', 'preprocessorC'], { - preprocessorA: (tokens) => { - tokens.bar = tokens.foo; - return tokens; + preprocessorA: { + preprocessor: (tokens) => { + tokens.bar = tokens.foo; + return tokens; + }, }, - preprocessorB: async (tokens) => { - await new Promise((resolve) => setTimeout(resolve, 100)); - tokens.baz = tokens.bar; - return tokens; + preprocessorB: { + preprocessor: async (tokens) => { + await new Promise((resolve) => setTimeout(resolve, 100)); + tokens.baz = tokens.bar; + return tokens; + }, }, - preprocessorC: (tokens) => { - tokens.qux = tokens.baz; - return tokens; + preprocessorC: { + preprocessor: (tokens) => { + tokens.qux = tokens.baz; + return tokens; + }, }, }, ); @@ -62,84 +72,4 @@ describe('utils', () => { }); }); }); - - describe('typeDtcgDelegate', () => { - it('should correctly let tokens inherit the $type property while respecting local overrides', () => { - const tokens = { - dimension: { - $type: 'dimension', - scale: { - $value: '2', - $type: 'math', - }, - xs: { - $value: '4', - }, - nested: { - deep: { - deeper: { - $value: '12', - }, - }, - deep2: { - $type: 'math', - deeper: { - $type: 'other', - evenDeeper: { - $value: '12', - $type: 'math', - }, - evenDeeper2: { - $value: '12', - }, - }, - }, - }, - sm: { - $value: '8', - }, - }, - }; - - expect(typeDtcgDelegate(tokens)).to.eql({ - dimension: { - $type: 'dimension', - scale: { - $value: '2', - $type: 'math', - }, - xs: { - $value: '4', - $type: 'dimension', - }, - nested: { - deep: { - deeper: { - $value: '12', - $type: 'dimension', - }, - }, - deep2: { - $type: 'math', - deeper: { - $type: 'other', - evenDeeper: { - $value: '12', - $type: 'math', - }, - evenDeeper2: { - $value: '12', - $type: 'other', - }, - }, - }, - }, - sm: { - $value: '8', - $type: 'dimension', - }, - }, - }); - }); - }); }); diff --git a/__tests__/utils/typeDtcgDelegate.test.js b/__tests__/utils/typeDtcgDelegate.test.js new file mode 100644 index 000000000..7b3c35d3e --- /dev/null +++ b/__tests__/utils/typeDtcgDelegate.test.js @@ -0,0 +1,96 @@ +/* + * Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with + * the License. A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR + * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions + * and limitations under the License. + */ +import { expect } from 'chai'; +import { typeDtcgDelegate } from '../../lib/utils/typeDtcgDelegate.js'; + +describe('utils', () => { + describe('typeDtcgDelegate', () => { + it('should correctly let tokens inherit the $type property while respecting local overrides', () => { + const tokens = { + dimension: { + $type: 'dimension', + scale: { + $value: '2', + $type: 'math', + }, + xs: { + $value: '4', + }, + nested: { + deep: { + deeper: { + $value: '12', + }, + }, + deep2: { + $type: 'math', + deeper: { + $type: 'other', + evenDeeper: { + $value: '12', + $type: 'math', + }, + evenDeeper2: { + $value: '12', + }, + }, + }, + }, + sm: { + $value: '8', + }, + }, + }; + + expect(typeDtcgDelegate(tokens)).to.eql({ + dimension: { + $type: 'dimension', + scale: { + $value: '2', + $type: 'math', + }, + xs: { + $value: '4', + $type: 'dimension', + }, + nested: { + deep: { + deeper: { + $value: '12', + $type: 'dimension', + }, + }, + deep2: { + $type: 'math', + deeper: { + $type: 'other', + evenDeeper: { + $value: '12', + $type: 'math', + }, + evenDeeper2: { + $value: '12', + $type: 'other', + }, + }, + }, + }, + sm: { + $value: '8', + $type: 'dimension', + }, + }, + }); + }); + }); +}); diff --git a/docs/src/content/docs/info/tokens.md b/docs/src/content/docs/info/tokens.md index cbb40e2e6..5c2163c0b 100644 --- a/docs/src/content/docs/info/tokens.md +++ b/docs/src/content/docs/info/tokens.md @@ -61,7 +61,7 @@ Using DTCG format that would look like: For any design tokens you wish to output, the "value" attribute is required. This provides the data that will be used throughout the build process (and ultimately used for styling in your deliverables). You can optionally include any custom attributes you would like (e.g. "comment" with a string or "metadata" as an object with its own attributes). -| Attribute | Type | Description | +| Property | Type | Description | | :--------- | :----------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | value | Any | The value of the design token. This can be any type of data, a hex string, an integer, a file path to a file, even an object or array. | | comment | String (optional) | The comment attribute will show up in a code comment in output files if the format supports it. | @@ -75,13 +75,13 @@ You can add any attributes or data you want in a design token and Style Dictiona Style Dictionary adds some default metadata on each design token that helps with transforms and formats. Here is what Style Dictionary adds onto each design token: -| Attribute | Type | Description | -| :-------- | :------------ | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| name | String | A default name of the design token that is set to the key of the design token. This is only added if you do not provide one. | -| path | Array[String] | The object path of the design token. `color: { background: { primary: { value: "#fff" } } }` will have a path of `['color','background', 'primary']`. | -| original | Object | A pristine copy of the original design token object. This is to make sure transforms and formats always have the unmodified version of the original design token. | -| filePath | String | The file path of the file the token is defined in. This file path is derived from the `source` or `include` file path arrays defined in the [configuration](/reference/config). | -| isSource | Boolean | If the token is from a file defined in the `source` array as opposed to `include` in the [configuration](/reference/config). | +| Property | Type | Description | +| :------- | :------------ | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| name | String | A default name of the design token that is set to the key of the design token. This is only added if you do not provide one. | +| path | Array[String] | The object path of the design token. `color: { background: { primary: { value: "#fff" } } }` will have a path of `['color','background', 'primary']`. | +| original | Object | A pristine copy of the original design token object. This is to make sure transforms and formats always have the unmodified version of the original design token. | +| filePath | String | The file path of the file the token is defined in. This file path is derived from the `source` or `include` file path arrays defined in the [configuration](/reference/config). | +| isSource | Boolean | If the token is from a file defined in the `source` array as opposed to `include` in the [configuration](/reference/config). | Given this configuration: diff --git a/docs/src/content/docs/reference/Hooks/preprocessors.md b/docs/src/content/docs/reference/Hooks/preprocessors.md index 4f1469233..7fdc5c492 100644 --- a/docs/src/content/docs/reference/Hooks/preprocessors.md +++ b/docs/src/content/docs/reference/Hooks/preprocessors.md @@ -5,6 +5,8 @@ title: Preprocessors Starting in version 4.0, you can define custom preprocessors to process the dictionary object as a whole, after it all token files have been parsed and combined into one. This is useful if you want to do more complex transformations on the dictionary as a whole, when all other ways are not powerful enough. +Preprocessors can be applied globally or per platform. + :::caution It should be clear that using this feature should be a last resort. Using custom parsers to parse per file or using transforms to do transformations on a per token basis, gives more granular control and reduces the risks of making mistakes. @@ -69,13 +71,38 @@ StyleDictionary.registerPreprocessor(myPreprocessor); ```javascript export default { - preprocessors: { - 'strip-props': myPreprocessor, + registeredHooks: { + preprocessors: { + 'strip-props': myPreprocessor, + }, }, // ... the rest of the configuration }; ``` +### Applying it in config + +```json +{ + "source": ["**/*.tokens.json"], + "preprocessors": ["strip-props"] +} +``` + +or platform-specific: + +```json +{ + "source": ["**/*.tokens.json"], + "platforms": { + "css": { + "transformGroup": "css", + "preprocessors": ["strip-props"] + } + } +} +``` + --- ## Preprocessor examples @@ -100,3 +127,12 @@ StyleDictionary.registerPreprocessor({ }, }); ``` + +--- + +## Default preprocessors + +There are two default preprocessors that are always applied and run before other custom preprocessors do: + +- [`typeDtcgDelegate`](/reference/utils/dtcg#typedtcgdelegate), for DTCG tokens, make sure the `$type` is either already present or gets inherited from the closest ancestor that has it defined, so that the `$type` is always available on the token level, for ease of use +- [`expandObjectTokens`](/reference/config#expand), a private preprocessor that will expand object-value (composite) tokens when user config has this enabled. diff --git a/docs/src/content/docs/reference/Utils/DTCG.md b/docs/src/content/docs/reference/Utils/DTCG.md index d010ac4cd..abf360b6e 100644 --- a/docs/src/content/docs/reference/Utils/DTCG.md +++ b/docs/src/content/docs/reference/Utils/DTCG.md @@ -10,6 +10,8 @@ This function processes your ["Design Token Community Group Draft spec"-complian We built this utility because it's cheaper to apply the inheritance once, rather than on every access of a token's "$type" property, checking the ancestor tree to find it. +This utility is ran by default in Style-Dictionary after the parser hook and before the preprocessor hook. + ```js import { typeDtcgDelegate } from 'style-dictionary/utils'; diff --git a/docs/src/content/docs/reference/config.md b/docs/src/content/docs/reference/config.md index f3e9792f3..fd4495b8c 100644 --- a/docs/src/content/docs/reference/config.md +++ b/docs/src/content/docs/reference/config.md @@ -140,17 +140,19 @@ You would then change your npm script or CLI command to run that file with Node: --- -## Attributes +## Properties -| Attribute | Type | Description | +| Property | Type | Description | | :--------------- | :------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `log` | `Log` | [Configure logging behavior](/reference/logging) to either reduce/silence logs or to make them more verbose for debugging purposes. | | `source` | `string[]` | An array of file path [globs](https://github.com/isaacs/node-glob) to design token files. Style Dictionary will do a deep merge of all of the token files, allowing you to organize your files however you want. | | `include` | `string[]` | An array of file path [globs](https://github.com/isaacs/node-glob) to design token files that contain default styles. Style Dictionary uses this as a base collection of design tokens. The tokens found using the "source" attribute will overwrite tokens found using include. | | `tokens` | `Object` | The tokens object is a way to include inline design tokens as opposed to using the `source` and `include` arrays. | +| `expand` | `ExpandConfig` | Configures whether and how composite (object-value) tokens will be expanded into separate tokens. `false` by default. Supports either `boolean`, `ExpandFilter` function or an Object containing a `typesMap` property and optionally an `include` OR `exclude` property. | | `platforms` | `Record` | An object containing [platform](#platform) config objects that describe how the Style Dictionary should build for that platform. You can add any arbitrary attributes on this object that will get passed to formats and actions (more on these in a bit). This is useful for things like build paths, name prefixes, variable names, etc. | -| `parsers` | `Parser[]` | Custom [file parsers](/reference/hooks/parsers) to run on input files | -| `preprocessors` | `Record` | Custom [preprocessors](/reference/hooks/preprocessors) to run on the full token dictionary, before any transforms run, can be registered using `.registerPreprocessor`. The keys in this object will be the preprocessor's name | +| `hooks` | `Hooks` object | Object that contains all configured custom hooks: `preprocessors`. Note: `parsers`, `transforms`, `transformGroups`, `formats`, `fileHeaders`, `filters`, `actions` will be moved under property this later. Can be used to define hooks inline as an alternative to using `register<'Hook'>` methods. | +| `parsers` | `Parser[]` | Configured custom [file parsers](/reference/hooks/parsers) to run on input files | +| `preprocessors` | `string[]` | Which [preprocessors](/reference/hooks/preprocessors) (by name) to run on the full token dictionary, before any transforms run, can be registered using `.registerPreprocessor`. You can also configure this on the platform config level if you need to run it on the dictionary only for specific platforms. | | `transform` | `Record` | Custom [transforms](/reference/hooks/transforms) you can include inline rather than using `.registerTransform`. The keys in this object will be the transform's name, the value should be an object with `type` | | `transformGroup` | `Record` | Custom [transformGroups](/reference/hooks/transform_groups) you can include inline rather than using `.registerTransformGroup`. The keys in this object will be the transformGroup's name, the value should be an array with `transform`s | | `format` | `Record` | Custom [formats](/reference/hooks/formats) you can include inline in the configuration rather than using `.registerFormat`. The keys in this object will be for format's name and value should be the formatter function. | @@ -167,20 +169,22 @@ Log configuration object to configure the [logging behavior of Style Dictionary] A platform is a build target that tells Style Dictionary how to properly transform and format your design tokens for output to a specific platform. You can have as many platforms as you need and you can name them anything, there are no restrictions. -| Attribute | Type | Description | -| :--------------- | :--------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `transforms` | `string[]` | An array of [transform](/reference/hooks/transforms) keys to be performed on the design tokens. These will transform the tokens in a non-destructive way, allowing each platform to transform the tokens. Transforms to apply sequentially to all tokens. Can be a built-in one or you can create your own. | -| `transformGroup` | `string` | A string that maps to an array of transforms. This makes it easier to reference transforms by grouping them together. Can be combined with [transforms](/reference/hooks/transforms). | -| `buildPath` | `string` | Base path to build the files, must end with a trailing slash. | -| `options` | `Object` | Options that apply to all files in the platform, for example [`outputReferences`](/reference/hooks/format#references-in-output-files) and `showFileHeader` | -| `files` | `File[]` | [Files](#file) to be generated for this platform. | -| `actions` | `string[]` | [Actions](/reference/hooks/actions) to be performed after the files are built for that platform. Actions can be any arbitrary code you want to run like copying files, generating assets, etc. You can use pre-defined actions or create custom actions. | +| Property | Type | Description | +| :--------------- | :------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `transforms` | `string[]` | An array of [transform](/reference/hooks/transforms) keys to be performed on the design tokens. These will transform the tokens in a non-destructive way, allowing each platform to transform the tokens. Transforms to apply sequentially to all tokens. Can be a built-in one or you can create your own. | +| `transformGroup` | `string` | A string that maps to an array of transforms. This makes it easier to reference transforms by grouping them together. Can be combined with [transforms](/reference/hooks/transforms). | +| `buildPath` | `string` | Base path to build the files, must end with a trailing slash. | +| `expand` | `ExpandConfig` | Configures whether and how composite (object-value) tokens will be expanded into separate tokens. `false` by default. Supports either `boolean`, `ExpandFilter` function or an Object containing a `typesMap` property and optionally an `include` OR `exclude` property. | +| `preprocessors` | `string[]` | Which [preprocessors](/reference/hooks/preprocessors) (by name) to run on the full token dictionary when building for this particular platform, before any transforms run, can be registered using `.registerPreprocessor`. You can also configure this on the global config. | +| `options` | `Object` | Options that apply to all files in the platform, for example [`outputReferences`](/reference/hooks/format#references-in-output-files) and `showFileHeader` | +| `files` | `File[]` | [Files](#file) to be generated for this platform. | +| `actions` | `string[]` | [Actions](/reference/hooks/actions) to be performed after the files are built for that platform. Actions can be any arbitrary code you want to run like copying files, generating assets, etc. You can use pre-defined actions or create custom actions. | ### File A File configuration object represents a single output file. The `options` object on the file configuration will take precedence over the `options` object defined at the platform level. Apart from the options listed below, any other options can be added, which can then be used inside custom [formats](/reference/hooks/formats). -| Attribute | Type | Description | +| Property | Type | Description | | :------------------------- | :------------------------------------ | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `destination` | `string` | Location to build the file, will be appended to the buildPath. | | `format` | `string` | [Format](/reference/hooks/formats) used to generate the file. Can be a built-in one or you can create your own via [registerFormat](/reference/api#registerformat). | @@ -189,3 +193,245 @@ A File configuration object represents a single output file. The `options` objec | `options.showFileHeader` | `boolean` | If the generated file should have a comment at the top about being generated. The default fileHeader comment has "Do not edit + Timestamp". By default is "true". | | `options.fileHeader` | `string \|function` | A custom fileHeader that can be either a name of a registered file header (string) or an inline [fileHeader](/reference/hooks/formats#customfileheader) function. | | `options.outputReferences` | `boolean \| OutputReferencesFunction` | If the file should keep token [references](/reference/hooks/formats#references-in-output-files). By default this is "false". Also allows passing a function to conditionally output references on a per token basis. | + +### Expand + +You can configure whether and how composite (object-value) tokens will be expanded into separate tokens. +By default, this functionality is disabled and for formats such as CSS where object values are not supported, you'd be relying on either a [custom value transform](/reference/hooks/transforms/#defining-custom-transforms) to turn such token values into strings, or [writing a custom format](/reference/hooks/formats/#custom-formats) to format object values into CSS compatible values. + +#### Expand usage + +Below are examples of how the `expand` property can be used. + +```js +{ + expand: true, // expand all object-value (composite) type tokens + + expand: {}, // equivalent to true + + // conditionally expand, executes this callback for each individual token + expand: (token, config, platformConfig) => true, + + // equivalent to true, but additionally passing a typesMap + expand: { + typesMap: { + width: 'dimension', + }, + }, + + // only expands typography and border tokens, also passes a typesMap + expand: { + include: ['typography', 'border'], + // more info about typesMap later... + typesMap: { + // all width props are mapped to 'dimension' type + width: 'dimension', + typography: { + // fontSize prop is mapped to 'dimension' type if inside a typography composite type token + fontSize: 'dimension', + }, + }, + }, + + // expands everything except for typography and border tokens + expand: { + exclude: ['typography', 'border'], + }, + + // only expands tokens for which this function returns true + expand: { + include: (token, config, platformConfig) => true, + }, + + // expands everything except for tokens for which this function returns true + expand: { + exclude: (token, config, platformConfig) => true, + }, +} +``` + +The value of expand can be multiple things: + +- `boolean`, `false` by default, when set to `true`, any object-value (composite) design token will be expanded into multiple tokens, one for each property. +- a function of type `ExpandFilter`, e.g. `(token, options, platform) => true`, must return a `boolean`, when `true` will expand that individual token, arguments: + - `token`: the design token of which the value is an object (composite) + - `options`: the StyleDictionary config options + - `platform`: this is only passed when expand is used on the platform level, contains the platform specific config options +- An object: + - Empty, which is equivalent of passing `true` + - Containing just a `typesMap`, which is also equivalent of passing `true`, except you're also passing the `typesMap` + - Also containing an `include` or `exclude` property which can be either an array of composite types or an `ExpandFilter` function, to conditionally expand or negate expand of individual tokens + +#### Global vs Platform + +You can enable the expanding of tokens both on a global level and on a platform level. + +One notable difference to keep in mind is that when you configure it on a global level, the token expansion will happen immediately **after** the [parsing hook](/reference/hooks/parsers) and **before** [preprocessing](/reference/hooks/preprocessors) or [transform](/reference/hooks/transforms) hooks.\ +This means that token metadata properties that are added by Style Dictionary such as `name`, `filePath`, `path`, `attributes` etc. are not present yet.\ +The advantage is having the expanded tokens (`sd.tokens` prop) available before doing any exporting to platforms. + +If you configure it on the platform level however, the metadata mentioned earlier is available and can be used to conditionally expand tokens. +It also allows you to expand tokens for some platforms but not for others.\ +The downside there is needing to configure it for every platform separately. + +:::caution +It's also important to note that if you configure expansion on the global level, you cannot undo those token expansions by negating it in the platform-specific expand configs. +::: + +#### Type Mapping + +While our expand utility comes with a `typesMap` out of the box that aligns with the [Design Token Community Group spec](https://design-tokens.github.io/community-group/format/#composite-design-token) to convert composite subtype properties to [defined DTCG types](https://design-tokens.github.io/community-group/format/#types), you can also pass a custom `typesMap` that will allow you to extend or override it. +A `typesMap` allows you to configure how object-value (composite) properties in the original token value should be mapped to the newly expanded individual tokens. + +For example: + +```json title="tokens-input.json" +{ + "value": { + "width": "2px", + "style": "solid", + "color": "#000" + }, + "type": "border" +} +``` + +Here, according to the DTCG spec, you would probably want to map the `"width"` property to type [`"dimension"`](https://design-tokens.github.io/community-group/format/#dimension) and `"style"` property to type [`"strokeStyle"`](https://design-tokens.github.io/community-group/format/#stroke-style). +`"width"` is more of a general property where we always want to map it to `"dimension"` but border `"style"` is more specific to the border composite type, therefore this `typesMap` makes sense: + +```json title="config.json" +{ + "expand": { + "typesMap": { + "width": "dimension", + "border": { + "style": "strokeStyle" + } + } + } +} +``` + +Resulting in the following expanded output: + +```json title="tokens-output.json" +{ + "width": { + "value": "2px", + "type": "dimension" + }, + "style": { + "value": "solid", + "type": "strokeStyle" + }, + "color": { + "value": "#000", + "type": "color" + } +} +``` + +#### Example + +~ sd-playground + +```json tokens +{ + "border": { + "type": "border", + "value": { + "width": "2px", + "style": "solid", + "color": "#000" + } + }, + "typography": { + "type": "typography", + "value": { + "fontWeight": "800", + "fontSize": "16px", + "fontFamily": "Arial Black" + } + } +} +``` + +```json config +{ + "expand": { + "include": ["border"], + "typesMap": { + "border": { + "style": "borderStyle" + } + } + }, + "platforms": { + "css": { + "transformGroup": "css", + "files": [ + { + "destination": "vars.css", + "format": "css/variables" + } + ], + "expand": true + }, + "js": { + "transformGroup": "js", + "files": [ + { + "destination": "tokens.js", + "format": "javascript/es6" + } + ] + } + } +} +``` + +#### DTCG Type Map + +Below is the standard DTCG type map that the expand utility comes out of the box with: + +```js +const DTCGTypesMap = { + // https://design-tokens.github.io/community-group/format/#stroke-style + strokeStyle: { + // does not yet have its own type defined, but is an enum of: "round" | "butt" | "square" + lineCap: 'other', + // note that this is spec'd to be a dimension array, which is unspecified in the spec for dimension + // generally speaking, transforms that match dimension type tokens do not account for this potentially being an array + // therefore we map it to "other" for now... + dashArray: 'other', + }, + // https://design-tokens.github.io/community-group/format/#border + border: { + style: 'strokeStyle', + width: 'dimension', + }, + // https://design-tokens.github.io/community-group/format/#transition + transition: { + delay: 'duration', + // needs more discussion https://github.com/design-tokens/community-group/issues/103 + timingFunction: 'cubicBezier', + }, + // https://design-tokens.github.io/community-group/format/#shadow + shadow: { + offsetX: 'dimension', + offsetY: 'dimension', + blur: 'dimension', + spread: 'dimension', + }, + // https://design-tokens.github.io/community-group/format/#gradient + gradient: { + position: 'number', + }, + // https://design-tokens.github.io/community-group/format/#typography + typography: { + fontSize: 'dimension', + letterSpacing: 'dimension', + lineHeight: 'number', + }, +}; +``` diff --git a/docs/src/content/docs/version-4/migration.md b/docs/src/content/docs/version-4/migration.md index bb7c5337f..810efb2b2 100644 --- a/docs/src/content/docs/version-4/migration.md +++ b/docs/src/content/docs/version-4/migration.md @@ -110,6 +110,52 @@ StyleDictionary.registerFormat({ }); ``` +## Hooks APIs + +We've given a name to all of the things that you can register which will execute custom behavior during the Style Dictionary lifecycle: `hooks`. +Available hooks are: `parsers`, `preprocessors`, `transformGroups`, `transforms`, `formats`, `filters`, `fileHeaders`, `actions`. + +:::note +The other hooks are also going to change similarly to preprocessors, in an effort to align these APIs and make them consistent across. +They will all be grouped under the `hooks` property, they will all use plural form vs singular (e.g. `transforms` vs `transform`), and lastly, +they will all use the same signature, with a `name` property and a handler function name that is the same as the hook name (e.g. `transformer` will be `transform`). +Parsers will also have to be applied explicitly similarly to preprocessors. +::: + +### Preprocessors + +Preprocessors, when registered, would always apply on a global level, without explicitly applying them in the config. + +This has been changed now: + +```js title="config.js" del={3-8} ins={9-24} +export default { + // register it inline or by SD.registerPreprocessor + preprocessors: { + foo: (dictionary) => { + // preprocess it + return dictionary; + }, + }, + hooks: { + preprocessors: { + foo: (dictionary) => { + // preprocess it + return dictionary; + }, + }, + }, + // apply it globally + preprocessors: ['foo'], + platforms: { + css: { + // or apply is per platform + preprocessors: ['foo'], + }, + }, +}; +``` + ## CTI reliance [CTI or Category / Type / Item](/info/tokens/#category--type--item) used to be the default way of structuring design tokens in Style Dictionary. diff --git a/docs/starlight-config.ts b/docs/starlight-config.ts index 439caf561..c3e6aa4f4 100644 --- a/docs/starlight-config.ts +++ b/docs/starlight-config.ts @@ -14,6 +14,9 @@ export default { slack: 'https://join.slack.com/t/tokens-studio/shared_invite/zt-1p8ea3m6t-C163oJcN9g3~YZTKRgo2hg', }, + tableOfContents: { + maxHeadingLevel: 4, + }, sidebar: [ { label: 'Getting Started', diff --git a/lib/Register.js b/lib/Register.js index 3fa3204e9..52bbde15c 100644 --- a/lib/Register.js +++ b/lib/Register.js @@ -3,6 +3,7 @@ import transformGroup from './common/transformGroups.js'; import format from './common/formats.js'; import action from './common/actions.js'; import filter from './common/filters.js'; +import { deepmerge } from './utils/deepmerge.js'; /** * @typedef {import('../types/File.d.ts').FileHeader} FileHeader @@ -15,6 +16,7 @@ import filter from './common/filters.js'; * @typedef {import('../types/Format.d.ts').Format} Format * @typedef {import('../types/Format.d.ts').Formatter} Formatter * @typedef {import('../types/Action.d.ts').Action} Action + * @typedef {{ preprocessors: Record>}} Hooks */ export class Register { @@ -26,6 +28,11 @@ export class Register { * * Therefore, we have to make use of static props vs instance props and use getters and setters to merge these together. */ + /** @type {Hooks} */ + static hooks = { + preprocessors: {}, + }; + static transform = transform; static transformGroup = transformGroup; static format = format; @@ -35,8 +42,19 @@ export class Register { static fileHeader = {}; /** @type {Parser[]} */ static parsers = []; // we need to initialise the array, since we don't have built-in parsers - /** @type {Record} */ - static preprocessors = {}; + + /** @type {Hooks} */ + get hooks() { + const ctor = /** @type {typeof Register} */ (this.constructor); + return deepmerge(ctor.hooks, this._hooks ?? {}); + } + + /** + * @param {Hooks} v + */ + set hooks(v) { + this._hooks = v; + } /** * @param {Transform} cfg @@ -342,23 +360,18 @@ export class Register { throw new Error(`${errorPrefix} Preprocessor.preprocessor must be a function`); } // make sure to trigger the setter - target.preprocessors = { - ...target.preprocessors, - [name]: preprocessor, + target.hooks = { + ...target.hooks, + preprocessors: { + ...target.hooks.preprocessors, + [name]: { + preprocessor, + }, + }, }; return target; } - get preprocessors() { - const ctor = /** @type {typeof Register} */ (this.constructor); - return { ...ctor.preprocessors, ...this._preprocessors }; - } - - /** @param {Record} v */ - set preprocessors(v) { - this._preprocessors = v; - } - /** * @param {{name: string; fileHeader: FileHeader;}} cfg */ @@ -412,6 +425,9 @@ export class Register { this.filter = {}; this.fileHeader = {}; this.parsers = []; // we need to initialise the array, since we don't have built-in parsers - this.preprocessors = {}; + + this.hooks = { + preprocessors: {}, + }; } } diff --git a/lib/StyleDictionary.js b/lib/StyleDictionary.js index 68e0450d5..c637bc6e1 100644 --- a/lib/StyleDictionary.js +++ b/lib/StyleDictionary.js @@ -27,6 +27,7 @@ import GroupMessages, { verbosityInfo } from './utils/groupMessages.js'; import flattenTokens from './utils/flattenTokens.js'; import { detectDtcgSyntax } from './utils/detectDtcgSyntax.js'; import { preprocess } from './utils/preprocess.js'; +import { typeDtcgDelegate } from './utils/typeDtcgDelegate.js'; import transformObject from './transform/object.js'; import transformConfig from './transform/config.js'; @@ -35,12 +36,15 @@ import buildFiles from './buildFiles.js'; import cleanFiles from './cleanFiles.js'; import cleanDirs from './cleanDirs.js'; import cleanActions from './cleanActions.js'; +import { expandTokens } from './utils/expandObjectTokens.js'; /** * @typedef {import('../types/Volume.d.ts').Volume} Volume * @typedef {import('../types/Config.d.ts').Config} Config * @typedef {import('../types/Config.d.ts').PlatformConfig} PlatformConfig * @typedef {import('../types/Config.d.ts').LogConfig} LogConfig + * @typedef {import('../types/Config.d.ts').Expand} Expand + * @typedef {import('../types/Config.d.ts').ExpandConfig} ExpandConfig * @typedef {import('../types/DesignToken.d.ts').DesignToken} Token * @typedef {import('../types/DesignToken.d.ts').TransformedToken} TransformedToken * @typedef {import('../types/DesignToken.d.ts').DesignTokens} Tokens @@ -82,7 +86,7 @@ export default class StyleDictionary extends Register { filter: this.filter, fileHeader: this.fileHeader, parsers: this.parsers, - preprocessors: this.preprocessors, + hooks: this.hooks, }, this._options ?? {}, ); @@ -121,8 +125,14 @@ export default class StyleDictionary extends Register { this.source = []; /** @type {string[]} */ this.include = []; + /** @type {ExpandConfig|undefined} */ + this.expand = undefined; + /** @type {Record} */ + this.expandTypesMap = {}; /** @type {Record} */ this.platforms = {}; + /** @type {string[]} */ + this.preprocessors = []; if (volume) { // when a user sets a custom FS shim, mark it for later reference volume.__custom_fs__ = true; @@ -314,8 +324,14 @@ export default class StyleDictionary extends Register { } // Merge inline, include, and source tokens - const unprocessedTokens = deepExtend([{}, inlineTokens, includeTokens, sourceTokens]); - this.tokens = await preprocess(unprocessedTokens, this.preprocessors); + let tokens = deepExtend([{}, inlineTokens, includeTokens, sourceTokens]); + if (this.usesDtcg) { + tokens = typeDtcgDelegate(tokens); + } + if (this.shouldRunExpansion(this.options.expand)) { + tokens = expandTokens(tokens, this.options); + } + this.tokens = await preprocess(tokens, this.preprocessors, this.hooks.preprocessors); this.options = { ...this.options, usesDtcg: this.usesDtcg }; this.hasInitializedResolve(null); @@ -323,6 +339,26 @@ export default class StyleDictionary extends Register { return this; } + /** + * @param {ExpandConfig} [expandCfg] + * @returns + */ + shouldRunExpansion(expandCfg) { + if (expandCfg !== undefined) { + if ( + // run tokens expansion if the config is not false or if it's an object + // and not every prop of this object is false + !( + (typeof expandCfg === 'boolean' && expandCfg === false) || + (typeof expandCfg === 'object' && Object.values(expandCfg).every((exp) => exp === false)) + ) + ) { + return true; + } + } + return false; + } + /** * @param {string} platform * @returns {Promise} @@ -337,7 +373,17 @@ export default class StyleDictionary extends Register { // We don't want to mutate the original object const platformConfig = transformConfig(this.platforms[platform], this, platform); - let exportableResult = this.tokens; + let platformProcessedTokens = this.tokens; + if (this.shouldRunExpansion(platformConfig.expand)) { + platformProcessedTokens = expandTokens(platformProcessedTokens, this.options, platformConfig); + } + platformProcessedTokens = await preprocess( + platformProcessedTokens, + platformConfig.preprocessors, + this.hooks.preprocessors, + ); + + let exportableResult = platformProcessedTokens; /** * @type {string[]} diff --git a/lib/utils/deepmerge.js b/lib/utils/deepmerge.js index 276749db4..0e2e56c1b 100644 --- a/lib/utils/deepmerge.js +++ b/lib/utils/deepmerge.js @@ -1,3 +1,15 @@ +/* + * Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with + * the License. A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR + * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions + * and limitations under the License. + */ import _deepmerge from '@bundled-es-modules/deepmerge'; import isPlainObject from 'is-plain-obj'; diff --git a/lib/utils/detectDtcgSyntax.js b/lib/utils/detectDtcgSyntax.js index a3f824986..6751f9880 100644 --- a/lib/utils/detectDtcgSyntax.js +++ b/lib/utils/detectDtcgSyntax.js @@ -1,3 +1,15 @@ +/* + * Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with + * the License. A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR + * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions + * and limitations under the License. + */ import isPlainObject from 'is-plain-obj'; /** diff --git a/lib/utils/expandObjectTokens.js b/lib/utils/expandObjectTokens.js new file mode 100644 index 000000000..7be36b1f0 --- /dev/null +++ b/lib/utils/expandObjectTokens.js @@ -0,0 +1,231 @@ +/* + * Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with + * the License. A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR + * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions + * and limitations under the License. + */ +import { resolveReferences } from './references/resolveReferences.js'; +import usesReferences from './references/usesReferences.js'; +import { deepmerge } from './deepmerge.js'; + +/** + * @typedef {import('../../types/DesignToken.d.ts').DesignToken} DesignToken + * @typedef {import('../../types/DesignToken.d.ts').DesignToken} DesignTokens + * @typedef {import('../../types/Config.d.ts').Expand} Expand + * @typedef {import('../../types/Config.d.ts').ExpandConfig} ExpandConfig + * @typedef {import('../../types/Config.d.ts').ExpandFilter} ExpandFilter + * @typedef {import('../../types/Config.d.ts').Config} Config + * @typedef {import('../../types/Config.d.ts').PlatformConfig} PlatformConfig + */ + +export const DTCGTypesMap = { + // https://design-tokens.github.io/community-group/format/#border + border: { + style: 'strokeStyle', + width: 'dimension', + }, + // https://design-tokens.github.io/community-group/format/#transition + transition: { + delay: 'duration', + // needs more discussion https://github.com/design-tokens/community-group/issues/103 + timingFunction: 'cubicBezier', + }, + // https://design-tokens.github.io/community-group/format/#shadow + shadow: { + offsetX: 'dimension', + offsetY: 'dimension', + blur: 'dimension', + spread: 'dimension', + }, + // https://design-tokens.github.io/community-group/format/#gradient + gradient: { + position: 'number', + }, + // https://design-tokens.github.io/community-group/format/#typography + typography: { + fontSize: 'dimension', + letterSpacing: 'dimension', + lineHeight: 'number', + }, +}; + +/** + * expandTypesMap and this function may be slightly confusing, + * refer to the unit tests for a better explanation + * @param {string} subtype + * @param {string} compositionType + * @param {Expand['typesMap']} expandTypesMap + * @returns {string} + */ +export function getTypeFromMap(subtype, compositionType, expandTypesMap = {}) { + const typeMap = deepmerge(DTCGTypesMap, expandTypesMap); + // the map might exist within the compositionType + const mapObjForComp = typeMap[compositionType]; + // or instead, it may be on the top-level, independent of the compositionType + const mappedSubType = typeMap[subtype]; + if (typeof mapObjForComp === 'object' && mapObjForComp[subtype]) { + return mapObjForComp[subtype]; + // the type mapping might be on the top-level, independent of the compositionType + } else if (typeof mappedSubType === 'string') { + return mappedSubType; + } + return subtype; +} + +/** + * @param {DesignToken} token + * @param {Config} opts + * @param {PlatformConfig} [platform] + */ +function shouldExpand(token, opts, platform) { + const expand = platform?.expand ?? opts.expand ?? false; + + /** @type {ExpandFilter | boolean} */ + let condition = false; + let reverse = false; + + if (typeof expand === 'function' || typeof expand === 'boolean') { + condition = expand; + } else { + const type = /** @type {string} */ (opts.usesDtcg ? token.$type : token.type); + if (expand.include === undefined && expand.exclude === undefined) { + condition = true; + } + + if (expand.include) { + condition = + typeof expand.include === 'function' ? expand.include : expand.include.includes(type); + } + + if (/** @type {Expand} */ (expand).exclude) { + if (expand.include) { + throw Error( + 'expand.include should not be combined with expand.exclude, use one or the other.', + ); + } + condition = + typeof expand.exclude === 'function' ? expand.exclude : expand.exclude.includes(type); + reverse = true; + } + } + + let result = condition; + if (typeof condition === 'function') { + result = condition(token, opts, platform); + } + + return reverse ? !result : result; +} + +/** + * @param {DesignToken} token + * @param {Config} opts + * @param {PlatformConfig} [platform] + */ +export function expandToken(token, opts, platform) { + const uses$ = opts.usesDtcg; + // create a copy of the token without the value/type, so that we have all the meta props + // which have to be inherited in the expanded tokens. + /** @type {Record} */ + const copyMeta = {}; + Object.keys(token) + // either filter $value & $type, or value and type depending on whether $ is used + .filter( + (key) => + !['$value', 'value', '$type', 'type'] + .filter((key) => (uses$ ? key.startsWith('$') : !key.startsWith('$'))) + .includes(key), + ) + .forEach((key) => { + copyMeta[key] = token[key]; + }); + + const value = uses$ ? token.$value : token.value; + // the $type and type may both be missing if the $type is coming from an ancestor token group, + // however, prior to expand and preprocessors, we run a step so missing $type is added from the closest ancestor + const compositionType = /** @type {string} */ (token.$type ?? token.type); + /** @type {DesignTokens} */ + const expandedTokenObj = {}; + /** @type {Expand['typesMap']} */ + let typesMap = {}; + const expand = platform?.expand ?? opts.expand; + if (typeof expand === 'object') { + typesMap = expand.typesMap ?? {}; + } + + // array of objects is also valid e.g. multi-shadow values + // https://github.com/design-tokens/community-group/issues/100 there seems to be a consensus for this + // so this code adds forward-compatibility with that + const _value = Array.isArray(value) ? value : [value]; + + _value.forEach((objectVal, index, arr) => { + let expandedTokenObjRef = expandedTokenObj; + // more than 1 means multi-value, meaning we should add nested token group + // with index to the expanded result + if (arr.length > 1) { + expandedTokenObj[index + 1] = {}; + expandedTokenObjRef = expandedTokenObjRef[index + 1]; + } + Object.entries(objectVal).forEach(([key, value]) => { + expandedTokenObjRef[key] = { + ...copyMeta, + [`${uses$ ? '$' : ''}value`]: value, + [`${uses$ ? '$' : ''}type`]: getTypeFromMap(key, compositionType, typesMap), + }; + }); + }); + + return expandedTokenObj; +} + +/** + * @param {DesignTokens | DesignToken} slice + * @param {DesignTokens} original + * @param {Config} opts + * @param {PlatformConfig} [platform] + */ +function expandTokensRecurse(slice, original, opts, platform) { + for (const key in slice) { + const token = slice[key]; + if (typeof token !== 'object' || token === null) { + continue; + } + const uses$ = opts.usesDtcg; + let value = uses$ ? token.$value : token.value; + if (value) { + // if our token is a ref, we have to resolve it first in order to expand its value + if (typeof value === 'string' && usesReferences(value)) { + value = resolveReferences(value, original, { usesDtcg: uses$ }); + token[uses$ ? '$value' : 'value'] = value; + } + if (typeof value === 'object' && shouldExpand(token, opts, platform)) { + // TODO: Support nested objects, e.g. a border can have a style prop (strokeStyle) which itself + // can also be an object value with dashArray and lineCap props. + // More info: https://design-tokens.github.io/community-group/format/#example-border-composite-token-examples + slice[key] = expandToken(token, opts, platform); + } + } else { + expandTokensRecurse(token, original, opts, platform); + } + } +} + +/** + * @param {DesignTokens} dictionary + * @param {Config} opts + * @param {PlatformConfig} [platform] + */ +export function expandTokens(dictionary, opts, platform) { + // create a copy in which we will do mutations + const copy = structuredClone(dictionary); + // create a separate copy to check as the original object + const original = structuredClone(dictionary); + expandTokensRecurse(copy, original, opts, platform); + return copy; +} diff --git a/lib/utils/index.js b/lib/utils/index.js index 6c1fbc699..b278a19d7 100644 --- a/lib/utils/index.js +++ b/lib/utils/index.js @@ -17,7 +17,7 @@ import { resolveReferences } from './references/resolveReferences.js'; import { outputReferencesFilter } from './references/outputReferencesFilter.js'; import { outputReferencesTransformed } from './references/outputReferencesTransformed.js'; import flattenTokens from './flattenTokens.js'; -import { typeDtcgDelegate } from './preprocess.js'; +import { typeDtcgDelegate } from './typeDtcgDelegate.js'; // Public style-dictionary/utils API export { diff --git a/lib/utils/preprocess.js b/lib/utils/preprocess.js index 1525a7907..2584a3970 100644 --- a/lib/utils/preprocess.js +++ b/lib/utils/preprocess.js @@ -11,64 +11,31 @@ * and limitations under the License. */ -import isPlainObject from 'is-plain-obj'; - /** * @typedef {import('../../types/DesignToken.d.ts').DesignTokens} DesignTokens - * @typedef {import('../../types/DesignToken.d.ts').DesignToken} DesignToken * @typedef {import('../../types/Preprocessor.d.ts').Preprocessor} Preprocessor - * @typedef {import('../../types/Preprocessor.d.ts').preprocessor} preprocessor */ -/** - * @param {DesignTokens} tokens - * @returns - */ -export function typeDtcgDelegate(tokens) { - const clone = structuredClone(tokens); - - /** - * @param {DesignTokens | DesignToken} slice - * @param {string} [_type] - */ - const recurse = (slice, _type) => { - let type = _type; // keep track of type through the stack - const keys = Object.keys(slice); - if (!keys.includes('$type') && type && keys.includes('$value')) { - slice.$type = type; - } - - Object.entries(slice).forEach(([key, val]) => { - if (key === '$type') { - type = val; - } - - if (isPlainObject(val)) { - recurse(val, type); - } - }); - }; - - recurse(clone); - return clone; -} - /** * Run all registered preprocessors on the dictionary, * returning the preprocessed dictionary in each step. * * @param {DesignTokens} tokens - * @param {Record} [preprocessorObj] + * @param {string[]} [appliedPreprocessors] + * @param {Record>} [preprocessorObj] * @returns {Promise} */ -export async function preprocess(tokens, preprocessorObj = {}) { - let processedTokens = typeDtcgDelegate(tokens); +export async function preprocess(tokens, appliedPreprocessors = [], preprocessorObj = {}) { + let processedTokens = tokens; - const preprocessors = Object.values(preprocessorObj); + const preprocessors = Object.entries(preprocessorObj); if (preprocessors.length > 0) { - for (const preprocessor of preprocessors) { - processedTokens = await preprocessor(processedTokens); + for (const [key, pre] of preprocessors) { + if (appliedPreprocessors.includes(key)) { + processedTokens = await pre.preprocessor(processedTokens); + } } } + return processedTokens; } diff --git a/lib/utils/typeDtcgDelegate.js b/lib/utils/typeDtcgDelegate.js new file mode 100644 index 000000000..7b0ce5108 --- /dev/null +++ b/lib/utils/typeDtcgDelegate.js @@ -0,0 +1,39 @@ +import isPlainObject from 'is-plain-obj'; + +/** + * @typedef {import('../../types/DesignToken.d.ts').DesignTokens} DesignTokens + * @typedef {import('../../types/DesignToken.d.ts').DesignToken} DesignToken + */ + +/** + * @param {DesignTokens} tokens + * @returns + */ +export function typeDtcgDelegate(tokens) { + const clone = structuredClone(tokens); + + /** + * @param {DesignTokens | DesignToken} slice + * @param {string} [_type] + */ + const recurse = (slice, _type) => { + let type = _type; // keep track of type through the stack + const keys = Object.keys(slice); + if (!keys.includes('$type') && type && keys.includes('$value')) { + slice.$type = type; + } + + Object.entries(slice).forEach(([key, val]) => { + if (key === '$type') { + type = val; + } + + if (isPlainObject(val)) { + recurse(val, type); + } + }); + }; + + recurse(clone); + return clone; +} diff --git a/types/Config.d.ts b/types/Config.d.ts index f427c3837..6cd23a3dd 100644 --- a/types/Config.d.ts +++ b/types/Config.d.ts @@ -11,7 +11,7 @@ * and limitations under the License. */ -import type { DesignTokens, TransformedToken } from './DesignToken.d.ts'; +import type { DesignToken, DesignTokens, TransformedToken } from './DesignToken.d.ts'; import type { Filter, Matcher } from './Filter.d.ts'; import type { FileHeader, File, FormattingOptions } from './File.d.ts'; import type { Parser } from './Parser.d.ts'; @@ -19,6 +19,7 @@ import type { Preprocessor } from './Preprocessor.d.ts'; import type { Transform } from './Transform.d.ts'; import type { Formatter, OutputReferences } from './Format.d.ts'; import type { Action } from './Action.d.ts'; +import type { Hooks } from '../lib/Register.js'; export interface LocalOptions { showFileHeader?: boolean; @@ -59,16 +60,36 @@ export interface LogConfig { verbosity?: 'default' | 'silent' | 'verbose'; } +export type ExpandFilter = ( + token: DesignToken, + options: Config, + platform?: PlatformConfig, +) => boolean; + +export interface Expand { + typesMap?: Record | string>; + include?: string[] | ExpandFilter; + exclude?: string[] | ExpandFilter; +} + +export type ExpandConfig = Expand | boolean | ExpandFilter; + +export interface Hooks { + preprocessors?: Record>; +} + export interface PlatformConfig extends RegexOptions { log?: LogConfig; transformGroup?: string; transforms?: string[] | Omit[]; - basePxFontSize?: number; + expand?: ExpandConfig; prefix?: string; buildPath?: string; files?: File[]; actions?: string[] | Omit[]; options?: LocalOptions; + // Allows adding custom options on the platform level which is how you can pass external options to transforms + [key: string]: any; } export interface Config { @@ -76,9 +97,11 @@ export interface Config { source?: string[]; include?: string[]; tokens?: DesignTokens; + hooks?: Hooks; + expand?: ExpandConfig; platforms?: Record; parsers?: Parser[]; - preprocessors?: Record; + preprocessors?: string[]; transform?: Record; transformGroup?: Record; format?: Record;