diff --git a/.changeset/fuzzy-beers-sell.md b/.changeset/fuzzy-beers-sell.md new file mode 100644 index 0000000..68c88a3 --- /dev/null +++ b/.changeset/fuzzy-beers-sell.md @@ -0,0 +1,5 @@ +--- +'@tokens-studio/sd-transforms': minor +--- + +BREAKING: swap expandComposites 2nd argument and 3rd argument. 2nd argument is now TransformOptions and 3rd argument the filePath (string). This used to be vice versa and was inconsistent with the other parser functions. diff --git a/.changeset/nice-tips-attack.md b/.changeset/nice-tips-attack.md new file mode 100644 index 0000000..2487395 --- /dev/null +++ b/.changeset/nice-tips-attack.md @@ -0,0 +1,5 @@ +--- +'@tokens-studio/sd-transforms': minor +--- + +Add and expose a parseTokens function to make it more developer friendly to add the sd-transforms parsing step to your own custom parser. diff --git a/.changeset/seven-comics-fly.md b/.changeset/seven-comics-fly.md new file mode 100644 index 0000000..53589d8 --- /dev/null +++ b/.changeset/seven-comics-fly.md @@ -0,0 +1,5 @@ +--- +'@tokens-studio/sd-transforms': minor +--- + +BREAKNG: Use structuredClone instead of shallow Object.assign copy in parsers. Must use NodeJS v17 at minimum now, as opposed to v15.14.0 before this update. diff --git a/.changeset/silver-lobsters-know.md b/.changeset/silver-lobsters-know.md new file mode 100644 index 0000000..4a68745 --- /dev/null +++ b/.changeset/silver-lobsters-know.md @@ -0,0 +1,5 @@ +--- +'@tokens-studio/sd-transforms': minor +--- + +Add the addAttributeCTI option to registerTransforms function, to automatically add `attribute/cti` predefined transform to the `tokens-studio` transformGroup. diff --git a/README.md b/README.md index 9202b81..a92e7af 100644 --- a/README.md +++ b/README.md @@ -9,8 +9,8 @@ - [Using the transforms](#using-the-transforms) - [Custom Transform Group](#custom-transform-group) - [Options](#options) -- [Full example](#full-example) - - [Themes full example](#themes-full-example) +- [Complete example](#full-example) + - [Themes complete example](#themes-complete-example) - [Multi-dimensional theming](#multi-dimensional-theming) - [Transforms](#transforms) - [Troubleshooting](#not-sure-how-to-fix-your-issue) @@ -112,7 +112,7 @@ StyleDictionary.registerTransform({ In Style-Dictionary, [`transformGroup` and `transforms` cannot be combined in a platform inside your config](https://github.com/amzn/style-dictionary/issues/813). -Therefore, if you wish to use the transform group, but adjust, add or remove a few transforms, your best option is creating a custom transformGroup: +Therefore, if you wish to use the transform group, but adjust, add or remove a few transforms, your best option is to create a custom transform group: ```js const { transforms } = require('@tokens-studio/sd-transforms'); @@ -127,6 +127,9 @@ StyleDictionary.registerTransformGroup({ }); ``` +> Note: it is easy to change the casing or to add attributes/cti transform to the group, without needing to create a custom transform group. +> For this, see section "Options" below for the `casing` and `addAttributeCTI` option. + ### Options You can pass options to the `registerTransforms` function. @@ -152,6 +155,7 @@ Options: | name | type | required | default | description | | ----------------------------- | ------------------------ | -------- | --------------- | ------------------------------------------------------------------------------------------------------------------------------------- | | excludeParentKeys | boolean | ❌ | `false` | Whether or not to exclude parent keys from your token files | +| addAttributeCTI | boolean | ❌ | `false` | Whether or not to add `'attribute/cti'` predefined transform to the `tokens-studio` transformGroup | | alwaysAddFontStyle | boolean | ❌ | `false` | Whether or not to always add a 'normal' fontStyle property to typography tokens which do not have explicit fontStyle | | casing | string | ❌ | `camel` | What kind of casing to use for token names. Options: [`camel`, `pascal`, `snake`, `kebab`, `constant`] | | expand | boolean \| ExpandOptions | ❌ | See props below | `false` to not register the parser at all. By default, expands composition tokens. Optionally, border, shadow and typography as well. | @@ -163,8 +167,8 @@ Options: | ['ts/color/modifiers'].format | ColorModifierFormat | ❌ | `undefined` | Color modifier output format override ('hex' \| 'hsl' \| 'lch' \| 'p3' \| 'srgb'), uses local format or modifier space as default | | | -> Note: you can also import and use the `expandComposites` function to run the expansion on your token object manually. -> Handy if you have your own parsers set up (e.g. for JS files), and you want the expansions to work there too. +> Note: you can also import and use the `parseTokens` function to run the parsing steps on your tokens object manually. +> Handy if you have your own parsers set up (e.g. for JS files), and you want the parser-based features like composites-expansion to work there too. ## Full example @@ -221,7 +225,7 @@ sd.cleanAllPlatforms(); sd.buildAllPlatforms(); ``` -#### To run it use following command +To run it use the following command ```sh node build-output.js @@ -230,7 +234,7 @@ node build-output.js > Note: make sure to choose either the full transformGroup, **OR** its separate transforms so you can adjust or add your own. > [Combining a transformGroup with a transforms array can give unexpected results](https://github.com/amzn/style-dictionary/issues/813). -### Themes full example +### Themes: complete example You might be using Themes in the PRO version of Tokens Studio. @@ -378,7 +382,7 @@ run(); ### ts/descriptionToComment -This transform maps token descriptions to comments. +This transform maps token descriptions into comments. **matches**: All tokens that have a description property. @@ -443,7 +447,7 @@ This transform checks and evaluates math expressions ### ts/size/px -This transform adds `px` as a unit when missing to tokens. +This transform adds `px` as a unit when dimension-like tokens do not have a unit. **matches**: `token.type` is one of `['sizing', 'spacing', 'borderRadius', 'borderWidth', 'fontSizes', 'dimension']` @@ -471,7 +475,7 @@ This transform adds `px` as a unit when missing to tokens. ### ts/opacity -This transforms opacity token values declared with `%` to a number between `0` and `1`. +This transforms opacity token values declared with `%` into a number between `0` and `1`. **matches**: `token.type` is `'opacity'` @@ -499,7 +503,7 @@ This transforms opacity token values declared with `%` to a number between `0` a ### ts/size/lineheight -This transforms line-height token values declared with `%` to a a unitless value. +This transforms line-height token values declared with `%` into a unitless value. **matches**: `token.type` is `'lineHeights'` @@ -645,7 +649,7 @@ This transforms letter-spacing token values declared with `%` to a value with `e ### ts/color/css/hexrgba -This transforms color token values with Figma's "hex code RGBA" to an actual `rgba()` format +This transforms color token values with Figma's "hex code RGBA" into actual `rgba()` format **matches**: `token.type` is `'color'` @@ -701,7 +705,7 @@ This transforms font-family token values into valid CSS, adding single quotes if ### ts/typography/css/shorthand -This transforms typography tokens to a valid CSS shorthand +This transforms typography tokens into a valid CSS shorthand **matches**: `token.type` is `'typography'` @@ -733,7 +737,7 @@ This transforms typography tokens to a valid CSS shorthand ### ts/shadow/css/shorthand -This transforms shadow tokens to a valid CSS shadow shorthand +This transforms shadow tokens into a valid CSS shadow shorthand **matches**: `token.type` is `'boxShadow'` @@ -766,7 +770,7 @@ This transforms shadow tokens to a valid CSS shadow shorthand ### ts/border/css/shorthand -This transforms border tokens to a valid CSS border shorthand +This transforms border tokens into a valid CSS border shorthand **matches**: `token.type` is `'border'` @@ -795,13 +799,13 @@ This transforms border tokens to a valid CSS border shorthand } ``` -## Not sure how to fix your issue ? +## Not sure how to fix your issue? -### Create a reproduction by :- +### Create a reproduction by:- -1. Open configurator tool [link](https://configurator.tokens.studio/) +1. Open the configurator tool [link](https://configurator.tokens.studio/) 2. Upload your tokens and add your style dictionary config and transforms -3. Copy the Url as it will include your settings. -4. Join our slack [link](https://tokens.studio/slack) -5. Open style-dictionary-configurator channel. -6. Create a thread about your issue and paste your reproduction link inside it. +3. Copy the URL as it will include your settings +4. Join our Slack [link](https://tokens.studio/slack) +5. Open style-dictionary-configurator channel +6. Create a thread about your issue and paste your reproduction link inside it diff --git a/package.json b/package.json index e3f9a24..566fae3 100644 --- a/package.json +++ b/package.json @@ -80,7 +80,7 @@ "style-dictionary" ], "engines": { - "node": ">=15.14.0" + "node": ">=17.0.0" }, "prettier": { "printWidth": 100, diff --git a/src/TransformOptions.ts b/src/TransformOptions.ts index 917283b..7643738 100644 --- a/src/TransformOptions.ts +++ b/src/TransformOptions.ts @@ -15,7 +15,7 @@ export type Expandables = export const expandablesAsStringsArr = ['composition', 'typography', 'border', 'boxShadow']; export type ExpandablesAsStrings = (typeof expandablesAsStringsArr)[number]; -export type ExpandFilter = (token: T, filePath: string) => boolean; +export type ExpandFilter = (token: T, filePath?: string) => boolean; export interface ExpandOptions { typography?: boolean | ExpandFilter; // default false @@ -31,6 +31,7 @@ export interface ColorModifierOptions { } export interface TransformOptions { + addAttributeCTI?: boolean; casing?: 'camel' | 'pascal' | 'snake' | 'kebab' | 'constant'; alwaysAddFontStyle?: boolean; expand?: ExpandOptions | false; diff --git a/src/index.ts b/src/index.ts index e41a5f2..70a5266 100644 --- a/src/index.ts +++ b/src/index.ts @@ -4,6 +4,7 @@ export { transforms } from './registerTransforms.js'; export { expandComposites } from './parsers/expand-composites.js'; export { excludeParentKeys } from './parsers/exclude-parent-keys.js'; export { addFontStyles } from './parsers/add-font-styles.js'; +export { parseTokens } from './parsers/parse-tokens.js'; export { mapDescriptionToComment } from './mapDescriptionToComment.js'; export { checkAndEvaluateMath } from './checkAndEvaluateMath.js'; diff --git a/src/parsers/add-font-styles.ts b/src/parsers/add-font-styles.ts index 431bc02..7595117 100644 --- a/src/parsers/add-font-styles.ts +++ b/src/parsers/add-font-styles.ts @@ -50,7 +50,7 @@ export function addFontStyles( dictionary: DeepKeyTokenMap, transformOpts?: TransformOptions, ): DeepKeyTokenMap { - const copy = { ...dictionary }; + const copy = structuredClone(dictionary); recurse(copy, copy, transformOpts?.alwaysAddFontStyle); return copy; } diff --git a/src/parsers/expand-composites.ts b/src/parsers/expand-composites.ts index 65fd181..c652e31 100644 --- a/src/parsers/expand-composites.ts +++ b/src/parsers/expand-composites.ts @@ -64,7 +64,7 @@ export function expandToken(compToken: SingleToken, isShadow = false): Si function shouldExpand( token: T, condition: boolean | ExpandFilter, - filePath: string, + filePath?: string, ): boolean { if (typeof condition === 'function') { return condition(token, filePath); @@ -75,8 +75,8 @@ function shouldExpand( function recurse( slice: DeepKeyTokenMap | SingleToken, copy: DeepKeyTokenMap | SingleToken, - filePath: string, transformOpts: TransformOptions = {}, + filePath?: string, ) { const opts = { ...transformOpts, @@ -110,17 +110,17 @@ function recurse( } } } else { - recurse(token, copy, filePath, transformOpts); + recurse(token, copy, transformOpts, filePath); } } } export function expandComposites( dictionary: DeepKeyTokenMap | SingleToken, - filePath: string, transformOpts?: TransformOptions, + filePath?: string, ): DeepKeyTokenMap | SingleToken { - const copy = { ...dictionary }; - recurse(copy, copy, filePath, transformOpts); + const copy = structuredClone(dictionary); + recurse(copy, copy, transformOpts, filePath); return copy; } diff --git a/src/parsers/parse-tokens.ts b/src/parsers/parse-tokens.ts new file mode 100644 index 0000000..c34a93d --- /dev/null +++ b/src/parsers/parse-tokens.ts @@ -0,0 +1,16 @@ +import { DeepKeyTokenMap } from '@tokens-studio/types'; +import { TransformOptions } from '../TransformOptions.js'; +import { excludeParentKeys } from './exclude-parent-keys.js'; +import { addFontStyles } from './add-font-styles.js'; +import { expandComposites } from './expand-composites.js'; + +export function parseTokens( + tokens: DeepKeyTokenMap, + transformOpts?: TransformOptions, + filePath?: string, +) { + const excluded = excludeParentKeys(tokens, transformOpts); + const withFontStyles = addFontStyles(excluded, transformOpts); + const expanded = expandComposites(withFontStyles, transformOpts, filePath); + return expanded; +} diff --git a/src/registerTransforms.ts b/src/registerTransforms.ts index 8539576..77c21d1 100644 --- a/src/registerTransforms.ts +++ b/src/registerTransforms.ts @@ -12,10 +12,8 @@ import { checkAndEvaluateMath } from './checkAndEvaluateMath.js'; import { mapDescriptionToComment } from './mapDescriptionToComment.js'; import { transformColorModifiers } from './color-modifiers/transformColorModifiers.js'; import { TransformOptions } from './TransformOptions.js'; -import { expandComposites } from './parsers/expand-composites.js'; -import { excludeParentKeys } from './parsers/exclude-parent-keys.js'; import { transformOpacity } from './transformOpacity.js'; -import { addFontStyles } from './parsers/add-font-styles.js'; +import { parseTokens } from './parsers/parse-tokens.js'; const isBrowser = typeof window === 'object'; @@ -58,11 +56,8 @@ export async function registerTransforms(sd: Core, transformOpts?: TransformOpti _sd.registerParser({ pattern: /\.json$/, parse: ({ filePath, contents }) => { - const obj = JSON.parse(contents); - const excluded = excludeParentKeys(obj, transformOpts); - const withFontStyles = addFontStyles(excluded, transformOpts); - const expanded = expandComposites(withFontStyles, filePath, transformOpts); - return expanded as DesignTokens; + const tokens = JSON.parse(contents); + return parseTokens(tokens, transformOpts, filePath) as DesignTokens; }, }); } @@ -195,6 +190,7 @@ export async function registerTransforms(sd: Core, transformOpts?: TransformOpti _sd.registerTransformGroup({ name: 'tokens-studio', transforms: [ + ...(transformOpts?.addAttributeCTI === true ? ['attribute/cti'] : []), ...transforms, // by default we go with camel, as having no default will likely give the user // errors straight away. This can be overridden by manually passing an array of transforms diff --git a/test/integration/sd-transforms.test.ts b/test/integration/sd-transforms.test.ts index ff502a4..9dd666e 100644 --- a/test/integration/sd-transforms.test.ts +++ b/test/integration/sd-transforms.test.ts @@ -124,4 +124,16 @@ describe('sd-transforms smoke tests', () => { --sd-uses-color: rgba(255, 0, 255, 1); }`); }); + + it('allows easily adding attribute/cti transform to tokens-studio group', async () => { + if (dict) { + cleanup(dict); + } + dict = init(cfg, { 'ts/color/modifiers': { format: 'hex' }, addAttributeCTI: true }); + const enrichedTokens = dict.exportPlatform('css'); // platform to parse for is 'css' in this case + expect(enrichedTokens.dimension.scale.attributes).to.eql({ + category: 'dimension', + type: 'scale', + }); + }); }); diff --git a/test/integration/utils.ts b/test/integration/utils.ts index bab6166..15be926 100644 --- a/test/integration/utils.ts +++ b/test/integration/utils.ts @@ -4,8 +4,7 @@ import { registerTransforms } from '../../src/registerTransforms.js'; export function init(cfg: Config, transformOpts = {}) { registerTransforms(StyleDictionary, transformOpts); const dict = StyleDictionary.extend(cfg); - dict.buildAllPlatforms(); - return dict; + return dict.buildAllPlatforms(); } export function cleanup(dict: StyleDictionary.Core) { diff --git a/test/spec/parsers/expand.spec.ts b/test/spec/parsers/expand.spec.ts index 83b5ea7..b31c80a 100644 --- a/test/spec/parsers/expand.spec.ts +++ b/test/spec/parsers/expand.spec.ts @@ -285,67 +285,372 @@ describe('expand', () => { describe(`expand ${type}`, () => { it(`should expand ${type} tokens`, () => { expect( - expandComposites({ [type]: tokensInput[type] }, 'foo/bar.json', { - expand: { typography: true, border: true, shadow: true }, - }), + expandComposites( + { [type]: tokensInput[type] }, + { + expand: { typography: true, border: true, shadow: true }, + }, + ), ).to.eql({ [type]: tokensOutput[type] }); }); it(`should expand composition tokens by default`, () => { const output = type === 'composition' ? tokensOutput[type] : tokensInput[type]; - expect(expandComposites({ [type]: tokensInput[type] }, 'foo/bar.json')).to.eql({ + expect(expandComposites({ [type]: tokensInput[type] })).to.eql({ [type]: output, }); }); it('should not expand composition tokens when options dictate it should not', () => { expect( - expandComposites({ [type]: tokensInput[type] }, 'foo/bar.json', { - expand: { composition: false }, - }), + expandComposites( + { [type]: tokensInput[type] }, + { + expand: { composition: false }, + }, + ), ).to.eql({ [type]: tokensInput[type] }); - const filter = (_: SingleToken, filePath: string) => !filePath.startsWith('foo'); + const filter = (_: SingleToken, filePath?: string) => { + return !filePath?.startsWith('foo'); + }; - expect( - expandComposites({ [type]: tokensInput[type] }, 'foo/bar.json', { + const output = expandComposites( + { [type]: tokensInput[type] }, + { expand: { composition: filter, typography: filter, border: filter, shadow: filter }, - }), - ).to.eql({ [type]: tokensInput[type] }); - }); + }, + 'foo/bar.json', + ); - it('should expand composition tokens recursing multiple levels deep', () => { - expect( - expandComposites({ foo: { bar: { qux: tokensInput[type] } } }, 'foo/bar.json', { - expand: { typography: true, border: true, shadow: true }, - }), - ).to.eql({ foo: { bar: { qux: tokensOutput[type] } } }); + expect(output).to.eql({ [type]: tokensInput[type] }); }); }); }); + it('should expand composition tokens recursing multiple levels deep', () => { + const input = { + foo: { + bar: { + qux: { + composition: { + foo: { + value: { + fontFamilies: '24px', + fontSizes: '96', + fontWeights: '500', + }, + type: 'composition', + }, + }, + typography: { + foo: { + value: { + fontFamily: 'Arial', + fontWeight: '500', + lineHeight: '1.25', + fontSize: '26', + }, + type: 'typography', + }, + ref: { + value: '{foo.bar.qux.typography.foo}', + type: 'typography', + }, + }, + border: { + foo: { + value: { + color: '#FFFF00', + strokeStyle: 'solid', + borderWidth: '4', + }, + type: 'border', + }, + }, + shadow: { + single: { + value: { + x: '0', + y: '4', + blur: '10', + spread: '0', + color: 'rgba(0,0,0,0.4)', + type: 'innerShadow', + }, + type: 'boxShadow', + }, + double: { + value: [ + { + x: '0', + y: '4', + blur: '10', + spread: '0', + color: 'rgba(0,0,0,0.4)', + type: 'innerShadow', + }, + { + x: '0', + y: '8', + blur: '12', + spread: '5', + color: 'rgba(0,0,0,0.4)', + }, + ], + type: 'boxShadow', + }, + ref: { + value: '{foo.bar.qux.shadow.double}', + type: 'boxShadow', + }, + }, + }, + }, + }, + } as DeepKeyTokenMap; + + const output = { + foo: { + bar: { + qux: { + composition: { + foo: { + fontFamilies: { + value: '24px', + type: 'fontFamilies', + }, + fontSizes: { + value: '96', + type: 'fontSizes', + }, + fontWeights: { + value: '500', + type: 'fontWeights', + }, + }, + }, + typography: { + foo: { + fontFamily: { + value: 'Arial', + type: 'fontFamilies', + }, + fontWeight: { + value: '500', + type: 'fontWeights', + }, + lineHeight: { + value: '1.25', + type: 'lineHeights', + }, + fontSize: { + value: '26', + type: 'fontSizes', + }, + }, + ref: { + fontFamily: { + value: 'Arial', + type: 'fontFamilies', + }, + fontWeight: { + value: '500', + type: 'fontWeights', + }, + lineHeight: { + value: '1.25', + type: 'lineHeights', + }, + fontSize: { + value: '26', + type: 'fontSizes', + }, + }, + }, + border: { + foo: { + color: { + value: '#FFFF00', + type: 'color', + }, + strokeStyle: { + value: 'solid', + type: 'strokeStyle', + }, + borderWidth: { + value: '4', + type: 'borderWidth', + }, + }, + }, + shadow: { + single: { + x: { + value: '0', + type: 'dimension', + }, + y: { + value: '4', + type: 'dimension', + }, + blur: { + value: '10', + type: 'dimension', + }, + spread: { + value: '0', + type: 'dimension', + }, + color: { + value: 'rgba(0,0,0,0.4)', + type: 'color', + }, + type: { + value: 'innerShadow', + type: 'other', + }, + }, + double: { + 1: { + x: { + value: '0', + type: 'dimension', + }, + y: { + value: '4', + type: 'dimension', + }, + blur: { + value: '10', + type: 'dimension', + }, + spread: { + value: '0', + type: 'dimension', + }, + color: { + value: 'rgba(0,0,0,0.4)', + type: 'color', + }, + type: { + value: 'innerShadow', + type: 'other', + }, + }, + 2: { + x: { + value: '0', + type: 'dimension', + }, + y: { + value: '8', + type: 'dimension', + }, + blur: { + value: '12', + type: 'dimension', + }, + spread: { + value: '5', + type: 'dimension', + }, + color: { + value: 'rgba(0,0,0,0.4)', + type: 'color', + }, + }, + }, + ref: { + 1: { + x: { + value: '0', + type: 'dimension', + }, + y: { + value: '4', + type: 'dimension', + }, + blur: { + value: '10', + type: 'dimension', + }, + spread: { + value: '0', + type: 'dimension', + }, + color: { + value: 'rgba(0,0,0,0.4)', + type: 'color', + }, + type: { + value: 'innerShadow', + type: 'other', + }, + }, + 2: { + x: { + value: '0', + type: 'dimension', + }, + y: { + value: '8', + type: 'dimension', + }, + blur: { + value: '12', + type: 'dimension', + }, + spread: { + value: '5', + type: 'dimension', + }, + color: { + value: 'rgba(0,0,0,0.4)', + type: 'color', + }, + }, + }, + }, + }, + }, + }, + // casting to unknown because strokeStyle is not a recognized type by Tokens Studio, we create this type by expanding border tokens + } as unknown as DeepKeyTokenMap; + + expect( + expandComposites( + input, + { + expand: { typography: true, border: true, shadow: true }, + }, + 'foo/bar.json', + ), + ).to.eql(output); + }); + it(`should allow a filter condition function for expanding tokens`, () => { expect( expandComposites( { typography: tokensInput.typography } as DeepKeyTokenMap, - 'foo/bar.json', { expand: { typography: (_, filePath) => filePath === 'foo/bar.json', }, }, + 'foo/bar.json', ), ).to.eql({ typography: tokensOutput.typography }); expect( expandComposites( { typography: tokensInput.typography } as DeepKeyTokenMap, - 'foo/bar.json', { expand: { typography: (_, filePath) => filePath === 'foo/qux.json', }, }, + 'foo/bar.json', ), ).to.eql({ typography: tokensInput.typography }); }); @@ -359,12 +664,12 @@ describe('expand', () => { type: 'typography', }, } as DeepKeyTokenMap, - 'foo/bar.json', { expand: { typography: true, }, }, + 'foo/bar.json', ), ).to.eql({ ref: { @@ -382,8 +687,8 @@ describe('expand', () => { type: 'color', comment: null, } as SingleToken, - 'foo/bar.json', { expand: { typography: true } }, + 'foo/bar.json', ), ); });