diff --git a/README.md b/README.md
index 990b3be64..82b03a691 100644
--- a/README.md
+++ b/README.md
@@ -98,7 +98,7 @@ npm i @xenova/transformers
Alternatively, you can use it in vanilla JS, without any bundler, by using a CDN or static hosting. For example, using [ES Modules](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules), you can import the library with:
```html
```
@@ -121,7 +121,7 @@ Want to jump straight in? Get started with one of our sample applications/templa
-By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.1.1/dist/), which should work out-of-the-box. You can customize this as follows:
+By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.2.0/dist/), which should work out-of-the-box. You can customize this as follows:
### Settings
diff --git a/docs/snippets/2_installation.snippet b/docs/snippets/2_installation.snippet
index 85b273d05..d4e50652a 100644
--- a/docs/snippets/2_installation.snippet
+++ b/docs/snippets/2_installation.snippet
@@ -7,6 +7,6 @@ npm i @xenova/transformers
Alternatively, you can use it in vanilla JS, without any bundler, by using a CDN or static hosting. For example, using [ES Modules](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules), you can import the library with:
```html
```
diff --git a/docs/snippets/4_custom-usage.snippet b/docs/snippets/4_custom-usage.snippet
index 3a9e0ee72..57a5962d4 100644
--- a/docs/snippets/4_custom-usage.snippet
+++ b/docs/snippets/4_custom-usage.snippet
@@ -1,6 +1,6 @@
-By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.1.1/dist/), which should work out-of-the-box. You can customize this as follows:
+By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.2.0/dist/), which should work out-of-the-box. You can customize this as follows:
### Settings
diff --git a/package-lock.json b/package-lock.json
index bd8a6112f..e4ab273a0 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "@xenova/transformers",
- "version": "2.1.1",
+ "version": "2.2.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@xenova/transformers",
- "version": "2.1.1",
+ "version": "2.2.0",
"license": "Apache-2.0",
"dependencies": {
"onnxruntime-web": "1.14.0",
diff --git a/package.json b/package.json
index 554e8bee5..fae968102 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@xenova/transformers",
- "version": "2.1.1",
+ "version": "2.2.0",
"description": "State-of-the-art Machine Learning for the web. Run 🤗 Transformers directly in your browser, with no need for a server!",
"main": "./src/transformers.js",
"types": "./types/transformers.d.ts",
diff --git a/src/env.js b/src/env.js
index bd38dba1a..aa4e5a932 100644
--- a/src/env.js
+++ b/src/env.js
@@ -29,7 +29,7 @@ import url from 'url';
import { ONNX } from './backends/onnx.js';
const { env: onnx_env } = ONNX;
-const VERSION = '2.1.1';
+const VERSION = '2.2.0';
// Check if various APIs are available (depends on environment)
const WEB_CACHE_AVAILABLE = typeof self !== 'undefined' && 'caches' in self;