From 8e075f4a990c420f4a12fa82a2f0e5de0076297b Mon Sep 17 00:00:00 2001 From: Joshua Lochner Date: Wed, 25 Dec 2024 12:34:53 +0200 Subject: [PATCH] [version] Update to 3.2.3 --- README.md | 5 +++-- docs/snippets/2_installation.snippet | 2 +- docs/snippets/4_custom-usage.snippet | 2 +- package-lock.json | 4 ++-- package.json | 2 +- src/env.js | 2 +- 6 files changed, 9 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 2dd668278..0804bb605 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ npm i @huggingface/transformers Alternatively, you can use it in vanilla JS, without any bundler, by using a CDN or static hosting. For example, using [ES Modules](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules), you can import the library with: ```html ``` @@ -155,7 +155,7 @@ Check out the Transformers.js [template](https://huggingface.co/new-space?templa -By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models?library=transformers.js) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.2.2/dist/), which should work out-of-the-box. You can customize this as follows: +By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models?library=transformers.js) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.2.3/dist/), which should work out-of-the-box. You can customize this as follows: ### Settings @@ -313,6 +313,7 @@ You can refine your search by selecting the task you're interested in (e.g., [te 1. **Depth Pro** (from Apple) released with the paper [Depth Pro: Sharp Monocular Metric Depth in Less Than a Second](https://arxiv.org/abs/2410.02073) by Aleksei Bochkovskii, Amaël Delaunoy, Hugo Germain, Marcel Santos, Yichao Zhou, Stephan R. Richter, Vladlen Koltun. 1. **[DETR](https://huggingface.co/docs/transformers/model_doc/detr)** (from Facebook) released with the paper [End-to-End Object Detection with Transformers](https://arxiv.org/abs/2005.12872) by Nicolas Carion, Francisco Massa, Gabriel Synnaeve, Nicolas Usunier, Alexander Kirillov, Sergey Zagoruyko. 1. **[DINOv2](https://huggingface.co/docs/transformers/model_doc/dinov2)** (from Meta AI) released with the paper [DINOv2: Learning Robust Visual Features without Supervision](https://arxiv.org/abs/2304.07193) by Maxime Oquab, Timothée Darcet, Théo Moutakanni, Huy Vo, Marc Szafraniec, Vasil Khalidov, Pierre Fernandez, Daniel Haziza, Francisco Massa, Alaaeldin El-Nouby, Mahmoud Assran, Nicolas Ballas, Wojciech Galuba, Russell Howes, Po-Yao Huang, Shang-Wen Li, Ishan Misra, Michael Rabbat, Vasu Sharma, Gabriel Synnaeve, Hu Xu, Hervé Jegou, Julien Mairal, Patrick Labatut, Armand Joulin, Piotr Bojanowski. +1. **[DINOv2 with Registers](https://huggingface.co/docs/transformers/model_doc/dinov2_with_registers)** (from Meta AI) released with the paper [DINOv2 with Registers](https://arxiv.org/abs/2309.16588) by Timothée Darcet, Maxime Oquab, Julien Mairal, Piotr Bojanowski. 1. **[DistilBERT](https://huggingface.co/docs/transformers/model_doc/distilbert)** (from HuggingFace), released together with the paper [DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter](https://arxiv.org/abs/1910.01108) by Victor Sanh, Lysandre Debut and Thomas Wolf. The same method has been applied to compress GPT2 into [DistilGPT2](https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation), RoBERTa into [DistilRoBERTa](https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation), Multilingual BERT into [DistilmBERT](https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation) and a German version of DistilBERT. 1. **[DiT](https://huggingface.co/docs/transformers/model_doc/dit)** (from Microsoft Research) released with the paper [DiT: Self-supervised Pre-training for Document Image Transformer](https://arxiv.org/abs/2203.02378) by Junlong Li, Yiheng Xu, Tengchao Lv, Lei Cui, Cha Zhang, Furu Wei. 1. **[Donut](https://huggingface.co/docs/transformers/model_doc/donut)** (from NAVER), released together with the paper [OCR-free Document Understanding Transformer](https://arxiv.org/abs/2111.15664) by Geewook Kim, Teakgyu Hong, Moonbin Yim, Jeongyeon Nam, Jinyoung Park, Jinyeong Yim, Wonseok Hwang, Sangdoo Yun, Dongyoon Han, Seunghyun Park. diff --git a/docs/snippets/2_installation.snippet b/docs/snippets/2_installation.snippet index b245eddd0..069313d04 100644 --- a/docs/snippets/2_installation.snippet +++ b/docs/snippets/2_installation.snippet @@ -7,6 +7,6 @@ npm i @huggingface/transformers Alternatively, you can use it in vanilla JS, without any bundler, by using a CDN or static hosting. For example, using [ES Modules](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules), you can import the library with: ```html ``` diff --git a/docs/snippets/4_custom-usage.snippet b/docs/snippets/4_custom-usage.snippet index 29bf676ab..46447c0d8 100644 --- a/docs/snippets/4_custom-usage.snippet +++ b/docs/snippets/4_custom-usage.snippet @@ -1,6 +1,6 @@ -By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models?library=transformers.js) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.2.2/dist/), which should work out-of-the-box. You can customize this as follows: +By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models?library=transformers.js) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.2.3/dist/), which should work out-of-the-box. You can customize this as follows: ### Settings diff --git a/package-lock.json b/package-lock.json index 4998d8991..14094bd24 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@huggingface/transformers", - "version": "3.2.2", + "version": "3.2.3", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@huggingface/transformers", - "version": "3.2.2", + "version": "3.2.3", "license": "Apache-2.0", "dependencies": { "@huggingface/jinja": "^0.3.2", diff --git a/package.json b/package.json index 98f3cab98..b2ea407a6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@huggingface/transformers", - "version": "3.2.2", + "version": "3.2.3", "description": "State-of-the-art Machine Learning for the web. Run 🤗 Transformers directly in your browser, with no need for a server!", "main": "./src/transformers.js", "types": "./types/transformers.d.ts", diff --git a/src/env.js b/src/env.js index 311b687e1..8c060e6fd 100644 --- a/src/env.js +++ b/src/env.js @@ -26,7 +26,7 @@ import fs from 'fs'; import path from 'path'; import url from 'url'; -const VERSION = '3.2.2'; +const VERSION = '3.2.3'; // Check if various APIs are available (depends on environment) const IS_BROWSER_ENV = typeof window !== "undefined" && typeof window.document !== "undefined";