8000 README: inference first (#144) · huggingface/huggingface.js@126df2e · GitHub
[go: up one dir, main page]

Skip to content

Commit 126df2e

Browse files
gary149coyotte508
andauthored
README: inference first (#144)
Co-authored-by: Eliott C <coyotte508@gmail.com>
1 parent 75b85ad commit 126df2e

File tree

1 file changed

+56
-29
lines changed

1 file changed

+56
-29
lines changed

README.md

Lines changed: 56 additions & 29 deletions
8000
< 6D40 td data-grid-cell-id="diff-b335630551682c19a781afebcf4d07bf978fb1f8ac04c6bf87428ed5106870f5-110-111-0" data-selected="false" role="gridcell" style="background-color:var(--diffBlob-additionNum-bgColor, var(--diffBlob-addition-bgColor-num));text-align:center" tabindex="-1" valign="top" class="focusable-grid-cell diff-line-number position-relative left-side">
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,28 @@
99
<br/>
1010
</p>
1111

12+
```ts
13+
await inference.translation({
14+
model: 't5-base',
15+
inputs: 'My name is Wolfgang and I live in Berlin'
16+
})
17+
18+
await inference.textToImage({
19+
model: 'stabilityai/stable-diffusion-2',
20+
inputs: 'award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]',
21+
parameters: {
22+
negative_prompt: 'blurry',
23+
}
24+
})
25+
```
26+
1227
# Hugging Face JS libraries
1328

1429
This is a collection of JS libraries to interact with the Hugging Face API, with TS types included.
1530

31+
- [@huggingface/inference](packages/inference/README.md): Use the Inference API to make calls to 100,000+ Machine Learning models, or your own [inference endpoints](https://hf.co/docs/inference-endpoints/)!
1632
- [@huggingface/hub](packages/hub/README.md): Interact with huggingface.co to create or delete repos and commit / download files
17-
- [@huggingface/inference](packages/inference/README.md): Use the Inference API to make calls to 100,000+ Machine Learning models, or to your own [inference endpoints](https://hf.co/docs/inference-endpoints/)!
33+
1834

1935
With more to come, like `@huggingface/endpoints` to manage your HF Endpoints!
2036

@@ -29,15 +45,15 @@ The libraries are still very young, please help us by opening issues!
2945
To install via NPM, you can download the libraries as needed:
3046

3147
```bash
32-
npm install @huggingface/hub
3348
npm install @huggingface/inference
49+
npm install @huggingface/hub
3450
```
3551

3652
Then import the libraries in your code:
3753

3854
```ts
39-
import { createRepo, commit, deleteRepo, listFiles } from "@huggingface/hub";
4055
import { HfInference } from "@huggingface/inference";
56+
import { createRepo, commit, deleteRepo, listFiles } from "@huggingface/hub";
4157
import type { RepoId, Credentials } from "@huggingface/hub";
4258
```
4359

@@ -53,36 +69,17 @@ You can run our packages with vanilla JS, without any bundler, by using a CDN or
5369
</script>
5470
```
5571

56-
## Usage example
72+
## Usage examples
73+
74+
Get your HF access token in your [account settings](https://huggingface.co/settings/tokens).
75+
76+
### @huggingface/inference examples
5777

5878
```ts
59-
import { createRepo, uploadFile, deleteFiles } from "@huggingface/hub";
6079
import { HfInference } from "@huggingface/inference";
6180

62-
// use an access token from your free account
6381
const HF_ACCESS_TOKEN = "hf_...";
6482

65-
await createRepo({
66-
repo: "my-user/nlp-model", // or {type: "model", name: "my-user/nlp-test"},
67-
credentials: {accessToken: HF_ACCESS_TOKEN}
68-
});
69-
70-
await uploadFile({
71-
repo: "my-user/nlp-model",
72-
credentials: {accessToken: HF_ACCESS_TOKEN},
73-
// Can work with native File in browsers
74-
file: {
75-
path: "pytorch_model.bin",
76-
content: new Blob(...)
77-
}
78-
});
79-
80-
await deleteFiles({
81-
repo: {type: "space", name: "my-user/my-space"}, // or "spaces/my-user/my-space"
82-
credentials: {accessToken: HF_ACCESS_TOKEN},
83-
paths: ["README.md", ".gitattributes"]
84-
});
85-
8683
const inference = new HfInference(HF_ACCESS_TOKEN);
8784

8885
await inference.translation({
@@ -91,8 +88,8 @@ await inference.translation({
9188
})
9289

9390
await inference.textToImage({
94-
inputs: 'award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]',
9591
model: 'stabilityai/stable-diffusion-2',
92+
inputs: 'award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]',
9693
parameters: {
9794
negative_prompt: 'blurry',
9895
}
@@ -103,11 +100,41 @@ await inference.imageToText({
103100
model: 'nlpconnect/vit-gpt2-image-captioning',
104101
})
105102

103+
106104
// Using your own inference endpoint: https://hf.co/docs/inference-endpoints/
107-
const gpt2 = hf.endpoint('https://xyz.eu-west-1.aws.endpoints.huggingface.cloud/gpt2');
105+
const gpt2 = inference.endpoint('https://xyz.eu-west-1.aws.endpoints.huggingface.cloud/gpt2');
108106
const { generated_text } = await gpt2.textGeneration({inputs: 'The answer to the universe is'});
109107
```
110108

109+
### @huggingface/hub examples
110+
111+
```ts
112+
import { createRepo, uploadFile, deleteFiles } from "@huggingface/hub";
113+
114+
const HF_ACCESS_TOKEN = "hf_...";
115+
116+
await createRepo({
117+
repo: "my-user/nlp-model", // or {type: "model", name: "my-user/nlp-test"},
118+
credentials: {accessToken: HF_ACCESS_TOKEN}
119+
});
120+
121+
await uploadFile({
122+
repo: "my-user/nlp-model",
123+
credentials: {accessToken: HF_ACCESS_TOKEN},
124+
// Can work with native File in browsers
125+
file: {
126+
path: "pytorch_model.bin",
127+
content: new Blob(...)
128+
}
129+
});
130+
131+
await deleteFiles({
132+
repo: {type: "space", name: "my-user/my-space"}, // or "spaces/my-user/my-space"
133+
credentials: {accessToken: HF_ACCESS_TOKEN},
134+
paths: ["README.md", ".gitattributes"]
135+
});
136+
```
137+
111138
There are more features of course, check each library's README!
112139

113140
## Formatting & testing

0 commit comments

Comments
 (0)
0