Sarah Ciston commited on
Commit
efa39e6
·
1 Parent(s): ea39f35

test without OAUTH

Browse files
Files changed (2) hide show
  1. README.md +7 -5
  2. sketch.js +14 -14
README.md CHANGED
@@ -8,11 +8,13 @@ pinned: false
8
  models:
9
  - distilroberta-base
10
  - bert-base-uncased
11
- hf_oauth: true
12
- hf_oauth_scopes:
13
- - read-repos
14
- - write-repos
15
- - inference-api
 
 
16
 
17
 
18
  ---
 
8
  models:
9
  - distilroberta-base
10
  - bert-base-uncased
11
+
12
+
13
+ # hf_oauth: true
14
+ # hf_oauth_scopes:
15
+ # - read-repos
16
+ # - write-repos
17
+ # - inference-api
18
 
19
 
20
  ---
sketch.js CHANGED
@@ -1,27 +1,27 @@
1
  // connect to API via module
2
 
3
  // import { AutoTokenizer, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers';
4
- // import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/[email protected]';
5
 
6
  /// AUTHORIZATION
7
  // import { textGeneration } from 'https://esm.sh/@huggingface/inference';
8
- import { oauthLoginUrl, oauthHandleRedirectIfPresent } from 'https://esm.sh/@huggingface/[email protected]';
9
 
10
- const oauthResult = await oauthHandleRedirectIfPresent();
11
 
12
- if (!oauthResult) {
13
- // If the user is not logged in, redirect to the login page
14
- window.location.href = await oauthLoginUrl();
15
- }
16
 
17
- // You can use oauthResult.accessToken, oauthResult.accessTokenExpiresAt and oauthResult.userInfo
18
- // console.log(oauthResult);
19
- const HF_TOKEN = window.huggingface.variables.OAUTH_CLIENT_SECRET
20
- // const HF_TOKEN = oauthResult.accessToken
21
- console.log(HF_TOKEN)
22
 
23
- import { HfInference } from 'https://esm.sh/@huggingface/inference';
24
- const inference = new HfInference(HF_TOKEN);
25
 
26
  // PIPELINE MODELS
27
  // models('Xenova/gpt2', 'Xenova/gpt-3.5-turbo', 'mistralai/Mistral-7B-Instruct-v0.2', 'Xenova/llama-68m', 'meta-llama/Meta-Llama-3-8B', 'Xenova/bloom-560m', 'Xenova/distilgpt2')
 
1
  // connect to API via module
2
 
3
  // import { AutoTokenizer, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers';
4
+ import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/[email protected]';
5
 
6
  /// AUTHORIZATION
7
  // import { textGeneration } from 'https://esm.sh/@huggingface/inference';
8
+ // import { oauthLoginUrl, oauthHandleRedirectIfPresent } from 'https://esm.sh/@huggingface/[email protected]';
9
 
10
+ // const oauthResult = await oauthHandleRedirectIfPresent();
11
 
12
+ // if (!oauthResult) {
13
+ // // If the user is not logged in, redirect to the login page
14
+ // window.location.href = await oauthLoginUrl();
15
+ // }
16
 
17
+ // // You can use oauthResult.accessToken, oauthResult.accessTokenExpiresAt and oauthResult.userInfo
18
+ // // console.log(oauthResult);
19
+ // const HF_TOKEN = window.huggingface.variables.OAUTH_CLIENT_SECRET
20
+ // // const HF_TOKEN = oauthResult.accessToken
21
+ // console.log(HF_TOKEN)
22
 
23
+ // import { HfInference } from 'https://esm.sh/@huggingface/inference';
24
+ // const inference = new HfInference(HF_TOKEN);
25
 
26
  // PIPELINE MODELS
27
  // models('Xenova/gpt2', 'Xenova/gpt-3.5-turbo', 'mistralai/Mistral-7B-Instruct-v0.2', 'Xenova/llama-68m', 'meta-llama/Meta-Llama-3-8B', 'Xenova/bloom-560m', 'Xenova/distilgpt2')