Add diffusers format weights (#3)

- Add diffusers format weights (5d4717a3dc82fa40a286f48bb71f1ead5c517800)
- update readme with diffusers goodies (97a16425ff5c88f218d76851f32820e703ea813d)
- Update README.md (9ae394376d269f108989e648378f3ee54cfe7d7b)
- Upload folder using huggingface_hub (f2a94b28d7167624c443a40aaa0334351bd62c02)
- update scheduler (fe1bfe0b434c9a621a4d4ddad4f57204dcf55d23)

Co-authored-by: Dhruv Nair <dn6@users.noreply.huggingface.co>
Co-authored-by: Apolinário from multimodal AI art <multimodalart@users.noreply.huggingface.co>
This commit is contained in:
ai-modelscope
2024-08-02 22:15:25 +08:00
parent a91722bf11
commit 7f9644d9a7
25 changed files with 230467 additions and 2 deletions

48895
tokenizer/merges.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,30 @@
{
"bos_token": {
"content": "<|startoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}

View File

@ -0,0 +1,30 @@
{
"add_prefix_space": false,
"added_tokens_decoder": {
"49406": {
"content": "<|startoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": true
},
"49407": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
}
},
"bos_token": "<|startoftext|>",
"clean_up_tokenization_spaces": true,
"do_lower_case": true,
"eos_token": "<|endoftext|>",
"errors": "replace",
"model_max_length": 77,
"pad_token": "<|endoftext|>",
"tokenizer_class": "CLIPTokenizer",
"unk_token": "<|endoftext|>"
}

49410
tokenizer/vocab.json Normal file

File diff suppressed because it is too large Load Diff