Spaces:
Runtime error
Runtime error
| # Copyright 2022 EleutherAI and The HuggingFace Inc. team. All rights reserved. | |
| # | |
| # Licensed under the Apache License, Version 2.0 (the "License"); | |
| # you may not use this file except in compliance with the License. | |
| # You may obtain a copy of the License at | |
| # | |
| # http://www.apache.org/licenses/LICENSE-2.0 | |
| # | |
| # Unless required by applicable law or agreed to in writing, software | |
| # distributed under the License is distributed on an "AS IS" BASIS, | |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
| # See the License for the specific language governing permissions and | |
| # limitations under the License. | |
| from typing import TYPE_CHECKING | |
| from transformers.utils import ( | |
| OptionalDependencyNotAvailable, | |
| _LazyModule, | |
| is_sentencepiece_available, | |
| is_tokenizers_available, | |
| is_torch_available, | |
| ) | |
| _import_structure = { | |
| "configuration_llama": ["LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP", "LlamaConfig"], | |
| } | |
| try: | |
| if not is_sentencepiece_available(): | |
| raise OptionalDependencyNotAvailable() | |
| except OptionalDependencyNotAvailable: | |
| pass | |
| else: | |
| _import_structure["tokenization_llama"] = ["LlamaTokenizer"] | |
| try: | |
| if not is_tokenizers_available(): | |
| raise OptionalDependencyNotAvailable() | |
| except OptionalDependencyNotAvailable: | |
| pass | |
| else: | |
| _import_structure["tokenization_llama_fast"] = ["LlamaTokenizerFast"] | |
| try: | |
| if not is_torch_available(): | |
| raise OptionalDependencyNotAvailable() | |
| except OptionalDependencyNotAvailable: | |
| pass | |
| else: | |
| _import_structure["modeling_llama"] = [ | |
| "LlamaForCausalLM", | |
| "LlamaModel", | |
| "LlamaPreTrainedModel", | |
| "LlamaForSequenceClassification", | |
| ] | |
| if TYPE_CHECKING: | |
| from .configuration_llama import LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP, LlamaConfig | |
| try: | |
| if not is_sentencepiece_available(): | |
| raise OptionalDependencyNotAvailable() | |
| except OptionalDependencyNotAvailable: | |
| pass | |
| else: | |
| from .tokenization_llama import LlamaTokenizer | |
| try: | |
| if not is_tokenizers_available(): | |
| raise OptionalDependencyNotAvailable() | |
| except OptionalDependencyNotAvailable: | |
| pass | |
| else: | |
| from .tokenization_llama_fast import LlamaTokenizerFast | |
| try: | |
| if not is_torch_available(): | |
| raise OptionalDependencyNotAvailable() | |
| except OptionalDependencyNotAvailable: | |
| pass | |
| else: | |
| from .modeling_llama import LlamaForCausalLM, LlamaForSequenceClassification, LlamaModel, LlamaPreTrainedModel | |
| else: | |
| import sys | |
| sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__) | |