mpt-30b / hf_causal_lm.py
irenedea's picture
LLM-foundry update March 26, 2024 23:50:31
ce13d72 verified
raw
history blame
568 Bytes
"""Implements a Hugging Causal LM wrapped inside a :class:`.ComposerModel`."""
import logging
import os
import warnings
from typing import TYPE_CHECKING, Any, Dict, Mapping
from transformers import AutoConfig, AutoModelForCausalLM, PreTrainedModel, PreTrainedTokenizerBase
from .hf_fsdp import hf_get_init_device
from .model_wrapper import HuggingFaceModelWithFSDP
from .attention import is_flash_v2_installed
from .utils import init_empty_weights
from .config_utils import pop_config
if TYPE_CHECKING:
from peft import PeftConfig
log = logging.getLogger(__name__)