Ctrl+K
- mosei_unaligned_ood_binary_CIDER.pt440 MB
Detected Pickle imports (42)
- "torch.nn.modules.activation.Tanh",
- "transformers.models.bert.configuration_bert.BertConfig",
- "modules.transformer.TransformerEncoder",
- "torch.nn.modules.activation.ReLU",
- "modules.position_embedding.PositionalEmbedding",
- "transformers.models.bert.modeling_bert.BertSelfOutput",
- "transformers.activations.GELUActivation",
- "transformers.models.bert.modeling_bert.BertModel",
- "transformers.models.bert.modeling_bert.BertOutput",
- "transformers.models.bert.tokenization_bert.BasicTokenizer",
- "torch.nn.modules.conv.Conv1d",
- "transformers.models.bert.tokenization_bert.WordpieceTokenizer",
- "tokenizers.AddedToken",
- "transformers.models.bert.modeling_bert.BertEmbeddings",
- "torch.FloatStorage",
- "transformers.models.bert.modeling_bert.BertPooler",
- "torch.LongStorage",
- "torch.nn.modules.linear.Linear",
- "torch._utils._rebuild_parameter",
- "__builtin__.set",
- "src.models.CIDERModel",
- "transformers.models.bert.modeling_bert.BertLayer",
- "transformers.models.bert.modeling_bert.BertIntermediate",
- "torch.nn.modules.container.Sequential",
- "modules.transformer.TransformerEncoderLayer",
- "transformers.models.bert.modeling_bert.BertEncoder",
- "modules.multimodal_multihead_attention.MultimodalMultiheadAttention",
- "src.models.BertTextEncoder",
- "transformers.tokenization_utils.Trie",
- "torch.nn.modules.normalization.LayerNorm",
- "modules.compressed_multihead_attention.CompressedMultiheadAttention",
- "collections.OrderedDict",
- "torch._C._nn.gelu",
- "torch.nn.modules.rnn.GRU",
- "torch._utils._rebuild_tensor_v2",
- "torch.nn.modules.sparse.Embedding",
- "transformers.models.bert.modeling_bert.BertSdpaSelfAttention",
- "torch.nn.modules.container.ModuleList",
- "transformers.models.bert.tokenization_bert.BertTokenizer",
- "transformers.models.bert.modeling_bert.BertAttention",
- "src.models.NaiveAttention",
- "torch.nn.modules.dropout.Dropout"
LFS - mosei_unaligned_ood_seven_CIDER.pt440 MB
Detected Pickle imports (42)
- "torch._utils._rebuild_parameter",
- "transformers.models.bert.modeling_bert.BertIntermediate",
- "torch.nn.modules.activation.Tanh",
- "transformers.models.bert.tokenization_bert.WordpieceTokenizer",
- "torch.nn.modules.activation.ReLU",
- "torch._utils._rebuild_tensor_v2",
- "transformers.models.bert.modeling_bert.BertLayer",
- "src.models.NaiveAttention",
- "tokenizers.AddedToken",
- "modules.compressed_multihead_attention.CompressedMultiheadAttention",
- "modules.multimodal_multihead_attention.MultimodalMultiheadAttention",
- "src.models.BertTextEncoder",
- "collections.OrderedDict",
- "torch.nn.modules.linear.Linear",
- "torch.nn.modules.container.ModuleList",
- "transformers.activations.GELUActivation",
- "torch.nn.modules.rnn.GRU",
- "src.models.CIDERModel",
- "transformers.models.bert.tokenization_bert.BertTokenizer",
- "torch._C._nn.gelu",
- "transformers.models.bert.modeling_bert.BertModel",
- "transformers.models.bert.modeling_bert.BertAttention",
- "transformers.models.bert.modeling_bert.BertSdpaSelfAttention",
- "torch.LongStorage",
- "__builtin__.set",
- "transformers.models.bert.modeling_bert.BertSelfOutput",
- "transformers.models.bert.modeling_bert.BertEmbeddings",
- "transformers.models.bert.configuration_bert.BertConfig",
- "transformers.models.bert.tokenization_bert.BasicTokenizer",
- "torch.FloatStorage",
- "torch.nn.modules.container.Sequential",
- "transformers.models.bert.modeling_bert.BertPooler",
- "modules.transformer.TransformerEncoderLayer",
- "torch.nn.modules.normalization.LayerNorm",
- "transformers.models.bert.modeling_bert.BertOutput",
- "modules.transformer.TransformerEncoder",
- "transformers.tokenization_utils.Trie",
- "torch.nn.modules.dropout.Dropout",
- "torch.nn.modules.conv.Conv1d",
- "transformers.models.bert.modeling_bert.BertEncoder",
- "modules.position_embedding.PositionalEmbedding",
- "torch.nn.modules.sparse.Embedding"
LFS