code
stringlengths
81
54k
code_codestyle
int64
0
721
style_context
stringlengths
91
41.9k
style_context_codestyle
int64
0
699
label
int64
0
1
'''simple docstring''' import bza import gzip import lzma import os import shutil import struct import tarfile import warnings import zipfile from abc import ABC, abstractmethod from pathlib import Path from typing import Dict, List, Optional, Type, Union from .. import config from .filelock import FileLock fro...
11
'''simple docstring''' import os import re import unicodedata from shutil import copyfile from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import is_torch_available, logging if is_torch_av...
11
1
'''simple docstring''' from collections import OrderedDict from typing import Any, Mapping, Optional, Union from ...configuration_utils import PretrainedConfig from ...feature_extraction_utils import FeatureExtractionMixin from ...onnx import OnnxConfig from ...onnx.utils import compute_effective_axis_dimension...
11
'''simple docstring''' from __future__ import annotations from collections.abc import Iterable, Iterator from dataclasses import dataclass UpperCAmelCase_ : str = (3, 9, -11, 0, 7, 5, 1, -1) UpperCAmelCase_ : int = (4, 6, 2, 0, 8, 10, 3, -2) @dataclass class UpperCAmelCase__ ...
11
1
'''simple docstring''' from __future__ import annotations def A_ ( _lowerCAmelCase : list[int] , _lowerCAmelCase : int ): """simple docstring""" if len(_lowerCAmelCase ) == 0: return False _lowerCamelCase : int = len(_lowe...
11
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import _LazyModule UpperCAmelCase_ : Tuple = {'tokenization_wav2vec2_phoneme': ['Wav2Vec2PhonemeCTCTokenizer']} if TYPE_CHECKING: from .tokenization_wavaveca_phoneme import WavaVecaPhonemeCTCTokenizer else: import sys ...
11
1
'''simple docstring''' import math import random from typing import Any from .hill_climbing import SearchProblem def A_ ( _lowerCAmelCase : Union[str, Any] , _lowerCAmelCase : bool = True , _lowerCAmelCase : float = math.inf , _lowerCAmelCase : float = -math.inf , ...
11
'''simple docstring''' import unittest import numpy as np from transformers import AlbertConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask if is_flax_available(): import jax.nu...
11
1
'''simple docstring''' from dataclasses import dataclass from typing import List, Optional, Union import numpy as np import PIL import torch from transformers import CLIPImageProcessor, CLIPVisionModel from ...models import PriorTransformer from ...pipelines import DiffusionPipeline from ...schedulers import H...
11
'''simple docstring''' import argparse import json import os import tensorstore as ts import torch from flax import serialization from flax.traverse_util import flatten_dict, unflatten_dict from tensorflow.io import gfile from transformers.modeling_utils import dtype_byte_size from transformers.models.switch_t...
11
1
'''simple docstring''' from __future__ import annotations def A_ ( _lowerCAmelCase : str ): """simple docstring""" return [ord(_lowerCAmelCase ) - 96 for elem in plain] def A_ ( _lowerCAmelCase : list[int] ): """simple docstrin...
11
'''simple docstring''' from math import sqrt def A_ ( _lowerCAmelCase : int = 1000000 ): """simple docstring""" _lowerCamelCase : int = 0 _lowerCamelCase : int = 0 _lowerCamelCase : int while num_cuboids <= limit: ...
11
1
'''simple docstring''' from __future__ import annotations def A_ ( _lowerCAmelCase : list[int] , _lowerCAmelCase : int ): """simple docstring""" if len(_lowerCAmelCase ) < k or k < 0: raise ValueError("Invalid Input" ) _lowerCamel...
11
'''simple docstring''' def A_ ( _lowerCAmelCase : int ): """simple docstring""" if isinstance(_lowerCAmelCase , _lowerCAmelCase ): raise TypeError("'float' object cannot be interpreted as an integer" ) if isinstance(_lowerCAmelCase , _lowe...
11
1
'''simple docstring''' import warnings from ...utils import logging from .image_processing_clip import CLIPImageProcessor UpperCAmelCase_ : Union[str, Any] = logging.get_logger(__name__) class UpperCAmelCase__ ( A ): def __init__( self : Optional[Any],*__A :...
11
'''simple docstring''' from dataclasses import dataclass from typing import List, Optional, Union import numpy as np import PIL import torch from transformers import CLIPImageProcessor, CLIPVisionModel from ...models import PriorTransformer from ...pipelines import DiffusionPipeline from ...schedulers import H...
11
1
'''simple docstring''' import multiprocessing import time from arguments import PretokenizationArguments from datasets import load_dataset from transformers import AutoTokenizer, HfArgumentParser def A_ ( _lowerCAmelCase : Union[str, Any] ): """simple docstring""" _...
11
'''simple docstring''' import random from typing import Any def A_ ( _lowerCAmelCase : list ): """simple docstring""" for _ in range(len(_lowerCAmelCase ) ): _lowerCamelCase : Any = random.randint(0 , len(_lowerCAmelCase ...
11
1
'''simple docstring''' def A_ ( _lowerCAmelCase : str ): """simple docstring""" assert column_title.isupper() _lowerCamelCase : Optional[Any] = 0 _lowerCamelCase : Union[str, Any] = len(_lowerCAmelCase ) - 1 _lowerCamelCase...
11
'''simple docstring''' import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available()...
11
1
'''simple docstring''' import math from dataclasses import dataclass from typing import Optional, Tuple, Union import numpy as np import torch from ..configuration_utils import ConfigMixin, register_to_config from ..utils import BaseOutput, randn_tensor from .scheduling_utils import SchedulerMixin @dataclass ...
11
'''simple docstring''' from typing import List, Optional from tokenizers import ByteLevelBPETokenizer from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_blenderbot_small import BlenderbotSmallTokenizer UpperCAmelCase_ : Union[str, Any] = ...
11
1
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available UpperCAmelCase_ : List[Any] = { 'configuration_m2m_100': ['M2M_100_PRETRAINED_CONFIG_ARCHIVE_MAP', 'M2M100Config', 'M2M100On...
11
'''simple docstring''' import contextlib import copy import random from typing import Any, Dict, Iterable, Optional, Union import numpy as np import torch from .utils import deprecate, is_transformers_available if is_transformers_available(): import transformers def A_ ( _lowerCAmelCase : ...
11
1
'''simple docstring''' from __future__ import annotations import math import numpy as np from numpy.linalg import norm def A_ ( _lowerCAmelCase : np.ndarray , _lowerCAmelCase : np.ndarray ): """simple docstring""" return math.sqrt(sum(pow(a - b , 2 ...
11
'''simple docstring''' import argparse from pathlib import Path import torch from transformers import OPTConfig, OPTModel from transformers.utils import logging logging.set_verbosity_info() UpperCAmelCase_ : Optional[int] = logging.get_logger(__name__) def A_ ( _lowerCAmelCase ...
11
1
'''simple docstring''' def A_ ( _lowerCAmelCase : int , _lowerCAmelCase : int ): """simple docstring""" return "\n".join( F'{number} * {i} = {number * i}' for i in range(1 , number_of_terms + 1 ) ) if __name__ == "__main__": print(multipli...
11
'''simple docstring''' import argparse import requests import torch from PIL import Image from transformers import CLIPProcessor, GroupViTConfig, GroupViTModel def A_ ( _lowerCAmelCase : Union[str, Any] ): """simple docstring""" if "img_encoder.pos_embed" in name: ...
11
1
'''simple docstring''' import random import unittest from torch.utils.data import BatchSampler, DataLoader, IterableDataset from accelerate import Accelerator from accelerate.data_loader import ( BatchSamplerShard, DataLoaderDispatcher, DataLoaderShard, IterableDatasetShard, SkipBatchSample...
11
'''simple docstring''' from __future__ import annotations def A_ ( _lowerCAmelCase : list[int] , _lowerCAmelCase : int , _lowerCAmelCase : int , _lowerCAmelCase : int ): """simple docstring""" if (direction == 1 and array[indexa] > array[indexa...
11
1
'''simple docstring''' from ...configuration_utils import PretrainedConfig from ...utils import logging UpperCAmelCase_ : Any = logging.get_logger(__name__) UpperCAmelCase_ : Union[str, Any] = { 'edbeeching/decision-transformer-gym-hopper-medium': ( 'https://hugging...
11
'''simple docstring''' import math def A_ ( _lowerCAmelCase : int ): """simple docstring""" _lowerCamelCase : Optional[int] = math.loga(math.sqrt(4 * positive_integer + 1 ) / 2 + 1 / 2 ) return exponent == int(_lowerCAmelCase ) ...
11
1
'''simple docstring''' import functools import logging import os import sys import threading from logging import ( CRITICAL, # NOQA DEBUG, # NOQA ERROR, # NOQA FATAL, # NOQA INFO, # NOQA NOTSET, # NOQA WARN, # NOQA WARNING, # NOQA ) from typing import Optional import hugg...
11
'''simple docstring''' import warnings from ..trainer import Trainer from ..utils import logging UpperCAmelCase_ : Union[str, Any] = logging.get_logger(__name__) class UpperCAmelCase__ ( A ): def __init__( self : int,__A : Any=None,**__A : O...
11
1
'''simple docstring''' from __future__ import annotations def A_ ( _lowerCAmelCase : int ): """simple docstring""" _lowerCamelCase : Union[str, Any] = 2 _lowerCamelCase : Dict = [] while i * i <= n: if n % i: ...
11
'''simple docstring''' import json from typing import TYPE_CHECKING, List, Optional, Tuple from tokenizers import pre_tokenizers, processors from ...tokenization_utils_base import AddedToken, BatchEncoding from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokeniz...
11
1
'''simple docstring''' import json import os import unittest from transformers.models.blenderbot_small.tokenization_blenderbot_small import ( VOCAB_FILES_NAMES, BlenderbotSmallTokenizer, ) from ...test_tokenization_common import TokenizerTesterMixin class UpperCAmelCase__ ( A , unitte...
11
'''simple docstring''' def A_ ( _lowerCAmelCase : float ): """simple docstring""" return 10 - x * x def A_ ( _lowerCAmelCase : float , _lowerCAmelCase : float ): """simple docstring""" if equation(_lowerCAmelCase ) *...
11
1
'''simple docstring''' import unittest import numpy as np import torch from torch import nn from transformers import ( CLIPImageProcessor, CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokenizer, CLIPVisionConfig, CLIPVisionModelWithProjection, ) from diffusers import KandinskyVaaPri...
11
'''simple docstring''' import gzip import hashlib import json import multiprocessing import os import re import shutil import time from pathlib import Path import numpy as np from arguments import PreprocessingArguments from datasets import load_dataset from minhash_deduplication import deduplicate_dataset fro...
11
1
'''simple docstring''' import os from argparse import ArgumentParser from typing import List import torch.utils.data from datasets import Dataset, IterableDataset from datasets.distributed import split_dataset_by_node UpperCAmelCase_ : Dict = 4 UpperCAmelCase_ : List[str] = 3 ...
11
'''simple docstring''' import os import time from dataclasses import dataclass, field from enum import Enum from typing import Dict, List, Optional, Union import torch from filelock import FileLock from torch.utils.data import Dataset from ...models.auto.modeling_auto import MODEL_FOR_QUESTION_ANSWERING_MAPPIN...
11
1
'''simple docstring''' from ...configuration_utils import PretrainedConfig from ...utils import logging UpperCAmelCase_ : Any = logging.get_logger(__name__) UpperCAmelCase_ : Optional[Any] = { 'alibaba-damo/mgp-str-base': 'https://huggingface.co/alibaba-damo/mgp-str-base/re...
11
'''simple docstring''' import os from shutil import copyfile from typing import List, Optional, Tuple from ...tokenization_utils import AddedToken from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import is_sentencepiece_available, logging if is_sentencepiece_available(): from .tok...
11
1
'''simple docstring''' def A_ ( _lowerCAmelCase : int , _lowerCAmelCase : int ): """simple docstring""" while b: _lowerCamelCase , _lowerCamelCase : List[str] = b, a % b return a def A_ ( _lowerCAmelCase : int...
11
'''simple docstring''' import os import re import unicodedata from shutil import copyfile from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import is_torch_available, logging if is_torch_av...
11
1
'''simple docstring''' from collections import OrderedDict from typing import Any, List, Mapping, Optional from ... import PreTrainedTokenizer, TensorType, is_torch_available from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfigWithPast, PatchingSpec from ...utils import logging U...
11
'''simple docstring''' from __future__ import annotations from collections.abc import Iterable, Iterator from dataclasses import dataclass UpperCAmelCase_ : str = (3, 9, -11, 0, 7, 5, 1, -1) UpperCAmelCase_ : int = (4, 6, 2, 0, 8, 10, 3, -2) @dataclass class UpperCAmelCase__ ...
11
1
'''simple docstring''' import numpy as np from transformers import BatchFeature from transformers.testing_utils import require_tf, require_torch from .test_feature_extraction_common import FeatureExtractionSavingTestMixin class UpperCAmelCase__ ( A ): # to overwrite at feature extractactor spe...
11
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import _LazyModule UpperCAmelCase_ : Tuple = {'tokenization_wav2vec2_phoneme': ['Wav2Vec2PhonemeCTCTokenizer']} if TYPE_CHECKING: from .tokenization_wavaveca_phoneme import WavaVecaPhonemeCTCTokenizer else: import sys ...
11
1
'''simple docstring''' def A_ ( _lowerCAmelCase : int , _lowerCAmelCase : bool = False ): """simple docstring""" if n == 2: return True if not n % 2 or n < 2: return False if n > 5 and n % 10 not in (1, 3, 7, 9): # can quickly che...
11
'''simple docstring''' import unittest import numpy as np from transformers import AlbertConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask if is_flax_available(): import jax.nu...
11
1
'''simple docstring''' import warnings from ..trainer import Trainer from ..utils import logging UpperCAmelCase_ : Union[str, Any] = logging.get_logger(__name__) class UpperCAmelCase__ ( A ): def __init__( self : int,__A : Any=None,**__A : O...
11
'''simple docstring''' import argparse import json import os import tensorstore as ts import torch from flax import serialization from flax.traverse_util import flatten_dict, unflatten_dict from tensorflow.io import gfile from transformers.modeling_utils import dtype_byte_size from transformers.models.switch_t...
11
1
'''simple docstring''' import argparse import hashlib import os import urllib import warnings import torch from torch import nn from tqdm import tqdm from transformers import WhisperConfig, WhisperForConditionalGeneration UpperCAmelCase_ : Dict = { 'tiny.en': 'https://openaipublic.azureed...
11
'''simple docstring''' from math import sqrt def A_ ( _lowerCAmelCase : int = 1000000 ): """simple docstring""" _lowerCamelCase : int = 0 _lowerCamelCase : int = 0 _lowerCamelCase : int while num_cuboids <= limit: ...
11
1
'''simple docstring''' import sys import turtle def A_ ( _lowerCAmelCase : tuple[float, float] , _lowerCAmelCase : tuple[float, float] ): """simple docstring""" return (pa[0] + pa[0]) / 2, (pa[1] + pa[1]) / 2 def A_ ( _lowerCAmelCase : tupl...
11
'''simple docstring''' def A_ ( _lowerCAmelCase : int ): """simple docstring""" if isinstance(_lowerCAmelCase , _lowerCAmelCase ): raise TypeError("'float' object cannot be interpreted as an integer" ) if isinstance(_lowerCAmelCase , _lowe...
11
1
'''simple docstring''' def A_ ( _lowerCAmelCase : int , _lowerCAmelCase : int ): """simple docstring""" if b == 0: return 1 if (b % 2) == 0: return actual_power(_lowerCAmelCase , int(b / 2 ) ) * actual_power(_lowerCAmelCas...
11
'''simple docstring''' from dataclasses import dataclass from typing import List, Optional, Union import numpy as np import PIL import torch from transformers import CLIPImageProcessor, CLIPVisionModel from ...models import PriorTransformer from ...pipelines import DiffusionPipeline from ...schedulers import H...
11
1
'''simple docstring''' import copy from collections import OrderedDict from typing import Dict, Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ..auto import CONFIG_MAPPING UpperCAmelCase_ : int...
11
'''simple docstring''' import random from typing import Any def A_ ( _lowerCAmelCase : list ): """simple docstring""" for _ in range(len(_lowerCAmelCase ) ): _lowerCamelCase : Any = random.randint(0 , len(_lowerCAmelCase ...
11
1
'''simple docstring''' import inspect from typing import Optional, Union import numpy as np import PIL import torch from torch.nn import functional as F from torchvision import transforms from transformers import CLIPFeatureExtractor, CLIPModel, CLIPTextModel, CLIPTokenizer from diffusers import ( Autoenco...
11
'''simple docstring''' import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available()...
11
1
'''simple docstring''' import argparse import re from flax.traverse_util import flatten_dict, unflatten_dict from tax import checkpoints from transformers import SwitchTransformersConfig, SwitchTransformersForConditionalGeneration from transformers.modeling_flax_pytorch_utils import load_flax_weights_in_pytorc...
11
'''simple docstring''' from typing import List, Optional from tokenizers import ByteLevelBPETokenizer from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_blenderbot_small import BlenderbotSmallTokenizer UpperCAmelCase_ : Union[str, Any] = ...
11
1
'''simple docstring''' import warnings from ...utils import logging from .image_processing_deformable_detr import DeformableDetrImageProcessor UpperCAmelCase_ : Optional[Any] = logging.get_logger(__name__) class UpperCAmelCase__ ( A ): def __init__( self : Option...
11
'''simple docstring''' import contextlib import copy import random from typing import Any, Dict, Iterable, Optional, Union import numpy as np import torch from .utils import deprecate, is_transformers_available if is_transformers_available(): import transformers def A_ ( _lowerCAmelCase : ...
11
1
'''simple docstring''' from typing import Any class UpperCAmelCase__ : def __init__( self : Any,__A : Any ): _lowerCamelCase : Optional[Any] = data _lowerCamelCase : List[str] = None def __repr__( self : int...
11
'''simple docstring''' import argparse from pathlib import Path import torch from transformers import OPTConfig, OPTModel from transformers.utils import logging logging.set_verbosity_info() UpperCAmelCase_ : Optional[int] = logging.get_logger(__name__) def A_ ( _lowerCAmelCase ...
11
1
'''simple docstring''' import pprint import requests UpperCAmelCase_ : Tuple = 'https://zenquotes.io/api' def A_ ( ): """simple docstring""" return requests.get(API_ENDPOINT_URL + "/today" ).json() def A_ ( ): """simple docstring""...
11
'''simple docstring''' import argparse import requests import torch from PIL import Image from transformers import CLIPProcessor, GroupViTConfig, GroupViTModel def A_ ( _lowerCAmelCase : Union[str, Any] ): """simple docstring""" if "img_encoder.pos_embed" in name: ...
11
1
'''simple docstring''' def A_ ( _lowerCAmelCase : list ): """simple docstring""" if any(not isinstance(_lowerCAmelCase , _lowerCAmelCase ) or x < 0 for x in sequence ): raise TypeError("Sequence must be list of non-negative integers" ) ...
11
'''simple docstring''' from __future__ import annotations def A_ ( _lowerCAmelCase : list[int] , _lowerCAmelCase : int , _lowerCAmelCase : int , _lowerCAmelCase : int ): """simple docstring""" if (direction == 1 and array[indexa] > array[indexa...
11
1
'''simple docstring''' import argparse from pathlib import Path import torch from transformers import OPTConfig, OPTModel from transformers.utils import logging logging.set_verbosity_info() UpperCAmelCase_ : Optional[int] = logging.get_logger(__name__) def A_ ( _lowerCAmelCase ...
11
'''simple docstring''' import math def A_ ( _lowerCAmelCase : int ): """simple docstring""" _lowerCamelCase : Optional[int] = math.loga(math.sqrt(4 * positive_integer + 1 ) / 2 + 1 / 2 ) return exponent == int(_lowerCAmelCase ) ...
11
1
'''simple docstring''' def A_ ( _lowerCAmelCase : str , _lowerCAmelCase : int ): """simple docstring""" return (pointa[0] - pointa[0]) ** 2 + (pointa[1] - pointa[1]) ** 2 def A_ ( _lowerCAmelCase : str , _lowerCAmelCase : Any=0 ): ...
11
'''simple docstring''' import warnings from ..trainer import Trainer from ..utils import logging UpperCAmelCase_ : Union[str, Any] = logging.get_logger(__name__) class UpperCAmelCase__ ( A ): def __init__( self : int,__A : Any=None,**__A : O...
11
1
'''simple docstring''' from __future__ import annotations import math def A_ ( _lowerCAmelCase : int ): """simple docstring""" if num <= 0: _lowerCamelCase : str = F'{num}: Invalid input, please enter a positive integer.' raise V...
11
'''simple docstring''' import json from typing import TYPE_CHECKING, List, Optional, Tuple from tokenizers import pre_tokenizers, processors from ...tokenization_utils_base import AddedToken, BatchEncoding from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokeniz...
11
1
'''simple docstring''' import copy import unittest from transformers.models.auto import get_values from transformers.testing_utils import require_torch, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTest...
11
'''simple docstring''' def A_ ( _lowerCAmelCase : float ): """simple docstring""" return 10 - x * x def A_ ( _lowerCAmelCase : float , _lowerCAmelCase : float ): """simple docstring""" if equation(_lowerCAmelCase ) *...
11
1
'''simple docstring''' import gc import unittest import torch from transformers import CLIPTextConfig, CLIPTextModel, CLIPTextModelWithProjection, CLIPTokenizer from diffusers import ( AutoencoderKL, DDIMScheduler, DDPMScheduler, PriorTransformer, StableUnCLIPPipeline, UNetaDConditionMo...
11
'''simple docstring''' import gzip import hashlib import json import multiprocessing import os import re import shutil import time from pathlib import Path import numpy as np from arguments import PreprocessingArguments from datasets import load_dataset from minhash_deduplication import deduplicate_dataset fro...
11
1
'''simple docstring''' from ...configuration_utils import PretrainedConfig from ...utils import logging UpperCAmelCase_ : Optional[Any] = logging.get_logger(__name__) UpperCAmelCase_ : Optional[int] = { 'RWKV/rwkv-4-169m-pile': 'https://huggingface.co/RWKV/rwkv-4-169m-pile/...
11
'''simple docstring''' import os import time from dataclasses import dataclass, field from enum import Enum from typing import Dict, List, Optional, Union import torch from filelock import FileLock from torch.utils.data import Dataset from ...models.auto.modeling_auto import MODEL_FOR_QUESTION_ANSWERING_MAPPIN...
11
1
'''simple docstring''' import logging import os from typing import List, Tuple import numpy as np import psutil import torch import torch.distributed as dist from transformers import RagRetriever UpperCAmelCase_ : int = logging.getLogger(__name__) class UpperCAmelCase__ ( A ): ...
11
'''simple docstring''' import os from shutil import copyfile from typing import List, Optional, Tuple from ...tokenization_utils import AddedToken from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import is_sentencepiece_available, logging if is_sentencepiece_available(): from .tok...
11
1
'''simple docstring''' import os import re from shutil import copyfile from typing import List, Optional, Tuple from ...tokenization_utils import PreTrainedTokenizer from ...utils import logging UpperCAmelCase_ : str = logging.get_logger(__name__) UpperCAmelCase_ : int = { ...
11
'''simple docstring''' import os import re import unicodedata from shutil import copyfile from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import is_torch_available, logging if is_torch_av...
11
1
'''simple docstring''' from math import loga def A_ ( _lowerCAmelCase : int ): """simple docstring""" if a < 0: raise ValueError("Input value must be a positive integer" ) elif isinstance(_lowerCAmelCase , _lowerCAmelCase ): ...
11
'''simple docstring''' from __future__ import annotations from collections.abc import Iterable, Iterator from dataclasses import dataclass UpperCAmelCase_ : str = (3, 9, -11, 0, 7, 5, 1, -1) UpperCAmelCase_ : int = (4, 6, 2, 0, 8, 10, 3, -2) @dataclass class UpperCAmelCase__ ...
11
1
'''simple docstring''' import math from enum import Enum from typing import Optional, Union from torch.optim import Optimizer from torch.optim.lr_scheduler import LambdaLR from .utils import logging UpperCAmelCase_ : Dict = logging.get_logger(__name__) class UpperCAmelCase__ ( A ...
11
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import _LazyModule UpperCAmelCase_ : Tuple = {'tokenization_wav2vec2_phoneme': ['Wav2Vec2PhonemeCTCTokenizer']} if TYPE_CHECKING: from .tokenization_wavaveca_phoneme import WavaVecaPhonemeCTCTokenizer else: import sys ...
11
1
'''simple docstring''' import argparse import logging import os from datetime import datetime import numpy as np import torch from torch import nn from torch.utils.data import DataLoader, RandomSampler, TensorDataset from tqdm import tqdm from transformers import GPTaLMHeadModel UpperCAmelCase_ : Opt...
11
'''simple docstring''' import unittest import numpy as np from transformers import AlbertConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask if is_flax_available(): import jax.nu...
11
1
'''simple docstring''' import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available()...
11
'''simple docstring''' import argparse import json import os import tensorstore as ts import torch from flax import serialization from flax.traverse_util import flatten_dict, unflatten_dict from tensorflow.io import gfile from transformers.modeling_utils import dtype_byte_size from transformers.models.switch_t...
11
1
'''simple docstring''' import mpmath # for roots of unity import numpy as np class UpperCAmelCase__ : def __init__( self : Any,__A : int=None,__A : str=None ): # Input as list _lowerCamelCase : List[Any] = list(poly_a or [0] ...
11
'''simple docstring''' from math import sqrt def A_ ( _lowerCAmelCase : int = 1000000 ): """simple docstring""" _lowerCamelCase : int = 0 _lowerCamelCase : int = 0 _lowerCamelCase : int while num_cuboids <= limit: ...
11
1
'''simple docstring''' import inspect import unittest from huggingface_hub import hf_hub_download from transformers import ConvNextConfig, UperNetConfig from transformers.testing_utils import require_torch, require_torch_multi_gpu, require_vision, slow, torch_device from transformers.utils import is_torch_avai...
11
'''simple docstring''' def A_ ( _lowerCAmelCase : int ): """simple docstring""" if isinstance(_lowerCAmelCase , _lowerCAmelCase ): raise TypeError("'float' object cannot be interpreted as an integer" ) if isinstance(_lowerCAmelCase , _lowe...
11
1
'''simple docstring''' import shutil import tempfile import unittest import numpy as np import pytest from transformers.testing_utils import require_vision from transformers.utils import is_vision_available if is_vision_available(): from PIL import Image from transformers import AutoProcessor, BlipaProces...
11
'''simple docstring''' from dataclasses import dataclass from typing import List, Optional, Union import numpy as np import PIL import torch from transformers import CLIPImageProcessor, CLIPVisionModel from ...models import PriorTransformer from ...pipelines import DiffusionPipeline from ...schedulers import H...
11
1
'''simple docstring''' def A_ ( _lowerCAmelCase : float ): """simple docstring""" return 10 - x * x def A_ ( _lowerCAmelCase : float , _lowerCAmelCase : float ): """simple docstring""" if equation(_lowerCAmelCase ) *...
11
'''simple docstring''' import random from typing import Any def A_ ( _lowerCAmelCase : list ): """simple docstring""" for _ in range(len(_lowerCAmelCase ) ): _lowerCamelCase : Any = random.randint(0 , len(_lowerCAmelCase ...
11
1
'''simple docstring''' import unittest from pathlib import Path from tempfile import TemporaryDirectory from transformers import AutoConfig, TFGPTaLMHeadModel, is_keras_nlp_available, is_tf_available from transformers.models.gpta.tokenization_gpta import GPTaTokenizer from transformers.testing_utils import requ...
11
'''simple docstring''' import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available()...
11
1
'''simple docstring''' import unittest import numpy as np import torch from diffusers import KarrasVePipeline, KarrasVeScheduler, UNetaDModel from diffusers.utils.testing_utils import enable_full_determinism, require_torch, slow, torch_device enable_full_determinism() class UpperCAmelCase__ ( un...
11
'''simple docstring''' from typing import List, Optional from tokenizers import ByteLevelBPETokenizer from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_blenderbot_small import BlenderbotSmallTokenizer UpperCAmelCase_ : Union[str, Any] = ...
11
1
'''simple docstring''' import functools import operator from ...configuration_utils import PretrainedConfig from ...utils import logging UpperCAmelCase_ : Any = logging.get_logger(__name__) UpperCAmelCase_ : str = { 'facebook/wav2vec2-base-960h': 'https://huggingface.co/fa...
11
'''simple docstring''' import contextlib import copy import random from typing import Any, Dict, Iterable, Optional, Union import numpy as np import torch from .utils import deprecate, is_transformers_available if is_transformers_available(): import transformers def A_ ( _lowerCAmelCase : ...
11
1
'''simple docstring''' def A_ ( _lowerCAmelCase : int = 600851475143 ): """simple docstring""" try: _lowerCamelCase : int = int(_lowerCAmelCase ) except (TypeError, ValueError): raise TypeError("Parameter n must be int or cas...
11
'''simple docstring''' import argparse from pathlib import Path import torch from transformers import OPTConfig, OPTModel from transformers.utils import logging logging.set_verbosity_info() UpperCAmelCase_ : Optional[int] = logging.get_logger(__name__) def A_ ( _lowerCAmelCase ...
11
1
'''simple docstring''' from __future__ import annotations from math import pi # Define the Reduced Planck Constant ℏ (H bar), speed of light C, value of # Pi and the function UpperCAmelCase_ : List[str] = 1.0_5457_1817E-34 # unit of ℏ : J * s UpperCAmelCase_ : Any = 3E8 # uni...
11
'''simple docstring''' import argparse import requests import torch from PIL import Image from transformers import CLIPProcessor, GroupViTConfig, GroupViTModel def A_ ( _lowerCAmelCase : Union[str, Any] ): """simple docstring""" if "img_encoder.pos_embed" in name: ...
11
1
'''simple docstring''' import copy import os from collections import OrderedDict from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Union if TYPE_CHECKING: from ...processing_utils import ProcessorMixin from ...utils import TensorType from ...configuration_utils import PretrainedConfig from ......
11
'''simple docstring''' from __future__ import annotations def A_ ( _lowerCAmelCase : list[int] , _lowerCAmelCase : int , _lowerCAmelCase : int , _lowerCAmelCase : int ): """simple docstring""" if (direction == 1 and array[indexa] > array[indexa...
11
1
'''simple docstring''' import argparse from pathlib import Path from transformers import AutoConfig, AutoTokenizer, RagConfig, RagSequenceForGeneration, RagTokenForGeneration def A_ ( _lowerCAmelCase : Optional[int] , _lowerCAmelCase : str , _lowerCAmelCase : str , _lowerC...
11
'''simple docstring''' import math def A_ ( _lowerCAmelCase : int ): """simple docstring""" _lowerCamelCase : Optional[int] = math.loga(math.sqrt(4 * positive_integer + 1 ) / 2 + 1 / 2 ) return exponent == int(_lowerCAmelCase ) ...
11
1
'''simple docstring''' import math def A_ ( _lowerCAmelCase : int ): """simple docstring""" _lowerCamelCase : List[str] = [True] * n _lowerCamelCase : Dict = False _lowerCamelCase : Any = False _lowerCamelCas...
11
'''simple docstring''' import warnings from ..trainer import Trainer from ..utils import logging UpperCAmelCase_ : Union[str, Any] = logging.get_logger(__name__) class UpperCAmelCase__ ( A ): def __init__( self : int,__A : Any=None,**__A : O...
11
1
'''simple docstring''' import os import tempfile from functools import partial from unittest import TestCase from unittest.mock import patch import datasets import datasets.config from .utils import require_beam class UpperCAmelCase__ ( datasets.BeamBasedBuilder ): def lowerCamelCase_ ( ...
11
'''simple docstring''' import json from typing import TYPE_CHECKING, List, Optional, Tuple from tokenizers import pre_tokenizers, processors from ...tokenization_utils_base import AddedToken, BatchEncoding from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokeniz...
11
1
'''simple docstring''' import itertools import json import os import unittest from transformers import AddedToken, RobertaTokenizer, RobertaTokenizerFast from transformers.models.roberta.tokenization_roberta import VOCAB_FILES_NAMES from transformers.testing_utils import require_tokenizers, slow from ...test_t...
11
'''simple docstring''' def A_ ( _lowerCAmelCase : float ): """simple docstring""" return 10 - x * x def A_ ( _lowerCAmelCase : float , _lowerCAmelCase : float ): """simple docstring""" if equation(_lowerCAmelCase ) *...
11
1
'''simple docstring''' UpperCAmelCase_ : Optional[Any] = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' def A_ ( ): """simple docstring""" _lowerCamelCase : List[Any] = input("Enter message: " ) _lowerCamelCase : List[str] = input("Enter key...
11
'''simple docstring''' import gzip import hashlib import json import multiprocessing import os import re import shutil import time from pathlib import Path import numpy as np from arguments import PreprocessingArguments from datasets import load_dataset from minhash_deduplication import deduplicate_dataset fro...
11
1
'''simple docstring''' import os import time from dataclasses import dataclass, field from enum import Enum from typing import Dict, List, Optional, Union import torch from filelock import FileLock from torch.utils.data import Dataset from ...models.auto.modeling_auto import MODEL_FOR_QUESTION_ANSWERING_MAPPIN...
11
'''simple docstring''' import os import time from dataclasses import dataclass, field from enum import Enum from typing import Dict, List, Optional, Union import torch from filelock import FileLock from torch.utils.data import Dataset from ...models.auto.modeling_auto import MODEL_FOR_QUESTION_ANSWERING_MAPPIN...
11
1
'''simple docstring''' import logging import os import sys from dataclasses import dataclass, field from itertools import chain from typing import Optional, Union import datasets import numpy as np import torch from datasets import load_dataset import transformers from transformers import ( AutoConfig, ...
11
'''simple docstring''' import os from shutil import copyfile from typing import List, Optional, Tuple from ...tokenization_utils import AddedToken from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import is_sentencepiece_available, logging if is_sentencepiece_available(): from .tok...
11
1
'''simple docstring''' import os import re import unicodedata from shutil import copyfile from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import is_torch_available, logging if is_torch_av...
11
'''simple docstring''' import os import re import unicodedata from shutil import copyfile from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import is_torch_available, logging if is_torch_av...
11
1
'''simple docstring''' import copy from ...configuration_utils import PretrainedConfig from ...utils import logging from ..auto.configuration_auto import CONFIG_MAPPING UpperCAmelCase_ : List[Any] = logging.get_logger(__name__) class UpperCAmelCase__ ( A ): lowerCAmelCase_ ...
11
'''simple docstring''' from __future__ import annotations from collections.abc import Iterable, Iterator from dataclasses import dataclass UpperCAmelCase_ : str = (3, 9, -11, 0, 7, 5, 1, -1) UpperCAmelCase_ : int = (4, 6, 2, 0, 8, 10, 3, -2) @dataclass class UpperCAmelCase__ ...
11
1
'''simple docstring''' import math UpperCAmelCase_ : Any = 10 UpperCAmelCase_ : List[str] = 7 UpperCAmelCase_ : int = BALLS_PER_COLOUR * NUM_COLOURS def A_ ( _lowerCAmelCase : int = 20 ): """simple docstring""" _lowerCa...
11
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import _LazyModule UpperCAmelCase_ : Tuple = {'tokenization_wav2vec2_phoneme': ['Wav2Vec2PhonemeCTCTokenizer']} if TYPE_CHECKING: from .tokenization_wavaveca_phoneme import WavaVecaPhonemeCTCTokenizer else: import sys ...
11
1
'''simple docstring''' import unittest from transformers import PegasusConfig, PegasusTokenizer, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_configuration_common import ConfigTester from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor if is_fl...
11
'''simple docstring''' import unittest import numpy as np from transformers import AlbertConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask if is_flax_available(): import jax.nu...
11
1
'''simple docstring''' import pytest from datasets import Dataset, DatasetDict, Features, NamedSplit, Value from datasets.io.text import TextDatasetReader from ..utils import assert_arrow_memory_doesnt_increase, assert_arrow_memory_increases def A_ ( _lowerCAmelCase : int , _lowerCAmelCa...
11
'''simple docstring''' import argparse import json import os import tensorstore as ts import torch from flax import serialization from flax.traverse_util import flatten_dict, unflatten_dict from tensorflow.io import gfile from transformers.modeling_utils import dtype_byte_size from transformers.models.switch_t...
11
1
'''simple docstring''' import random from typing import Any def A_ ( _lowerCAmelCase : list ): """simple docstring""" for _ in range(len(_lowerCAmelCase ) ): _lowerCamelCase : Any = random.randint(0 , len(_lowerCAmelCase ...
11
'''simple docstring''' from math import sqrt def A_ ( _lowerCAmelCase : int = 1000000 ): """simple docstring""" _lowerCamelCase : int = 0 _lowerCamelCase : int = 0 _lowerCamelCase : int while num_cuboids <= limit: ...
11
1
'''simple docstring''' def A_ ( _lowerCAmelCase : str ): """simple docstring""" if not all(x.isalpha() for x in string ): raise ValueError("String must only contain alphabetic characters." ) _lowerCamelCase : Any = sorted(string.l...
11
'''simple docstring''' def A_ ( _lowerCAmelCase : int ): """simple docstring""" if isinstance(_lowerCAmelCase , _lowerCAmelCase ): raise TypeError("'float' object cannot be interpreted as an integer" ) if isinstance(_lowerCAmelCase , _lowe...
11
1
'''simple docstring''' import argparse import requests import torch from PIL import Image from transformers import CLIPProcessor, GroupViTConfig, GroupViTModel def A_ ( _lowerCAmelCase : Union[str, Any] ): """simple docstring""" if "img_encoder.pos_embed" in name: ...
11
'''simple docstring''' from dataclasses import dataclass from typing import List, Optional, Union import numpy as np import PIL import torch from transformers import CLIPImageProcessor, CLIPVisionModel from ...models import PriorTransformer from ...pipelines import DiffusionPipeline from ...schedulers import H...
11
1
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_sentencepiece_available, is_speech_available, is_torch_available, ) UpperCAmelCase_ : str = { 'configuration_trocr': ['TROCR_PRETRAINED_CONFIG_...
11
'''simple docstring''' import random from typing import Any def A_ ( _lowerCAmelCase : list ): """simple docstring""" for _ in range(len(_lowerCAmelCase ) ): _lowerCamelCase : Any = random.randint(0 , len(_lowerCAmelCase ...
11
1
'''simple docstring''' import torch from transformers import CamembertForMaskedLM, CamembertTokenizer def A_ ( _lowerCAmelCase : List[str] , _lowerCAmelCase : Any , _lowerCAmelCase : List[str] , _lowerCAmelCase : Dict=5 ): """simple docstring""" ...
11
'''simple docstring''' import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available()...
11
1
'''simple docstring''' from __future__ import annotations def A_ ( _lowerCAmelCase : list[int] ): """simple docstring""" return len(set(_lowerCAmelCase ) ) == len(_lowerCAmelCase ) if __name__ == "__main__": import doctest doctest.testmod()
11
'''simple docstring''' from typing import List, Optional from tokenizers import ByteLevelBPETokenizer from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_blenderbot_small import BlenderbotSmallTokenizer UpperCAmelCase_ : Union[str, Any] = ...
11
1
'''simple docstring''' import json import os import subprocess import unittest from ast import literal_eval import pytest from parameterized import parameterized, parameterized_class from . import is_sagemaker_available if is_sagemaker_available(): from sagemaker import Session, TrainingJobAnalytics from s...
11
'''simple docstring''' import contextlib import copy import random from typing import Any, Dict, Iterable, Optional, Union import numpy as np import torch from .utils import deprecate, is_transformers_available if is_transformers_available(): import transformers def A_ ( _lowerCAmelCase : ...
11
1
'''simple docstring''' def A_ ( _lowerCAmelCase : List[str] , _lowerCAmelCase : Any ): """simple docstring""" _lowerCamelCase : Any = "" for i in table: res += inp[i - 1] return res def A_ ( _lowerCAmelCase : ...
11
'''simple docstring''' import argparse from pathlib import Path import torch from transformers import OPTConfig, OPTModel from transformers.utils import logging logging.set_verbosity_info() UpperCAmelCase_ : Optional[int] = logging.get_logger(__name__) def A_ ( _lowerCAmelCase ...
11
1
'''simple docstring''' from collections import defaultdict from graphs.minimum_spanning_tree_prims import prisms_algorithm as mst def A_ ( ): """simple docstring""" _lowerCamelCase , _lowerCamelCase : Union[str, Any] = 9, 14 # noqa: F841 _lowerCamelCa...
11
'''simple docstring''' import argparse import requests import torch from PIL import Image from transformers import CLIPProcessor, GroupViTConfig, GroupViTModel def A_ ( _lowerCAmelCase : Union[str, Any] ): """simple docstring""" if "img_encoder.pos_embed" in name: ...
11
1
'''simple docstring''' from ...processing_utils import ProcessorMixin class UpperCAmelCase__ ( A ): lowerCAmelCase_ = ['image_processor', 'feature_extractor'] lowerCAmelCase_ = 'TvltImageProcessor' lowerCAmelCase_ = 'TvltFeatureExtractor' def __init__( self...
11
'''simple docstring''' from __future__ import annotations def A_ ( _lowerCAmelCase : list[int] , _lowerCAmelCase : int , _lowerCAmelCase : int , _lowerCAmelCase : int ): """simple docstring""" if (direction == 1 and array[indexa] > array[indexa...
11
1
'''simple docstring''' import argparse import json from pathlib import Path import requests import torch from huggingface_hub import cached_download, hf_hub_url from PIL import Image from transformers import DPTConfig, DPTForDepthEstimation, DPTForSemanticSegmentation, DPTImageProcessor from transformers.utils...
11
'''simple docstring''' import math def A_ ( _lowerCAmelCase : int ): """simple docstring""" _lowerCamelCase : Optional[int] = math.loga(math.sqrt(4 * positive_integer + 1 ) / 2 + 1 / 2 ) return exponent == int(_lowerCAmelCase ) ...
11
1
'''simple docstring''' import logging import os import random import sys from dataclasses import dataclass, field from typing import Optional import datasets import numpy as np import pandas as pd from datasets import load_dataset import transformers from transformers import ( AutoConfig, BartForSequen...
11
'''simple docstring''' import warnings from ..trainer import Trainer from ..utils import logging UpperCAmelCase_ : Union[str, Any] = logging.get_logger(__name__) class UpperCAmelCase__ ( A ): def __init__( self : int,__A : Any=None,**__A : O...
11
1
'''simple docstring''' import json import os import unittest from transformers import DebertaTokenizer, DebertaTokenizerFast from transformers.models.deberta.tokenization_deberta import VOCAB_FILES_NAMES from transformers.testing_utils import slow from ...test_tokenization_common import TokenizerTesterMixin c...
11
'''simple docstring''' import json from typing import TYPE_CHECKING, List, Optional, Tuple from tokenizers import pre_tokenizers, processors from ...tokenization_utils_base import AddedToken, BatchEncoding from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokeniz...
11
1
'''simple docstring''' import torch from diffusers import DPMSolverSDEScheduler from diffusers.utils import torch_device from diffusers.utils.testing_utils import require_torchsde from .test_schedulers import SchedulerCommonTest @require_torchsde class UpperCAmelCase__ ( A ): lowerCAmelCase_ ...
11
'''simple docstring''' def A_ ( _lowerCAmelCase : float ): """simple docstring""" return 10 - x * x def A_ ( _lowerCAmelCase : float , _lowerCAmelCase : float ): """simple docstring""" if equation(_lowerCAmelCase ) *...
11
1
'''simple docstring''' import numpy as np def A_ ( _lowerCAmelCase : Dict , _lowerCAmelCase : Tuple , _lowerCAmelCase : Optional[int] , _lowerCAmelCase : str , _lowerCAmelCase : int ): """simple docstring""" _lowerCamelCase : str ...
11
'''simple docstring''' import gzip import hashlib import json import multiprocessing import os import re import shutil import time from pathlib import Path import numpy as np from arguments import PreprocessingArguments from datasets import load_dataset from minhash_deduplication import deduplicate_dataset fro...
11
1
'''simple docstring''' import argparse import ast import logging import os import sys import pandas as pd import torch from tqdm import tqdm from transformers import BartForConditionalGeneration, RagRetriever, RagSequenceForGeneration, RagTokenForGeneration from transformers import logging as transformers_logg...
11
'''simple docstring''' import os import time from dataclasses import dataclass, field from enum import Enum from typing import Dict, List, Optional, Union import torch from filelock import FileLock from torch.utils.data import Dataset from ...models.auto.modeling_auto import MODEL_FOR_QUESTION_ANSWERING_MAPPIN...
11
1
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) UpperCAmelCase_ : str = { 'configuration_falcon': ['FALCON_PRETRAINED_CONFIG_ARCHIVE_MAP', 'FalconConfig'], } try: if not is_torch...
11
'''simple docstring''' import os from shutil import copyfile from typing import List, Optional, Tuple from ...tokenization_utils import AddedToken from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import is_sentencepiece_available, logging if is_sentencepiece_available(): from .tok...
11
1
'''simple docstring''' import os import tempfile import unittest from pathlib import Path from transformers import AutoConfig, is_torch_available from transformers.testing_utils import require_torch, torch_device if is_torch_available(): from transformers import PyTorchBenchmark, PyTorchBenchmarkArguments @...
11
'''simple docstring''' import os import re import unicodedata from shutil import copyfile from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import is_torch_available, logging if is_torch_av...
11
1
'''simple docstring''' import collections from typing import List, Optional, Union from ...tokenization_utils_base import BatchEncoding from ...utils import TensorType, add_end_docstrings, add_start_docstrings, logging from ..bert.tokenization_bert_fast import BertTokenizerFast from .tokenization_dpr import DPR...
11
'''simple docstring''' from __future__ import annotations from collections.abc import Iterable, Iterator from dataclasses import dataclass UpperCAmelCase_ : str = (3, 9, -11, 0, 7, 5, 1, -1) UpperCAmelCase_ : int = (4, 6, 2, 0, 8, 10, 3, -2) @dataclass class UpperCAmelCase__ ...
11
1
'''simple docstring''' import warnings from ...utils import logging from .image_processing_layoutlmva import LayoutLMvaImageProcessor UpperCAmelCase_ : Optional[Any] = logging.get_logger(__name__) class UpperCAmelCase__ ( A ): def __init__( self : Any,*__A :...
11
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import _LazyModule UpperCAmelCase_ : Tuple = {'tokenization_wav2vec2_phoneme': ['Wav2Vec2PhonemeCTCTokenizer']} if TYPE_CHECKING: from .tokenization_wavaveca_phoneme import WavaVecaPhonemeCTCTokenizer else: import sys ...
11
1
'''simple docstring''' import warnings from diffusers import StableDiffusionInpaintPipeline as StableDiffusionInpaintPipeline # noqa F401 warnings.warn( 'The `inpainting.py` script is outdated. Please use directly `from diffusers import' ' StableDiffusionInpaintPipeline` instead.' )
11
'''simple docstring''' import unittest import numpy as np from transformers import AlbertConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask if is_flax_available(): import jax.nu...
11
1
'''simple docstring''' import gzip import hashlib import json import multiprocessing import os import re import shutil import time from pathlib import Path import numpy as np from arguments import PreprocessingArguments from datasets import load_dataset from minhash_deduplication import deduplicate_dataset fro...
11
'''simple docstring''' import argparse import json import os import tensorstore as ts import torch from flax import serialization from flax.traverse_util import flatten_dict, unflatten_dict from tensorflow.io import gfile from transformers.modeling_utils import dtype_byte_size from transformers.models.switch_t...
11
1
'''simple docstring''' import inspect import unittest from transformers import BitConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_backbone_common import BackboneT...
11
'''simple docstring''' from math import sqrt def A_ ( _lowerCAmelCase : int = 1000000 ): """simple docstring""" _lowerCamelCase : int = 0 _lowerCamelCase : int = 0 _lowerCamelCase : int while num_cuboids <= limit: ...
11
1
'''simple docstring''' import os import string import sys UpperCAmelCase_ : List[Any] = 1 << 8 UpperCAmelCase_ : int = { 'tab': ord('\t'), 'newline': ord('\r'), 'esc': 27, 'up': 65 + ARROW_KEY_FLAG, 'down': 66 + ARROW_KEY_FLAG, 'right': 67 + ARROW_KEY_FL...
11
'''simple docstring''' def A_ ( _lowerCAmelCase : int ): """simple docstring""" if isinstance(_lowerCAmelCase , _lowerCAmelCase ): raise TypeError("'float' object cannot be interpreted as an integer" ) if isinstance(_lowerCAmelCase , _lowe...
11
1
'''simple docstring''' import subprocess import sys from transformers import BertConfig, BertModel, BertTokenizer, pipeline from transformers.testing_utils import TestCasePlus, require_torch class UpperCAmelCase__ ( A ): @require_torch def lowerCamelCase_ ( self : int )...
11
'''simple docstring''' from dataclasses import dataclass from typing import List, Optional, Union import numpy as np import PIL import torch from transformers import CLIPImageProcessor, CLIPVisionModel from ...models import PriorTransformer from ...pipelines import DiffusionPipeline from ...schedulers import H...
11
1
'''simple docstring''' from math import factorial class UpperCAmelCase__ : def __init__( self : Optional[Any],__A : int,__A : List[Any] ): _lowerCamelCase : Dict = real if isinstance(__A,__A ): _lowerCamelCas...
11
'''simple docstring''' import random from typing import Any def A_ ( _lowerCAmelCase : list ): """simple docstring""" for _ in range(len(_lowerCAmelCase ) ): _lowerCamelCase : Any = random.randint(0 , len(_lowerCAmelCase ...
11
1
'''simple docstring''' import argparse import os import pickle import sys import torch from transformers import TransfoXLConfig, TransfoXLLMHeadModel, load_tf_weights_in_transfo_xl from transformers.models.transfo_xl import tokenization_transfo_xl as data_utils from transformers.models.transfo_xl.tokenization_...
11
'''simple docstring''' import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available()...
11
1
'''simple docstring''' from ...configuration_utils import PretrainedConfig UpperCAmelCase_ : Tuple = { 'google/tapas-base-finetuned-sqa': ( 'https://huggingface.co/google/tapas-base-finetuned-sqa/resolve/main/config.json' ), 'google/tapas-base-finetuned-wtq': ( 'http...
11
'''simple docstring''' from typing import List, Optional from tokenizers import ByteLevelBPETokenizer from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_blenderbot_small import BlenderbotSmallTokenizer UpperCAmelCase_ : Union[str, Any] = ...
11
1
'''simple docstring''' import unittest from transformers import JukeboxTokenizer from transformers.testing_utils import require_torch class UpperCAmelCase__ ( unittest.TestCase ): lowerCAmelCase_ = JukeboxTokenizer lowerCAmelCase_ = { 'artist': 'Zac Brown Band', ...
11
'''simple docstring''' import contextlib import copy import random from typing import Any, Dict, Iterable, Optional, Union import numpy as np import torch from .utils import deprecate, is_transformers_available if is_transformers_available(): import transformers def A_ ( _lowerCAmelCase : ...
11
1