code
stringlengths
87
55.2k
code_codestyle
int64
0
349
style_context
stringlengths
135
49.1k
style_context_codestyle
int64
0
349
label
int64
0
1
"""simple docstring""" from dataclasses import dataclass, field from typing import Tuple from ..utils import cached_property, is_tf_available, logging, requires_backends from .benchmark_args_utils import BenchmarkArguments if is_tf_available(): import tensorflow as tf _a = logging.get_logge...
61
import unittest from transformers import ( MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TextaTextGenerationPipeline, pipeline, ) from transformers.testing_utils import is_pipeline_test, require_tf, require_torch from transformers.utils import is_torch_available ...
13
0
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _A = { 'configuration_luke': ['LUKE_PRETRAINED_CONFIG_ARCHIVE_MAP', 'LukeConfig'], 'tokenization_luke': ['LukeTokenizer'], } try: if not is_torch_available(): raise ...
62
def A_ ( _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: List[str] = [0] * len(_UpperCAmelCase ) SCREAMING_SNAKE_CASE_: List[Any] = [] SCREAMING_SNAKE_CASE_: str = [] SCREAMING_SNAKE_CASE_: List[str] = 0 for values in graph.values(): ...
13
0
'''simple docstring''' def _lowerCamelCase ( lowercase : list , lowercase : int , lowercase : int = 0 , lowercase : int = 0 ) -> int: _a = right or len(lowercase ) - 1 if left > right: return -1 elif...
63
import argparse import evaluate import torch from datasets import load_dataset from torch.optim import AdamW from torch.utils.data import DataLoader from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed from accelerate import Accelerator, DistributedTy...
13
0
"""simple docstring""" import argparse import dataclasses import json import logging import os import shutil from typing import List, Optional import datasets from accelerate import Accelerator from datasets import load_dataset from finetuning import finetune from tqdm.auto import tqdm im...
64
from collections.abc import Callable class __lowercase : """simple docstring""" def __init__( self : Tuple , lowerCAmelCase__ : Callable | None = None): # Stores actual heap items. SCREAMING_SNAKE_CASE_: list = [] # Stores indexes of each i...
13
0
import importlib.util import os import platform from argparse import ArgumentParser import huggingface_hub from .. import __version__ as version from ..utils import ( is_accelerate_available, is_flax_available, is_safetensors_available, is_tf_available, is_torch_available, ) from . ...
65
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available, ) if is_sentencepiece_available(): from ..ta.tokenization_ta import TaT...
13
0
"""simple docstring""" import argparse import math import traceback import dateutil.parser as date_parser import requests def A_ ( _lowercase ): '''simple docstring''' snake_case_ :int = {} snake_case_ :List[Any] = job["""started_at"""] snake_case_ :int = ...
66
import gc import unittest import numpy as np import torch from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokenizer from diffusers import HeunDiscreteScheduler, PriorTransformer, ShapEPipeline from diffusers.pipelines.shap_e import ShapERenderer from diffusers.utils import load_numpy, slow...
13
0
'''simple docstring''' import argparse import os import evaluate import torch from datasets import load_dataset from torch.optim import AdamW from torch.utils.data import DataLoader from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed from accelerat...
67
from ...utils import ( OptionalDependencyNotAvailable, is_torch_available, is_transformers_available, is_transformers_version, ) try: if not (is_transformers_available() and is_torch_available() and is_transformers_version(""">=""", """4.25.0""")): raise OptionalDependencyNotAvaila...
13
0
import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin if is_torch_available(): import torch if is_vision_available(...
68
class __lowercase : """simple docstring""" def __init__( self : List[Any] , lowerCAmelCase__ : Optional[int] , lowerCAmelCase__ : List[Any]): SCREAMING_SNAKE_CASE_: List[str] = name SCREAMING_SNAKE_CASE_: Union[str, Any] = val ...
13
0
"""simple docstring""" from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging __UpperCamelCase = logging.get_logger(__name__) __UpperCamelCase = ...
69
import random import unittest import numpy as np import transformers from transformers import is_flax_available, is_torch_available from transformers.testing_utils import is_pt_flax_cross_test, require_flax if is_flax_available(): import os import jax.numpy as jnp from jax import jit fro...
13
0
'''simple docstring''' from typing import List, Optional, Tuple, Union import PIL import torch from torchvision import transforms from diffusers.pipeline_utils import DiffusionPipeline, ImagePipelineOutput from diffusers.schedulers import DDIMScheduler from diffusers.utils import rand...
70
import argparse import re import torch from CLAP import create_model from transformers import AutoFeatureExtractor, ClapConfig, ClapModel lowerCAmelCase : Union[str, Any] = { """text_branch""": """text_model""", """audio_branch""": """audio_model.audio_encoder""", """attn""": """attenti...
13
0
import math from numpy import inf from scipy.integrate import quad def A ( a_ ) -> float: if num <= 0: raise ValueError('math domain error' ) return quad(a_ ,0 ,a_ ,args=(a_) )[0] def A ...
71
import inspect import unittest from transformers import ViTConfig from transformers.testing_utils import ( require_accelerate, require_torch, require_torch_gpu, require_vision, slow, torch_device, ) from transformers.utils import cached_property, is_torch_available, is_vision_available from ...
13
0
"""simple docstring""" import inspect import unittest import numpy as np from tests.test_modeling_common import floats_tensor from transformers import DetrConfig, MaskFormerConfig, SwinConfig, is_torch_available, is_vision_available from transformers.testing_utils import require_torch, require_torch_multi_g...
72
import json import os from functools import lru_cache from typing import List, Optional, Tuple import regex as re from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging lowerCAmelCase : Any = logging.get_logger(__name__) lowerCAmelCase : Tuple = ...
13
0
def SCREAMING_SNAKE_CASE__ ( lowerCamelCase__ , lowerCamelCase__ , lowerCamelCase__ , lowerCamelCase__ , lowerCamelCase__ , ) -> float: __lowerCamelCase : Dict = [redshift, radiation_density, matter_density, dark_energy] if any(p < 0 for p in parameters ): raise ValueError('...
73
import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch if is_torch_available(): import torch from transformers.generation import DisjunctiveConstraint @require_torch class __lowercase ( unittest.TestCase ): """simple docstring"""...
13
0
"""simple docstring""" import math from numpy import inf from scipy.integrate import quad def _snake_case ( snake_case__ : float ): if num <= 0: raise ValueError('math domain error' ) return quad(snake_case__ , 0 , snake_case__ , args=(snake_case__) )[0] def ...
74
import pickle import shutil import tempfile import unittest from transformers import SPIECE_UNDERLINE, XGLMTokenizer, XGLMTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow from transformers.utils import cached_property from ...test_tokenization_commo...
13
0
'''simple docstring''' from math import loga def a_ ( __snake_case : int ) -> int: """simple docstring""" if a < 0: raise ValueError('''Input value must be a positive integer''' ) elif isinstance(__snake_case , __snake_ca...
75
def A_ ( _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , ): SCREAMING_SNAKE_CASE_: Optional[int] = [redshift, radiation_density, matter_density, dark_energy] if any(p < 0 for p in parameters ): raise ValueError("Al...
13
0
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) a_ = { 'configuration_roformer': ['ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'RoFormerConfig', 'Ro...
76
from typing import Optional, Union import torch from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACTaFN from ...modeling_outputs import BaseModelOutputWithPoolingAndNoAttention, ImageClassifierOutputWithNoAttention from ...modeling_utils import PreTra...
13
0
"""simple docstring""" from typing import List, Union from ..utils import ( add_end_docstrings, is_tf_available, is_torch_available, is_vision_available, logging, requires_backends, ) from .base import PIPELINE_INIT_ARGS, Pipeline if is_vision_available(): from PIL import Image ...
77
def A_ ( _UpperCAmelCase , _UpperCAmelCase = False ): if not isinstance(_UpperCAmelCase , _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: str = f"Expected string as input, found {type(_UpperCAmelCase )}" raise ValueError(_UpperCAmelCase ) if not isi...
13
0
"""simple docstring""" import torch from diffusers import DDPMScheduler from .test_schedulers import SchedulerCommonTest class A_ ( SCREAMING_SNAKE_CASE_ ): """simple docstring""" __UpperCamelCase = (DDPMScheduler,) def ...
78
import os import tempfile import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch if is_torch_available(): import torch from torch import nn from transformers import ( Adafactor, AdamW, get_constant_schedule, ...
13
0
'''simple docstring''' from __future__ import annotations def __lowercase ( __lowercase , __lowercase = None , __lowercase = None , __lowercase = False , ) -> tuple[int, float, str]: '''simple docstring''' _A = cipher_alphabet or [chr(__lowercase ...
79
import copy from dataclasses import dataclass, field from typing import ClassVar, Dict from ..features import Audio, Features, Value from .base import TaskTemplate @dataclass(frozen=UpperCAmelCase_ ) class __lowercase ( UpperCAmelCase_ ): """simple docstring""" _UpperCAmelCase : str ...
13
0
'''simple docstring''' from dataclasses import dataclass from typing import Dict, Optional, Tuple, Union import torch import torch.nn as nn from ..configuration_utils import ConfigMixin, register_to_config from ..utils import BaseOutput, apply_forward_hook from .attention_processor import AttentionProcessor...
80
import unittest import numpy as np from transformers import is_flax_available from transformers.testing_utils import require_flax from ..test_modeling_flax_common import ids_tensor if is_flax_available(): import jax import jax.numpy as jnp from transformers.generation import ( FlaxFo...
13
0
"""simple docstring""" import warnings from ...processing_utils import ProcessorMixin from ...tokenization_utils_base import BatchEncoding class __A ( _SCREAMING_SNAKE_CASE ): """simple docstring""" __lowerCAmelCase = ["image_processor...
81
import math import sys def A_ ( _UpperCAmelCase ): if number != int(_UpperCAmelCase ): raise ValueError("the value of input must be a natural number" ) if number < 0: raise ValueError("the value of input must not be a negative number" ) if number...
13
0
import argparse import json from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import ViTConfig, ViTForImageClassification, ViTImageProcessor, ViTModel from transformers.utils import logging logging.set_verbosity_info() A__ ...
82
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available lowerCAmelCase : Optional[int] = {"""configuration_wavlm""": ["""WAVLM_PRETRAINED_CONFIG_ARCHIVE_MAP""", """WavLMConfig"""]} try: if not is_torch_available(): rais...
13
0
'''simple docstring''' from ..utils import DummyObject, requires_backends class lowercase__ ( metaclass=lowercase ): lowercase__ = ["""note_seq"""] def __init__( self : Tuple ,*lowerCamelCase__ : Union[str, Any] ,**lowerCamelCase__ : Tuple ): '''...
83
import unittest from transformers import ( MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TextaTextGenerationPipeline, pipeline, ) from transformers.testing_utils import is_pipeline_test, require_tf, require_torch from transformers.utils import is_torch_available ...
13
0
"""simple docstring""" # Copyright 2021 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/...
84
def A_ ( _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: List[str] = [0] * len(_UpperCAmelCase ) SCREAMING_SNAKE_CASE_: List[Any] = [] SCREAMING_SNAKE_CASE_: str = [] SCREAMING_SNAKE_CASE_: List[str] = 0 for values in graph.values(): ...
13
0
'''simple docstring''' import shutil import tempfile import unittest from transformers import SPIECE_UNDERLINE, BatchEncoding, MBartTokenizer, MBartTokenizerFast, is_torch_available from transformers.testing_utils import ( get_tests_dir, nested_simplify, require_sentencepiece, require_tokeniz...
85
import argparse import evaluate import torch from datasets import load_dataset from torch.optim import AdamW from torch.utils.data import DataLoader from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed from accelerate import Accelerator, DistributedTy...
13
0
"""simple docstring""" from typing import Any, Dict, List, Union from ..utils import add_end_docstrings, is_torch_available, is_vision_available, logging, requires_backends from .base import PIPELINE_INIT_ARGS, Pipeline if is_vision_available(): from ..image_utils import load_image if is_torch_avail...
86
from collections.abc import Callable class __lowercase : """simple docstring""" def __init__( self : Tuple , lowerCAmelCase__ : Callable | None = None): # Stores actual heap items. SCREAMING_SNAKE_CASE_: list = [] # Stores indexes of each i...
13
0
UpperCamelCase = [0, 2, 4, 6, 8] UpperCamelCase = [1, 3, 5, 7, 9] def lowercase_ ( _lowerCamelCase : int , _lowerCamelCase : int , _lowerCamelCase : list[int] , _lowerCamelCase : int): if remaining_length == 0: if dig...
87
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available, ) if is_sentencepiece_available(): from ..ta.tokenization_ta import TaT...
13
0
def a__ ( A_, A_ ): '''simple docstring''' return 1 if input_a == input_a else 0 def a__ ( ): '''simple docstring''' assert xnor_gate(0, 0 ) == 1 assert xnor_gate(0, 1 ) == 0 assert xnor_gate(1, 0 ) == 0...
88
import gc import unittest import numpy as np import torch from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokenizer from diffusers import HeunDiscreteScheduler, PriorTransformer, ShapEPipeline from diffusers.pipelines.shap_e import ShapERenderer from diffusers.utils import load_numpy, slow...
13
0
'''simple docstring''' import logging import os import sys from pathlib import Path from unittest.mock import patch from parameterized import parameterized from run_eval import run_generate from run_eval_search import run_search from transformers.testing_utils import CaptureStdout, TestCasePlus, slow from utils ...
89
from ...utils import ( OptionalDependencyNotAvailable, is_torch_available, is_transformers_available, is_transformers_version, ) try: if not (is_transformers_available() and is_torch_available() and is_transformers_version(""">=""", """4.25.0""")): raise OptionalDependencyNotAvaila...
13
0
from collections import defaultdict from typing import Optional from ..image_utils import load_image from ..utils import ( add_end_docstrings, is_torch_available, logging, requires_backends, ) from .base import PIPELINE_INIT_ARGS, ChunkPipeline if is_torch_available(): im...
90
class __lowercase : """simple docstring""" def __init__( self : List[Any] , lowerCAmelCase__ : Optional[int] , lowerCAmelCase__ : List[Any]): SCREAMING_SNAKE_CASE_: List[str] = name SCREAMING_SNAKE_CASE_: Union[str, Any] = val ...
13
0
"""simple docstring""" # Copyright 2023 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LIC...
91
import random import unittest import numpy as np import transformers from transformers import is_flax_available, is_torch_available from transformers.testing_utils import is_pt_flax_cross_test, require_flax if is_flax_available(): import os import jax.numpy as jnp from jax import jit fro...
13
0
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_torch_available, is_vision_available, ) UpperCamelCase__ = {"""configuration_vit""": ["""VIT_PRETRAINED_CONFIG_ARCHIVE_MAP""", """ViTConfig"...
92
import argparse import re import torch from CLAP import create_model from transformers import AutoFeatureExtractor, ClapConfig, ClapModel lowerCAmelCase : Union[str, Any] = { """text_branch""": """text_model""", """audio_branch""": """audio_model.audio_encoder""", """attn""": """attenti...
13
0
'''simple docstring''' def snake_case_ ( __SCREAMING_SNAKE_CASE : str , __SCREAMING_SNAKE_CASE : list[str] ): """simple docstring""" lowercase_ : List[str] = '''''' for word_or_phrase in separated: ...
93
import inspect import unittest from transformers import ViTConfig from transformers.testing_utils import ( require_accelerate, require_torch, require_torch_gpu, require_vision, slow, torch_device, ) from transformers.utils import cached_property, is_torch_available, is_vision_available from ...
13
0
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available snake_case : str = { '''configuration_upernet''': ['''UperNetConfig'''], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except O...
94
import json import os from functools import lru_cache from typing import List, Optional, Tuple import regex as re from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging lowerCAmelCase : Any = logging.get_logger(__name__) lowerCAmelCase : Tuple = ...
13
0
import argparse import json from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import YolosConfig, YolosForObjectDetection, YolosImageProcessor from transformers.utils import logging logging.set_verbosity...
95
import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch if is_torch_available(): import torch from transformers.generation import DisjunctiveConstraint @require_torch class __lowercase ( unittest.TestCase ): """simple docstring"""...
13
0
"""simple docstring""" def _snake_case ( lowercase__ , lowercase__ , lowercase__ ): if exponent == 1: return base if exponent % 2 == 0: _lowerCamelCase : int = _modexpt(lowercase__ , exponent // 2 , lowercase...
96
import pickle import shutil import tempfile import unittest from transformers import SPIECE_UNDERLINE, XGLMTokenizer, XGLMTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow from transformers.utils import cached_property from ...test_tokenization_commo...
13
0
'''simple docstring''' from ...configuration_utils import PretrainedConfig from ...utils import logging __snake_case = logging.get_logger(__name__) __snake_case = { '''google/vivit-b-16x2-kinetics400''': ( '''https://huggingface.co/google/vivit-b-16x2-kinetics400/resolv...
97
def A_ ( _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , ): SCREAMING_SNAKE_CASE_: Optional[int] = [redshift, radiation_density, matter_density, dark_energy] if any(p < 0 for p in parameters ): raise ValueError("Al...
13
0
"""simple docstring""" import warnings from typing import Dict, List, Optional, Tuple from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging lowerCAmelCase__ : Optional[int] = logging.get_logger(__name__) class snake_case ( __UpperCAmelCase ): ...
98
from typing import Optional, Union import torch from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACTaFN from ...modeling_outputs import BaseModelOutputWithPoolingAndNoAttention, ImageClassifierOutputWithNoAttention from ...modeling_utils import PreTra...
13
0
from dataclasses import dataclass, field from typing import ClassVar, Dict from ..features import Features, Value from .base import TaskTemplate @dataclass(frozen=__UpperCAmelCase ) class A__ ( __UpperCAmelCase ): """simple docstring""" __A : str = field(default='''summa...
99
def A_ ( _UpperCAmelCase , _UpperCAmelCase = False ): if not isinstance(_UpperCAmelCase , _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: str = f"Expected string as input, found {type(_UpperCAmelCase )}" raise ValueError(_UpperCAmelCase ) if not isi...
13
0
"""simple docstring""" from collections import OrderedDict from typing import Any, List, Mapping, Optional from ... import PreTrainedTokenizer, TensorType, is_torch_available from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfigWithPast, PatchingSpec from ...utils import logging __...
100
import os import tempfile import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch if is_torch_available(): import torch from torch import nn from transformers import ( Adafactor, AdamW, get_constant_schedule, ...
13
0
import gc import unittest import numpy as np import torch from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokenizer from diffusers import HeunDiscreteScheduler, PriorTransformer, ShapEPipeline from diffusers.pipelines.shap_e import ShapERenderer from diffusers.utils import load_numpy, slow ...
101
import copy from dataclasses import dataclass, field from typing import ClassVar, Dict from ..features import Audio, Features, Value from .base import TaskTemplate @dataclass(frozen=UpperCAmelCase_ ) class __lowercase ( UpperCAmelCase_ ): """simple docstring""" _UpperCAmelCase : str ...
13
0
"""simple docstring""" from datetime import datetime import matplotlib.pyplot as plt import torch def lowercase ( _snake_case : int ) ->Union[str, Any]: """simple docstring""" for param in module.parameters(): __snake_case : int = False def lowercase...
102
import unittest import numpy as np from transformers import is_flax_available from transformers.testing_utils import require_flax from ..test_modeling_flax_common import ids_tensor if is_flax_available(): import jax import jax.numpy as jnp from transformers.generation import ( FlaxFo...
13
0
from bisect import bisect from itertools import accumulate def UpperCamelCase( __UpperCamelCase : Optional[int] ,__UpperCamelCase : Optional[Any] ,__UpperCamelCase : Dict ,__UpperCamelCase : str ): lowerCAmelCase_ : Union[str, Any] = sorted(zip(__UpperCa...
103
import math import sys def A_ ( _UpperCAmelCase ): if number != int(_UpperCAmelCase ): raise ValueError("the value of input must be a natural number" ) if number < 0: raise ValueError("the value of input must not be a negative number" ) if number...
13
0
'''simple docstring''' import random def _A ( A__ , A__ , A__ ): """simple docstring""" __lowercase = a[left_index] __lowercase = left_index + 1 for j in range(left_index + 1 , A__ ): if a[j] < pivot: __lowercase , __lowercase ...
104
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available lowerCAmelCase : Optional[int] = {"""configuration_wavlm""": ["""WAVLM_PRETRAINED_CONFIG_ARCHIVE_MAP""", """WavLMConfig"""]} try: if not is_torch_available(): rais...
13
0
"""simple docstring""" import os from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging a : List[Any] = logging.get_logger(__...
105
import unittest from transformers import ( MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TextaTextGenerationPipeline, pipeline, ) from transformers.testing_utils import is_pipeline_test, require_tf, require_torch from transformers.utils import is_torch_available ...
13
0
"""simple docstring""" from ...processing_utils import ProcessorMixin from ...tokenization_utils_base import BatchEncoding class SCREAMING_SNAKE_CASE ( a_ ): """simple docstring""" lowercase__ = ["image_processor", "tokenizer"] lowercase__ = "Auto...
106
def A_ ( _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: List[str] = [0] * len(_UpperCAmelCase ) SCREAMING_SNAKE_CASE_: List[Any] = [] SCREAMING_SNAKE_CASE_: str = [] SCREAMING_SNAKE_CASE_: List[str] = 0 for values in graph.values(): ...
13
0
from __future__ import annotations import collections import pprint from pathlib import Path def __magic_name__ ( A : str ): '''simple docstring''' return "".join(sorted(A ) ) def __magic_name__ ( A : str ): '''simple docstring''' return ...
107
import argparse import evaluate import torch from datasets import load_dataset from torch.optim import AdamW from torch.utils.data import DataLoader from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed from accelerate import Accelerator, DistributedTy...
13
0
"""simple docstring""" from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase__ = logging.get_logger(__name__) lowerCAmelCase__ = { '''facebook/dpr-ctx_encoder-single-nq-base''': ( '''https://huggingface.co/facebook/dpr-ctx_encoder-...
108
from collections.abc import Callable class __lowercase : """simple docstring""" def __init__( self : Tuple , lowerCAmelCase__ : Callable | None = None): # Stores actual heap items. SCREAMING_SNAKE_CASE_: list = [] # Stores indexes of each i...
13
0
"""simple docstring""" import json import os from pathlib import Path import pytest from datasets.download.download_config import DownloadConfig from datasets.download.download_manager import DownloadManager from datasets.utils.file_utils import hash_url_to_filename A: Optional[Any] = "http://www.mock...
109
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available, ) if is_sentencepiece_available(): from ..ta.tokenization_ta import TaT...
13
0
import itertools import os import random import tempfile import unittest import numpy as np from transformers import TvltFeatureExtractor, is_datasets_available from transformers.testing_utils import check_json_file_has_correct_format, require_torch, require_torchaudio from transformers.utils.import_u...
110
import gc import unittest import numpy as np import torch from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokenizer from diffusers import HeunDiscreteScheduler, PriorTransformer, ShapEPipeline from diffusers.pipelines.shap_e import ShapERenderer from diffusers.utils import load_numpy, slow...
13
0
import argparse import logging import os from pathlib import Path from typing import Any, Dict import pytorch_lightning as pl from pytorch_lightning.utilities import rank_zero_info from transformers import ( AdamW, AutoConfig, AutoModel, AutoModelForPreTraining, AutoModelForQuestionA...
65
from ...utils import ( OptionalDependencyNotAvailable, is_torch_available, is_transformers_available, is_transformers_version, ) try: if not (is_transformers_available() and is_torch_available() and is_transformers_version(""">=""", """4.25.0""")): raise OptionalDependencyNotAvaila...
13
0
import copy from dataclasses import dataclass, field from typing import ClassVar, Dict from ..features import Audio, Features, Value from .base import TaskTemplate @dataclass(frozen=UpperCAmelCase_ ) class UpperCAmelCase__ ( UpperCAmelCase_ ): '''simple docstring''' ...
226
class __lowercase : """simple docstring""" def __init__( self : List[Any] , lowerCAmelCase__ : Optional[int] , lowerCAmelCase__ : List[Any]): SCREAMING_SNAKE_CASE_: List[str] = name SCREAMING_SNAKE_CASE_: Union[str, Any] = val ...
13
0
import os from huggingface_hub.constants import HUGGINGFACE_HUB_CACHE, hf_cache_home a_ :Union[str, Any] = HUGGINGFACE_HUB_CACHE a_ :int = """config.json""" a_ :str = """diffusion_pytorch_model.bin""" a_ :Dict = """diffusion_flax_model.msgpack""" a_ :Op...
277
import random import unittest import numpy as np import transformers from transformers import is_flax_available, is_torch_available from transformers.testing_utils import is_pt_flax_cross_test, require_flax if is_flax_available(): import os import jax.numpy as jnp from jax import jit fro...
13
0
from __future__ import annotations def _snake_case( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) -> int: # noqa: E741 while r - l > 1: lowercase : Tuple = (l + r) // 2 if v[m...
20
import argparse import re import torch from CLAP import create_model from transformers import AutoFeatureExtractor, ClapConfig, ClapModel lowerCAmelCase : Union[str, Any] = { """text_branch""": """text_model""", """audio_branch""": """audio_model.audio_encoder""", """attn""": """attenti...
13
0
import torch from diffusers import KDPMaDiscreteScheduler from diffusers.utils import torch_device from .test_schedulers import SchedulerCommonTest class __snake_case ( UpperCAmelCase_ ): __lowerCamelCase : int = (KDPMaDiscreteScheduler,) __lowerCamelCase : Optional[A...
348
import inspect import unittest from transformers import ViTConfig from transformers.testing_utils import ( require_accelerate, require_torch, require_torch_gpu, require_vision, slow, torch_device, ) from transformers.utils import cached_property, is_torch_available, is_vision_available from ...
13
0
"""simple docstring""" from __future__ import annotations def __lowerCamelCase ( a_ : Any , a_ : Optional[Any] = None , a_ : Dict = None ) -> Dict: if start is None: __SCREAMING_SNAKE_CASE :Tuple = 0 if end is None: ...
191
import json import os from functools import lru_cache from typing import List, Optional, Tuple import regex as re from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging lowerCAmelCase : Any = logging.get_logger(__name__) lowerCAmelCase : Tuple = ...
13
0
def lowerCAmelCase_ ( snake_case_=28123 ): _A : Any = [1] * (limit + 1) for i in range(2,int(limit**0.5 ) + 1 ): sum_divs[i * i] += i for k in range(i + 1,limit // i + 1 ): sum_divs[k * i] += k + i _A : ...
26
import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch if is_torch_available(): import torch from transformers.generation import DisjunctiveConstraint @require_torch class __lowercase ( unittest.TestCase ): """simple docstring"""...
13
0
import pickle import shutil import tempfile import unittest from transformers import SPIECE_UNDERLINE, XGLMTokenizer, XGLMTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow from transformers.utils import cached_property from ...test_token...
283
import pickle import shutil import tempfile import unittest from transformers import SPIECE_UNDERLINE, XGLMTokenizer, XGLMTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow from transformers.utils import cached_property from ...test_tokenization_commo...
13
0
'''simple docstring''' def _A ( _lowerCAmelCase = 2_000_000 ): """simple docstring""" __lowercase =[0 for i in range(n + 1 )] __lowercase =1 __lowercase =1 for i in range(2 , int(n**0.5 ) + 1 ): if primality_...
166
def A_ ( _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , ): SCREAMING_SNAKE_CASE_: Optional[int] = [redshift, radiation_density, matter_density, dark_energy] if any(p < 0 for p in parameters ): raise ValueError("Al...
13
0
# Lint as: python3 import dataclasses import re from dataclasses import dataclass from functools import total_ordering from typing import Optional, Union A : List[Any] = re.compile(R'''^(?P<major>\d+)''' R'''\.(?P<minor>\d+)''' R'''\.(?P<patch>\d+)$''') @total_ordering @dataclass...
274
from typing import Optional, Union import torch from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACTaFN from ...modeling_outputs import BaseModelOutputWithPoolingAndNoAttention, ImageClassifierOutputWithNoAttention from ...modeling_utils import PreTra...
13
0
'''simple docstring''' import warnings from diffusers import StableDiffusionImgaImgPipeline # noqa F401 warnings.warn( "The `image_to_image.py` script is outdated. Please use directly `from diffusers import" " StableDiffusionImg2ImgPipeline` instead." )
311
def A_ ( _UpperCAmelCase , _UpperCAmelCase = False ): if not isinstance(_UpperCAmelCase , _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: str = f"Expected string as input, found {type(_UpperCAmelCase )}" raise ValueError(_UpperCAmelCase ) if not isi...
13
0
import argparse import gdown import numpy as np import torch from huggingface_hub import hf_hub_download from transformers import ( CLIPTokenizer, CLIPTokenizerFast, VideoMAEImageProcessor, XCLIPConfig, XCLIPModel, XCLIPProcessor, XCLIPTextConfig, XCLIPVisionConfig, ) ...
65
import os import tempfile import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch if is_torch_available(): import torch from torch import nn from transformers import ( Adafactor, AdamW, get_constant_schedule, ...
13
0
# We ignore warnings about stepping the scheduler since we step it ourselves during gradient accumulation import warnings from .state import AcceleratorState, GradientState warnings.filterwarnings("ignore", category=UserWarning, module="torch.optim.lr_scheduler") class UpperCAmelCase__ : ...
226
import copy from dataclasses import dataclass, field from typing import ClassVar, Dict from ..features import Audio, Features, Value from .base import TaskTemplate @dataclass(frozen=UpperCAmelCase_ ) class __lowercase ( UpperCAmelCase_ ): """simple docstring""" _UpperCAmelCase : str ...
13
0
def lowercase_ (A : List[str] = 1_0_0_0 ): return sum(e for e in range(3 , _UpperCAmelCase ) if e % 3 == 0 or e % 5 == 0 ) if __name__ == "__main__": print(F"""{solution() = }""")
277
import unittest import numpy as np from transformers import is_flax_available from transformers.testing_utils import require_flax from ..test_modeling_flax_common import ids_tensor if is_flax_available(): import jax import jax.numpy as jnp from transformers.generation import ( FlaxFo...
13
0
import os import unittest from transformers.models.transfo_xl.tokenization_transfo_xl import VOCAB_FILES_NAMES, TransfoXLTokenizer from ...test_tokenization_common import TokenizerTesterMixin class __snake_case ( UpperCAmelCase_ , unittest.TestCase ): _a : List[Any]= TransfoXLTokenizer ...
20
import math import sys def A_ ( _UpperCAmelCase ): if number != int(_UpperCAmelCase ): raise ValueError("the value of input must be a natural number" ) if number < 0: raise ValueError("the value of input must not be a negative number" ) if number...
13
0
def lowerCAmelCase_ ( __lowerCAmelCase )-> str: '''simple docstring''' UpperCAmelCase : List[str] =[0] * len(_UpperCAmelCase ) UpperCAmelCase : List[Any] =[] UpperCAmelCase : str =[] UpperCAmelCase : List[str]...
348
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available lowerCAmelCase : Optional[int] = {"""configuration_wavlm""": ["""WAVLM_PRETRAINED_CONFIG_ARCHIVE_MAP""", """WavLMConfig"""]} try: if not is_torch_available(): rais...
13
0
"""simple docstring""" import copy from typing import TYPE_CHECKING, Any, Mapping, Optional, OrderedDict from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ..auto.configuration_auto import AutoConfig ...
191
import unittest from transformers import ( MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TextaTextGenerationPipeline, pipeline, ) from transformers.testing_utils import is_pipeline_test, require_tf, require_torch from transformers.utils import is_torch_available ...
13
0
import os import re from shutil import copyfile from typing import List, Optional, Tuple from ...tokenization_utils import PreTrainedTokenizer from ...utils import logging _snake_case = logging.get_logger(__name__) _snake_case = { """vocab_file""": """vocab.txt""", """merg...
26
def A_ ( _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: List[str] = [0] * len(_UpperCAmelCase ) SCREAMING_SNAKE_CASE_: List[Any] = [] SCREAMING_SNAKE_CASE_: str = [] SCREAMING_SNAKE_CASE_: List[str] = 0 for values in graph.values(): ...
13
0
import os import unicodedata from shutil import copyfile from typing import Any, Dict, List, Optional, Tuple import sentencepiece as spm from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import SPIECE_UNDERLINE, logging _snake_case = logging.get_logger(...
283
import argparse import evaluate import torch from datasets import load_dataset from torch.optim import AdamW from torch.utils.data import DataLoader from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed from accelerate import Accelerator, DistributedTy...
13
0
'''simple docstring''' import logging import os from typing import List, Tuple import numpy as np import psutil import torch import torch.distributed as dist from transformers import RagRetriever lowerCamelCase = logging.getLogger(__name__) class _UpperCamelCase ( UpperCAmelCase_ ...
166
from collections.abc import Callable class __lowercase : """simple docstring""" def __init__( self : Tuple , lowerCAmelCase__ : Callable | None = None): # Stores actual heap items. SCREAMING_SNAKE_CASE_: list = [] # Stores indexes of each i...
13
0
from __future__ import annotations class A : '''simple docstring''' def __init__( self : Dict , __lowerCAmelCase : List[Any]=None ) -> Union[str, Any]: """simple docstring""" A__ = data A__ = None ...
274
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available, ) if is_sentencepiece_available(): from ..ta.tokenization_ta import TaT...
13
0
'''simple docstring''' import gc import unittest import numpy as np import torch from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokenizer from diffusers import HeunDiscreteScheduler, PriorTransformer, ShapEPipeline from diffusers.pipelines.shap_e import ShapERenderer from diffusers.utils...
311
import gc import unittest import numpy as np import torch from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokenizer from diffusers import HeunDiscreteScheduler, PriorTransformer, ShapEPipeline from diffusers.pipelines.shap_e import ShapERenderer from diffusers.utils import load_numpy, slow...
13
0
from argparse import ArgumentParser, Namespace from typing import Any, List, Optional from ..pipelines import Pipeline, get_supported_tasks, pipeline from ..utils import logging from . import BaseTransformersCLICommand try: from fastapi import Body, FastAPI, HTTPException from fastapi.routing i...
65
from ...utils import ( OptionalDependencyNotAvailable, is_torch_available, is_transformers_available, is_transformers_version, ) try: if not (is_transformers_available() and is_torch_available() and is_transformers_version(""">=""", """4.25.0""")): raise OptionalDependencyNotAvaila...
13
0
from math import sqrt def a ( _UpperCAmelCase : Optional[int] ): '''simple docstring''' assert isinstance(_UpperCAmelCase , _UpperCAmelCase ) and ( number >= 0 ), "'number' must been an int and positive" __UpperCAmelCase :...
226
class __lowercase : """simple docstring""" def __init__( self : List[Any] , lowerCAmelCase__ : Optional[int] , lowerCAmelCase__ : List[Any]): SCREAMING_SNAKE_CASE_: List[str] = name SCREAMING_SNAKE_CASE_: Union[str, Any] = val ...
13
0
def lowercase_ (A : List[str] , A : str , A : Dict , A : List[Any] , A : List[Any] , ): snake_case__ : Optional[int] = [redshift, radiation_density, matter_density, dark_energy] if any(p < 0 for p in parameters ): raise ValueError('All ...
277
import random import unittest import numpy as np import transformers from transformers import is_flax_available, is_torch_available from transformers.testing_utils import is_pt_flax_cross_test, require_flax if is_flax_available(): import os import jax.numpy as jnp from jax import jit fro...
13
0
def _snake_case( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) -> Tuple: lowercase : Union[str, Any] = word.split() def justify(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ) -> str: lowercase : Dic...
20
import argparse import re import torch from CLAP import create_model from transformers import AutoFeatureExtractor, ClapConfig, ClapModel lowerCAmelCase : Union[str, Any] = { """text_branch""": """text_model""", """audio_branch""": """audio_model.audio_encoder""", """attn""": """attenti...
13
0
import tempfile import unittest import numpy as np import transformers from transformers import GPTaTokenizer, GPTJConfig, is_flax_available, is_torch_available from transformers.testing_utils import is_pt_flax_cross_test, require_flax, tooslow from ...generation.test_flax_utils import FlaxGenerationTesterMixin from...
348
import inspect import unittest from transformers import ViTConfig from transformers.testing_utils import ( require_accelerate, require_torch, require_torch_gpu, require_vision, slow, torch_device, ) from transformers.utils import cached_property, is_torch_available, is_vision_available from ...
13
0
"""simple docstring""" def __lowerCamelCase ( a_ : str ) -> int: return number & 1 == 0 if __name__ == "__main__": import doctest doctest.testmod()
191
import json import os from functools import lru_cache from typing import List, Optional, Tuple import regex as re from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging lowerCAmelCase : Any = logging.get_logger(__name__) lowerCAmelCase : Tuple = ...
13
0
import inspect from typing import List, Optional, Tuple, Union import numpy as np import PIL import torch import torch.utils.checkpoint from ...models import UNetaDModel, VQModel from ...schedulers import ( DDIMScheduler, DPMSolverMultistepScheduler, EulerAncestralDiscreteScheduler, EulerDiscrete...
26
import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch if is_torch_available(): import torch from transformers.generation import DisjunctiveConstraint @require_torch class __lowercase ( unittest.TestCase ): """simple docstring"""...
13
0
from pathlib import Path import numpy as np from PIL import Image def lowercase_( SCREAMING_SNAKE_CASE_ ): '''simple docstring''' lowerCamelCase : Optional[Any] = rgb[:, :, 0], rgb[:, :, 1], rgb[:, :, 2] return 0.2989 * r + 0.5870 * g + 0.1140 * b def lo...
283
import pickle import shutil import tempfile import unittest from transformers import SPIECE_UNDERLINE, XGLMTokenizer, XGLMTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow from transformers.utils import cached_property from ...test_tokenization_commo...
13
0
'''simple docstring''' def _A ( _lowerCAmelCase ): """simple docstring""" if not all(char in '01' for char in bin_string ): raise ValueError('Non-binary value was passed to the function' ) if not bin_string: raise ValueError('Empty stri...
166
def A_ ( _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , _UpperCAmelCase , ): SCREAMING_SNAKE_CASE_: Optional[int] = [redshift, radiation_density, matter_density, dark_energy] if any(p < 0 for p in parameters ): raise ValueError("Al...
13
0
from __future__ import annotations def __lowerCamelCase ( __a :Union[str, Any] , __a :List[Any] , __a :Optional[int] , __a :List[str] , __a :Dict , ) -> Optional[Any]: """simple docstring""" A__ = len(_...
274
from typing import Optional, Union import torch from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACTaFN from ...modeling_outputs import BaseModelOutputWithPoolingAndNoAttention, ImageClassifierOutputWithNoAttention from ...modeling_utils import PreTra...
13
0
'''simple docstring''' from __future__ import annotations from collections import Counter from random import random class UpperCamelCase__ : """simple docstring""" def __init__( self ): '''simple docstring''' UpperCAmelCase : Optional[Any] = {} def ...
311
def A_ ( _UpperCAmelCase , _UpperCAmelCase = False ): if not isinstance(_UpperCAmelCase , _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: str = f"Expected string as input, found {type(_UpperCAmelCase )}" raise ValueError(_UpperCAmelCase ) if not isi...
13
0
import unittest from transformers import ( MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TextaTextGenerationPipeline, pipeline, ) from transformers.testing_utils import is_pipeline_test, require_tf, require_torch from transformers.utils import is_torch_avai...
65
import os import tempfile import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch if is_torch_available(): import torch from torch import nn from transformers import ( Adafactor, AdamW, get_constant_schedule, ...
13
0
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available __A ={"""configuration_wavlm""": ["""WAVLM_PRETRAINED_CONFIG_ARCHIVE_MAP""", """WavLMConfig"""]} try: if not is_torch_available(): raise OptionalDependencyNotAvaila...
226
import copy from dataclasses import dataclass, field from typing import ClassVar, Dict from ..features import Audio, Features, Value from .base import TaskTemplate @dataclass(frozen=UpperCAmelCase_ ) class __lowercase ( UpperCAmelCase_ ): """simple docstring""" _UpperCAmelCase : str ...
13
0
import shutil import tempfile import unittest from transformers import ClapFeatureExtractor, ClapProcessor, RobertaTokenizer, RobertaTokenizerFast from transformers.testing_utils import require_sentencepiece, require_torchaudio from .test_feature_extraction_clap import floats_list @require_torchaudio @require...
277
import unittest import numpy as np from transformers import is_flax_available from transformers.testing_utils import require_flax from ..test_modeling_flax_common import ids_tensor if is_flax_available(): import jax import jax.numpy as jnp from transformers.generation import ( FlaxFo...
13
0
import json import os from functools import lru_cache from typing import List, Optional, Tuple import regex as re from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging lowercase : Optional[int] = logging.get_logger(__name__) lowercase : Optional[i...
20
import math import sys def A_ ( _UpperCAmelCase ): if number != int(_UpperCAmelCase ): raise ValueError("the value of input must be a natural number" ) if number < 0: raise ValueError("the value of input must not be a negative number" ) if number...
13
0
import os import numpy import onnx def lowerCAmelCase_ ( __lowerCAmelCase , __lowerCAmelCase )-> Tuple: '''simple docstring''' UpperCAmelCase : Union[str, Any] =a.name UpperCAmelCase : int =b.name UpperCAmelCase : An...
348
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available lowerCAmelCase : Optional[int] = {"""configuration_wavlm""": ["""WAVLM_PRETRAINED_CONFIG_ARCHIVE_MAP""", """WavLMConfig"""]} try: if not is_torch_available(): rais...
13
0
"""simple docstring""" from typing import Optional, Union import torch from torch import nn from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss from ...activations import ACTaFN from ...modeling_outputs import BaseModelOutputWithPoolingAndNoAttention, ImageClassifierOutputWithNoAtten...
191
import unittest from transformers import ( MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, TextaTextGenerationPipeline, pipeline, ) from transformers.testing_utils import is_pipeline_test, require_tf, require_torch from transformers.utils import is_torch_available ...
13
0
_snake_case = [ 999, 800, 799, 600, 599, 500, 400, 399, 377, 355, 333, 311, 288, 266, 244, 222, 200, 199, 177, 155, 133, 111, 88, 66, 44, 22, 0, ] _snake_case = [ 999, ...
26
def A_ ( _UpperCAmelCase ): SCREAMING_SNAKE_CASE_: List[str] = [0] * len(_UpperCAmelCase ) SCREAMING_SNAKE_CASE_: List[Any] = [] SCREAMING_SNAKE_CASE_: str = [] SCREAMING_SNAKE_CASE_: List[str] = 0 for values in graph.values(): ...
13
0
import argparse import os import re # All paths are set with the intent you should run this script from the root of the repo with the command # python utils/check_dummies.py _snake_case = """src/diffusers""" # Matches is_xxx_available() _snake_case = re.compile(R'''is\...
283
import argparse import evaluate import torch from datasets import load_dataset from torch.optim import AdamW from torch.utils.data import DataLoader from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed from accelerate import Accelerator, DistributedTy...
13
0
'''simple docstring''' import json import os import shutil import tempfile from unittest import TestCase from transformers import BartTokenizer, BartTokenizerFast, DPRQuestionEncoderTokenizer, DPRQuestionEncoderTokenizerFast from transformers.models.bart.configuration_bart import BartConfig from transformers.mo...
166
from collections.abc import Callable class __lowercase : """simple docstring""" def __init__( self : Tuple , lowerCAmelCase__ : Callable | None = None): # Stores actual heap items. SCREAMING_SNAKE_CASE_: list = [] # Stores indexes of each i...
13
0
def __lowerCamelCase ( ) -> Dict: """simple docstring""" A__ = 0 for i in range(1 , 1_0_0_1 ): total += i**i return str(_UpperCAmelCase )[-1_0:] if __name__ == "__main__": print(solution())
274
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available, ) if is_sentencepiece_available(): from ..ta.tokenization_ta import TaT...
13
0
'''simple docstring''' from __future__ import annotations def lowercase ( __magic_name__ ): '''simple docstring''' UpperCAmelCase : int = [True] * limit UpperCAmelCase : int = False UpperCAmelCase : Dict = False ...
311
import gc import unittest import numpy as np import torch from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokenizer from diffusers import HeunDiscreteScheduler, PriorTransformer, ShapEPipeline from diffusers.pipelines.shap_e import ShapERenderer from diffusers.utils import load_numpy, slow...
13
0
import pprint import requests UpperCamelCase__ = """https://zenquotes.io/api""" def lowerCAmelCase_ ( ) -> List[Any]: '''simple docstring''' return requests.get(API_ENDPOINT_URL + "/today" ).json() def lowerCAmelCase_ ( ) ...
65
from ...utils import ( OptionalDependencyNotAvailable, is_torch_available, is_transformers_available, is_transformers_version, ) try: if not (is_transformers_available() and is_torch_available() and is_transformers_version(""">=""", """4.25.0""")): raise OptionalDependencyNotAvaila...
13
0
import random import unittest import torch from diffusers import IFInpaintingPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import ( TEXT_G...
226
class __lowercase : """simple docstring""" def __init__( self : List[Any] , lowerCAmelCase__ : Optional[int] , lowerCAmelCase__ : List[Any]): SCREAMING_SNAKE_CASE_: List[str] = name SCREAMING_SNAKE_CASE_: Union[str, Any] = val ...
13
0
import fire from transformers import AutoConfig, AutoModelForSeqaSeqLM, AutoTokenizer def lowercase_ (A : List[Any] , A : Tuple , **A : Tuple ): snake_case__ : Any = AutoConfig.from_pretrained(_UpperCAmelCase , **_UpperCAmelCase ) snake_case__ : Di...
277
import random import unittest import numpy as np import transformers from transformers import is_flax_available, is_torch_available from transformers.testing_utils import is_pt_flax_cross_test, require_flax if is_flax_available(): import os import jax.numpy as jnp from jax import jit fro...
13
0
import random import unittest import torch from diffusers import IFInpaintingSuperResolutionPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import ( TEXT_GUID...
20
import argparse import re import torch from CLAP import create_model from transformers import AutoFeatureExtractor, ClapConfig, ClapModel lowerCAmelCase : Union[str, Any] = { """text_branch""": """text_model""", """audio_branch""": """audio_model.audio_encoder""", """attn""": """attenti...
13
0
from collections import OrderedDict from typing import Any, Mapping, Optional from ... import PreTrainedTokenizer, TensorType, is_torch_available from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfigWithPast from ...utils import logging __snake_case = logging.get_logger(__name__) ...
348
import inspect import unittest from transformers import ViTConfig from transformers.testing_utils import ( require_accelerate, require_torch, require_torch_gpu, require_vision, slow, torch_device, ) from transformers.utils import cached_property, is_torch_available, is_vision_available from ...
13
0
"""simple docstring""" import math import os from copy import deepcopy import datasets import evaluate import torch import transformers from datasets import load_dataset from torch.utils.data import DataLoader from transformers import AutoModelForSequenceClassification, AutoTokenizer from acc...
191
import json import os from functools import lru_cache from typing import List, Optional, Tuple import regex as re from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging lowerCAmelCase : Any = logging.get_logger(__name__) lowerCAmelCase : Tuple = ...
13
0