code
stringlengths
82
54.1k
code_codestyle
int64
0
699
style_context
stringlengths
111
35.6k
style_context_codestyle
int64
0
699
label
int64
0
1
"""simple docstring""" import numpy as np from transformers import BatchFeature from transformers.testing_utils import require_tf, require_torch from .test_feature_extraction_common import FeatureExtractionSavingTestMixin class __a ( __UpperCAmelCase ): # to overwri...
552
"""simple docstring""" import contextlib import copy import random from typing import Any, Dict, Iterable, Optional, Union import numpy as np import torch from .utils import deprecate, is_transformers_available if is_transformers_available(): import transformers def lowerCamelCase (a_ :int)...
677
0
'''simple docstring''' import unittest from pathlib import Path from tempfile import NamedTemporaryFile, TemporaryDirectory from transformers import BertConfig, BertTokenizerFast, FeatureExtractionPipeline from transformers.convert_graph_to_onnx import ( convert, ensure_valid_input, generate_identi...
418
"""simple docstring""" import argparse import collections import numpy as np import torch from flax import traverse_util from tax import checkpoints from transformers import MTaConfig, UMTaEncoderModel, UMTaForConditionalGeneration from transformers.utils import logging logging.set_verbosity_info() def ...
677
0
'''simple docstring''' import json import os import unittest from transformers import AutoTokenizer, GPTaTokenizer, GPTaTokenizerFast from transformers.models.gpta.tokenization_gpta import VOCAB_FILES_NAMES from transformers.testing_utils import require_tokenizers from ...test_tokenization_common import Tokeni...
120
"""simple docstring""" from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) UpperCAmelCase = { '''configuration_blenderbot''': [ '''BLE...
677
0
from typing import Any, Dict, Optional import torch import torch.nn.functional as F from torch import nn from ..utils import maybe_allow_in_graph from .activations import get_activation from .attention_processor import Attention from .embeddings import CombinedTimestepLabelEmbeddings @maybe_allow_in_graph clas...
0
"""simple docstring""" from ...configuration_utils import PretrainedConfig from ...utils import logging UpperCAmelCase = logging.get_logger(__name__) UpperCAmelCase = { '''naver-clova-ix/donut-base''': '''https://huggingface.co/naver-clova-ix/donut-base/resolve/main/config.json''', # ...
677
0
from ..models.auto import AutoModelForSeqaSeqLM, AutoTokenizer from .base import PipelineTool class __A ( __UpperCAmelCase ): '''simple docstring''' a_ = "philschmid/bart-large-cnn-samsum" a_ = ( "This is a tool that summarizes an English text. It takes...
424
"""simple docstring""" import argparse import os import shutil from pathlib import Path import onnx import torch from packaging import version from torch.onnx import export from diffusers import OnnxRuntimeModel, OnnxStableDiffusionPipeline, StableDiffusionPipeline UpperCAmelCase = version.parse(vers...
677
0
'''simple docstring''' def UpperCamelCase_( snake_case : list ): '''simple docstring''' def merge(snake_case : list , snake_case : list ) -> list: def _merge(): while left and right: yield (left ...
400
"""simple docstring""" import itertools import json import linecache import os import pickle import re import socket import string from collections import Counter from logging import getLogger from pathlib import Path from typing import Callable, Dict, Iterable, List import git import torch from torch.utils.data...
677
0
'''simple docstring''' import os import time import numpy as np import onnxruntime as ort lowercase__ ='1' lowercase__ ='0' lowercase__ ='1' lowercase__ =ort.SessionOptions() lowercase__ =ort.GraphOptimizationLevel.ORT_DISABLE_ALL print('Create inference session......
263
"""simple docstring""" def lowerCamelCase (a_ :Tuple , a_ :int , a_ :Tuple , a_ :List[Any]) -> str: if height >= 1: move_tower(height - 1 , a_ , a_ , a_) move_disk(a_ , a_) move_tower(...
677
0
"""simple docstring""" def lowerCamelCase__ ( __snake_case ) -> str: """simple docstring""" return " ".join( ''''''.join(word[::-1] ) if len(a_ ) > 4 else word for word in sentence.split() ) if __name__ == "__main__": impor...
19
"""simple docstring""" from sklearn.metrics import mean_squared_error import datasets UpperCAmelCase = '''\ @article{scikit-learn, title={Scikit-learn: Machine Learning in {P}ython}, author={Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V. and Thirion, B. and Grisel, O. and B...
677
0
"""simple docstring""" from ..utils import DummyObject, requires_backends class UpperCAmelCase (metaclass=__UpperCAmelCase ): """simple docstring""" _UpperCAmelCase :str = ["torch", "torchsde"] def __init__( self , *_UpperCAmelCase , **_Uppe...
586
"""simple docstring""" from abc import ABC, abstractmethod from argparse import ArgumentParser class __magic_name__ ( __UpperCAmelCase ): @staticmethod @abstractmethod def __snake_case ( snake_case__ : ArgumentParser ): '''simple docstring''' ...
677
0
def UpperCamelCase ( __lowerCamelCase : list[int] , __lowerCamelCase : list[int] ): snake_case : Optional[int] = len(a_ ) print("The following activities are selected:" ) # The first activity is always selected snake_case ...
204
"""simple docstring""" from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) UpperCAmelCase = { '''configuration_encodec''': [ '''ENCODEC_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''EncodecConfig''', ], ...
677
0
import os import shutil from pathlib import Path from typing import Optional, Union import numpy as np from huggingface_hub import hf_hub_download from ..utils import ONNX_EXTERNAL_WEIGHTS_NAME, ONNX_WEIGHTS_NAME, is_onnx_available, logging if is_onnx_available(): import onnxruntime as ort UpperCame...
45
"""simple docstring""" import inspect import unittest from transformers import RegNetConfig, is_flax_available from transformers.testing_utils import require_flax, slow from transformers.utils import cached_property, is_vision_available from ...test_configuration_common import ConfigTester from ...test_modeling...
677
0
"""simple docstring""" import argparse import os import re # All paths are set with the intent you should run this script from the root of the repo with the command # python utils/check_dummies.py lowercase_ = 'src/diffusers' # Matches is_xxx_available() lowercase_ = re...
552
"""simple docstring""" UpperCAmelCase = { '''A''': ['''B''', '''C''', '''E'''], '''B''': ['''A''', '''D''', '''E'''], '''C''': ['''A''', '''F''', '''G'''], '''D''': ['''B'''], '''E''': ['''A''', '''B''', '''D'''], '''F''': ['''C'''], '''G''': ['''C'''], } def lowerCam...
677
0
'''simple docstring''' import os import tempfile import unittest from transformers import FlaubertConfig, is_torch_available from transformers.testing_utils import require_torch, require_torch_gpu, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import Mod...
418
"""simple docstring""" import logging import os from dataclasses import dataclass, field from functools import partial from pathlib import Path from tempfile import TemporaryDirectory from typing import List, Optional import faiss import torch from datasets import Features, Sequence, Value, load_dataset from tr...
677
0
'''simple docstring''' import unittest from transformers import TrOCRConfig from transformers.testing_utils import is_torch_available, require_torch, torch_device from ...generation.test_utils import GenerationTesterMixin from ...test_configuration_common import ConfigTester from ...test_modeling_common import...
120
"""simple docstring""" from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_torch_available UpperCAmelCase = { '''configuration_longt5''': ['''LONGT5_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''LongT5Config''', '''LongT5OnnxConfig'''], }...
677
0
import warnings from ...utils import logging from .image_processing_flava import FlavaImageProcessor SCREAMING_SNAKE_CASE__ : List[str] = logging.get_logger(__name__) class lowerCamelCase_ ( __UpperCAmelCase ): def __init__( self , *__lowerCAmelCase , ...
0
"""simple docstring""" import uuid from typing import Any, Dict, List, Optional, Union from ..utils import add_end_docstrings, is_tf_available, is_torch_available, logging from .base import PIPELINE_INIT_ARGS, Pipeline if is_tf_available(): import tensorflow as tf if is_torch_available(): import torch Upp...
677
0
import argparse import gc import json import os import shutil import warnings import torch from transformers import LlamaConfig, LlamaForCausalLM, LlamaTokenizer try: from transformers import LlamaTokenizerFast except ImportError as e: warnings.warn(e) warnings.warn( "The converted token...
424
"""simple docstring""" def lowerCamelCase (a_ :int = 100) -> int: lowercase :Union[str, Any] = set() lowercase :List[Any] = 0 lowercase :Dict = n + 1 # maximum limit for a in range(2 , a_): for b in ...
677
0
'''simple docstring''' import unittest from typing import Tuple import torch from diffusers.utils import floats_tensor, randn_tensor, torch_all_close, torch_device from diffusers.utils.testing_utils import require_torch @require_torch class _snake_case : @property def lowerCAmelCas...
400
"""simple docstring""" from typing import Callable, Optional, Union from ...configuration_utils import PretrainedConfig from ...utils import logging UpperCAmelCase = logging.get_logger(__name__) UpperCAmelCase = { '''microsoft/xprophetnet-large-wiki100-cased''': ( '''https://hugg...
677
0
'''simple docstring''' def UpperCamelCase_ ( A__ , A__ , A__ , A__ , A__ ): if index == number_of_items: return 0 a_ = 0 a_ = 0 a_ = knapsack(a_ , a_ , a_ , a_ , index + 1 ) if weights[index] <= max_...
263
"""simple docstring""" from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tensorflow_text_available, is_tf_available, is_tokenizers_available, is_torch_available, ) UpperCAmelCase = { '''configurati...
677
0
"""simple docstring""" import os from datetime import datetime as dt from github import Github _a = [ """good first issue""", """good second issue""", """good difficult issue""", """enhancement""", """new pipeline/model""", """new scheduler""", """wip...
19
"""simple docstring""" from typing import List, Optional, Union from ...image_utils import ImageInput from ...processing_utils import ProcessorMixin from ...tokenization_utils_base import BatchEncoding, PaddingStrategy, PreTokenizedInput, TextInput, TruncationStrategy from ...utils import TensorType class ...
677
0
"""simple docstring""" from __future__ import annotations import inspect import unittest import numpy as np from transformers import ResNetConfig from transformers.testing_utils import require_tf, require_vision, slow from transformers.utils import cached_property, is_tf_available, is_vision_available from .....
586
"""simple docstring""" import subprocess import sys from transformers import BertConfig, BertModel, BertTokenizer, pipeline from transformers.testing_utils import TestCasePlus, require_torch class __magic_name__ ( __UpperCAmelCase ): @require_torch def __snake_case ( self ...
677
0
from __future__ import annotations import os from collections.abc import Mapping lowerCAmelCase_ = tuple[int, int] class _lowerCAmelCase : def __init__( self , __UpperCAmelCase , __UpperCAmelCase ): lowerCAmelCase__ : set[int] = vertices low...
678
import unittest from transformers import ( MODEL_FOR_CAUSAL_LM_MAPPING, TF_MODEL_FOR_CAUSAL_LM_MAPPING, TextGenerationPipeline, logging, pipeline, ) from transformers.testing_utils import ( CaptureLogger, is_pipeline_test, require_accelerate, require_tf, require_torch, ...
678
1
# # This a `torch.distributed` diagnostics script that checks that all GPUs in the cluster (one or # many nodes) can talk to each other via nccl and allocate gpu memory. # # To run first adjust the number of processes and nodes: # # python -m torch.distributed.run --nproc_per_node 2 --nnodes 1 torch-distributed-gp...
678
def __lowerCAmelCase ( UpperCamelCase ) -> str: return "".join([hex(UpperCamelCase )[2:].zfill(2 ).upper() for byte in list(UpperCamelCase )] ) def __lowerCAmelCase ( UpperCamelCase ) -> bytes: # Check data validity, following RFC3548 # https://www.ietf.or...
678
1
import math from typing import Any, Callable, List, Optional, Tuple, Union import numpy as np import torch from ...models import TaFilmDecoder from ...schedulers import DDPMScheduler from ...utils import is_onnx_available, logging, randn_tensor if is_onnx_available(): from ..onnx_utils import OnnxRuntimeM...
678
import torch from diffusers import DPMSolverSDEScheduler from diffusers.utils import torch_device from diffusers.utils.testing_utils import require_torchsde from .test_schedulers import SchedulerCommonTest @require_torchsde class _lowerCAmelCase ( _lowercase ): A__ = (DPMSolverSDESchedul...
678
1
import datasets lowerCAmelCase_ = """\ @InProceedings{conneau2018xnli, author = \"Conneau, Alexis and Rinott, Ruty and Lample, Guillaume and Williams, Adina and Bowman, Samuel R. and Schwenk, Holger ...
678
import time import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch, torch_device from ..test_modeling_common import ids_tensor if is_torch_available(): import torch from transformers.generation import ( MaxLengthCriteria, Ma...
678
1
import argparse import json from pathlib import Path import torch import torchaudio from datasets import load_dataset from huggingface_hub import hf_hub_download from transformers import ASTConfig, ASTFeatureExtractor, ASTForAudioClassification from transformers.utils import logging logging.set_verbosity_info(...
678
from functools import reduce lowerCAmelCase_ = ( """73167176531330624919225119674426574742355349194934""" """96983520312774506326239578318016984801869478851843""" """85861560789112949495459501737958331952853208805511""" """12540698747158523863050715693290963295227443043557""" """6...
678
1
from __future__ import annotations from fractions import Fraction from math import gcd, sqrt def __lowerCAmelCase ( UpperCamelCase ) -> bool: lowerCAmelCase__ : int = int(number**0.5 ) return number == sq * sq def __lowerCAmelCase ( UpperCamelCase , ...
678
import re import jax.numpy as jnp from flax.traverse_util import flatten_dict, unflatten_dict from jax.random import PRNGKey from ..utils import logging lowerCAmelCase_ = logging.get_logger(__name__) def __lowerCAmelCase ( UpperCamelCase ) -> List[str]: lowerCAmelCase__ ...
678
1
import inspect import unittest from transformers import ViTMSNConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...test_...
678
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available lowerCAmelCase_ = { """configuration_nezha""": ["""NEZHA_PRETRAINED_CONFIG_ARCHIVE_MAP""", """NezhaConfig"""], } try: if not is_torch_available():...
678
1
from __future__ import annotations from scipy.special import comb # type: ignore class _lowerCAmelCase : def __init__( self , __UpperCAmelCase ): lowerCAmelCase__ : Optional[Any] = list_of_points # Degree determines the flexibility of the curve. # Degr...
678
import tempfile import unittest import numpy as np from huggingface_hub import HfFolder, delete_repo from requests.exceptions import HTTPError from transformers import BertConfig, is_flax_available from transformers.testing_utils import TOKEN, USER, is_staging_test, require_flax if is_flax_available(): im...
678
1
import os import unittest from tempfile import TemporaryDirectory import torch import torch.nn as nn from accelerate.utils import ( OffloadedWeightsLoader, extract_submodules_state_dict, load_offloaded_weight, offload_state_dict, offload_weight, ) class _lowerCAmelCase ( nn.Module ...
678
from random import randint from tempfile import TemporaryFile import numpy as np def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase , UpperCamelCase ) -> Any: lowerCAmelCase__ : Optional[Any] = 0 if start < end: lowerCAmelCase__ : Union[str, Any] ...
678
1
import shutil import tempfile import unittest import numpy as np import pytest from transformers.testing_utils import require_vision from transformers.utils import is_vision_available if is_vision_available(): from PIL import Image from transformers import AutoProcessor, BertTokenizer, BlipImageProc...
678
import pyarrow.parquet as pq import pytest from datasets import Audio, Dataset, DatasetDict, Features, NamedSplit, Sequence, Value, config from datasets.features.image import Image from datasets.io.parquet import ParquetDatasetReader, ParquetDatasetWriter, get_writer_batch_size from ..utils import assert_arrow_m...
678
1
from __future__ import annotations import math import random from collections.abc import Collection from typing import overload class _lowerCAmelCase : def __init__( self , __UpperCAmelCase = None ): if components is None: lowerCAmelCase__ : int = [] ...
678
from ...configuration_utils import PretrainedConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """microsoft/focalnet-tiny""": """...
678
1
import unittest from transformers import is_vision_available from transformers.pipelines import pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf, require_torch, require_vision, slow, ) from .test_pipelines_common import ANY if is_vision_ava...
678
from scipy.stats import pearsonr import datasets lowerCAmelCase_ = """ Pearson correlation coefficient and p-value for testing non-correlation. The Pearson correlation coefficient measures the linear relationship between two datasets. The calculation of the p-value relies on the assumption that eac...
678
1
import copy import os from collections import OrderedDict from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Union if TYPE_CHECKING: from ...processing_utils import ProcessorMixin from ...utils import TensorType from ...configuration_utils import PretrainedConfig from ...onnx import OnnxC...
678
from manim import * class _lowerCAmelCase ( _lowercase ): def __magic_name__( self ): lowerCAmelCase__ : Tuple = Rectangle(height=0.5 , width=0.5 ) lowerCAmelCase__ : Dict = Rectangle(height=0.25 , width=0.25 ) lowerCAmelC...
678
1
from manim import * class _lowerCAmelCase ( _lowercase ): def __magic_name__( self ): lowerCAmelCase__ : Tuple = Rectangle(height=0.5 , width=0.5 ) lowerCAmelCase__ : Dict = Rectangle(height=0.25 , width=0.25 ) lowerCAmelC...
678
import collections import os import re from pathlib import Path lowerCAmelCase_ = """src/transformers""" # Matches is_xxx_available() lowerCAmelCase_ = re.compile(R"""is\_([a-z_]*)_available()""") # Catches a one-line _import_struct = {xxx} lowerCAmelCase_ = re.compile(R"...
678
1
from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ..auto import CONFIG_MAPPING lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelC...
678
import math import os import unittest from transformers import MegatronBertConfig, is_torch_available from transformers.models.auto import get_values from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device from ...test_configuration_common import Config...
678
1
def __lowerCAmelCase ( UpperCamelCase = 1000 ) -> int: lowerCAmelCase__ , lowerCAmelCase__ : Dict = 1, 1 lowerCAmelCase__ : str = 2 while True: lowerCAmelCase__ : Tuple = 0 lowerCAmelCase__ : Tuple = fa + fa lower...
678
import warnings from collections import OrderedDict from typing import Any, Mapping, Optional from ... import PreTrainedTokenizer from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig, OnnxConfigWithPast, OnnxSeqaSeqConfigWithPast from ...onnx.utils import compute_effective_axis_dimen...
678
1
import importlib import json import os from collections import OrderedDict from typing import Dict, Optional, Union # Build the list of all image processors from ...configuration_utils import PretrainedConfig from ...dynamic_module_utils import get_class_from_dynamic_module, resolve_trust_remote_code from ...imag...
678
import functools import operator from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """asapp/sew-d-tiny-100k""": """https://huggingface.co/asapp/sew-d-tiny-100k/resolve/main/config.jso...
678
1
lowerCAmelCase_ = 8.31_44_62 # Unit - J mol-1 K-1 def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase , UpperCamelCase ) -> float: if moles < 0 or kelvin < 0 or volume < 0: raise ValueError('''Invalid inputs. Enter positive value.''' ) return moles *...
678
import unittest from transformers import PegasusTokenizer, PegasusTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, require_torch, slow from transformers.utils import cached_property from ...test_tokenization_common import TokenizerTesterMixin lowerC...
678
1
import inspect import unittest from transformers import MobileNetVaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ......
678
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """naver-clova-ix/donut-base""": """https://huggingface.co/naver-clova-ix/donut-base/resolve/main/config.json""", # See all Donut ...
678
1
import inspect import os import unittest from dataclasses import dataclass import torch from accelerate import Accelerator, DistributedDataParallelKwargs, GradScalerKwargs from accelerate.state import AcceleratorState from accelerate.test_utils import execute_subprocess_async, require_cuda, require_multi_gpu fro...
678
lowerCAmelCase_ = { """meter""": """m""", """kilometer""": """km""", """megametre""": """Mm""", """gigametre""": """Gm""", """terametre""": """Tm""", """petametre""": """Pm""", """exametre""": """Em""", """zettametre""": """Zm""", """yottametre""": """Ym""", } # Exp...
678
1
import argparse from transformers import BigBirdConfig, BigBirdForPreTraining, BigBirdForQuestionAnswering, load_tf_weights_in_big_bird from transformers.utils import logging logging.set_verbosity_info() def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase , UpperCamelCase ,...
678
import unittest from transformers import is_vision_available from transformers.pipelines import pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf, require_torch, require_vision, slow, ) from .test_pipelines_common import ANY if is_vision_ava...
678
1
import contextlib import csv import json import os import sqlitea import tarfile import textwrap import zipfile import pyarrow as pa import pyarrow.parquet as pq import pytest import datasets import datasets.config @pytest.fixture(scope='''session''' ) def __lowerCAmelCase ( ) -> Optional...
678
lowerCAmelCase_ = [ [0, 16, 13, 0, 0, 0], [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0], ] def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase ) ...
678
1
import argparse import json from pathlib import Path import requests import torch from huggingface_hub import cached_download, hf_hub_url from PIL import Image from transformers import DPTConfig, DPTForDepthEstimation, DPTForSemanticSegmentation, DPTImageProcessor from transformers.utils import logging logging...
678
import unittest from transformers import ( MODEL_FOR_CAUSAL_LM_MAPPING, TF_MODEL_FOR_CAUSAL_LM_MAPPING, TextGenerationPipeline, logging, pipeline, ) from transformers.testing_utils import ( CaptureLogger, is_pipeline_test, require_accelerate, require_tf, require_torch, ...
678
1
from __future__ import annotations import inspect import unittest from typing import List, Tuple from transformers import RegNetConfig from transformers.testing_utils import require_tf, require_vision, slow from transformers.utils import cached_property, is_tf_available, is_vision_available from ...test_configu...
678
def __lowerCAmelCase ( UpperCamelCase ) -> str: return "".join([hex(UpperCamelCase )[2:].zfill(2 ).upper() for byte in list(UpperCamelCase )] ) def __lowerCAmelCase ( UpperCamelCase ) -> bytes: # Check data validity, following RFC3548 # https://www.ietf.or...
678
1
import collections import os import re from pathlib import Path lowerCAmelCase_ = """src/transformers""" # Matches is_xxx_available() lowerCAmelCase_ = re.compile(R"""is\_([a-z_]*)_available()""") # Catches a one-line _import_struct = {xxx} lowerCAmelCase_ = re.compile(R"...
678
import torch from diffusers import DPMSolverSDEScheduler from diffusers.utils import torch_device from diffusers.utils.testing_utils import require_torchsde from .test_schedulers import SchedulerCommonTest @require_torchsde class _lowerCAmelCase ( _lowercase ): A__ = (DPMSolverSDESchedul...
678
1
print((lambda quine: quine % quine)("""print((lambda quine: quine %% quine)(%r))"""))
678
import time import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch, torch_device from ..test_modeling_common import ids_tensor if is_torch_available(): import torch from transformers.generation import ( MaxLengthCriteria, Ma...
678
1
def __lowerCAmelCase ( UpperCamelCase = 10 , UpperCamelCase = 22 ) -> int: lowerCAmelCase__ : Tuple = range(1 , UpperCamelCase ) lowerCAmelCase__ : Tuple = range(1 , UpperCamelCase ) return sum( 1 for power in powers for base in ba...
678
from functools import reduce lowerCAmelCase_ = ( """73167176531330624919225119674426574742355349194934""" """96983520312774506326239578318016984801869478851843""" """85861560789112949495459501737958331952853208805511""" """12540698747158523863050715693290963295227443043557""" """6...
678
1
import torch from diffusers import DPMSolverSDEScheduler from diffusers.utils import torch_device from diffusers.utils.testing_utils import require_torchsde from .test_schedulers import SchedulerCommonTest @require_torchsde class _lowerCAmelCase ( _lowercase ): A__ = (DPMSolverSDESchedul...
678
import re import jax.numpy as jnp from flax.traverse_util import flatten_dict, unflatten_dict from jax.random import PRNGKey from ..utils import logging lowerCAmelCase_ = logging.get_logger(__name__) def __lowerCAmelCase ( UpperCamelCase ) -> List[str]: lowerCAmelCase__ ...
678
1
import argparse import math import os import torch from neural_compressor.utils.pytorch import load from PIL import Image from transformers import CLIPTextModel, CLIPTokenizer from diffusers import AutoencoderKL, StableDiffusionPipeline, UNetaDConditionModel def __lowerCAmelCase ( ) -> Opti...
678
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available lowerCAmelCase_ = { """configuration_nezha""": ["""NEZHA_PRETRAINED_CONFIG_ARCHIVE_MAP""", """NezhaConfig"""], } try: if not is_torch_available():...
678
1
import os import sys import unittest lowerCAmelCase_ = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) sys.path.append(os.path.join(git_repo_path, """utils""")) import check_dummies # noqa: E402 from check_dummies import create_dummy_files, create_dummy_object, find_ba...
678
import tempfile import unittest import numpy as np from huggingface_hub import HfFolder, delete_repo from requests.exceptions import HTTPError from transformers import BertConfig, is_flax_available from transformers.testing_utils import TOKEN, USER, is_staging_test, require_flax if is_flax_available(): im...
678
1
import argparse import json import os import re import shutil import torch from transformers import BioGptConfig, BioGptForCausalLM from transformers.models.biogpt.tokenization_biogpt import VOCAB_FILES_NAMES from transformers.tokenization_utils_base import TOKENIZER_CONFIG_FILE from transformers.utils import WE...
678
from random import randint from tempfile import TemporaryFile import numpy as np def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase , UpperCamelCase ) -> Any: lowerCAmelCase__ : Optional[Any] = 0 if start < end: lowerCAmelCase__ : Union[str, Any] ...
678
1
lowerCAmelCase_ = [ [0, 16, 13, 0, 0, 0], [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0], ] def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase ) ...
678
import pyarrow.parquet as pq import pytest from datasets import Audio, Dataset, DatasetDict, Features, NamedSplit, Sequence, Value, config from datasets.features.image import Image from datasets.io.parquet import ParquetDatasetReader, ParquetDatasetWriter, get_writer_batch_size from ..utils import assert_arrow_m...
678
1
import json import os import shutil import tempfile import unittest import numpy as np import pytest from transformers import BertTokenizer, BertTokenizerFast from transformers.models.bert.tokenization_bert import VOCAB_FILES_NAMES from transformers.testing_utils import require_vision from transformers.utils imp...
678
from ...configuration_utils import PretrainedConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """microsoft/focalnet-tiny""": """...
678
1
import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_video_inputs if is_torch_available(): import t...
678
from scipy.stats import pearsonr import datasets lowerCAmelCase_ = """ Pearson correlation coefficient and p-value for testing non-correlation. The Pearson correlation coefficient measures the linear relationship between two datasets. The calculation of the p-value relies on the assumption that eac...
678
1
def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase ) -> float: _validate_point(UpperCamelCase ) _validate_point(UpperCamelCase ) if len(UpperCamelCase ) != len(UpperCamelCase ): raise ValueError('''Both points must be in the same n-dimensional space''' ) retur...
678
from manim import * class _lowerCAmelCase ( _lowercase ): def __magic_name__( self ): lowerCAmelCase__ : Tuple = Rectangle(height=0.5 , width=0.5 ) lowerCAmelCase__ : Dict = Rectangle(height=0.25 , width=0.25 ) lowerCAmelC...
678
1
lowerCAmelCase_ = """ # Transformers installation ! pip install transformers datasets # To install from source instead of the last release, comment the command above and uncomment the following one. # ! pip install git+https://github.com/huggingface/transformers.git """ lowerCAmelCase_ = ...
678
import collections import os import re from pathlib import Path lowerCAmelCase_ = """src/transformers""" # Matches is_xxx_available() lowerCAmelCase_ = re.compile(R"""is\_([a-z_]*)_available()""") # Catches a one-line _import_struct = {xxx} lowerCAmelCase_ = re.compile(R"...
678
1
from typing import Dict, List, Optional, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, convert_to_rgb, get_resize_output_image_size, normalize, rescale, resize, to_channel_d...
678
import math import os import unittest from transformers import MegatronBertConfig, is_torch_available from transformers.models.auto import get_values from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device from ...test_configuration_common import Config...
678
1
import unittest import numpy as np from transformers.testing_utils import is_flaky, require_torch, require_vision from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs if is_torch_available(): ...
678
import warnings from collections import OrderedDict from typing import Any, Mapping, Optional from ... import PreTrainedTokenizer from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig, OnnxConfigWithPast, OnnxSeqaSeqConfigWithPast from ...onnx.utils import compute_effective_axis_dimen...
678
1
import argparse import re import requests import torch # git clone https://github.com/salesforce/BLIP.git from models.blip import blip_decoder from models.blip_itm import blip_itm from models.blip_vqa import blip_vqa from PIL import Image from torchvision import transforms from torchvision.transforms.functional ...
678
import functools import operator from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """asapp/sew-d-tiny-100k""": """https://huggingface.co/asapp/sew-d-tiny-100k/resolve/main/config.jso...
678
1
import os from shutil import copyfile from typing import List, Optional, Tuple from ...tokenization_utils import AddedToken from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import is_sentencepiece_available, logging if is_sentencepiece_available(): from .tokenization_barthez imp...
678
import unittest from transformers import PegasusTokenizer, PegasusTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, require_torch, slow from transformers.utils import cached_property from ...test_tokenization_common import TokenizerTesterMixin lowerC...
678
1
from tempfile import TemporaryDirectory from unittest import TestCase from unittest.mock import MagicMock, patch from transformers import AutoModel, TFAutoModel from transformers.onnx import FeaturesManager from transformers.testing_utils import SMALL_MODEL_IDENTIFIER, require_tf, require_torch @require_tor...
678
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """naver-clova-ix/donut-base""": """https://huggingface.co/naver-clova-ix/donut-base/resolve/main/config.json""", # See all Donut ...
678
1
import itertools import os import random import tempfile import unittest import numpy as np from datasets import load_dataset from transformers import is_speech_available from transformers.testing_utils import check_json_file_has_correct_format, require_torch, require_torchaudio from transformers.utils.import_ut...
678
lowerCAmelCase_ = { """meter""": """m""", """kilometer""": """km""", """megametre""": """Mm""", """gigametre""": """Gm""", """terametre""": """Tm""", """petametre""": """Pm""", """exametre""": """Em""", """zettametre""": """Zm""", """yottametre""": """Ym""", } # Exp...
678
1
import sys lowerCAmelCase_ = ( """73167176531330624919225119674426574742355349194934""" """96983520312774506326239578318016984801869478851843""" """85861560789112949495459501737958331952853208805511""" """12540698747158523863050715693290963295227443043557""" """6689664895044524452...
678
import unittest from transformers import is_vision_available from transformers.pipelines import pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf, require_torch, require_vision, slow, ) from .test_pipelines_common import ANY if is_vision_ava...
678
1
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """sayakpaul/vit-msn-base""": """https://huggingface.co/sayakpaul/vit-msn-base/resolve/main/config.json""", # See all ViT MSN mode...
678
lowerCAmelCase_ = [ [0, 16, 13, 0, 0, 0], [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0], ] def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase ) ...
678
1
import logging import os import sys from dataclasses import dataclass, field from typing import Optional from seqaseq_trainer import SeqaSeqTrainer from seqaseq_training_args import SeqaSeqTrainingArguments import transformers from transformers import ( AutoConfig, AutoModelForSeqaSeqLM, AutoTokenize...
678
import unittest from transformers import ( MODEL_FOR_CAUSAL_LM_MAPPING, TF_MODEL_FOR_CAUSAL_LM_MAPPING, TextGenerationPipeline, logging, pipeline, ) from transformers.testing_utils import ( CaptureLogger, is_pipeline_test, require_accelerate, require_tf, require_torch, ...
678
1
from ..utils import DummyObject, requires_backends class _lowerCAmelCase ( metaclass=_lowercase ): A__ = ['torch', 'torchsde'] def __init__( self , *__UpperCAmelCase , **__UpperCAmelCase ): requires_backends(self , ['''torch''', '''torchsde'''] ) @...
678
def __lowerCAmelCase ( UpperCamelCase ) -> str: return "".join([hex(UpperCamelCase )[2:].zfill(2 ).upper() for byte in list(UpperCamelCase )] ) def __lowerCAmelCase ( UpperCamelCase ) -> bytes: # Check data validity, following RFC3548 # https://www.ietf.or...
678
1
import multiprocessing import time from arguments import PretokenizationArguments from datasets import load_dataset from transformers import AutoTokenizer, HfArgumentParser def __lowerCAmelCase ( UpperCamelCase ) -> List[str]: lowerCAmelCase__ : Tuple = {} lowerCAmelCase_...
678
import torch from diffusers import DPMSolverSDEScheduler from diffusers.utils import torch_device from diffusers.utils.testing_utils import require_torchsde from .test_schedulers import SchedulerCommonTest @require_torchsde class _lowerCAmelCase ( _lowercase ): A__ = (DPMSolverSDESchedul...
678
1
import warnings from collections import OrderedDict from typing import Any, Mapping, Optional from ... import PreTrainedTokenizer from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig, OnnxConfigWithPast, OnnxSeqaSeqConfigWithPast from ...onnx.utils import compute_effective_axis_dimen...
678
import time import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch, torch_device from ..test_modeling_common import ids_tensor if is_torch_available(): import torch from transformers.generation import ( MaxLengthCriteria, Ma...
678
1
import dataclasses import re import string from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple import numpy as np from . import residue_constants lowerCAmelCase_ = Mapping[str, np.ndarray] lowerCAmelCase_ = Mapping[str, Any] # Is a nested dict. lowerCAmel...
678
from functools import reduce lowerCAmelCase_ = ( """73167176531330624919225119674426574742355349194934""" """96983520312774506326239578318016984801869478851843""" """85861560789112949495459501737958331952853208805511""" """12540698747158523863050715693290963295227443043557""" """6...
678
1
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available lowerCAmelCase_ = { """configuration_nezha""": ["""NEZHA_PRETRAINED_CONFIG_ARCHIVE_MAP""", """NezhaConfig"""], } try: if not is_torch_available():...
678
import re import jax.numpy as jnp from flax.traverse_util import flatten_dict, unflatten_dict from jax.random import PRNGKey from ..utils import logging lowerCAmelCase_ = logging.get_logger(__name__) def __lowerCAmelCase ( UpperCamelCase ) -> List[str]: lowerCAmelCase__ ...
678
1
from importlib import import_module from .logging import get_logger lowerCAmelCase_ = get_logger(__name__) class _lowerCAmelCase : def __init__( self , __UpperCAmelCase , __UpperCAmelCase=None ): lowerCAmelCase__ : Dict = attrs or [] if mod...
678
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available lowerCAmelCase_ = { """configuration_nezha""": ["""NEZHA_PRETRAINED_CONFIG_ARCHIVE_MAP""", """NezhaConfig"""], } try: if not is_torch_available():...
678
1
import inspect import unittest import numpy as np from transformers import BeitConfig from transformers.testing_utils import require_flax, require_vision, slow from transformers.utils import cached_property, is_flax_available, is_vision_available from ...test_configuration_common import ConfigTester from ...tes...
678
import tempfile import unittest import numpy as np from huggingface_hub import HfFolder, delete_repo from requests.exceptions import HTTPError from transformers import BertConfig, is_flax_available from transformers.testing_utils import TOKEN, USER, is_staging_test, require_flax if is_flax_available(): im...
678
1
import math import time from transformers import Trainer, is_torch_tpu_available from transformers.trainer_utils import PredictionOutput, speed_metrics if is_torch_tpu_available(check_device=False): import torch_xla.core.xla_model as xm import torch_xla.debug.metrics as met class _lowerCAmelC...
678
from random import randint from tempfile import TemporaryFile import numpy as np def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase , UpperCamelCase ) -> Any: lowerCAmelCase__ : Optional[Any] = 0 if start < end: lowerCAmelCase__ : Union[str, Any] ...
678
1
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available, ) if is_sentencepiece_available(): from ..ta.tokenization_ta import T...
678
import pyarrow.parquet as pq import pytest from datasets import Audio, Dataset, DatasetDict, Features, NamedSplit, Sequence, Value, config from datasets.features.image import Image from datasets.io.parquet import ParquetDatasetReader, ParquetDatasetWriter, get_writer_batch_size from ..utils import assert_arrow_m...
678
1
import timeit import numpy as np import datasets from datasets.arrow_writer import ArrowWriter from datasets.features.features import _ArrayXD def __lowerCAmelCase ( UpperCamelCase ) -> List[Any]: def wrapper(*UpperCamelCase , **UpperCamelCase ): lowerCAmelCase__ : List...
678
from ...configuration_utils import PretrainedConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """microsoft/focalnet-tiny""": """...
678
1
import copy import inspect import unittest from transformers import PretrainedConfig, SwiftFormerConfig from transformers.testing_utils import ( require_torch, require_vision, slow, torch_device, ) from transformers.utils import cached_property, is_torch_available, is_vision_available from ...tes...
678
from scipy.stats import pearsonr import datasets lowerCAmelCase_ = """ Pearson correlation coefficient and p-value for testing non-correlation. The Pearson correlation coefficient measures the linear relationship between two datasets. The calculation of the p-value relies on the assumption that eac...
678
1
import unittest import numpy as np from transformers import MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING, TF_MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING from transformers.pipelines import AudioClassificationPipeline, pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf...
678
from manim import * class _lowerCAmelCase ( _lowercase ): def __magic_name__( self ): lowerCAmelCase__ : Tuple = Rectangle(height=0.5 , width=0.5 ) lowerCAmelCase__ : Dict = Rectangle(height=0.25 , width=0.25 ) lowerCAmelC...
678
1
import unittest from transformers import PegasusTokenizer, PegasusTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, require_torch, slow from transformers.utils import cached_property from ...test_tokenization_common import TokenizerTesterMixin lowerC...
678
import collections import os import re from pathlib import Path lowerCAmelCase_ = """src/transformers""" # Matches is_xxx_available() lowerCAmelCase_ = re.compile(R"""is\_([a-z_]*)_available()""") # Catches a one-line _import_struct = {xxx} lowerCAmelCase_ = re.compile(R"...
678
1
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) lowerCAmelCase_ = { """configuration_whisper""": ["""WHISPER_PRETRAINED_CONFIG_ARCHIVE_...
678
import math import os import unittest from transformers import MegatronBertConfig, is_torch_available from transformers.models.auto import get_values from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device from ...test_configuration_common import Config...
678
1
import time import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch, torch_device from ..test_modeling_common import ids_tensor if is_torch_available(): import torch from transformers.generation import ( MaxLengthCriteria, Ma...
678
import warnings from collections import OrderedDict from typing import Any, Mapping, Optional from ... import PreTrainedTokenizer from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig, OnnxConfigWithPast, OnnxSeqaSeqConfigWithPast from ...onnx.utils import compute_effective_axis_dimen...
678
1
import math import os import unittest from transformers import MegatronBertConfig, is_torch_available from transformers.models.auto import get_values from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device from ...test_configuration_common import Config...
678
import functools import operator from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """asapp/sew-d-tiny-100k""": """https://huggingface.co/asapp/sew-d-tiny-100k/resolve/main/config.jso...
678
1
import collections import inspect import unittest from typing import Dict, List, Tuple from transformers import MaskFormerSwinConfig from transformers.testing_utils import require_torch, require_torch_multi_gpu, torch_device from transformers.utils import is_torch_available from ...test_backbone_common import Ba...
678
import unittest from transformers import PegasusTokenizer, PegasusTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, require_torch, slow from transformers.utils import cached_property from ...test_tokenization_common import TokenizerTesterMixin lowerC...
678
1
import os from argparse import ArgumentParser from typing import List import torch.utils.data from datasets import Dataset, IterableDataset from datasets.distributed import split_dataset_by_node lowerCAmelCase_ = 4 lowerCAmelCase_ = 3 class _lowerCAmelCase ( _lowercase ...
678
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """naver-clova-ix/donut-base""": """https://huggingface.co/naver-clova-ix/donut-base/resolve/main/config.json""", # See all Donut ...
678
1
def __lowerCAmelCase ( UpperCamelCase ) -> List[str]: lowerCAmelCase__ : List[str] = [] lowerCAmelCase__ : List[str] = [] lowerCAmelCase__ : Dict = { '''^''': 3, '''*''': 2, '''/''': 2, '''%''': 2, '''+''': 1, '''...
678
lowerCAmelCase_ = { """meter""": """m""", """kilometer""": """km""", """megametre""": """Mm""", """gigametre""": """Gm""", """terametre""": """Tm""", """petametre""": """Pm""", """exametre""": """Em""", """zettametre""": """Zm""", """yottametre""": """Ym""", } # Exp...
678
1
import logging import os import sys from dataclasses import dataclass, field from typing import Optional import torch from datasets import load_dataset from torchvision.transforms import Compose, Lambda, Normalize, RandomHorizontalFlip, RandomResizedCrop, ToTensor from torchvision.transforms.functional import Int...
678
import unittest from transformers import is_vision_available from transformers.pipelines import pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf, require_torch, require_vision, slow, ) from .test_pipelines_common import ANY if is_vision_ava...
678
1
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_sentencepiece_available, is_tokenizers_available, is_torch_available, ) lowerCAmelCase_ = {} try: if not is_sentencepiece_available(): raise OptionalDependency...
678
lowerCAmelCase_ = [ [0, 16, 13, 0, 0, 0], [0, 0, 10, 12, 0, 0], [0, 4, 0, 0, 14, 0], [0, 0, 9, 0, 0, 20], [0, 0, 0, 7, 0, 4], [0, 0, 0, 0, 0, 0], ] def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase , UpperCamelCase , UpperCamelCase ) ...
678
1
from argparse import ArgumentParser, Namespace from ..utils import logging from . import BaseTransformersCLICommand def __lowerCAmelCase ( UpperCamelCase ) -> Dict: return ConvertCommand( args.model_type , args.tf_checkpoint , args.pytorch_dump_output , args.co...
678
import unittest from transformers import ( MODEL_FOR_CAUSAL_LM_MAPPING, TF_MODEL_FOR_CAUSAL_LM_MAPPING, TextGenerationPipeline, logging, pipeline, ) from transformers.testing_utils import ( CaptureLogger, is_pipeline_test, require_accelerate, require_tf, require_torch, ...
678
1
import unittest from pathlib import Path from tempfile import TemporaryDirectory from transformers import AutoConfig, TFGPTaLMHeadModel, is_keras_nlp_available, is_tf_available from transformers.models.gpta.tokenization_gpta import GPTaTokenizer from transformers.testing_utils import require_keras_nlp, require_tf...
678
def __lowerCAmelCase ( UpperCamelCase ) -> str: return "".join([hex(UpperCamelCase )[2:].zfill(2 ).upper() for byte in list(UpperCamelCase )] ) def __lowerCAmelCase ( UpperCamelCase ) -> bytes: # Check data validity, following RFC3548 # https://www.ietf.or...
678
1
lowerCAmelCase_ = tuple[float, float, float] lowerCAmelCase_ = tuple[float, float, float] def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase ) -> Vectorad: lowerCAmelCase__ : Dict = end_pointa[0] - end_pointa[0] lowerCAmelCase__ : Di...
678
import torch from diffusers import DPMSolverSDEScheduler from diffusers.utils import torch_device from diffusers.utils.testing_utils import require_torchsde from .test_schedulers import SchedulerCommonTest @require_torchsde class _lowerCAmelCase ( _lowercase ): A__ = (DPMSolverSDESchedul...
678
1
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """naver-clova-ix/donut-base""": """https://huggingface.co/naver-clova-ix/donut-base/resolve/main/config.json""", # See all Donut ...
678
import time import unittest from transformers import is_torch_available from transformers.testing_utils import require_torch, torch_device from ..test_modeling_common import ids_tensor if is_torch_available(): import torch from transformers.generation import ( MaxLengthCriteria, Ma...
678
1
from .glue import GlueDataset, GlueDataTrainingArguments from .language_modeling import ( LineByLineTextDataset, LineByLineWithRefDataset, LineByLineWithSOPTextDataset, TextDataset, TextDatasetForNextSentencePrediction, ) from .squad import SquadDataset, SquadDataTrainingArguments
678
from functools import reduce lowerCAmelCase_ = ( """73167176531330624919225119674426574742355349194934""" """96983520312774506326239578318016984801869478851843""" """85861560789112949495459501737958331952853208805511""" """12540698747158523863050715693290963295227443043557""" """6...
678
1
import argparse import requests import torch # pip3 install salesforce-lavis # I'm actually installing a slightly modified version: pip3 install git+https://github.com/nielsrogge/LAVIS.git@fix_lavis_float32 (there's also the fix_lavis branch) # also note: to convert Vicuna checkpoints, we had to include /home/ni...
678
import re import jax.numpy as jnp from flax.traverse_util import flatten_dict, unflatten_dict from jax.random import PRNGKey from ..utils import logging lowerCAmelCase_ = logging.get_logger(__name__) def __lowerCAmelCase ( UpperCamelCase ) -> List[str]: lowerCAmelCase__ ...
678
1
from typing import Dict, List, Optional, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, convert_to_rgb, get_resize_output_image_size, normalize, rescale, resize, to_channel_d...
678
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available lowerCAmelCase_ = { """configuration_nezha""": ["""NEZHA_PRETRAINED_CONFIG_ARCHIVE_MAP""", """NezhaConfig"""], } try: if not is_torch_available():...
678
1
def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase , UpperCamelCase ) -> int: def count_of_possible_combinations(UpperCamelCase ) -> int: if target < 0: return 0 if target == 0: return 1 return sum(count_of_possible_combinations(target - item ) ...
678
import tempfile import unittest import numpy as np from huggingface_hub import HfFolder, delete_repo from requests.exceptions import HTTPError from transformers import BertConfig, is_flax_available from transformers.testing_utils import TOKEN, USER, is_staging_test, require_flax if is_flax_available(): im...
678
1
import copy from typing import TYPE_CHECKING, Any, Mapping, Optional, OrderedDict from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ..auto.configuration_auto import AutoConfig if TYPE_CHECKING: from ... import...
678
from random import randint from tempfile import TemporaryFile import numpy as np def __lowerCAmelCase ( UpperCamelCase , UpperCamelCase , UpperCamelCase ) -> Any: lowerCAmelCase__ : Optional[Any] = 0 if start < end: lowerCAmelCase__ : Union[str, Any] ...
678
1
import torch from diffusers import KDPMaDiscreteScheduler from diffusers.utils import torch_device from .test_schedulers import SchedulerCommonTest class _lowerCAmelCase ( _lowercase ): A__ = (KDPMaDiscreteScheduler,) A__ = 10 def __magic_name__( self , **__U...
678
import pyarrow.parquet as pq import pytest from datasets import Audio, Dataset, DatasetDict, Features, NamedSplit, Sequence, Value, config from datasets.features.image import Image from datasets.io.parquet import ParquetDatasetReader, ParquetDatasetWriter, get_writer_batch_size from ..utils import assert_arrow_m...
678
1
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available lowerCAmelCase_ = { """configuration_poolformer""": [ """POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP""", """PoolFormerConfig""", """Poo...
678
from ...configuration_utils import PretrainedConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices lowerCAmelCase_ = logging.get_logger(__name__) lowerCAmelCase_ = { """microsoft/focalnet-tiny""": """...
678
1
from __future__ import annotations def __lowerCAmelCase ( UpperCamelCase ) -> int: # preprocessing the first row for i in range(1 , len(matrix[0] ) ): matrix[0][i] += matrix[0][i - 1] # preprocessing the first column for i in range(1 , len(UpperCamelCase ) ...
678
from scipy.stats import pearsonr import datasets lowerCAmelCase_ = """ Pearson correlation coefficient and p-value for testing non-correlation. The Pearson correlation coefficient measures the linear relationship between two datasets. The calculation of the p-value relies on the assumption that eac...
678
1
from maths.is_square_free import is_square_free from maths.prime_factors import prime_factors def __lowerCAmelCase ( UpperCamelCase ) -> int: lowerCAmelCase__ : int = prime_factors(UpperCamelCase ) if is_square_free(UpperCamelCase ): return -1 if len(UpperCamelCase ...
678
from manim import * class _lowerCAmelCase ( _lowercase ): def __magic_name__( self ): lowerCAmelCase__ : Tuple = Rectangle(height=0.5 , width=0.5 ) lowerCAmelCase__ : Dict = Rectangle(height=0.25 , width=0.25 ) lowerCAmelC...
678
1
import argparse import json import os import evaluate import torch from datasets import load_dataset from torch.optim import AdamW from torch.utils.data import DataLoader from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed from accelerate import A...
678
import collections import os import re from pathlib import Path lowerCAmelCase_ = """src/transformers""" # Matches is_xxx_available() lowerCAmelCase_ = re.compile(R"""is\_([a-z_]*)_available()""") # Catches a one-line _import_struct = {xxx} lowerCAmelCase_ = re.compile(R"...
678
1