code
stringlengths
81
54k
code_codestyle
int64
0
721
style_context
stringlengths
91
41.9k
style_context_codestyle
int64
0
699
label
int64
0
1
'''simple docstring''' from __future__ import annotations import math def __UpperCAmelCase ( _UpperCAmelCase : int ) -> bool: if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, ...
680
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) a : str = { '''configuration_gpt_bigcode''': ['''GPT_BIGCODE_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''GPTBigCodeConfig'''],...
680
1
'''simple docstring''' import jax.numpy as jnp from ...utils import logging from ..ta.modeling_flax_ta import FlaxTaEncoderModel, FlaxTaForConditionalGeneration, FlaxTaModel from .configuration_mta import MTaConfig a : List[str] = logging.get_logger(__name__) a : Union[str, An...
680
'''simple docstring''' # HF Trainer benchmarking tool # # This tool can be used to run and compare multiple dimensions of the HF Trainers args. # # It then prints a report once in github format with all the information that needs to be shared # with others and second time in a console-friendly format,...
680
1
'''simple docstring''' from collections.abc import Callable import numpy as np def __UpperCAmelCase ( _UpperCAmelCase : Callable , _UpperCAmelCase : float , _UpperCAmelCase : float , _UpperCAmelCase : float , _UpperCAmelCase : float ) ...
680
'''simple docstring''' import pytest from datasets.parallel import ParallelBackendConfig, parallel_backend from datasets.utils.py_utils import map_nested from .utils import require_dill_gt_0_3_2, require_joblibspark, require_not_windows def __UpperCAmelCase ( _UpperCAmelCase : Dict ...
680
1
'''simple docstring''' from math import sqrt def __UpperCAmelCase ( _UpperCAmelCase : int ) -> bool: if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, all even numbers, all multip...
680
'''simple docstring''' from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging a : Union[str, Any] = logging.get_logger(__name__) ...
680
1
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) a : Tuple = { '''configuration_electra''': ...
680
'''simple docstring''' from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging a : Union[str, Any] = logging.get_logger(__name__) a : List[Any] = { ...
680
1
'''simple docstring''' from ..utils import DummyObject, requires_backends class SCREAMING_SNAKE_CASE__ ( metaclass=_UpperCamelCase ): __SCREAMING_SNAKE_CASE = ["""flax"""] def __init__( self : Dict , *a_ : int , **a_ : Tuple ): ...
680
'''simple docstring''' import logging import torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from transformers.file_utils import add_start_docstrings, add_start_docstrings_to_model_forward from transformers.models.bert.modeling_bert import ( BERT_INPUTS_DOCSTRING, ...
680
1
'''simple docstring''' from manim import * class SCREAMING_SNAKE_CASE__ ( _UpperCamelCase ): def A ( self : List[str] ): """simple docstring""" __snake_case = Rectangle(height=0.5 , width=0.5 ) __snake_case ...
680
'''simple docstring''' import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_devic...
680
1
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int ) -> int: if not isinstance(SCREAMING_SNAKE_CASE_ , SCREAMING_SNAKE_CASE_ ): raise ValueError("Input must be an integer" ) if input_num <= 0: raise ValueError("Input must be positive" ) retu...
700
'''simple docstring''' import copy from dataclasses import dataclass from pathlib import Path from typing import Dict, Optional, Union @dataclass class SCREAMING_SNAKE_CASE__ : __SCREAMING_SNAKE_CASE = None __SCREAMING_SNAKE_CASE = False __SCREAMING_SNAKE_CASE = ...
680
0
'''simple docstring''' import unittest import numpy as np from transformers.testing_utils import require_pytesseract, require_torch from transformers.utils import is_pytesseract_available, is_torch_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image...
701
'''simple docstring''' import gc import tempfile import unittest import numpy as np import torch from diffusers import VersatileDiffusionTextToImagePipeline from diffusers.utils.testing_utils import nightly, require_torch_gpu, torch_device a : Optional[Any] = False class ...
680
0
'''simple docstring''' import json import os import re import unicodedata from json.encoder import INFINITY from typing import Any, Dict, List, Optional, Tuple, Union import numpy as np import regex from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...tokenization_utils_ba...
702
'''simple docstring''' import os import torch from ..logging import get_logger from .constants import FSDP_PYTORCH_VERSION, MODEL_NAME, OPTIMIZER_NAME from .versions import is_torch_version if is_torch_version('''>=''', FSDP_PYTORCH_VERSION): import torch.distributed.checkpoint as dist_...
680
0
'''simple docstring''' def __UpperCAmelCase ( ) -> str: for n in range(1 , 1_00_00_00 ): yield n * (n + 1) // 2 def __UpperCAmelCase ( _UpperCAmelCase : Dict ) -> List[Any]: __snake_case = 1 __snake_case = 2 while i * i <= n: __s...
703
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int , _UpperCAmelCase : int ) -> str: if not isinstance(_UpperCAmelCase , _UpperCAmelCase ): raise ValueError("iterations must be defined as integers" ) if not isinstance(_UpperCAmelCase , ...
680
0
'''simple docstring''' from typing import Dict, List, Optional, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, get_resize_output_image_size, normalize, rescale, resize, to_chann...
704
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int ) -> str: if number > 0: raise ValueError("input must be a negative integer" ) __snake_case = len(bin(_UpperCAmelCase )[3:] ) __snake_case = bin(abs(_UpperCAmelCase ) - (1 << binary_number_length)...
680
0
'''simple docstring''' from typing import List, Optional, Union import numpy as np from ...feature_extraction_sequence_utils import SequenceFeatureExtractor from ...feature_extraction_utils import BatchFeature from ...utils import PaddingStrategy, TensorType, logging a : Optional[Any] ...
705
'''simple docstring''' from timeit import timeit def __UpperCAmelCase ( _UpperCAmelCase : int ) -> int: if number < 0: raise ValueError("the value of input must not be negative" ) __snake_case = 0 while number: number &= number - 1 result += 1 r...
680
0
'''simple docstring''' from typing import Any def __UpperCAmelCase ( _UpperCAmelCase : Tuple ) -> list[Any]: if not input_list: return [] __snake_case = [input_list.count(lowerCamelCase_ ) for value in input_list] __snake_case = max(lowerCamelCase_ ) # Gets the ...
706
'''simple docstring''' import tempfile import unittest from make_student import create_student_by_copying_alternating_layers from transformers import AutoConfig from transformers.file_utils import cached_property from transformers.testing_utils import require_torch a : Dict = '''...
680
0
'''simple docstring''' from manim import * class SCREAMING_SNAKE_CASE__ ( UpperCAmelCase_ ): def A ( self : List[str] ): """simple docstring""" __snake_case = Rectangle(height=0.5 , width=0.5 ) __snake_case ...
707
'''simple docstring''' import argparse import glob import logging import os import time from argparse import Namespace import numpy as np import torch from lightning_base import BaseTransformer, add_generic_args, generic_train from torch.utils.data import DataLoader, TensorDataset from transf...
680
0
'''simple docstring''' from __future__ import annotations import math def __UpperCAmelCase ( _UpperCAmelCase : list , _UpperCAmelCase : list ) -> list: if len(__A ) != 2 or len(a[0] ) != 2 or len(__A ) != 2 or len(b[0] ) != 2: raise Exception("Matrice...
708
'''simple docstring''' import pytest import datasets.config from datasets.utils.info_utils import is_small_dataset @pytest.mark.parametrize("dataset_size" , [None, 4_00 * 2**20, 6_00 * 2**20] ) @pytest.mark.parametrize("input_in_memory_max_size" , ["default", 0, 1_00 * 2**20, 9_00 *...
680
0
'''simple docstring''' import pprint import requests a : Union[str, Any] = 'https://zenquotes.io/api' def __UpperCAmelCase ( ) -> Optional[Any]: return requests.get(API_ENDPOINT_URL + "/today" ).json() def __UpperCAmelCase ( ) -> Optional[Any]: retur...
709
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : float ) -> float: if edge <= 0 or not isinstance(_UpperCAmelCase , _UpperCAmelCase ): raise ValueError("Length must be a positive." ) return 3 * ((25 + 10 * (5 ** (1 / 2))) ** (1 / 2)) * (edge**2) d...
680
0
'''simple docstring''' from ...configuration_utils import PretrainedConfig from ...utils import logging a = logging.get_logger(__name__) a = { "microsoft/swinv2-tiny-patch4-window8-256": ( "https://huggingface.co/microsoft/swinv2-tiny-patch4-window8-256/resolve/main...
710
'''simple docstring''' from math import atan, cos, radians, sin, tan from .haversine_distance import haversine_distance a : Any = 6_378_137.0 a : List[Any] = 6_356_752.314_245 a : Dict = 6_378_137 def __UpperCAmelCase ( _UpperCAmelCase : float...
680
0
'''simple docstring''' import argparse import ast import logging import os import sys import pandas as pd import torch from tqdm import tqdm from transformers import BartForConditionalGeneration, RagRetriever, RagSequenceForGeneration, RagTokenForGeneration from transformers import logging as ...
711
'''simple docstring''' import math import sys import cva import numpy as np def __UpperCAmelCase ( _UpperCAmelCase : np.ndarray , _UpperCAmelCase : float ) -> np.ndarray: # For applying gaussian function for each element in matrix. __snake_case = math.sqrt...
680
0
'''simple docstring''' import unittest import numpy as np from diffusers import LMSDiscreteScheduler, OnnxStableDiffusionInpaintPipeline from diffusers.utils.testing_utils import ( is_onnx_available, load_image, nightly, require_onnxruntime, require_torch_gpu, ) from ..test_...
712
'''simple docstring''' class SCREAMING_SNAKE_CASE__ : def __init__( self : Any , a_ : Dict , a_ : Union[str, Any] , a_ : Tuple ): """simple docstring""" __snake_case = name __snake_case = value __snak...
680
0
'''simple docstring''' a : int = [0, 2, 4, 6, 8] a : Any = [1, 3, 5, 7, 9] def __UpperCAmelCase ( _UpperCAmelCase : Optional[Any] , _UpperCAmelCase : Dict , _UpperCAmelCase : Optional[Any] , _UpperCAmelCase : Dict ) -> i...
713
'''simple docstring''' import os from math import logaa def __UpperCAmelCase ( _UpperCAmelCase : str = "base_exp.txt" ) -> int: __snake_case = 0 __snake_case = 0 for i, line in enumerate(open(os.path.join(os.path.dirname(_UpperCAmelCase ) , _UpperCAmelCase )...
680
0
'''simple docstring''' import argparse import dataclasses import json import logging import os import shutil from typing import List, Optional import datasets from accelerate import Accelerator from datasets import load_dataset from finetuning import finetune from tqdm.auto import tqdm impo...
714
'''simple docstring''' from typing import List, Optional from tokenizers import ByteLevelBPETokenizer from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_blenderbot_small import BlenderbotSmallTokenizer a : List[Any] = log...
680
0
'''simple docstring''' from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging a : Union[str, Any] = logging.get_logger(__name__) a : Any ...
715
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) a : str = { '''configuration_gpt_bigcode''': ['''GPT_BIGCODE_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''GPTBigCodeConfig'''],...
680
0
'''simple docstring''' from random import randint from tempfile import TemporaryFile import numpy as np def A__ ( _UpperCAmelCase : Tuple , _UpperCAmelCase : str , _UpperCAmelCase : Dict ) -> Union[str, Any]: __snake_case = 0 if start < end: ...
716
'''simple docstring''' # HF Trainer benchmarking tool # # This tool can be used to run and compare multiple dimensions of the HF Trainers args. # # It then prints a report once in github format with all the information that needs to be shared # with others and second time in a console-friendly format,...
680
0
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available a : Union[str, Any] = { '''configuration_instructblip''': [ '''INSTRUCTBLIP_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''InstructB...
717
'''simple docstring''' import pytest from datasets.parallel import ParallelBackendConfig, parallel_backend from datasets.utils.py_utils import map_nested from .utils import require_dill_gt_0_3_2, require_joblibspark, require_not_windows def __UpperCAmelCase ( _UpperCAmelCase : Dict ...
680
0
'''simple docstring''' import re def __UpperCAmelCase ( _UpperCAmelCase : str ) -> list: return [char.split() for char in re.split(R"[^ a-z A-Z 0-9 \s]" , str_ )] def __UpperCAmelCase ( _UpperCAmelCase : str ) -> str: __snake_case = split_input(str_ ) return...
718
'''simple docstring''' from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging a : Union[str, Any] = logging.get_logger(__name__) ...
680
0
'''simple docstring''' import fire from transformers import AutoConfig, AutoModelForSeqaSeqLM, AutoTokenizer def __UpperCAmelCase ( _UpperCAmelCase : str , _UpperCAmelCase : str , **_UpperCAmelCase : List[Any] ) -> str: __snake_case = AutoConfig.fro...
719
'''simple docstring''' from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging a : Union[str, Any] = logging.get_logger(__name__) a : List[Any] = { ...
680
0
import unittest from transformers import is_torch_available from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow if is_torch_available(): import torch from transformers import XLMRobertaModel @require_sentencepiece @require_toke...
720
'''simple docstring''' import logging import torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from transformers.file_utils import add_start_docstrings, add_start_docstrings_to_model_forward from transformers.models.bert.modeling_bert import ( BERT_INPUTS_DOCSTRING, ...
680
0
'''simple docstring''' import os from typing import Optional import fsspec from fsspec.archive import AbstractArchiveFileSystem from fsspec.utils import DEFAULT_BLOCK_SIZE class SCREAMING_SNAKE_CASE__ ( SCREAMING_SNAKE_CASE__ ): __SCREAMING_SNAKE_CASE = """""" __SCREAMING_S...
721
'''simple docstring''' import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_devic...
680
0
'''simple docstring''' import json import os import tempfile import unittest import numpy as np from datasets import load_dataset from transformers.testing_utils import require_torch, require_vision, slow from transformers.utils import is_torch_available, is_vision_available from ...test_imag...
700
'''simple docstring''' import copy from dataclasses import dataclass from pathlib import Path from typing import Dict, Optional, Union @dataclass class SCREAMING_SNAKE_CASE__ : __SCREAMING_SNAKE_CASE = None __SCREAMING_SNAKE_CASE = False __SCREAMING_SNAKE_CASE = ...
680
0
'''simple docstring''' import unittest from transformers import CamembertTokenizer, CamembertTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow from transformers.utils import is_torch_available from ...test_tokenization_common import ...
701
'''simple docstring''' import gc import tempfile import unittest import numpy as np import torch from diffusers import VersatileDiffusionTextToImagePipeline from diffusers.utils.testing_utils import nightly, require_torch_gpu, torch_device a : Optional[Any] = False class ...
680
0
'''simple docstring''' from typing import Optional from torch import nn from .transformer_ad import TransformeraDModel, TransformeraDModelOutput class SCREAMING_SNAKE_CASE__ ( nn.Module ): def __init__( self : List[str] , a_ : List[str] = 16 , a_ : L...
702
'''simple docstring''' import os import torch from ..logging import get_logger from .constants import FSDP_PYTORCH_VERSION, MODEL_NAME, OPTIMIZER_NAME from .versions import is_torch_version if is_torch_version('''>=''', FSDP_PYTORCH_VERSION): import torch.distributed.checkpoint as dist_...
680
0
'''simple docstring''' import os import tempfile import unittest from transformers import DistilBertConfig, is_torch_available from transformers.testing_utils import require_torch, require_torch_gpu, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_co...
703
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int , _UpperCAmelCase : int ) -> str: if not isinstance(_UpperCAmelCase , _UpperCAmelCase ): raise ValueError("iterations must be defined as integers" ) if not isinstance(_UpperCAmelCase , ...
680
0
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_sentencepiece_available, is_tokenizers_available, is_torch_available, ) a : Tuple = {"configuration_fnet": ["FNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "FNet...
704
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int ) -> str: if number > 0: raise ValueError("input must be a negative integer" ) __snake_case = len(bin(_UpperCAmelCase )[3:] ) __snake_case = bin(abs(_UpperCAmelCase ) - (1 << binary_number_length)...
680
0
'''simple docstring''' from random import shuffle import tensorflow as tf from numpy import array def __UpperCAmelCase ( _UpperCAmelCase : Dict , _UpperCAmelCase : Optional[Any] ) -> str: __snake_case = int(_UpperCAmelCase ) assert noofclusters < len(_Upper...
705
'''simple docstring''' from timeit import timeit def __UpperCAmelCase ( _UpperCAmelCase : int ) -> int: if number < 0: raise ValueError("the value of input must not be negative" ) __snake_case = 0 while number: number &= number - 1 result += 1 r...
680
0
'''simple docstring''' from argparse import ArgumentParser, Namespace from ..utils import logging from . import BaseTransformersCLICommand def __UpperCAmelCase ( _UpperCAmelCase : Namespace ) -> Optional[int]: return ConvertCommand( args.model_type , args.tf_checkpoi...
706
'''simple docstring''' import tempfile import unittest from make_student import create_student_by_copying_alternating_layers from transformers import AutoConfig from transformers.file_utils import cached_property from transformers.testing_utils import require_torch a : Dict = '''...
680
0
'''simple docstring''' import argparse import json import re from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import ( MobileNetVaConfig, MobileNetVaForImageClassification, MobileNetVaImagePr...
707
'''simple docstring''' import argparse import glob import logging import os import time from argparse import Namespace import numpy as np import torch from lightning_base import BaseTransformer, add_generic_args, generic_train from torch.utils.data import DataLoader, TensorDataset from transf...
680
0
'''simple docstring''' from collections import OrderedDict from typing import Any, List, Mapping, Optional from ... import PreTrainedTokenizer, TensorType, is_torch_available from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfigWithPast, PatchingSpec from ...utils import...
708
'''simple docstring''' import pytest import datasets.config from datasets.utils.info_utils import is_small_dataset @pytest.mark.parametrize("dataset_size" , [None, 4_00 * 2**20, 6_00 * 2**20] ) @pytest.mark.parametrize("input_in_memory_max_size" , ["default", 0, 1_00 * 2**20, 9_00 *...
680
0
'''simple docstring''' from __future__ import annotations def __UpperCAmelCase ( _UpperCAmelCase : list ) -> list: if len(_lowercase ) == 0: return [] __snake_case = min(_lowercase ), max(_lowercase ) __snake_case = int(max_value - min_value ) + 1...
709
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : float ) -> float: if edge <= 0 or not isinstance(_UpperCAmelCase , _UpperCAmelCase ): raise ValueError("Length must be a positive." ) return 3 * ((25 + 10 * (5 ** (1 / 2))) ** (1 / 2)) * (edge**2) d...
680
0
'''simple docstring''' from __future__ import annotations def __UpperCAmelCase ( _UpperCAmelCase : float , _UpperCAmelCase : float , _UpperCAmelCase : float ) -> int: if days_between_payments <= 0: raise ValueError("days_between_payments must be >...
710
'''simple docstring''' from math import atan, cos, radians, sin, tan from .haversine_distance import haversine_distance a : Any = 6_378_137.0 a : List[Any] = 6_356_752.314_245 a : Dict = 6_378_137 def __UpperCAmelCase ( _UpperCAmelCase : float...
680
0
'''simple docstring''' import os import shutil import tempfile import unittest import numpy as np from transformers import AutoTokenizer, BarkProcessor from transformers.testing_utils import require_torch, slow @require_torch class SCREAMING_SNAKE_CASE__ ( unittest.TestCase ): ...
711
'''simple docstring''' import math import sys import cva import numpy as np def __UpperCAmelCase ( _UpperCAmelCase : np.ndarray , _UpperCAmelCase : float ) -> np.ndarray: # For applying gaussian function for each element in matrix. __snake_case = math.sqrt...
680
0
'''simple docstring''' from __future__ import annotations class SCREAMING_SNAKE_CASE__ : def __init__( self : Tuple , a_ : int ): """simple docstring""" __snake_case = data __snake_case = None __snake_case = N...
712
'''simple docstring''' class SCREAMING_SNAKE_CASE__ : def __init__( self : Any , a_ : Dict , a_ : Union[str, Any] , a_ : Tuple ): """simple docstring""" __snake_case = name __snake_case = value __snak...
680
0
'''simple docstring''' from __future__ import annotations def __UpperCAmelCase ( _UpperCAmelCase : Tuple ) -> int: # preprocessing the first row for i in range(1 , len(matrix[0] ) ): matrix[0][i] += matrix[0][i - 1] # preprocessing the first column for...
713
'''simple docstring''' import os from math import logaa def __UpperCAmelCase ( _UpperCAmelCase : str = "base_exp.txt" ) -> int: __snake_case = 0 __snake_case = 0 for i, line in enumerate(open(os.path.join(os.path.dirname(_UpperCAmelCase ) , _UpperCAmelCase )...
680
0
'''simple docstring''' import unittest from transformers import MODEL_FOR_ZERO_SHOT_OBJECT_DETECTION_MAPPING, is_vision_available, pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf, require_torch, require_vision, slow, ) fr...
714
'''simple docstring''' from typing import List, Optional from tokenizers import ByteLevelBPETokenizer from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_blenderbot_small import BlenderbotSmallTokenizer a : List[Any] = log...
680
0
'''simple docstring''' import itertools import os import random import tempfile import unittest import numpy as np from datasets import load_dataset from transformers import is_speech_available from transformers.testing_utils import check_json_file_has_correct_format, require_torch, require_torchaudio from tran...
715
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) a : str = { '''configuration_gpt_bigcode''': ['''GPT_BIGCODE_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''GPTBigCodeConfig'''],...
680
0
'''simple docstring''' import gc import unittest import torch from transformers import CLIPTextConfig, CLIPTextModel, CLIPTextModelWithProjection, CLIPTokenizer from diffusers import ( AutoencoderKL, DDIMScheduler, DDPMScheduler, PriorTransformer, StableUnCLIPPipeline, ...
716
'''simple docstring''' # HF Trainer benchmarking tool # # This tool can be used to run and compare multiple dimensions of the HF Trainers args. # # It then prints a report once in github format with all the information that needs to be shared # with others and second time in a console-friendly format,...
680
0
'''simple docstring''' import argparse import struct import unittest class SCREAMING_SNAKE_CASE__ : def __init__( self : Optional[int] , a_ : str ): """simple docstring""" __snake_case = data # Initialize hash values ...
717
'''simple docstring''' import pytest from datasets.parallel import ParallelBackendConfig, parallel_backend from datasets.utils.py_utils import map_nested from .utils import require_dill_gt_0_3_2, require_joblibspark, require_not_windows def __UpperCAmelCase ( _UpperCAmelCase : Dict ...
680
0
'''simple docstring''' from __future__ import annotations import unittest import numpy as np from transformers import BlipTextConfig from transformers.testing_utils import require_tf, slow from transformers.utils import is_tf_available from ...test_configuration_common import ConfigTester from ...test_modelin...
718
'''simple docstring''' from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging a : Union[str, Any] = logging.get_logger(__name__) ...
680
0
'''simple docstring''' from ...configuration_utils import PretrainedConfig from ...utils import logging a : int = logging.get_logger(__name__) a : int = { '''google/switch-base-8''': '''https://huggingface.co/google/switch-base-8/blob/main/config.json''', } c...
719
'''simple docstring''' from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging a : Union[str, Any] = logging.get_logger(__name__) a : List[Any] = { ...
680
0
from collections.abc import Sequence from queue import Queue class SCREAMING_SNAKE_CASE__ : def __init__( self : int , a_ : List[str] , a_ : Union[str, Any] , a_ : Union[str, Any] , a_ : Tuple=None , a_ : str=None ): ...
720
'''simple docstring''' import logging import torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from transformers.file_utils import add_start_docstrings, add_start_docstrings_to_model_forward from transformers.models.bert.modeling_bert import ( BERT_INPUTS_DOCSTRING, ...
680
0
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available a : str = {"configuration_ibert": ["IBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "IBertConfig", "IBertOnnxConfig"]} try: if not is_torch_avail...
721
'''simple docstring''' import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_devic...
680
0
'''simple docstring''' from ...configuration_utils import PretrainedConfig from ...utils import logging a : List[str] = logging.get_logger(__name__) a : List[str] = { '''studio-ousia/luke-base''': '''https://huggingface.co/studio-ousia/luke-base/resolve/main/config.j...
700
'''simple docstring''' import copy from dataclasses import dataclass from pathlib import Path from typing import Dict, Optional, Union @dataclass class SCREAMING_SNAKE_CASE__ : __SCREAMING_SNAKE_CASE = None __SCREAMING_SNAKE_CASE = False __SCREAMING_SNAKE_CASE = ...
680
0
'''simple docstring''' from ...processing_utils import ProcessorMixin from ...tokenization_utils_base import BatchEncoding class SCREAMING_SNAKE_CASE__ ( _UpperCamelCase ): __SCREAMING_SNAKE_CASE = 'ClapFeatureExtractor' __SCREAMING_SNAKE_CASE = ('RobertaTokenizer', 'Robert...
701
'''simple docstring''' import gc import tempfile import unittest import numpy as np import torch from diffusers import VersatileDiffusionTextToImagePipeline from diffusers.utils.testing_utils import nightly, require_torch_gpu, torch_device a : Optional[Any] = False class ...
680
0
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available a : Optional[int] = { '''configuration_megatron_bert''': ['''MEGATRON_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''MegatronBertConfig'''], } ...
702
'''simple docstring''' import os import torch from ..logging import get_logger from .constants import FSDP_PYTORCH_VERSION, MODEL_NAME, OPTIMIZER_NAME from .versions import is_torch_version if is_torch_version('''>=''', FSDP_PYTORCH_VERSION): import torch.distributed.checkpoint as dist_...
680
0
'''simple docstring''' import unittest from transformers.utils.backbone_utils import ( BackboneMixin, get_aligned_output_features_output_indices, verify_out_features_out_indices, ) class SCREAMING_SNAKE_CASE__ ( unittest.TestCase ): def A ( self : ...
703
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int , _UpperCAmelCase : int ) -> str: if not isinstance(_UpperCAmelCase , _UpperCAmelCase ): raise ValueError("iterations must be defined as integers" ) if not isinstance(_UpperCAmelCase , ...
680
0
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available a : List[Any] = { '''configuration_time_series_transformer''': [ '''TIME_SERIES_TRANSFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''TimeSeri...
704
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int ) -> str: if number > 0: raise ValueError("input must be a negative integer" ) __snake_case = len(bin(_UpperCAmelCase )[3:] ) __snake_case = bin(abs(_UpperCAmelCase ) - (1 << binary_number_length)...
680
0
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available a : Union[str, Any] = { '''configuration_maskformer''': ['''MASKFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''MaskFormerC...
705
'''simple docstring''' from timeit import timeit def __UpperCAmelCase ( _UpperCAmelCase : int ) -> int: if number < 0: raise ValueError("the value of input must not be negative" ) __snake_case = 0 while number: number &= number - 1 result += 1 r...
680
0
'''simple docstring''' from ..utils import DummyObject, requires_backends class SCREAMING_SNAKE_CASE__ ( metaclass=_UpperCamelCase ): __SCREAMING_SNAKE_CASE = ["torch", "transformers", "onnx"] def __init__( self : int , *a_ : Any , **a_ : Union[str, A...
706
'''simple docstring''' import tempfile import unittest from make_student import create_student_by_copying_alternating_layers from transformers import AutoConfig from transformers.file_utils import cached_property from transformers.testing_utils import require_torch a : Dict = '''...
680
0
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : Tuple , _UpperCAmelCase : Tuple ) -> None: __snake_case = len(lowercase_ ) print("The following activities are selected:" ) # The first activity is always selected __snake_case = 0 pri...
707
'''simple docstring''' import argparse import glob import logging import os import time from argparse import Namespace import numpy as np import torch from lightning_base import BaseTransformer, add_generic_args, generic_train from torch.utils.data import DataLoader, TensorDataset from transf...
680
0
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : Dict = 10**12 ) -> int: __snake_case = 1 __snake_case = 0 __snake_case = 1 __snake_case = 1 while numerator <= 2 * min_total - 1: prev_numerator += 2 * numerator numerator +...
708
'''simple docstring''' import pytest import datasets.config from datasets.utils.info_utils import is_small_dataset @pytest.mark.parametrize("dataset_size" , [None, 4_00 * 2**20, 6_00 * 2**20] ) @pytest.mark.parametrize("input_in_memory_max_size" , ["default", 0, 1_00 * 2**20, 9_00 *...
680
0
'''simple docstring''' from __future__ import annotations def __UpperCAmelCase ( _UpperCAmelCase : str , _UpperCAmelCase : Union[str, Any] ) -> List[str]: print(F'''Vertex\tShortest Distance from vertex {src}''' ) for i, d in enumerate(_UpperCAmelCase ): pri...
709
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : float ) -> float: if edge <= 0 or not isinstance(_UpperCAmelCase , _UpperCAmelCase ): raise ValueError("Length must be a positive." ) return 3 * ((25 + 10 * (5 ** (1 / 2))) ** (1 / 2)) * (edge**2) d...
680
0
'''simple docstring''' import copy import os from typing import Union from ...configuration_utils import PretrainedConfig from ...utils import logging a = logging.get_logger(__name__) a = { '''BridgeTower/bridgetower-base''': '''https://huggingface.co/BridgeTower/bridge...
710
'''simple docstring''' from math import atan, cos, radians, sin, tan from .haversine_distance import haversine_distance a : Any = 6_378_137.0 a : List[Any] = 6_356_752.314_245 a : Dict = 6_378_137 def __UpperCAmelCase ( _UpperCAmelCase : float...
680
0
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) a : Union[str, Any] = { 'configuration_whis...
711
'''simple docstring''' import math import sys import cva import numpy as np def __UpperCAmelCase ( _UpperCAmelCase : np.ndarray , _UpperCAmelCase : float ) -> np.ndarray: # For applying gaussian function for each element in matrix. __snake_case = math.sqrt...
680
0
'''simple docstring''' import os import unittest from transformers.models.cpmant.tokenization_cpmant import VOCAB_FILES_NAMES, CpmAntTokenizer from transformers.testing_utils import require_jieba, tooslow from ...test_tokenization_common import TokenizerTesterMixin @require_jieba class SCREAM...
712
'''simple docstring''' class SCREAMING_SNAKE_CASE__ : def __init__( self : Any , a_ : Dict , a_ : Union[str, Any] , a_ : Tuple ): """simple docstring""" __snake_case = name __snake_case = value __snak...
680
0
'''simple docstring''' import argparse import torch from transformers import BlenderbotConfig, BlenderbotForConditionalGeneration from transformers.utils import logging logging.set_verbosity_info() a : Dict = logging.get_logger(__name__) a : Optional[int] = [ ...
713
'''simple docstring''' import os from math import logaa def __UpperCAmelCase ( _UpperCAmelCase : str = "base_exp.txt" ) -> int: __snake_case = 0 __snake_case = 0 for i, line in enumerate(open(os.path.join(os.path.dirname(_UpperCAmelCase ) , _UpperCAmelCase )...
680
0
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase ) -> List[Any]: return "".join([hex(a__ )[2:].zfill(2 ).upper() for byte in list(a__ )] ) def __UpperCAmelCase ( _UpperCAmelCase ) -> int: if (len(a__ ) % 2) != 0: raise ValueError( ...
714
'''simple docstring''' from typing import List, Optional from tokenizers import ByteLevelBPETokenizer from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_blenderbot_small import BlenderbotSmallTokenizer a : List[Any] = log...
680
0
'''simple docstring''' from collections import namedtuple import requests from lxml import html # type: ignore a : Dict = namedtuple('''covid_data''', '''cases deaths recovered''') def __UpperCAmelCase ( _UpperCAmelCase : str = "https://www.worldometers.info/coronavirus/" ) -> Op...
715
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) a : str = { '''configuration_gpt_bigcode''': ['''GPT_BIGCODE_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''GPTBigCodeConfig'''],...
680
0
'''simple docstring''' import heapq import sys import numpy as np a : List[Any] = tuple[int, int] class SCREAMING_SNAKE_CASE__ : def __init__( self : str ): """simple docstring""" __snake_case = [] __snake_ca...
716
'''simple docstring''' # HF Trainer benchmarking tool # # This tool can be used to run and compare multiple dimensions of the HF Trainers args. # # It then prints a report once in github format with all the information that needs to be shared # with others and second time in a console-friendly format,...
680
0
'''simple docstring''' import os import textwrap import pyarrow as pa import pytest from datasets import ClassLabel, Features, Image from datasets.packaged_modules.csv.csv import Csv from ..utils import require_pil @pytest.fixture def __UpperCAmelCase ( _UpperCAmelCase : str )...
717
'''simple docstring''' import pytest from datasets.parallel import ParallelBackendConfig, parallel_backend from datasets.utils.py_utils import map_nested from .utils import require_dill_gt_0_3_2, require_joblibspark, require_not_windows def __UpperCAmelCase ( _UpperCAmelCase : Dict ...
680
0
'''simple docstring''' import os import unittest from transformers import FunnelTokenizer, FunnelTokenizerFast from transformers.models.funnel.tokenization_funnel import VOCAB_FILES_NAMES from transformers.testing_utils import require_tokenizers from ...test_tokenization_common import TokenizerTesterMixin @r...
718
'''simple docstring''' from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging a : Union[str, Any] = logging.get_logger(__name__) ...
680
0
'''simple docstring''' import tempfile import unittest from make_student import create_student_by_copying_alternating_layers from transformers import AutoConfig from transformers.file_utils import cached_property from transformers.testing_utils import require_torch a : Optional[int] ...
719
'''simple docstring''' from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging a : Union[str, Any] = logging.get_logger(__name__) a : List[Any] = { ...
680
0
import warnings from ...utils import logging from .image_processing_clip import CLIPImageProcessor a : List[Any] = logging.get_logger(__name__) class SCREAMING_SNAKE_CASE__ ( _UpperCamelCase ): def __init__( self : Union[str, Any] , *a_ : Dict ...
720
'''simple docstring''' import logging import torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from transformers.file_utils import add_start_docstrings, add_start_docstrings_to_model_forward from transformers.models.bert.modeling_bert import ( BERT_INPUTS_DOCSTRING, ...
680
0
'''simple docstring''' import unittest from transformers import JukeboxTokenizer from transformers.testing_utils import require_torch class SCREAMING_SNAKE_CASE__ ( unittest.TestCase ): __SCREAMING_SNAKE_CASE = JukeboxTokenizer __SCREAMING_SNAKE_CASE = { 'artist'...
721
'''simple docstring''' import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_devic...
680
0
'''simple docstring''' from typing import TYPE_CHECKING # rely on isort to merge the imports from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available a : Dict = { """configuration_cpmant""": ["""CPMANT_PRETRAINED_CONFIG_ARCHIVE...
700
'''simple docstring''' import copy from dataclasses import dataclass from pathlib import Path from typing import Dict, Optional, Union @dataclass class SCREAMING_SNAKE_CASE__ : __SCREAMING_SNAKE_CASE = None __SCREAMING_SNAKE_CASE = False __SCREAMING_SNAKE_CASE = ...
680
0
'''simple docstring''' import numpy as np class SCREAMING_SNAKE_CASE__ : def __init__( self : Any ): """simple docstring""" __snake_case = (0, 0) __snake_case = None __snake_case = 0 __snake_case = 0 ...
701
'''simple docstring''' import gc import tempfile import unittest import numpy as np import torch from diffusers import VersatileDiffusionTextToImagePipeline from diffusers.utils.testing_utils import nightly, require_torch_gpu, torch_device a : Optional[Any] = False class ...
680
0
'''simple docstring''' from __future__ import annotations a : List[str] = 8.988e9 # units = N * m^s * C^-2 def __UpperCAmelCase ( _UpperCAmelCase : float , _UpperCAmelCase : float , _UpperCAmelCase : float , _UpperCAmelCase : floa...
702
'''simple docstring''' import os import torch from ..logging import get_logger from .constants import FSDP_PYTORCH_VERSION, MODEL_NAME, OPTIMIZER_NAME from .versions import is_torch_version if is_torch_version('''>=''', FSDP_PYTORCH_VERSION): import torch.distributed.checkpoint as dist_...
680
0
'''simple docstring''' import math def __UpperCAmelCase ( _UpperCAmelCase : int , _UpperCAmelCase : Union[str, Any] = 0 , _UpperCAmelCase : int = 0 ) -> list: __snake_case = end or len(SCREAMING_SNAKE_CASE_ ) for i in range(SCREAMING_SNAKE_CASE_ ,...
703
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int , _UpperCAmelCase : int ) -> str: if not isinstance(_UpperCAmelCase , _UpperCAmelCase ): raise ValueError("iterations must be defined as integers" ) if not isinstance(_UpperCAmelCase , ...
680
0
'''simple docstring''' import os import unittest from transformers import BatchEncoding from transformers.models.bert.tokenization_bert import ( BasicTokenizer, WordpieceTokenizer, _is_control, _is_punctuation, _is_whitespace, ) from transformers.models.prophetnet.tokenization_prophetnet impo...
704
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int ) -> str: if number > 0: raise ValueError("input must be a negative integer" ) __snake_case = len(bin(_UpperCAmelCase )[3:] ) __snake_case = bin(abs(_UpperCAmelCase ) - (1 << binary_number_length)...
680
0
'''simple docstring''' import pytest import datasets # Import fixture modules as plugins a : Tuple = ['''tests.fixtures.files''', '''tests.fixtures.hub''', '''tests.fixtures.fsspec'''] def __UpperCAmelCase ( _UpperCAmelCase : str , _UpperCAmelCase : int )...
705
'''simple docstring''' from timeit import timeit def __UpperCAmelCase ( _UpperCAmelCase : int ) -> int: if number < 0: raise ValueError("the value of input must not be negative" ) __snake_case = 0 while number: number &= number - 1 result += 1 r...
680
0
'''simple docstring''' from copy import deepcopy import torch import torch.nn.functional as F from torch.optim import AdamW from torch.optim.lr_scheduler import LambdaLR from torch.utils.data import DataLoader from accelerate.accelerator import Accelerator from accelerate.state import GradientState ...
706
'''simple docstring''' import tempfile import unittest from make_student import create_student_by_copying_alternating_layers from transformers import AutoConfig from transformers.file_utils import cached_property from transformers.testing_utils import require_torch a : Dict = '''...
680
0
'''simple docstring''' import functools import operator from ...configuration_utils import PretrainedConfig from ...utils import logging a : int = logging.get_logger(__name__) a : Optional[Any] = { '''facebook/wav2vec2-base-960h''': '''https://huggingface.co/face...
707
'''simple docstring''' import argparse import glob import logging import os import time from argparse import Namespace import numpy as np import torch from lightning_base import BaseTransformer, add_generic_args, generic_train from torch.utils.data import DataLoader, TensorDataset from transf...
680
0
'''simple docstring''' from copy import deepcopy import torch import torch.nn.functional as F from torch.optim import AdamW from torch.optim.lr_scheduler import LambdaLR from torch.utils.data import DataLoader from accelerate.accelerator import Accelerator from accelerate.state import GradientSt...
708
'''simple docstring''' import pytest import datasets.config from datasets.utils.info_utils import is_small_dataset @pytest.mark.parametrize("dataset_size" , [None, 4_00 * 2**20, 6_00 * 2**20] ) @pytest.mark.parametrize("input_in_memory_max_size" , ["default", 0, 1_00 * 2**20, 9_00 *...
680
0
'''simple docstring''' # This code is adapted from OpenAI's release # https://github.com/openai/human-eval/blob/master/human_eval/execution.py import contextlib import faulthandler import io import multiprocessing import os import platform import signal import tempfile def __UpperCAmelCase ...
709
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : float ) -> float: if edge <= 0 or not isinstance(_UpperCAmelCase , _UpperCAmelCase ): raise ValueError("Length must be a positive." ) return 3 * ((25 + 10 * (5 ** (1 / 2))) ** (1 / 2)) * (edge**2) d...
680
0
'''simple docstring''' import functools import operator from ...configuration_utils import PretrainedConfig from ...utils import logging a = logging.get_logger(__name__) a = { 'asapp/sew-d-tiny-100k': 'https://huggingface.co/asapp/sew-d-tiny-100k/resolve/main/config.json...
710
'''simple docstring''' from math import atan, cos, radians, sin, tan from .haversine_distance import haversine_distance a : Any = 6_378_137.0 a : List[Any] = 6_356_752.314_245 a : Dict = 6_378_137 def __UpperCAmelCase ( _UpperCAmelCase : float...
680
0
'''simple docstring''' import os import re import unicodedata from shutil import copyfile from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union import sentencepiece as spm from ...tokenization_utils import PreTrainedTokenizer from ...utils import is_torch_available, logging ...
711
'''simple docstring''' import math import sys import cva import numpy as np def __UpperCAmelCase ( _UpperCAmelCase : np.ndarray , _UpperCAmelCase : float ) -> np.ndarray: # For applying gaussian function for each element in matrix. __snake_case = math.sqrt...
680
0
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int=2_81_23 ) -> Optional[int]: __snake_case = [1] * (limit + 1) for i in range(2 , int(limit**0.5 ) + 1 ): sum_divs[i * i] += i for k in range(i + 1 , limit // i + 1 ): sum_divs[...
712
'''simple docstring''' class SCREAMING_SNAKE_CASE__ : def __init__( self : Any , a_ : Dict , a_ : Union[str, Any] , a_ : Tuple ): """simple docstring""" __snake_case = name __snake_case = value __snak...
680
0
'''simple docstring''' from .glue import glue_convert_examples_to_features, glue_output_modes, glue_processors, glue_tasks_num_labels from .squad import SquadExample, SquadFeatures, SquadVaProcessor, SquadVaProcessor, squad_convert_examples_to_features from .utils import DataProcessor, InputExample, Inpu...
713
'''simple docstring''' import os from math import logaa def __UpperCAmelCase ( _UpperCAmelCase : str = "base_exp.txt" ) -> int: __snake_case = 0 __snake_case = 0 for i, line in enumerate(open(os.path.join(os.path.dirname(_UpperCAmelCase ) , _UpperCAmelCase )...
680
0
'''simple docstring''' import argparse import os from pathlib import Path import fairseq import torch from packaging import version from torch import nn from transformers import ( BartConfig, BartForConditionalGeneration, BartForSequenceClassification, BartModel, BartTok...
714
'''simple docstring''' from typing import List, Optional from tokenizers import ByteLevelBPETokenizer from ...tokenization_utils_fast import PreTrainedTokenizerFast from ...utils import logging from .tokenization_blenderbot_small import BlenderbotSmallTokenizer a : List[Any] = log...
680
0
'''simple docstring''' import torch from diffusers import CMStochasticIterativeScheduler from .test_schedulers import SchedulerCommonTest class SCREAMING_SNAKE_CASE__ ( _UpperCamelCase ): __SCREAMING_SNAKE_CASE = (CMStochasticIterativeScheduler,) __SCREAMING_SNAKE_CASE = 10 def...
715
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) a : str = { '''configuration_gpt_bigcode''': ['''GPT_BIGCODE_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''GPTBigCodeConfig'''],...
680
0
'''simple docstring''' import functools def A__ ( _UpperCAmelCase : Any , _UpperCAmelCase : Any ) -> int: if not isinstance(a__ , a__ ) or not all(isinstance(a__ , a__ ) for day in days ): raise ValueError("The parameter days should be a list of ...
716
'''simple docstring''' # HF Trainer benchmarking tool # # This tool can be used to run and compare multiple dimensions of the HF Trainers args. # # It then prints a report once in github format with all the information that needs to be shared # with others and second time in a console-friendly format,...
680
0
'''simple docstring''' import time import warnings from abc import ABC from copy import deepcopy from typing import Optional import torch from ..utils import add_start_docstrings, logging a : int = logging.get_logger(__name__) a : Any = r'''\n Args:\n ...
717
'''simple docstring''' import pytest from datasets.parallel import ParallelBackendConfig, parallel_backend from datasets.utils.py_utils import map_nested from .utils import require_dill_gt_0_3_2, require_joblibspark, require_not_windows def __UpperCAmelCase ( _UpperCAmelCase : Dict ...
680
0
'''simple docstring''' from typing import List, Optional, Union import numpy as np from ....audio_utils import mel_filter_bank, optimal_fft_length, spectrogram, window_function from ....feature_extraction_sequence_utils import SequenceFeatureExtractor from ....feature_extraction_utils import BatchFeature from ....
718
'''simple docstring''' from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging a : Union[str, Any] = logging.get_logger(__name__) ...
680
0
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available, is_vision_available, ) a : str = { """configuration_layoutlmv2""": ["""LAYOUTLMV2_PRET...
719
'''simple docstring''' from collections import OrderedDict from typing import Mapping from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging a : Union[str, Any] = logging.get_logger(__name__) a : List[Any] = { ...
680
0
from typing import Any, Callable, Dict, List, Optional, Union import torch from transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer from diffusers import ( AutoencoderKL, DDIMScheduler, DiffusionPipeline, LMSDiscreteScheduler, PNDMScheduler, StableDiffusio...
720
'''simple docstring''' import logging import torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from transformers.file_utils import add_start_docstrings, add_start_docstrings_to_model_forward from transformers.models.bert.modeling_bert import ( BERT_INPUTS_DOCSTRING, ...
680
0
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available a : Optional[int] = { """configuration_pix2struct""": [ """PIX2STRUCT_PRETRAINED_CONFIG_ARCHIVE_MAP""", ...
721
'''simple docstring''' import inspect import unittest from transformers import DPTConfig from transformers.file_utils import is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, require_vision, slow, torch_devic...
680
0
'''simple docstring''' import argparse import json from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import DetrConfig, DetrForObjectDetection, DetrForSegmentation, DetrImageProcessor, ResNetConfig from tran...
700
'''simple docstring''' import copy from dataclasses import dataclass from pathlib import Path from typing import Dict, Optional, Union @dataclass class SCREAMING_SNAKE_CASE__ : __SCREAMING_SNAKE_CASE = None __SCREAMING_SNAKE_CASE = False __SCREAMING_SNAKE_CASE = ...
680
0
'''simple docstring''' import json import os from pathlib import Path import pytest from datasets.download.download_config import DownloadConfig from datasets.download.download_manager import DownloadManager from datasets.utils.file_utils import hash_url_to_filename a : Optional[Any] ...
701
'''simple docstring''' import gc import tempfile import unittest import numpy as np import torch from diffusers import VersatileDiffusionTextToImagePipeline from diffusers.utils.testing_utils import nightly, require_torch_gpu, torch_device a : Optional[Any] = False class ...
680
0
'''simple docstring''' import argparse import json import os import torch from transformers.file_utils import has_file from diffusers import UNetaDConditionModel, UNetaDModel a : Any = False a : Tuple = True a : Optional[int] = False if __name__ =...
702
'''simple docstring''' import os import torch from ..logging import get_logger from .constants import FSDP_PYTORCH_VERSION, MODEL_NAME, OPTIMIZER_NAME from .versions import is_torch_version if is_torch_version('''>=''', FSDP_PYTORCH_VERSION): import torch.distributed.checkpoint as dist_...
680
0
'''simple docstring''' from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_torch_available, is_vision_available, ) a : Optional[Any] = { "configuration_convnext": ["CONVNEXT_PRETRAINED_C...
703
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int , _UpperCAmelCase : int ) -> str: if not isinstance(_UpperCAmelCase , _UpperCAmelCase ): raise ValueError("iterations must be defined as integers" ) if not isinstance(_UpperCAmelCase , ...
680
0
'''simple docstring''' from __future__ import annotations import random # Maximum size of the population. Bigger could be faster but is more memory expensive. a : List[Any] = 200 # Number of elements selected in every generation of evolution. The selection takes # place from best to worst of that ...
704
'''simple docstring''' def __UpperCAmelCase ( _UpperCAmelCase : int ) -> str: if number > 0: raise ValueError("input must be a negative integer" ) __snake_case = len(bin(_UpperCAmelCase )[3:] ) __snake_case = bin(abs(_UpperCAmelCase ) - (1 << binary_number_length)...
680
0