code stringlengths 81 54k | code_codestyle int64 0 721 | style_context stringlengths 91 41.9k | style_context_codestyle int64 0 699 | label int64 0 1 |
|---|---|---|---|---|
"""simple docstring"""
def __A ( a_ : int )-> int:
'''simple docstring'''
if n == 1 or not isinstance(a_ , a_ ):
return 0
elif n == 2:
return 1
else:
SCREAMING_SNAKE_CASE : Optional[Any] = [0, 1]
for i in range(... | 707 |
"""simple docstring"""
import os
import sys
lowerCamelCase__ : List[Any] = os.path.join(os.path.dirname(__file__), "src")
sys.path.append(SRC_DIR)
from transformers import (
AutoConfig,
AutoModel,
AutoModelForCausalLM,
AutoModelForMaskedLM,
AutoModelForQuestionAnswering,
... | 18 | 0 |
from ...configuration_utils import PretrainedConfig
from ...utils import logging
lowerCamelCase__ : int = logging.get_logger(__name__)
lowerCamelCase__ : List[Any] = {
"google/canine-s": "https://huggingface.co/google/canine-s/resolve/main/config.json",
# See all CANINE m... | 708 |
"""simple docstring"""
import math
from typing import Optional
import numpy as np
from ...configuration_utils import PretrainedConfig
from ...utils import logging
lowerCamelCase__ : Tuple = logging.get_logger(__name__)
lowerCamelCase__ : Any = {
"facebook/encodec_24kh... | 18 | 0 |
"""simple docstring"""
from collections import OrderedDict
from typing import Mapping
from packaging import version
from ...configuration_utils import PretrainedConfig
from ...onnx import OnnxConfig
from ...utils import logging
lowerCamelCase__ : List[str] = logging.get_logger(__name__)
lo... | 709 |
"""simple docstring"""
import copy
import unittest
from transformers.models.auto import get_values
from transformers.testing_utils import require_torch, slow, torch_device
from transformers.utils import cached_property, is_torch_available, is_vision_available
from ...test_configuration_common import ConfigTest... | 18 | 0 |
"""simple docstring"""
import logging
import os
import threading
import time
try:
import warnings
except ImportError:
lowerCamelCase__ : Optional[int] = None
try:
import msvcrt
except ImportError:
lowerCamelCase__ : Any = None
try:
import fcntl
exc... | 710 |
"""simple docstring"""
import logging
import os
import random
import sys
from dataclasses import dataclass, field
from typing import Optional
import datasets
import numpy as np
import pandas as pd
from datasets import load_dataset
import transformers
from transformers import (
AutoConfig,
BartForSequen... | 18 | 0 |
"""simple docstring"""
import timeit
import numpy as np
import datasets
from datasets.arrow_writer import ArrowWriter
from datasets.features.features import _ArrayXD
def __A ( a_ : Optional[int] )-> List[str]:
'''simple docstring'''
def wrapper(*a_ : List[str] ,... | 711 |
"""simple docstring"""
import collections
import inspect
import unittest
from transformers import SwinvaConfig
from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from transformers.utils import cached_property, is_torch_available, is_vision_available
from ...test_configurat... | 18 | 0 |
"""simple docstring"""
def __A ( a_ : int = 1 , a_ : int = 10_00 )-> int:
'''simple docstring'''
SCREAMING_SNAKE_CASE : List[Any] = 1
SCREAMING_SNAKE_CASE : List[str] = 0
for divide_by_number in range(a_ , digit ... | 712 |
"""simple docstring"""
from collections import OrderedDict
from typing import Any, Mapping, Optional
from ... import PreTrainedTokenizer
from ...configuration_utils import PretrainedConfig
from ...file_utils import TensorType, is_torch_available
from ...onnx import OnnxConfig, OnnxConfigWithPast, OnnxSeqaSeqCon... | 18 | 0 |
"""simple docstring"""
def __A ( a_ : Any )-> List[str]:
'''simple docstring'''
SCREAMING_SNAKE_CASE : Optional[int] = [0] * len(a_ )
SCREAMING_SNAKE_CASE : List[Any] = []
SCREAMING_SNAKE_CASE : List[Any] = []
SCREAMING_SNAKE_C... | 713 |
"""simple docstring"""
from collections import OrderedDict
from typing import TYPE_CHECKING, Any, Mapping, Optional
from packaging import version
from ...configuration_utils import PretrainedConfig
from ...onnx import OnnxConfig
from ...onnx.utils import compute_effective_axis_dimension
from ...utils import lo... | 18 | 0 |
"""simple docstring"""
import argparse
import json
import requests
import timm
import torch
from huggingface_hub import hf_hub_download
from PIL import Image
from transformers import AutoImageProcessor, SwinConfig, SwinForImageClassification
def __A ( a_ : str )-> Any:
'''simp... | 714 |
"""simple docstring"""
import math
def __A ( a_ : list , a_ : int )-> int:
'''simple docstring'''
SCREAMING_SNAKE_CASE : Optional[int] = len(a_ )
SCREAMING_SNAKE_CASE : Optional[Any] = int(math.floor(math.sqrt(a_ ) ) )
... | 18 | 0 |
"""simple docstring"""
from typing import Mapping
from ...configuration_utils import PretrainedConfig
from ...onnx import OnnxSeqaSeqConfigWithPast
from ...utils import logging
lowerCamelCase__ : Dict = logging.get_logger(__name__)
lowerCamelCase__ : Any = {
"t5-small"... | 715 |
"""simple docstring"""
from sklearn.metrics import fa_score
import datasets
lowerCamelCase__ : List[Any] = "\nThe F1 score is the harmonic mean of the precision and recall. It can be computed with the equation:\nF1 = 2 * (precision * recall) / (precision + recall)\n"
lowerCamelCase__ : ... | 18 | 0 |
"""simple docstring"""
from collections import UserDict
from typing import Union
import numpy as np
import requests
from ..utils import (
add_end_docstrings,
logging,
)
from .audio_classification import ffmpeg_read
from .base import PIPELINE_INIT_ARGS, Pipeline
lowerCamelCase__ : Optional[Any] ... | 716 |
"""simple docstring"""
from __future__ import annotations
from fractions import Fraction
def __A ( a_ : int , a_ : int )-> bool:
'''simple docstring'''
return (
num != den and num % 10 == den // 10 and (num // 10) / (den % 10) == num / den
)
def __A... | 18 | 0 |
"""simple docstring"""
import argparse
import json
from pathlib import Path
import torch
import torchaudio
from datasets import load_dataset
from huggingface_hub import hf_hub_download
from transformers import ASTConfig, ASTFeatureExtractor, ASTForAudioClassification
from transformers.utils i... | 717 |
"""simple docstring"""
from ...configuration_utils import PretrainedConfig
from ...utils import logging
from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices
lowerCamelCase__ : int = logging.get_logger(__name__)
class lowercase__( _Uppe... | 18 | 0 |
"""simple docstring"""
from collections import OrderedDict
from typing import Mapping
from ...configuration_utils import PretrainedConfig
from ...onnx import OnnxConfig
lowerCamelCase__ : Any = {
"albert-base-v1": "https://huggingface.co/albert-base-v1/resolve/main/config.json",
"alb... | 718 |
"""simple docstring"""
import math
class lowercase__:
'''simple docstring'''
def __init__( self :Union[str, Any] , lowerCamelCase_ :List[str]=0 ) -> List[Any]: # a graph with Node 0,1,...,N-1
'''simple docstring'''
SCREAMING_SNAKE_CASE : Tuple = n... | 18 | 0 |
"""simple docstring"""
import datasets
from .evaluate import evaluate
lowerCamelCase__ : List[Any] = "\\n@inproceedings{Rajpurkar2016SQuAD10,\n title={SQuAD: 100, 000+ Questions for Machine Comprehension of Text},\n author={Pranav Rajpurkar and Jian Zhang and Konstantin Lopyrev and Percy L... | 719 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ....utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
lowerCamelCase__ : Tuple = {
"configuration_mctct": ["MCTCT_PRETRAINED_CONFIG_ARCHIVE_MAP", "MCTCTConfig"],
"feature_extraction_mctct": ["MCTC... | 18 | 0 |
"""simple docstring"""
import argparse
import os
import re
lowerCamelCase__ : List[str] = "src/transformers/models/auto"
# re pattern that matches mapping introductions:
# SUPER_MODEL_MAPPING_NAMES = OrderedDict or SUPER_MODEL_MAPPING = OrderedDict
lowerCamelCase__ : str ... | 720 |
"""simple docstring"""
import os
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
import torch
from torch import nn
from ...models.controlnet import ControlNetModel, ControlNetOutput
from ...models.modeling_utils import ModelMixin
from ...utils import logging
lowerCamelCase__ : List... | 18 | 0 |
"""simple docstring"""
import random
def __A ( a_ : int )-> bool:
'''simple docstring'''
SCREAMING_SNAKE_CASE : Optional[Any] = num - 1
SCREAMING_SNAKE_CASE : str = 0
while s % 2 == 0:
SCREAMING_SNAKE_CASE : Union[str, Any] ... | 721 |
"""simple docstring"""
from __future__ import annotations
from numpy import array, cos, cross, floataa, radians, sin
from numpy.typing import NDArray
def __A ( a_ : float , a_ : float , a_ : bool = False )-> list[float]:
'''simple docstring'''
if radian_... | 18 | 0 |
"""simple docstring"""
from __future__ import annotations
from numpy import array, cos, cross, floataa, radians, sin
from numpy.typing import NDArray
def __A ( a_ : float , a_ : float , a_ : bool = False )-> list[float]:
'''simple docstring'''
if radian_... | 700 |
"""simple docstring"""
from __future__ import annotations
import random
# Maximum size of the population. Bigger could be faster but is more memory expensive.
lowerCamelCase__ : Optional[Any] = 200
# Number of elements selected in every generation of evolution. The selection takes
# place fr... | 18 | 0 |
"""simple docstring"""
from ...configuration_utils import PretrainedConfig
from ...utils import logging
lowerCamelCase__ : Tuple = logging.get_logger(__name__)
lowerCamelCase__ : str = {
"studio-ousia/luke-base": "https://huggingface.co/studio-ousia/luke-base/resol... | 701 |
"""simple docstring"""
import argparse
import ast
import logging
import os
import sys
import pandas as pd
import torch
from tqdm import tqdm
from transformers import BartForConditionalGeneration, RagRetriever, RagSequenceForGeneration, RagTokenForGeneration
from transformers import logging as transformers_logg... | 18 | 0 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
lowerCamelCase__ : List[str] = {
"configuration_deberta": ["DEBERTA_PRETRAI... | 702 |
"""simple docstring"""
import json
import os
from typing import Optional, Tuple
from ...tokenization_utils import PreTrainedTokenizer
from ...utils import logging
lowerCamelCase__ : List[str] = logging.get_logger(__name__)
lowerCamelCase__ : Optional[int] = {"vocab_file": ... | 18 | 0 |
import os
from collections import namedtuple
import pytest
from datasets import ClassLabel, Features, Sequence, Value
from datasets.commands.test import TestCommand
from datasets.info import DatasetInfo, DatasetInfosDict
lowerCamelCase__ : Union[str, Any] = namedtuple(
"_TestCommandArgs",
... | 703 |
"""simple docstring"""
from ...configuration_utils import PretrainedConfig
from ...utils import logging
lowerCamelCase__ : Tuple = logging.get_logger(__name__)
lowerCamelCase__ : str = {
"studio-ousia/luke-base": "https://huggingface.co/studio-ousia/luke-base/resolve/ma... | 18 | 0 |
"""simple docstring"""
import sys
from collections.abc import Mapping
from typing import TYPE_CHECKING
import numpy as np
import pyarrow as pa
from .. import config
from ..utils.py_utils import map_nested
from .formatting import TensorFormatter
if TYPE_CHECKING:
import torch
class lowercase__( Te... | 704 |
"""simple docstring"""
def __A ( a_ : list , a_ : int , a_ : int = 0 , a_ : int = 0 )-> int:
'''simple docstring'''
SCREAMING_SNAKE_CASE : str = right or len(a_ ) - 1
if left > right:
return -1
elif list_data[left] == key:
... | 18 | 0 |
"""simple docstring"""
import collections
import inspect
import unittest
from transformers import SwinvaConfig
from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from transformers.utils import cached_property, is_torch_available, is_vision_available
from ...test_configurat... | 705 |
"""simple docstring"""
def __A ( a_ : int )-> list[int]:
'''simple docstring'''
if num <= 0:
raise ValueError('''Input must be a positive integer''' )
SCREAMING_SNAKE_CASE : Optional[int] = [True] * (num + 1)
SCREAMING_SNAKE_CASE : Optiona... | 18 | 0 |
"""simple docstring"""
import argparse
import json
import os
import re
import torch
from transformers import BloomConfig, BloomModel
from transformers.file_utils import CONFIG_NAME, WEIGHTS_NAME
from transformers.utils import logging
logging.set_verbosity_info()
lowerCamelCase__ : Tuple = ... | 706 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
lowerCamelCase__ : Optional[Any] = {
"configuration_funnel": ["FUNNEL_PRETRAINED... | 18 | 0 |
"""simple docstring"""
from ..models.whisper import WhisperForConditionalGeneration, WhisperProcessor
from .base import PipelineTool
class lowercase__( _UpperCAmelCase ):
'''simple docstring'''
UpperCamelCase = """openai/whisper-base"""
UpperCamelCase = (
"""This ... | 707 |
"""simple docstring"""
import os
import sys
lowerCamelCase__ : List[Any] = os.path.join(os.path.dirname(__file__), "src")
sys.path.append(SRC_DIR)
from transformers import (
AutoConfig,
AutoModel,
AutoModelForCausalLM,
AutoModelForMaskedLM,
AutoModelForQuestionAnswering,
... | 18 | 0 |
import gc
import unittest
import torch
from parameterized import parameterized
from diffusers import AutoencoderKL
from diffusers.utils import floats_tensor, load_hf_numpy, require_torch_gpu, slow, torch_all_close, torch_device
from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testi... | 708 |
"""simple docstring"""
import math
from typing import Optional
import numpy as np
from ...configuration_utils import PretrainedConfig
from ...utils import logging
lowerCamelCase__ : Tuple = logging.get_logger(__name__)
lowerCamelCase__ : Any = {
"facebook/encodec_24kh... | 18 | 0 |
"""simple docstring"""
from __future__ import annotations
from math import gcd
def __A ( a_ : int , a_ : int = 2 , a_ : int = 1 , a_ : int = 3 , )-> int | None:
'''simple docstring'''
if num < 2:
raise ValueError('''The input value cannot be le... | 709 |
"""simple docstring"""
import copy
import unittest
from transformers.models.auto import get_values
from transformers.testing_utils import require_torch, slow, torch_device
from transformers.utils import cached_property, is_torch_available, is_vision_available
from ...test_configuration_common import ConfigTest... | 18 | 0 |
"""simple docstring"""
from unittest import TestCase
from datasets import Dataset
from minhash_deduplication import deduplicate_dataset, make_duplicate_clusters
def __A ( )-> List[Any]:
'''simple docstring'''
SCREAMING_SNAKE_CASE : Optional[int] = {
'''repo_nam... | 710 |
"""simple docstring"""
import logging
import os
import random
import sys
from dataclasses import dataclass, field
from typing import Optional
import datasets
import numpy as np
import pandas as pd
from datasets import load_dataset
import transformers
from transformers import (
AutoConfig,
BartForSequen... | 18 | 0 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ....utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
lowerCamelCase__ : Tuple = {
"configuration_mctct": ["MCTCT_PRETRAINED_CONFIG_ARCHIVE_MAP", "MCTCTConfig"],
"feature_extraction_mctct": ["MCTC... | 711 |
"""simple docstring"""
import collections
import inspect
import unittest
from transformers import SwinvaConfig
from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from transformers.utils import cached_property, is_torch_available, is_vision_available
from ...test_configurat... | 18 | 0 |
"""simple docstring"""
import copy
from typing import TYPE_CHECKING, Any, Mapping, Optional, OrderedDict
from packaging import version
from ...configuration_utils import PretrainedConfig
from ...onnx import OnnxConfig
from ...utils import logging
from ..auto.configuration_auto import AutoConfig... | 712 |
"""simple docstring"""
from collections import OrderedDict
from typing import Any, Mapping, Optional
from ... import PreTrainedTokenizer
from ...configuration_utils import PretrainedConfig
from ...file_utils import TensorType, is_torch_available
from ...onnx import OnnxConfig, OnnxConfigWithPast, OnnxSeqaSeqCon... | 18 | 0 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
lowerCamelCase__ : Optional[Any] = {
"configuration_funnel": ["FUNNEL_PRETRAINED... | 713 |
"""simple docstring"""
from collections import OrderedDict
from typing import TYPE_CHECKING, Any, Mapping, Optional
from packaging import version
from ...configuration_utils import PretrainedConfig
from ...onnx import OnnxConfig
from ...onnx.utils import compute_effective_axis_dimension
from ...utils import lo... | 18 | 0 |
"""simple docstring"""
import random
from typing import Any
def __A ( a_ : list )-> list[Any]:
'''simple docstring'''
for _ in range(len(a_ ) ):
SCREAMING_SNAKE_CASE : Tuple = random.randint(0 , len(a_ ) - 1 )
SCREAMING_SNAKE_CASE ... | 714 |
"""simple docstring"""
import math
def __A ( a_ : list , a_ : int )-> int:
'''simple docstring'''
SCREAMING_SNAKE_CASE : Optional[int] = len(a_ )
SCREAMING_SNAKE_CASE : Optional[Any] = int(math.floor(math.sqrt(a_ ) ) )
... | 18 | 0 |
"""simple docstring"""
import logging
import os
import random
import sys
from dataclasses import dataclass, field
from typing import Optional
import datasets
import numpy as np
import pandas as pd
from datasets import load_dataset
import transformers
from transformers import (
AutoConfig,
BartForSequen... | 715 |
"""simple docstring"""
from sklearn.metrics import fa_score
import datasets
lowerCamelCase__ : List[Any] = "\nThe F1 score is the harmonic mean of the precision and recall. It can be computed with the equation:\nF1 = 2 * (precision * recall) / (precision + recall)\n"
lowerCamelCase__ : ... | 18 | 0 |
"""simple docstring"""
from ...configuration_utils import PretrainedConfig
from ...utils import logging
from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices
lowerCamelCase__ : int = logging.get_logger(__name__)
class lowercase__( _Uppe... | 716 |
"""simple docstring"""
from __future__ import annotations
from fractions import Fraction
def __A ( a_ : int , a_ : int )-> bool:
'''simple docstring'''
return (
num != den and num % 10 == den // 10 and (num // 10) / (den % 10) == num / den
)
def __A... | 18 | 0 |
"""simple docstring"""
from typing import List, Optional, Union
import torch
from ...models import UNetaDConditionModel, VQModel
from ...pipelines import DiffusionPipeline
from ...pipelines.pipeline_utils import ImagePipelineOutput
from ...schedulers import DDPMScheduler
from ...utils import (
... | 717 |
"""simple docstring"""
from ...configuration_utils import PretrainedConfig
from ...utils import logging
from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices
lowerCamelCase__ : int = logging.get_logger(__name__)
class lowercase__( _Uppe... | 18 | 0 |
"""simple docstring"""
import json
import sys
import tempfile
import unittest
from pathlib import Path
import transformers
from transformers import (
CONFIG_MAPPING,
FEATURE_EXTRACTOR_MAPPING,
AutoConfig,
AutoFeatureExtractor,
WavaVecaConfig,
WavaVecaFeatureExtractor,
)
from transformers... | 718 |
"""simple docstring"""
import math
class lowercase__:
'''simple docstring'''
def __init__( self :Union[str, Any] , lowerCamelCase_ :List[str]=0 ) -> List[Any]: # a graph with Node 0,1,...,N-1
'''simple docstring'''
SCREAMING_SNAKE_CASE : Tuple = n... | 18 | 0 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_torch_available
lowerCamelCase__ : Dict = {
"configuration_gpt_neo": ["GPT_NEO_PRETRAINED_CONFIG_ARCHIVE_MAP", "GPTNeoConfig", "GPTNeoOnnxConfig... | 719 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ....utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
lowerCamelCase__ : Tuple = {
"configuration_mctct": ["MCTCT_PRETRAINED_CONFIG_ARCHIVE_MAP", "MCTCTConfig"],
"feature_extraction_mctct": ["MCTC... | 18 | 0 |
"""simple docstring"""
import tempfile
import torch
from diffusers import (
DEISMultistepScheduler,
DPMSolverMultistepScheduler,
DPMSolverSinglestepScheduler,
UniPCMultistepScheduler,
)
from .test_schedulers import SchedulerCommonTest
class lowercase__( _UpperCAmelCase ):
'''simp... | 720 |
"""simple docstring"""
import os
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
import torch
from torch import nn
from ...models.controlnet import ControlNetModel, ControlNetOutput
from ...models.modeling_utils import ModelMixin
from ...utils import logging
lowerCamelCase__ : List... | 18 | 0 |
"""simple docstring"""
import os
from argparse import ArgumentParser, Namespace
from ..data import SingleSentenceClassificationProcessor as Processor
from ..pipelines import TextClassificationPipeline
from ..utils import is_tf_available, is_torch_available, logging
from . import BaseTransformersCLICommand
if ... | 721 |
"""simple docstring"""
from __future__ import annotations
from numpy import array, cos, cross, floataa, radians, sin
from numpy.typing import NDArray
def __A ( a_ : float , a_ : float , a_ : bool = False )-> list[float]:
'''simple docstring'''
if radian_... | 18 | 0 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
lowerCamelCase__ : List[str] = {
"configuration_whisper":... | 700 |
"""simple docstring"""
from __future__ import annotations
import random
# Maximum size of the population. Bigger could be faster but is more memory expensive.
lowerCamelCase__ : Optional[Any] = 200
# Number of elements selected in every generation of evolution. The selection takes
# place fr... | 18 | 0 |
"""simple docstring"""
import os
import unicodedata
from shutil import copyfile
from typing import Any, Dict, List, Optional, Tuple
import sentencepiece as spm
from ...tokenization_utils import AddedToken, PreTrainedTokenizer
from ...utils import SPIECE_UNDERLINE, logging
lowerCamelCase__ : str ... | 701 |
"""simple docstring"""
import argparse
import ast
import logging
import os
import sys
import pandas as pd
import torch
from tqdm import tqdm
from transformers import BartForConditionalGeneration, RagRetriever, RagSequenceForGeneration, RagTokenForGeneration
from transformers import logging as transformers_logg... | 18 | 0 |
"""simple docstring"""
import argparse
import requests
import torch
from PIL import Image
from transformers import CLIPProcessor, GroupViTConfig, GroupViTModel
def __A ( a_ : List[str] )-> Tuple:
'''simple docstring'''
if "img_encoder.pos_embed" in name:
SCREA... | 702 |
"""simple docstring"""
import json
import os
from typing import Optional, Tuple
from ...tokenization_utils import PreTrainedTokenizer
from ...utils import logging
lowerCamelCase__ : List[str] = logging.get_logger(__name__)
lowerCamelCase__ : Optional[int] = {"vocab_file": ... | 18 | 0 |
import warnings
from ...utils import logging
from .image_processing_layoutlmva import LayoutLMvaImageProcessor
lowerCamelCase__ : Tuple = logging.get_logger(__name__)
class lowercase__( _UpperCAmelCase ):
'''simple docstring'''
def __init__( self :List[Any] , *lowerCamelCa... | 703 |
"""simple docstring"""
from ...configuration_utils import PretrainedConfig
from ...utils import logging
lowerCamelCase__ : Tuple = logging.get_logger(__name__)
lowerCamelCase__ : str = {
"studio-ousia/luke-base": "https://huggingface.co/studio-ousia/luke-base/resolve/ma... | 18 | 0 |
"""simple docstring"""
import pickle
import numpy as np
from matplotlib import pyplot as plt
class lowercase__:
'''simple docstring'''
def __init__( self :Dict , lowerCamelCase_ :Optional[Any] , lowerCamelCase_ :Optional[int] , lowerCamelCase_ :int , lowerCamelCase_ :Optional[int]... | 704 |
"""simple docstring"""
def __A ( a_ : list , a_ : int , a_ : int = 0 , a_ : int = 0 )-> int:
'''simple docstring'''
SCREAMING_SNAKE_CASE : str = right or len(a_ ) - 1
if left > right:
return -1
elif list_data[left] == key:
... | 18 | 0 |
"""simple docstring"""
from collections import namedtuple
lowerCamelCase__ : List[str] = namedtuple("from_to", "from_ to")
lowerCamelCase__ : Any = {
"cubicmeter": from_to(1, 1),
"litre": from_to(0.0_0_1, 1000),
"kilolitre": from_to(1, 1),
"gallon": from_to(0... | 705 |
"""simple docstring"""
def __A ( a_ : int )-> list[int]:
'''simple docstring'''
if num <= 0:
raise ValueError('''Input must be a positive integer''' )
SCREAMING_SNAKE_CASE : Optional[int] = [True] * (num + 1)
SCREAMING_SNAKE_CASE : Optiona... | 18 | 0 |
"""simple docstring"""
from __future__ import annotations
from math import pi, sqrt
def __A ( a_ : float , a_ : float )-> tuple:
'''simple docstring'''
if inductance <= 0:
raise ValueError('''Inductance cannot be 0 or negative''' )
elif capacitance <=... | 706 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
lowerCamelCase__ : Optional[Any] = {
"configuration_funnel": ["FUNNEL_PRETRAINED... | 18 | 0 |
"""simple docstring"""
from collections import defaultdict
def __A ( a_ : str , a_ : str )-> bool:
'''simple docstring'''
SCREAMING_SNAKE_CASE : Tuple = first_str.lower().strip()
SCREAMING_SNAKE_CASE : int = ... | 707 |
"""simple docstring"""
import os
import sys
lowerCamelCase__ : List[Any] = os.path.join(os.path.dirname(__file__), "src")
sys.path.append(SRC_DIR)
from transformers import (
AutoConfig,
AutoModel,
AutoModelForCausalLM,
AutoModelForMaskedLM,
AutoModelForQuestionAnswering,
... | 18 | 0 |
import argparse
from argparse import Namespace
import torch
from torch import nn
from transformers import XGLMConfig, XGLMForCausalLM
def __A ( a_ : List[Any] )-> Any:
'''simple docstring'''
SCREAMING_SNAKE_CASE : Dict = [
'''decoder.version''',
'''d... | 708 |
"""simple docstring"""
import math
from typing import Optional
import numpy as np
from ...configuration_utils import PretrainedConfig
from ...utils import logging
lowerCamelCase__ : Tuple = logging.get_logger(__name__)
lowerCamelCase__ : Any = {
"facebook/encodec_24kh... | 18 | 0 |
"""simple docstring"""
from typing import List, Optional, Tuple, Union
import torch
from ...utils import logging, randn_tensor
from ..pipeline_utils import AudioPipelineOutput, DiffusionPipeline
lowerCamelCase__ : Optional[int] = logging.get_logger(__name__) # pylint: disable=invalid-name
... | 709 |
"""simple docstring"""
import copy
import unittest
from transformers.models.auto import get_values
from transformers.testing_utils import require_torch, slow, torch_device
from transformers.utils import cached_property, is_torch_available, is_vision_available
from ...test_configuration_common import ConfigTest... | 18 | 0 |
"""simple docstring"""
import unittest
from queue import Empty
from threading import Thread
from transformers import AutoTokenizer, TextIteratorStreamer, TextStreamer, is_torch_available
from transformers.testing_utils import CaptureStdout, require_torch, torch_device
from ..test_modeling_common import ids_ten... | 710 |
"""simple docstring"""
import logging
import os
import random
import sys
from dataclasses import dataclass, field
from typing import Optional
import datasets
import numpy as np
import pandas as pd
from datasets import load_dataset
import transformers
from transformers import (
AutoConfig,
BartForSequen... | 18 | 0 |
"""simple docstring"""
import functools
def __A ( a_ : str , a_ : str )-> int:
'''simple docstring'''
SCREAMING_SNAKE_CASE : Union[str, Any] = len(a_ )
SCREAMING_SNAKE_CASE : Tuple = len(a_ )
@functools.cache
def min_dista... | 711 |
"""simple docstring"""
import collections
import inspect
import unittest
from transformers import SwinvaConfig
from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from transformers.utils import cached_property, is_torch_available, is_vision_available
from ...test_configurat... | 18 | 0 |
"""simple docstring"""
import copy
from ...configuration_utils import PretrainedConfig
from ...utils import logging
lowerCamelCase__ : Any = logging.get_logger(__name__)
class lowercase__( _UpperCAmelCase ):
'''simple docstring'''
UpperCamelCase = ""... | 712 |
"""simple docstring"""
from collections import OrderedDict
from typing import Any, Mapping, Optional
from ... import PreTrainedTokenizer
from ...configuration_utils import PretrainedConfig
from ...file_utils import TensorType, is_torch_available
from ...onnx import OnnxConfig, OnnxConfigWithPast, OnnxSeqaSeqCon... | 18 | 0 |
"""simple docstring"""
from ...configuration_utils import PretrainedConfig
from ...utils import logging
lowerCamelCase__ : List[Any] = logging.get_logger(__name__)
lowerCamelCase__ : Optional[int] = {
"microsoft/swinv2-tiny-patch4-window8-256": (
"https://huggin... | 713 |
"""simple docstring"""
from collections import OrderedDict
from typing import TYPE_CHECKING, Any, Mapping, Optional
from packaging import version
from ...configuration_utils import PretrainedConfig
from ...onnx import OnnxConfig
from ...onnx.utils import compute_effective_axis_dimension
from ...utils import lo... | 18 | 0 |
"""simple docstring"""
import logging
from transformers.configuration_utils import PretrainedConfig
lowerCamelCase__ : Optional[int] = logging.getLogger(__name__)
class lowercase__( _UpperCAmelCase ):
'''simple docstring'''
UpperCamelCase = """masked_bert"""
def __i... | 714 |
"""simple docstring"""
import math
def __A ( a_ : list , a_ : int )-> int:
'''simple docstring'''
SCREAMING_SNAKE_CASE : Optional[int] = len(a_ )
SCREAMING_SNAKE_CASE : Optional[Any] = int(math.floor(math.sqrt(a_ ) ) )
... | 18 | 0 |
"""simple docstring"""
import gc
import threading
import time
import psutil
import torch
class lowercase__:
'''simple docstring'''
def __init__( self :Tuple ) -> Optional[Any]:
'''simple docstring'''
SCREAMING_SNAKE_CASE : str = psutil.Process()
SC... | 715 |
"""simple docstring"""
from sklearn.metrics import fa_score
import datasets
lowerCamelCase__ : List[Any] = "\nThe F1 score is the harmonic mean of the precision and recall. It can be computed with the equation:\nF1 = 2 * (precision * recall) / (precision + recall)\n"
lowerCamelCase__ : ... | 18 | 0 |
"""simple docstring"""
import copy
from typing import Dict, List, Optional
from ...configuration_utils import PretrainedConfig
from ...utils import logging
from ..auto import CONFIG_MAPPING
lowerCamelCase__ : int = {
"facebook/mask2former-swin-small-coco-instance": (
"https://hug... | 716 |
"""simple docstring"""
from __future__ import annotations
from fractions import Fraction
def __A ( a_ : int , a_ : int )-> bool:
'''simple docstring'''
return (
num != den and num % 10 == den // 10 and (num // 10) / (den % 10) == num / den
)
def __A... | 18 | 0 |
"""simple docstring"""
import math
from ...configuration_utils import PretrainedConfig
from ...utils import logging
lowerCamelCase__ : List[Any] = logging.get_logger(__name__)
lowerCamelCase__ : List[str] = {
"facebook/data2vec-base-960h": "https://hug... | 717 |
"""simple docstring"""
from ...configuration_utils import PretrainedConfig
from ...utils import logging
from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices
lowerCamelCase__ : int = logging.get_logger(__name__)
class lowercase__( _Uppe... | 18 | 0 |
"""simple docstring"""
import warnings
from ...utils import logging
from .image_processing_yolos import YolosImageProcessor
lowerCamelCase__ : Tuple = logging.get_logger(__name__)
class lowercase__( _UpperCAmelCase ):
'''simple docstring'''
def __init__( self :str , ... | 718 |
"""simple docstring"""
import math
class lowercase__:
'''simple docstring'''
def __init__( self :Union[str, Any] , lowerCamelCase_ :List[str]=0 ) -> List[Any]: # a graph with Node 0,1,...,N-1
'''simple docstring'''
SCREAMING_SNAKE_CASE : Tuple = n... | 18 | 0 |
"""simple docstring"""
from collections import OrderedDict
from typing import Mapping
from ...configuration_utils import PretrainedConfig
from ...onnx import OnnxConfig
from ...utils import logging
lowerCamelCase__ : int = logging.get_logger(__name__)
lowerCamelCase__ : List[Any] ... | 719 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ....utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
lowerCamelCase__ : Tuple = {
"configuration_mctct": ["MCTCT_PRETRAINED_CONFIG_ARCHIVE_MAP", "MCTCTConfig"],
"feature_extraction_mctct": ["MCTC... | 18 | 0 |
"""simple docstring"""
from collections.abc import Generator
from math import sin
def __A ( a_ : bytes )-> bytes:
'''simple docstring'''
if len(a_ ) != 32:
raise ValueError('''Input must be of length 32''' )
SCREAMING_SNAKE_CASE : str = B'''''... | 720 |
"""simple docstring"""
import os
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
import torch
from torch import nn
from ...models.controlnet import ControlNetModel, ControlNetOutput
from ...models.modeling_utils import ModelMixin
from ...utils import logging
lowerCamelCase__ : List... | 18 | 0 |
"""simple docstring"""
import argparse
import torch
from transformers import FunnelBaseModel, FunnelConfig, FunnelModel, load_tf_weights_in_funnel
from transformers.utils import logging
logging.set_verbosity_info()
def __A ( a_ : Any , a_ : Union[str, Any] , a_ : Optional[in... | 721 |
"""simple docstring"""
from __future__ import annotations
from numpy import array, cos, cross, floataa, radians, sin
from numpy.typing import NDArray
def __A ( a_ : float , a_ : float , a_ : bool = False )-> list[float]:
'''simple docstring'''
if radian_... | 18 | 0 |
"""simple docstring"""
import argparse
import json
import os
import torch
from torch import nn
from transformers import NllbMoeConfig, NllbMoeModel
from transformers.modeling_utils import dtype_byte_size
from transformers.utils import WEIGHTS_INDEX_NAME, WEIGHTS_NAME
def _UpperCamelCase ( _A ) -> ... | 19 |
"""simple docstring"""
import argparse
import logging
import os
import sys
import numpy as np
import onnxruntime
import torch
from bart_onnx.generation_onnx import BARTBeamSearchGenerator
from bart_onnx.reduce_onnx_size import remove_dup_initializers
import transformers
from transformers import BartForConditional... | 19 | 1 |
"""simple docstring"""
def _UpperCamelCase ( _A ) -> int:
"""simple docstring"""
_UpperCAmelCase = [1]
_UpperCAmelCase ,_UpperCAmelCase ,_UpperCAmelCase = 0, 0, 0
_UpperCAmelCase = ugly_nums[ia] * 2
_UpperCAmelCase = ugly_nu... | 19 |
"""simple docstring"""
import argparse
import json
import math
import os
import time
import traceback
import zipfile
from collections import Counter
import requests
def _UpperCamelCase ( _A , _A=None ) -> Union[str, Any]:
"""simple docstring"""
_UpperCAmelCase = N... | 19 | 1 |
"""simple docstring"""
import math
def _UpperCamelCase ( _A ) -> bool:
"""simple docstring"""
if 1 < number < 4:
# 2 and 3 are primes
return True
elif number < 2 or number % 2 == 0 or number % 3 == 0:
# Negatives, 0, 1, all even numbers, all multiples of 3 ... | 19 |
"""simple docstring"""
import re
import warnings
from contextlib import contextmanager
from ...processing_utils import ProcessorMixin
class a_ ( _UpperCAmelCase ):
a : Any = ['image_processor', 'tokenizer']
a : Optional[int] = 'AutoImageProcessor'
a : An... | 19 | 1 |
"""simple docstring"""
import argparse
import torch
from transformers import LxmertConfig, LxmertForPreTraining, load_tf_weights_in_lxmert
from transformers.utils import logging
logging.set_verbosity_info()
def _UpperCamelCase ( _A , _A , _A ) -> List[Any]:
"""simple ... | 19 |
"""simple docstring"""
import colorsys
from PIL import Image # type: ignore
def _UpperCamelCase ( _A , _A , _A ) -> float:
"""simple docstring"""
_UpperCAmelCase = x
_UpperCAmelCase = y
for step in range(_A ): # noqa: B007
... | 19 | 1 |
"""simple docstring"""
from itertools import product
def _UpperCamelCase ( _A , _A ) -> list[int]:
"""simple docstring"""
_UpperCAmelCase = sides_number
_UpperCAmelCase = max_face_number * dice_number
_UpperCAmelCase = [0] * (max_total... | 19 |
"""simple docstring"""
from typing import Optional
from torch import nn
from .transformer_ad import TransformeraDModel, TransformeraDModelOutput
class a_ ( nn.Module ):
def __init__( self : List[str] , __UpperCamelCase : int = 16 , __UpperCamelCase : ... | 19 | 1 |
"""simple docstring"""
import io
import json
import fsspec
import pytest
from datasets import Dataset, DatasetDict, Features, NamedSplit, Value
from datasets.io.json import JsonDatasetReader, JsonDatasetWriter
from ..utils import assert_arrow_memory_doesnt_increase, assert_arrow_memory_increases
def _UpperCam... | 19 |
"""simple docstring"""
import argparse
import torch
from transformers import LxmertConfig, LxmertForPreTraining, load_tf_weights_in_lxmert
from transformers.utils import logging
logging.set_verbosity_info()
def _UpperCamelCase ( _A , _A , _A ) -> List[Any]:
"""simple ... | 19 | 1 |
"""simple docstring"""
import copy
import os
from typing import Union
from ...configuration_utils import PretrainedConfig
from ...utils import logging
a : Optional[Any] = logging.get_logger(__name__)
a : List[Any] = {
'''BridgeTower/bridgetower-base''': '''https://huggingface.co/Bridge... | 19 |
"""simple docstring"""
import argparse
import os
import re
import packaging.version
a : str = '''examples/'''
a : List[str] = {
'''examples''': (re.compile(r'''^check_min_version\("[^"]+"\)\s*$''', re.MULTILINE), '''check_min_version("VERSION")\n'''),
'''init''': (re.compile(r'''^__... | 19 | 1 |
"""simple docstring"""
from typing import List, Optional, Tuple, Union
import PIL
import torch
from torchvision import transforms
from diffusers.pipeline_utils import DiffusionPipeline, ImagePipelineOutput
from diffusers.schedulers import DDIMScheduler
from diffusers.utils import randn_tensor
a : Any ... | 19 |
"""simple docstring"""
from __future__ import annotations
def _UpperCamelCase ( _A ) -> None:
"""simple docstring"""
create_state_space_tree(_A , [] , 0 , [0 for i in range(len(_A ) )] )
def _UpperCamelCase ( _A , _A , ... | 19 | 1 |
"""simple docstring"""
import unittest
from transformers import (
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
Pipeline,
ZeroShotClassificationPipeline,
pipeline,
)
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, ... | 19 |
"""simple docstring"""
import inspect
import unittest
from transformers import DPTConfig
from transformers.file_utils import is_torch_available, is_vision_available
from transformers.models.auto import get_values
from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from ...test... | 19 | 1 |
"""simple docstring"""
import argparse
import datetime
def _UpperCamelCase ( _A ) -> str:
"""simple docstring"""
_UpperCAmelCase = {
"""0""": """Sunday""",
"""1""": """Monday""",
"""2""": """Tuesday""",
"""3""": """Wednesday""",
"""4"... | 19 |
"""simple docstring"""
import enum
import warnings
from ..tokenization_utils import TruncationStrategy
from ..utils import add_end_docstrings, is_tf_available, is_torch_available, logging
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_tf_available():
import tensorflow as tf
from ..models.auto.modeling_t... | 19 | 1 |
"""simple docstring"""
import argparse
import torch
from transformers import (
EncodecConfig,
EncodecFeatureExtractor,
EncodecModel,
logging,
)
# checkpoints downloaded from:
# https://dl.fbaipublicfiles.com/encodec/v0/encodec_24khz-d7cc33bc.th
# https://huggingface.co/facebook/musicgen-small/re... | 19 |
"""simple docstring"""
import unittest
import numpy as np
import torch
from transformers import CLIPTextConfig, CLIPTextModel
from diffusers import DDIMScheduler, LDMPipeline, UNetaDModel, VQModel
from diffusers.utils.testing_utils import enable_full_determinism, require_torch, slow, torch_device
enable_full_de... | 19 | 1 |
"""simple docstring"""
import inspect
import tempfile
import unittest
from huggingface_hub import hf_hub_download
from transformers import is_torch_available
from transformers.testing_utils import is_flaky, require_torch, slow, torch_device
from ...test_configuration_common import ConfigTester
from ...test_model... | 19 |
"""simple docstring"""
import re
from filelock import FileLock
try:
import nltk
a : str = True
except (ImportError, ModuleNotFoundError):
a : List[str] = False
if NLTK_AVAILABLE:
with FileLock('''.lock''') as lock:
nltk.download('''punkt''', quiet=True)
def _UpperCamelCase ( ... | 19 | 1 |
"""simple docstring"""
from typing import Dict, List, Optional, Tuple, Union
import numpy as np
from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict
from ...image_transforms import (
center_crop,
get_resize_output_image_size,
normalize,
rescale,
resize,
to_... | 19 |
"""simple docstring"""
import unittest
from transformers import PegasusConfig, PegasusTokenizer, is_flax_available
from transformers.testing_utils import require_flax, slow
from ...test_configuration_common import ConfigTester
from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor
if is_flax_... | 19 | 1 |
"""simple docstring"""
import os
import tempfile
import unittest
from transformers import NezhaConfig, is_torch_available
from transformers.models.auto import get_values
from transformers.testing_utils import require_torch, require_torch_gpu, slow, torch_device
from ...generation.test_utils import GenerationTeste... | 19 |
"""simple docstring"""
import tempfile
import unittest
from transformers import TaConfig, is_torch_available
from transformers.testing_utils import (
require_sentencepiece,
require_tokenizers,
require_torch,
slow,
torch_device,
)
from ...generation.test_utils import GenerationTesterMixin
from ... | 19 | 1 |
"""simple docstring"""
# Logistic Regression from scratch
# In[62]:
# In[63]:
# importing all the required libraries
import numpy as np
from matplotlib import pyplot as plt
from sklearn import datasets
def _UpperCamelCase ( _A ) -> Tuple:
"""simple docstring"""
return 1 / (1 + np.... | 19 |
"""simple docstring"""
import copy
import os
import tempfile
from unittest import TestCase
from unittest.mock import patch
import numpy as np
import pyarrow as pa
import pyarrow.parquet as pq
import pytest
from datasets.arrow_writer import ArrowWriter, OptimizedTypedSequence, ParquetWriter, TypedSequence
from dat... | 19 | 1 |
"""simple docstring"""
import logging
import os
import sys
from dataclasses import dataclass, field
from typing import Optional
from seqaseq_trainer import SeqaSeqTrainer
from seqaseq_training_args import SeqaSeqTrainingArguments
import transformers
from transformers import (
AutoConfig,
AutoModelForSeqaS... | 19 |
"""simple docstring"""
# Lint as: python3
import sys
from collections.abc import Mapping
from typing import TYPE_CHECKING, Dict, Optional
import numpy as np
import pyarrow as pa
from .. import config
from ..utils.logging import get_logger
from ..utils.py_utils import map_nested
from .formatting import TensorForma... | 19 | 1 |
"""simple docstring"""
import os
from pathlib import Path
import numpy as np
import pytest
from pack_dataset import pack_data_dir
from parameterized import parameterized
from save_len_file import save_len_file
from torch.utils.data import DataLoader
from transformers import AutoTokenizer
from transformers.models.... | 19 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
a : Tuple = {
'''configuration_pegasus_x''': ['''PEGASUS_X_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''PegasusXConfig'''],
}
try:
if not is_torch_available():
r... | 19 | 1 |
"""simple docstring"""
from typing import Optional
from torch import nn
from .transformer_ad import TransformeraDModel, TransformeraDModelOutput
class a_ ( nn.Module ):
def __init__( self : List[str] , __UpperCamelCase : int = 16 , __UpperCamelCase : ... | 19 |
"""simple docstring"""
import collections
import json
import math
import os
import re
import time
from fnmatch import fnmatch
from typing import Dict
import requests
from slack_sdk import WebClient
a : int = WebClient(token=os.environ['''CI_SLACK_BOT_TOKEN'''])
def _UpperCamelCase ( _A ) ... | 19 | 1 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_tf_available,
is_torch_available,
)
a : List[str] = {
'''configuration_resnet''': ['''RESNET_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''Re... | 19 |
"""simple docstring"""
import asyncio
import os
import re
import sys
import tempfile
import unittest
from contextlib import contextmanager
from copy import deepcopy
from distutils.util import strtobool
from enum import Enum
from importlib.util import find_spec
from pathlib import Path
from unittest.mock import patc... | 19 | 1 |
"""simple docstring"""
from typing import TYPE_CHECKING
# rely on isort to merge the imports
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
a : str = {
'''configuration_autoformer''': [
'''AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP''',
'''Autofor... | 19 |
"""simple docstring"""
from pathlib import PurePosixPath
from typing import Optional
import fsspec
from fsspec import AbstractFileSystem
from huggingface_hub.hf_api import DatasetInfo
from ..utils.file_utils import get_authentication_headers_for_url
from ..utils.hub import hf_hub_url
class a_ ( _UpperC... | 19 | 1 |
"""simple docstring"""
import json
import os
import tempfile
import unittest
import numpy as np
from datasets import load_dataset
from transformers.testing_utils import require_torch, require_vision, slow
from transformers.utils import is_torch_available, is_vision_available
from ...test_image_processing_common ... | 19 |
"""simple docstring"""
import itertools
import os
from collections import Counter, defaultdict
from concurrent.futures import ThreadPoolExecutor, as_completed
import numpy as np
import datasets
from .execute import check_correctness
a : Optional[Any] = '''\
@misc{chen2021evaluating,
title={Eva... | 19 | 1 |
"""simple docstring"""
def _UpperCamelCase ( _A ) -> int:
"""simple docstring"""
if divisor % 5 == 0 or divisor % 2 == 0:
return 0
_UpperCAmelCase = 1
_UpperCAmelCase = 1
while repunit:
_UpperCAmelCase = (1_0 * repunit + 1) % ... | 19 |
"""simple docstring"""
from collections.abc import Callable
import numpy as np
def _UpperCamelCase ( _A , _A , _A , _A , _A ) -> np.array:
"""simple docstring"""
_UpperCAmelCase = int(np.ceil((x_end - xa) / step_size ) )
_U... | 19 | 1 |
"""simple docstring"""
from math import pi, sqrt
def _UpperCamelCase ( _A ) -> float:
"""simple docstring"""
if num <= 0:
raise ValueError("""math domain error""" )
if num > 171.5:
raise OverflowError("""math range error""" )
elif num - int(_A ) ... | 19 |
"""simple docstring"""
import argparse
import logging
import os
import sys
import numpy as np
import onnxruntime
import torch
from bart_onnx.generation_onnx import BARTBeamSearchGenerator
from bart_onnx.reduce_onnx_size import remove_dup_initializers
import transformers
from transformers import BartForConditional... | 19 | 1 |
"""simple docstring"""
import json
from typing import List, Optional, Tuple
from tokenizers import normalizers
from ...tokenization_utils_base import BatchEncoding
from ...tokenization_utils_fast import PreTrainedTokenizerFast
from ...utils import PaddingStrategy, logging
from .tokenization_realm import RealmToke... | 19 |
"""simple docstring"""
import argparse
import json
import math
import os
import time
import traceback
import zipfile
from collections import Counter
import requests
def _UpperCamelCase ( _A , _A=None ) -> Union[str, Any]:
"""simple docstring"""
_UpperCAmelCase = N... | 19 | 1 |
"""simple docstring"""
import collections
import inspect
import unittest
from transformers import FocalNetConfig
from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from transformers.utils import cached_property, is_torch_available, is_vision_available
from ...test_backbone_co... | 19 |
"""simple docstring"""
import re
import warnings
from contextlib import contextmanager
from ...processing_utils import ProcessorMixin
class a_ ( _UpperCAmelCase ):
a : Any = ['image_processor', 'tokenizer']
a : Optional[int] = 'AutoImageProcessor'
a : An... | 19 | 1 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
a : str = {
'''configuration_roformer''': ['''ROFORMER_PRETRAINED_C... | 19 |
"""simple docstring"""
import colorsys
from PIL import Image # type: ignore
def _UpperCamelCase ( _A , _A , _A ) -> float:
"""simple docstring"""
_UpperCAmelCase = x
_UpperCAmelCase = y
for step in range(_A ): # noqa: B007
... | 19 | 1 |
"""simple docstring"""
import unittest
from transformers import is_torch_available
from transformers.testing_utils import require_torch
if is_torch_available():
import torch
from transformers.activations import gelu_new, gelu_python, get_activation
@require_torch
class a_ ( unittest.TestCase ):
... | 19 |
"""simple docstring"""
from typing import Optional
from torch import nn
from .transformer_ad import TransformeraDModel, TransformeraDModelOutput
class a_ ( nn.Module ):
def __init__( self : List[str] , __UpperCamelCase : int = 16 , __UpperCamelCase : ... | 19 | 1 |
"""simple docstring"""
from ...configuration_utils import PretrainedConfig
from ...utils import logging
from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices
a : Union[str, Any] = logging.get_logger(__name__)
a : List[Any] = {
'''shi-labs/na... | 19 |
"""simple docstring"""
import argparse
import torch
from transformers import LxmertConfig, LxmertForPreTraining, load_tf_weights_in_lxmert
from transformers.utils import logging
logging.set_verbosity_info()
def _UpperCamelCase ( _A , _A , _A ) -> List[Any]:
"""simple ... | 19 | 1 |
"""simple docstring"""
# NOTE: This file is deprecated and will be removed in a future version.
# It only exists so that temporarely `from diffusers.pipelines import DiffusionPipeline` works
from ...utils import deprecate
from ..controlnet.pipeline_flax_controlnet import FlaxStableDiffusionControlNetPipeline # no... | 19 |
"""simple docstring"""
import argparse
import os
import re
import packaging.version
a : str = '''examples/'''
a : List[str] = {
'''examples''': (re.compile(r'''^check_min_version\("[^"]+"\)\s*$''', re.MULTILINE), '''check_min_version("VERSION")\n'''),
'''init''': (re.compile(r'''^__... | 19 | 1 |
"""simple docstring"""
from ...utils import is_torch_available, is_transformers_available
if is_transformers_available() and is_torch_available():
from .pipeline_vq_diffusion import LearnedClassifierFreeSamplingEmbeddings, VQDiffusionPipeline | 19 |
"""simple docstring"""
from __future__ import annotations
def _UpperCamelCase ( _A ) -> None:
"""simple docstring"""
create_state_space_tree(_A , [] , 0 , [0 for i in range(len(_A ) )] )
def _UpperCamelCase ( _A , _A , ... | 19 | 1 |
"""simple docstring"""
from math import acos, sin
from typing import List, Tuple, Union
import numpy as np
import torch
from PIL import Image
from ...models import AutoencoderKL, UNetaDConditionModel
from ...schedulers import DDIMScheduler, DDPMScheduler
from ...utils import randn_tensor
from ..pipeline_utils imp... | 19 |
"""simple docstring"""
import inspect
import unittest
from transformers import DPTConfig
from transformers.file_utils import is_torch_available, is_vision_available
from transformers.models.auto import get_values
from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from ...test... | 19 | 1 |
"""simple docstring"""
from __future__ import annotations
def _UpperCamelCase ( _A ) -> bool:
"""simple docstring"""
if len(_A ) < 2:
raise ValueError("""Monogons and Digons are not polygons in the Euclidean space""" )
if any(i <= 0 for i in nums ):
... | 19 |
"""simple docstring"""
import enum
import warnings
from ..tokenization_utils import TruncationStrategy
from ..utils import add_end_docstrings, is_tf_available, is_torch_available, logging
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_tf_available():
import tensorflow as tf
from ..models.auto.modeling_t... | 19 | 1 |
"""simple docstring"""
import unittest
from transformers import load_tool
from .test_tools_common import ToolTesterMixin
class a_ ( unittest.TestCase , _UpperCAmelCase ):
def _snake_case ( self : Any ) ->Optional[Any]:
'''simple docstring'''
... | 19 |
"""simple docstring"""
import unittest
import numpy as np
import torch
from transformers import CLIPTextConfig, CLIPTextModel
from diffusers import DDIMScheduler, LDMPipeline, UNetaDModel, VQModel
from diffusers.utils.testing_utils import enable_full_determinism, require_torch, slow, torch_device
enable_full_de... | 19 | 1 |
"""simple docstring"""
from __future__ import annotations
class a_ :
def __init__( self : int , __UpperCamelCase : int ) ->None:
'''simple docstring'''
_UpperCAmelCase = data
_UpperCAmelCase = None
... | 19 |
"""simple docstring"""
import re
from filelock import FileLock
try:
import nltk
a : str = True
except (ImportError, ModuleNotFoundError):
a : List[str] = False
if NLTK_AVAILABLE:
with FileLock('''.lock''') as lock:
nltk.download('''punkt''', quiet=True)
def _UpperCamelCase ( ... | 19 | 1 |
"""simple docstring"""
import baseaa
import io
import json
import os
from copy import deepcopy
from ..optimizer import AcceleratedOptimizer
from ..scheduler import AcceleratedScheduler
class a_ :
def __init__( self : Any , __UpperCamelCase : Optional[int] ) ->Tu... | 19 |
"""simple docstring"""
import unittest
from transformers import PegasusConfig, PegasusTokenizer, is_flax_available
from transformers.testing_utils import require_flax, slow
from ...test_configuration_common import ConfigTester
from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor
if is_flax_... | 19 | 1 |
"""simple docstring"""
from ...processing_utils import ProcessorMixin
from ...tokenization_utils_base import BatchEncoding
class a_ ( _UpperCAmelCase ):
a : Any = ['image_processor', 'tokenizer']
a : List[str] = 'AutoImageProcessor'
a : Dict = 'Aut... | 19 |
"""simple docstring"""
import tempfile
import unittest
from transformers import TaConfig, is_torch_available
from transformers.testing_utils import (
require_sentencepiece,
require_tokenizers,
require_torch,
slow,
torch_device,
)
from ...generation.test_utils import GenerationTesterMixin
from ... | 19 | 1 |
"""simple docstring"""
import unittest
from transformers import is_flax_available
from transformers.testing_utils import require_flax, require_sentencepiece, require_tokenizers, require_torch, slow
if is_flax_available():
import optax
from flax.training.common_utils import onehot
from transformers import Aut... | 19 |
"""simple docstring"""
import copy
import os
import tempfile
from unittest import TestCase
from unittest.mock import patch
import numpy as np
import pyarrow as pa
import pyarrow.parquet as pq
import pytest
from datasets.arrow_writer import ArrowWriter, OptimizedTypedSequence, ParquetWriter, TypedSequence
from dat... | 19 | 1 |
"""simple docstring"""
import argparse
import pytorch_lightning as pl
import torch
from torch import nn
from transformers import LongformerForQuestionAnswering, LongformerModel
class a_ ( pl.LightningModule ):
def __init__( self : List[str] , __UpperCamelCase : Unio... | 19 |
"""simple docstring"""
# Lint as: python3
import sys
from collections.abc import Mapping
from typing import TYPE_CHECKING, Dict, Optional
import numpy as np
import pyarrow as pa
from .. import config
from ..utils.logging import get_logger
from ..utils.py_utils import map_nested
from .formatting import TensorForma... | 19 | 1 |
"""simple docstring"""
# Copyright 2023 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
... | 19 |
"""simple docstring"""
from typing import TYPE_CHECKING
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
a : Tuple = {
'''configuration_pegasus_x''': ['''PEGASUS_X_PRETRAINED_CONFIG_ARCHIVE_MAP''', '''PegasusXConfig'''],
}
try:
if not is_torch_available():
r... | 19 | 1 |
"""simple docstring"""
# Imports
import numpy as np
class a_ :
def __init__( self : List[str] , __UpperCamelCase : Dict=None , __UpperCamelCase : str=None , __UpperCamelCase : str=None , __UpperCamelCase : List[str]=None... | 19 |
"""simple docstring"""
import collections
import json
import math
import os
import re
import time
from fnmatch import fnmatch
from typing import Dict
import requests
from slack_sdk import WebClient
a : int = WebClient(token=os.environ['''CI_SLACK_BOT_TOKEN'''])
def _UpperCamelCase ( _A ) ... | 19 | 1 |
"""simple docstring"""
def _UpperCamelCase ( _A , _A , _A , _A ) -> bool:
"""simple docstring"""
if graph[path[curr_ind - 1]][next_ver] == 0:
return False
# 2. Validate that next vertex is not already in path
return not any(vertex == next_ver ... | 19 |
"""simple docstring"""
import asyncio
import os
import re
import sys
import tempfile
import unittest
from contextlib import contextmanager
from copy import deepcopy
from distutils.util import strtobool
from enum import Enum
from importlib.util import find_spec
from pathlib import Path
from unittest.mock import patc... | 19 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.