code stringlengths 87 55.2k | code_codestyle int64 0 349 | style_context stringlengths 135 49.1k | style_context_codestyle int64 0 349 | label int64 0 1 |
|---|---|---|---|---|
A__ = {
'''Pillow''': '''Pillow<10.0.0''',
'''accelerate''': '''accelerate>=0.20.3''',
'''av''': '''av==9.2.0''',
'''beautifulsoup4''': '''beautifulsoup4''',
'''black''': '''black~=23.1''',
'''codecarbon''': '''codecarbon==1.2.0''',
'''cookiecutter''': '''cookiecut... | 230 |
import unittest
from transformers import MraConfig, is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_torch... | 330 | 0 |
'''simple docstring'''
import gc
import tempfile
import unittest
import numpy as np
import torch
from diffusers import VersatileDiffusionTextToImagePipeline
from diffusers.utils.testing_utils import nightly, require_torch_gpu, torch_device
__a = False
class A__ ( unittest.Tes... | 145 |
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_tf_available,
is_torch_available,
)
a_ = {"""configuration_encoder_decoder""": ["""EncoderDecoderConfig"""]}
try:
if not is_torch_available():
raise Opti... | 330 | 0 |
'''simple docstring'''
class __UpperCAmelCase :
def __init__( self , lowerCAmelCase_ ):
"""simple docstring"""
_snake_case = val
_snake_case = None
_snake_case = None
def lowerCamelCase ( s... | 42 |
from string import ascii_lowercase, ascii_uppercase
def a__ ( _UpperCamelCase : str ):
if not sentence:
return ""
__lowerCamelCase = dict(zip(_UpperCamelCase ,_UpperCamelCase ) )
return lower_to_upper.get(sentence[0] ,sentence[0] ) +... | 330 | 0 |
import argparse
import torch
from transformers import (
SpeechTaConfig,
SpeechTaFeatureExtractor,
SpeechTaForSpeechToSpeech,
SpeechTaForSpeechToText,
SpeechTaForTextToSpeech,
SpeechTaProcessor,
SpeechTaTokenizer,
logging,
)
from transformers.tokenization_utils import AddedToken
... | 342 |
from transformers import BertTokenizer, EncoderDecoderModel, SeqaSeqTrainer, SeqaSeqTrainingArguments
from transformers.testing_utils import TestCasePlus, require_torch, slow
from transformers.utils import is_datasets_available
if is_datasets_available():
import datasets
class __lowerCAmelCase ( lo... | 330 | 0 |
'''simple docstring'''
def snake_case_ (_a : int ):
UpperCAmelCase = int(_UpperCamelCase )
if decimal in (0, 1): # Exit cases for the recursion
return str(_UpperCamelCase )
UpperCAmelCase , UpperCAmelCase = divmod(_UpperCamelCa... | 34 |
# Copyright 2023 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by appli... | 330 | 0 |
import argparse
import os
from pathlib import Path
from typing import Dict
import tensorflow as tf
import torch
from tqdm import tqdm
from transformers import PegasusConfig, PegasusForConditionalGeneration, PegasusTokenizer
from transformers.models.pegasus.configuration_pegasus import DEFAULTS, task_s... | 305 |
import random
import unittest
from torch.utils.data import BatchSampler, DataLoader, IterableDataset
from accelerate import Accelerator
from accelerate.data_loader import (
BatchSamplerShard,
DataLoaderDispatcher,
DataLoaderShard,
IterableDatasetShard,
SkipBatchSampler,
SkipDataLoader,
... | 330 | 0 |
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_torch_available,
)
lowerCAmelCase = {
'configuration_encodec': [
'ENCODEC_PRETRAINED_CONFIG_ARCHIVE_MAP',
'EncodecConfig',
],
'feature_extrac... | 110 |
def a__ ( _UpperCamelCase : int ):
__lowerCamelCase = n ** (1 / 3)
return (val * val * val) == n
if __name__ == "__main__":
print(perfect_cube(27))
print(perfect_cube(4))
| 330 | 0 |
import os
SCREAMING_SNAKE_CASE__ = {"""I""": 1, """V""": 5, """X""": 10, """L""": 50, """C""": 100, """D""": 500, """M""": 1000}
def __SCREAMING_SNAKE_CASE ( SCREAMING_SNAKE_CASE : str ) -> Optional[int]:
__lowercase = 0
__lowercase = 0
w... | 325 |
import argparse
import os
# New Code #
import evaluate
import torch
from datasets import load_dataset
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from accelerate import A... | 330 | 0 |
'''simple docstring'''
# Copyright 2023 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unle... | 198 |
import logging
import os
import threading
import time
try:
import warnings
except ImportError:
a_ = None
try:
import msvcrt
except ImportError:
a_ = None
try:
import fcntl
except ImportError:
a_ = None
# Backward compatibility
# ---------------------------------------... | 330 | 0 |
import os
def a ( snake_case__: str ):
'''simple docstring'''
lowercase_ = len(grid[0] )
lowercase_ = len(_UpperCamelCase )
lowercase_ = 0
lowercase_ = 0
lowercase_ = 0
# Check vertical... | 30 |
import copy
import inspect
import unittest
import numpy as np
from huggingface_hub import hf_hub_download
from transformers import TimesformerConfig
from transformers.models.auto import get_values
from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from transformers.utils impo... | 330 | 0 |
"""simple docstring"""
import pytest
import datasets.config
from datasets.utils.info_utils import is_small_dataset
@pytest.mark.parametrize('''dataset_size''' , [None, 4_00 * 2**20, 6_00 * 2**20] )
@pytest.mark.parametrize('''input_in_memory_max_size''' , ['''default''', 0, 1_00 * 2**... | 191 |
def a__ ( _UpperCamelCase : int ):
if not isinstance(_UpperCamelCase ,_UpperCamelCase ):
__lowerCamelCase = F"""Input value of [number={number}] must be an integer"""
raise TypeError(_UpperCamelCase )
if number < 0:
return False
__lowerCa... | 330 | 0 |
A__ = {
'''A''': ['''B''', '''C''', '''E'''],
'''B''': ['''A''', '''D''', '''E'''],
'''C''': ['''A''', '''F''', '''G'''],
'''D''': ['''B'''],
'''E''': ['''A''', '''B''', '''D'''],
'''F''': ['''C'''],
'''G''': ['''C'''],
}
def _lowerCAmelCase ( ... | 230 |
import gc
import unittest
from parameterized import parameterized
from diffusers import FlaxUNetaDConditionModel
from diffusers.utils import is_flax_available
from diffusers.utils.testing_utils import load_hf_numpy, require_flax, slow
if is_flax_available():
import jax
import jax.numpy as jnp
@slow
@requ... | 330 | 0 |
'''simple docstring'''
import gc
import unittest
import numpy as np
import torch
import torch.nn.functional as F
from transformers import (
ClapTextConfig,
ClapTextModelWithProjection,
RobertaTokenizer,
SpeechTaHifiGan,
SpeechTaHifiGanConfig,
)
from diffusers import (
AudioLDMPipeline,
... | 145 |
from typing import TYPE_CHECKING
from ....utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
a_ = {"""configuration_mmbt""": ["""MMBTConfig"""]}
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
... | 330 | 0 |
'''simple docstring'''
import argparse
import json
from pathlib import Path
import requests
import torch
from huggingface_hub import hf_hub_download
from PIL import Image
from transformers import (
SwiftFormerConfig,
SwiftFormerForImageClassification,
ViTImageProcessor,
)
from transformers.utils i... | 42 |
import gc
import importlib.metadata
import tempfile
import unittest
from packaging import version
from transformers import (
AutoModel,
AutoModelForCausalLM,
AutoModelForSeqaSeqLM,
AutoModelForSequenceClassification,
AutoTokenizer,
BitsAndBytesConfig,
pipeline,
)
from transformers.test... | 330 | 0 |
__magic_name__: Dict = [
"DownloadConfig",
"DownloadManager",
"DownloadMode",
"StreamingDownloadManager",
]
from .download_config import DownloadConfig
from .download_manager import DownloadManager, DownloadMode
from .streaming_download_manager import StreamingDownloadManager
| 342 |
from dataclasses import dataclass
from typing import Dict, Optional, Tuple, Union
import torch
import torch.nn as nn
from ..configuration_utils import ConfigMixin, register_to_config
from ..utils import BaseOutput, apply_forward_hook
from .attention_processor import AttentionProcessor, AttnProcessor
from .modelin... | 330 | 0 |
'''simple docstring'''
# Note: if you intend to run this script make sure you look under scripts/fsmt/
# to locate the appropriate script to do the work correctly. There is a set of scripts to:
# - download and prepare data and run the conversion script
# - perform eval to get the best hparam into the confi... | 34 |
import argparse
from typing import Dict
import tensorflow as tf
import torch
from tqdm import tqdm
from transformers import BigBirdPegasusConfig, BigBirdPegasusForConditionalGeneration
a_ = [
# tf -> hf
("""/""", """."""),
("""layer_""", """layers."""),
("""kernel""", """weight"""),
... | 330 | 0 |
import numpy as np
import pandas as pd
from sklearn.preprocessing import Normalizer
from sklearn.svm import SVR
from statsmodels.tsa.statespace.sarimax import SARIMAX
def UpperCamelCase ( __magic_name__ : list , __magic_name__ : list , __magic_name__ : ... | 305 |
import uuid
from typing import Any, Dict, List, Optional, Union
from ..utils import add_end_docstrings, is_tf_available, is_torch_available, logging
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_tf_available():
import tensorflow as tf
if is_torch_available():
import torch
a_ = logging.get... | 330 | 0 |
import collections
import tempfile
import unittest
import numpy as np
from transformers.testing_utils import (
is_pt_flax_cross_test,
require_flax,
require_torch,
require_vision,
slow,
torch_device,
)
from transformers.utils import is_flax_available, is_torch_available, is_... | 110 |
import argparse
import os
from pathlib import Path
from typing import Dict
import tensorflow as tf
import torch
from tqdm import tqdm
from transformers import PegasusConfig, PegasusForConditionalGeneration, PegasusTokenizer
from transformers.models.pegasus.configuration_pegasus import DEFAULTS, task_specific_para... | 330 | 0 |
from __future__ import annotations
import unittest
from transformers import BlenderbotSmallConfig, BlenderbotSmallTokenizer, is_tf_available
from transformers.testing_utils import require_tf, require_tokenizers, slow
from transformers.utils import cached_property
from ...test_configuration_common import ConfigTeste... | 325 |
import argparse
import fairseq
import torch
from transformers import UniSpeechSatConfig, UniSpeechSatForCTC, UniSpeechSatForPreTraining, logging
logging.set_verbosity_info()
a_ = logging.get_logger(__name__)
a_ = {
"""post_extract_proj""": """feature_projection.projection""",
"""enc... | 330 | 0 |
'''simple docstring'''
import colorsys
from PIL import Image # type: ignore
def __UpperCamelCase ( UpperCAmelCase , UpperCAmelCase , UpperCAmelCase ):
lowercase__ : Any = x
lowercase__ : List[str] = y
for step in range(_UpperCamelCase ): # noqa: B007
lowercase... | 198 |
from typing import List, Union
import numpy as np
from ..utils import add_end_docstrings, is_torch_available, is_vision_available, logging, requires_backends
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_vision_available():
from PIL import Image
from ..image_utils import load_image
if is_torch_avai... | 330 | 0 |
import inspect
import os
import torch
from transformers import AutoModel
from transformers.testing_utils import mockenv_context
from transformers.trainer_utils import set_seed
import accelerate
from accelerate.accelerator import Accelerator
from accelerate.state import AcceleratorState
from accelerate.te... | 30 |
from typing import Dict, List, Optional, Union
import numpy as np
from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict
from ...image_transforms import (
center_crop,
convert_to_rgb,
get_resize_output_image_size,
normalize,
rescale,
resize,
to_channel_di... | 330 | 0 |
"""simple docstring"""
import json
import logging
import os
import sys
from time import time
from unittest.mock import patch
from transformers.testing_utils import TestCasePlus, require_torch_tpu
logging.basicConfig(level=logging.DEBUG)
lowerCamelCase_ = logging.getLogger()
d... | 191 |
from __future__ import annotations
from typing import Generic, TypeVar
a_ = TypeVar("""T""")
class __lowerCAmelCase ( Generic[T] ):
def __init__( self , __UpperCAmelCase ):
'''simple docstring'''
__lowerCamelCase = data
__lowerCamelCase = self
... | 330 | 0 |
# NOTE: This file is deprecated and will be removed in a future version.
# It only exists so that temporarely `from diffusers.pipelines import DiffusionPipeline` works
from ...utils import deprecate
from ..controlnet.pipeline_flax_controlnet import FlaxStableDiffusionControlNetPipeline # noqa: F401
d... | 230 |
import unittest
from transformers import MraConfig, is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_torch... | 330 | 0 |
'''simple docstring'''
def __UpperCAmelCase ( a_: int ):
if divisor % 5 == 0 or divisor % 2 == 0:
return 0
_UpperCAmelCase : str = 1
_UpperCAmelCase : Optional[Any] = 1
while repunit:
_UpperCAmelCase : str = (10 * repunit + 1) % d... | 145 |
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_tf_available,
is_torch_available,
)
a_ = {"""configuration_encoder_decoder""": ["""EncoderDecoderConfig"""]}
try:
if not is_torch_available():
raise Opti... | 330 | 0 |
'''simple docstring'''
from typing import List, Optional, Union
import torch
from ...models import UNetaDConditionModel, VQModel
from ...pipelines import DiffusionPipeline
from ...pipelines.pipeline_utils import ImagePipelineOutput
from ...schedulers import DDPMScheduler
from ...utils import (
is_accelera... | 42 |
from string import ascii_lowercase, ascii_uppercase
def a__ ( _UpperCamelCase : str ):
if not sentence:
return ""
__lowerCamelCase = dict(zip(_UpperCamelCase ,_UpperCamelCase ) )
return lower_to_upper.get(sentence[0] ,sentence[0] ) +... | 330 | 0 |
import re
def UpperCamelCase ( _A ):
"""simple docstring"""
if len(re.findall("""[ATCG]""", _UpperCamelCase ) ) != len(_UpperCamelCase ):
raise ValueError("""Invalid Strand""" )
return dna.translate(dna.maketrans("""ATCG""", """TAGC""" ) )
if __... | 342 |
from transformers import BertTokenizer, EncoderDecoderModel, SeqaSeqTrainer, SeqaSeqTrainingArguments
from transformers.testing_utils import TestCasePlus, require_torch, slow
from transformers.utils import is_datasets_available
if is_datasets_available():
import datasets
class __lowerCAmelCase ( lo... | 330 | 0 |
'''simple docstring'''
import unittest
from transformers import SPIECE_UNDERLINE
from transformers.models.speechta import SpeechTaTokenizer
from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow
from transformers.tokenization_utils import AddedToken
from ...t... | 34 |
# Copyright 2023 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by appli... | 330 | 0 |
import inspect
import os
import re
from transformers.configuration_utils import PretrainedConfig
from transformers.utils import direct_transformers_import
# All paths are set with the intent you should run this script from the root of the repo with the command
# python utils/check_config_docstrings.py... | 305 |
import random
import unittest
from torch.utils.data import BatchSampler, DataLoader, IterableDataset
from accelerate import Accelerator
from accelerate.data_loader import (
BatchSamplerShard,
DataLoaderDispatcher,
DataLoaderShard,
IterableDatasetShard,
SkipBatchSampler,
SkipDataLoader,
... | 330 | 0 |
from typing import Dict, List, Optional, Union
import numpy as np
from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict
from ...image_transforms import (
center_crop,
get_resize_output_image_size,
normalize,
rescale,
resize,
to_channel_dimension... | 110 |
def a__ ( _UpperCamelCase : int ):
__lowerCamelCase = n ** (1 / 3)
return (val * val * val) == n
if __name__ == "__main__":
print(perfect_cube(27))
print(perfect_cube(4))
| 330 | 0 |
from unittest.mock import patch
import pyspark
from datasets.packaged_modules.spark.spark import (
Spark,
SparkExamplesIterable,
_generate_iterable_examples,
)
from ..utils import (
require_dill_gt_0_3_2,
require_not_windows,
)
def __SCREAMING_SNAKE_CASE ( SCREAMING_SNAKE_CASE ... | 325 |
import argparse
import os
# New Code #
import evaluate
import torch
from datasets import load_dataset
from torch.optim import AdamW
from torch.utils.data import DataLoader
from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed
from accelerate import A... | 330 | 0 |
'''simple docstring'''
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
__a: Optional[int]... | 198 |
import logging
import os
import threading
import time
try:
import warnings
except ImportError:
a_ = None
try:
import msvcrt
except ImportError:
a_ = None
try:
import fcntl
except ImportError:
a_ = None
# Backward compatibility
# ---------------------------------------... | 330 | 0 |
__a = {
0: '0',
1: '1',
2: '2',
3: '3',
4: '4',
5: '5',
6: '6',
7: '7',
8: '8',
9: '9',
1_0: 'a',
1_1: 'b',
1_2: 'c',
1_3: 'd',
1_4: 'e',
1_5: 'f',
}
def a ( snake_case__: float ):
'''simp... | 30 |
import copy
import inspect
import unittest
import numpy as np
from huggingface_hub import hf_hub_download
from transformers import TimesformerConfig
from transformers.models.auto import get_values
from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from transformers.utils impo... | 330 | 0 |
"""simple docstring"""
import os
from collections.abc import Iterator
def __lowerCamelCase ( a_ : str = "." ) -> List[str]:
for dir_path, dir_names, filenames in os.walk(_UpperCamelCase ):
__SCREAMING_SNAKE_CASE :Union[str, Any] = [d ... | 191 |
def a__ ( _UpperCamelCase : int ):
if not isinstance(_UpperCamelCase ,_UpperCamelCase ):
__lowerCamelCase = F"""Input value of [number={number}] must be an integer"""
raise TypeError(_UpperCamelCase )
if number < 0:
return False
__lowerCa... | 330 | 0 |
class a :
def __init__( self :Any ,__lowercase :List[Any] ,__lowercase :List[Any] ,__lowercase :str ):
snake_case__ : Union[str, Any] = None
snake_case__ : int = None
snake_case__ : Optional[int] = graph
... | 230 |
import gc
import unittest
from parameterized import parameterized
from diffusers import FlaxUNetaDConditionModel
from diffusers.utils import is_flax_available
from diffusers.utils.testing_utils import load_hf_numpy, require_flax, slow
if is_flax_available():
import jax
import jax.numpy as jnp
@slow
@requ... | 330 | 0 |
'''simple docstring'''
from typing import Callable, List, Optional, Tuple, Union
import torch
from transformers import CLIPTextModel, CLIPTokenizer
from ...configuration_utils import ConfigMixin, register_to_config
from ...models import ModelMixin, TransformeraDModel, VQModel
from ...schedulers import VQDiffusi... | 145 |
from typing import TYPE_CHECKING
from ....utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
a_ = {"""configuration_mmbt""": ["""MMBTConfig"""]}
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
... | 330 | 0 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available
lowercase : Union[str, Any] = {
"configuration_altclip": [
"ALTCLIP_PRETRAINED_CONFIG_ARCHIVE_MAP",
"AltC... | 42 |
import gc
import importlib.metadata
import tempfile
import unittest
from packaging import version
from transformers import (
AutoModel,
AutoModelForCausalLM,
AutoModelForSeqaSeqLM,
AutoModelForSequenceClassification,
AutoTokenizer,
BitsAndBytesConfig,
pipeline,
)
from transformers.test... | 330 | 0 |
from __future__ import annotations
def UpperCamelCase ( _A ):
"""simple docstring"""
if not nums:
return 0
__magic_name__ : Dict = nums[0]
__magic_name__ : Union[str, Any] = 0
for num in nums[1:]:
_... | 342 |
from dataclasses import dataclass
from typing import Dict, Optional, Tuple, Union
import torch
import torch.nn as nn
from ..configuration_utils import ConfigMixin, register_to_config
from ..utils import BaseOutput, apply_forward_hook
from .attention_processor import AttentionProcessor, AttnProcessor
from .modelin... | 330 | 0 |
'''simple docstring'''
from dataclasses import dataclass
from typing import List, Optional, Union
import numpy as np
import PIL
from ...utils import BaseOutput, OptionalDependencyNotAvailable, is_torch_available, is_transformers_available
from .timesteps import (
fastaa_timesteps,
smartaa_timestep... | 34 |
import argparse
from typing import Dict
import tensorflow as tf
import torch
from tqdm import tqdm
from transformers import BigBirdPegasusConfig, BigBirdPegasusForConditionalGeneration
a_ = [
# tf -> hf
("""/""", """."""),
("""layer_""", """layers."""),
("""kernel""", """weight"""),
... | 330 | 0 |
from .glue import GlueDataset, GlueDataTrainingArguments
from .language_modeling import (
LineByLineTextDataset,
LineByLineWithRefDataset,
LineByLineWithSOPTextDataset,
TextDataset,
TextDatasetForNextSentencePrediction,
)
from .squad import SquadDataset, SquadDataTrainingArguments
| 305 |
import uuid
from typing import Any, Dict, List, Optional, Union
from ..utils import add_end_docstrings, is_tf_available, is_torch_available, logging
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_tf_available():
import tensorflow as tf
if is_torch_available():
import torch
a_ = logging.get... | 330 | 0 |
'''simple docstring'''
from io import BytesIO
from typing import List, Union
import requests
from ..utils import add_end_docstrings, is_decord_available, is_torch_available, logging, requires_backends
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_decord_available():
import numpy as np
from ... | 331 |
'''simple docstring'''
from typing import List, Union
from ..utils import (
add_end_docstrings,
is_tf_available,
is_torch_available,
is_vision_available,
logging,
requires_backends,
)
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_vision_available():
from PIL import Image
... | 331 | 1 |
'''simple docstring'''
import os
import sys
import unittest
lowerCAmelCase :Dict = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
sys.path.append(os.path.join(git_repo_path, '''utils'''))
import get_test_info # noqa: E402
from get_test_info import ( # noqa: E402
get... | 331 |
'''simple docstring'''
import argparse
import logging
import os
from pathlib import Path
from typing import Any, Dict
import pytorch_lightning as pl
from pytorch_lightning.utilities import rank_zero_info
from transformers import (
AdamW,
AutoConfig,
AutoModel,
AutoModelForPreTraining,
AutoMode... | 331 | 1 |
'''simple docstring'''
import copy
from typing import Any, Dict, List, Optional, Union
import numpy as np
from ...audio_utils import mel_filter_bank, spectrogram, window_function
from ...feature_extraction_sequence_utils import SequenceFeatureExtractor
from ...feature_extraction_utils import BatchFeature
from ...... | 331 |
'''simple docstring'''
import torch
from diffusers import DDPMScheduler
from .test_schedulers import SchedulerCommonTest
class _lowerCamelCase ( lowercase__ ):
'''simple docstring'''
A_ : Dict = (DDPMScheduler,)
def __lowerCAmelCase ( self : Any ... | 331 | 1 |
'''simple docstring'''
import unittest
from transformers import is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device
from ...generation.test_utils import GenerationTesterMixin
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import ModelTest... | 331 |
'''simple docstring'''
import random
import unittest
import torch
from diffusers import IFInpaintingPipeline
from diffusers.utils import floats_tensor
from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import skip_mps, torch_device
from ..pipeline_params import (
... | 331 | 1 |
'''simple docstring'''
import math
import random
from typing import Any
from .hill_climbing import SearchProblem
def lowerCamelCase ( lowerCAmelCase : List[Any] , lowerCAmelCase : bool = True , lowerCAmelCase : float = math.inf , lowerCAmelCase : float = -math.inf , lowerCAmelCase :... | 331 |
'''simple docstring'''
import unittest
from transformers import DebertaConfig, is_torch_available
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import ModelTeste... | 331 | 1 |
'''simple docstring'''
import itertools
from dataclasses import dataclass
from typing import List, Optional
import pyarrow as pa
import pyarrow.parquet as pq
import datasets
from datasets.table import table_cast
lowerCAmelCase :Union[str, Any] = datasets.utils.logging.get_logger(__name__)
@dataclass
... | 331 |
'''simple docstring'''
class _lowerCamelCase : # Public class to implement a graph
'''simple docstring'''
def __init__( self : List[Any] , _A : int , _A : int , _A : list[list[bool]] ) -> None:
__magic_name__ : Tuple = row
__... | 331 | 1 |
'''simple docstring'''
from dataclasses import dataclass
from typing import Dict, Optional, Tuple, Union
import torch
import torch.nn as nn
from ..configuration_utils import ConfigMixin, register_to_config
from ..utils import BaseOutput, apply_forward_hook
from .attention_processor import AttentionProcessor, Attn... | 331 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_sentencepiece_available,
is_tokenizers_available,
is_torch_available,
is_vision_available,
)
lowerCAmelCase :Tuple = {'''processing_layoutxlm''': ['''L... | 331 | 1 |
'''simple docstring'''
# Copyright 2023 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# U... | 331 |
'''simple docstring'''
from __future__ import annotations
from math import ceil, floor, sqrt
def lowerCamelCase ( lowerCAmelCase : int = 200_0000 ):
"""simple docstring"""
__magic_name__ : list[int] = [0]
__magic_name__ : int
for idx in range(1 , ceil(sqrt... | 331 | 1 |
'''simple docstring'''
from collections import UserDict
from typing import List, Union
from ..utils import (
add_end_docstrings,
is_tf_available,
is_torch_available,
is_vision_available,
logging,
requires_backends,
)
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_vision_available():... | 331 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
lowerCAmelCase :str = {'''configuration... | 331 | 1 |
'''simple docstring'''
import argparse
from collections import OrderedDict
from pathlib import Path
import torch
from transformers import (
VisualBertConfig,
VisualBertForMultipleChoice,
VisualBertForPreTraining,
VisualBertForQuestionAnswering,
VisualBertForVisualReasoning,
)
from transformers... | 331 |
'''simple docstring'''
from typing import Dict, List, Optional, Union
import numpy as np
from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict
from ...image_transforms import (
center_crop,
get_resize_output_image_size,
normalize,
rescale,
resize,
to_channel... | 331 | 1 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_sentencepiece_available,
is_tokenizers_available,
is_torch_available,
is_vision_available,
)
lowerCAmelCase :Tuple = {'''processing_layoutxlm''': ['''L... | 331 |
'''simple docstring'''
from __future__ import annotations
from math import pi
# Define the Reduced Planck Constant ℏ (H bar), speed of light C, value of
# Pi and the function
lowerCAmelCase :Tuple = 1.0_5_4_5_7_1_8_1_7E-3_4 # unit of ℏ : J * s
lowerCAmelCase :Union[str, Any] = 3E8 # unit ... | 331 | 1 |
'''simple docstring'''
import logging
from pathlib import Path
import numpy as np
import pytorch_lightning as pl
import torch
from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint
from pytorch_lightning.utilities import rank_zero_only
from utils_rag import save_json
def lowerCamelCase ( l... | 331 |
'''simple docstring'''
import os
from itertools import chain
from random import randrange, shuffle
import pytest
from .sola import PokerHand
lowerCAmelCase :Tuple = (
'''4S 3H 2C 7S 5H''',
'''9D 8H 2C 6S 7H''',
'''2D 6D 9D TH 7D''',
'''TC 8C 2S JH 6C''',
'''JH 8S TH AH QH''',
'... | 331 | 1 |
'''simple docstring'''
import gc
import unittest
import numpy as np
import torch
from transformers import CLIPTextConfig, CLIPTextModel, XLMRobertaTokenizer
from diffusers import AltDiffusionPipeline, AutoencoderKL, DDIMScheduler, PNDMScheduler, UNetaDConditionModel
from diffusers.pipelines.alt_diffusion.modeling... | 331 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_tf_available,
is_torch_available,
)
lowerCAmelCase :Union[str, Any] = {
'''configuration_vision_encoder_decoder''': ['''VisionEncode... | 331 | 1 |
'''simple docstring'''
import argparse
import json
from typing import List
from ltp import LTP
from transformers import BertTokenizer
def lowerCamelCase ( lowerCAmelCase : int ):
"""simple docstring"""
if (
(cp >= 0x4e00 and cp <= 0x9fff)
or (cp >= 0x3400 and cp <= 0x4dbf) #
... | 331 |
'''simple docstring'''
from collections import UserDict
from typing import List, Union
from ..utils import (
add_end_docstrings,
is_tf_available,
is_torch_available,
is_vision_available,
logging,
requires_backends,
)
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_vision_available():... | 331 | 1 |
'''simple docstring'''
def lowerCamelCase ( lowerCAmelCase : float ):
"""simple docstring"""
if edge <= 0 or not isinstance(lowerCAmelCase , lowerCAmelCase ):
raise ValueError('Length must be a positive.' )
return 3 * ((25 + 10 * (5 ** (1 / 2))) ** (1 / 2)) * (edge**2)
def lowerCam... | 331 |
'''simple docstring'''
import json
import os
import re
import shutil
import tempfile
import unittest
from typing import Tuple
from transformers import AddedToken, BatchEncoding, ByTaTokenizer
from transformers.utils import cached_property, is_tf_available, is_torch_available
from ...test_tokenization_common impor... | 331 | 1 |
'''simple docstring'''
def lowerCamelCase ( lowerCAmelCase : int = 1000 ):
"""simple docstring"""
__magic_name__ : List[Any] = -1
__magic_name__ : List[str] = 0
for a in range(1 , n // 3 ):
# Solving the two equations a**2+b**2=c**2 and a+b+c=N elimin... | 331 |
'''simple docstring'''
import unittest
from transformers import is_torch_available
from transformers.testing_utils import require_torch
if is_torch_available():
import torch
from transformers.generation import DisjunctiveConstraint
@require_torch
class _lowerCamelCase ( unittest.TestCas... | 331 | 1 |
'''simple docstring'''
def lowerCamelCase ( lowerCAmelCase : int ):
"""simple docstring"""
if not isinstance(lowerCAmelCase , lowerCAmelCase ):
raise ValueError('Input must be an integer' )
if input_num <= 0:
raise ValueError('Input must be positive' )
return sum(
divisor for d... | 331 |
'''simple docstring'''
import re
import tempfile
from pathlib import Path
import pytest
import yaml
from datasets.utils.readme import ReadMe
# @pytest.fixture
# def example_yaml_structure():
lowerCAmelCase :List[str] = yaml.safe_load(
'''\
name: ""
allow_empty: false
allow_empty_text: true
subse... | 331 | 1 |
'''simple docstring'''
import random
import unittest
import torch
from diffusers import IFInpaintingPipeline
from diffusers.utils import floats_tensor
from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import skip_mps, torch_device
from ..pipeline_params import (
... | 331 |
'''simple docstring'''
from __future__ import annotations
import os
import tempfile
import unittest
from transformers import ConvBertConfig, is_tf_available
from transformers.testing_utils import require_tf, slow
from ...test_configuration_common import ConfigTester
from ...test_modeling_tf_common import TFModel... | 331 | 1 |
'''simple docstring'''
import logging
import os
from typing import List, Tuple
import numpy as np
import psutil
import torch
import torch.distributed as dist
from transformers import RagRetriever
lowerCAmelCase :List[Any] = logging.getLogger(__name__)
class _lowerCamelCase ( lowercase__ ... | 331 |
'''simple docstring'''
import os
import tempfile
from functools import partial
from unittest import TestCase
from unittest.mock import patch
import numpy as np
import pytest
from datasets.arrow_dataset import Dataset
from datasets.search import ElasticSearchIndex, FaissIndex, MissingIndex
from .utils import requ... | 331 | 1 |
'''simple docstring'''
import torch
from diffusers import DDIMParallelScheduler
from .test_schedulers import SchedulerCommonTest
class _lowerCamelCase ( lowercase__ ):
'''simple docstring'''
A_ : str = (DDIMParallelScheduler,)
A_ : Optional[int] = (("""... | 331 |
'''simple docstring'''
import logging
from pathlib import Path
import numpy as np
import pytorch_lightning as pl
import torch
from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint
from pytorch_lightning.utilities import rank_zero_only
from utils_rag import save_json
def lowerCamelCase ( l... | 331 | 1 |
'''simple docstring'''
import inspect
import unittest
from huggingface_hub import hf_hub_download
from transformers import ConvNextConfig, UperNetConfig
from transformers.testing_utils import require_torch, require_torch_multi_gpu, require_vision, slow, torch_device
from transformers.utils import is_torch_availab... | 331 |
'''simple docstring'''
def lowerCamelCase ( ):
"""simple docstring"""
return 1
def lowerCamelCase ( lowerCAmelCase : int ):
"""simple docstring"""
return 0 if x < 0 else two_pence(x - 2 ) + one_pence()
def lowerCamelCase ( lowerCAmelCase : int ):
"""s... | 331 | 1 |
'''simple docstring'''
from __future__ import annotations
import inspect
import unittest
from transformers import ViTConfig
from transformers.testing_utils import require_tf, require_vision, slow
from transformers.utils import cached_property, is_tf_available, is_vision_available
from ...test_configuration_commo... | 331 |
'''simple docstring'''
from ..utils import DummyObject, requires_backends
class _lowerCamelCase ( metaclass=lowercase__ ):
'''simple docstring'''
A_ : Optional[Any] = ["""flax""", """transformers"""]
def __init__( self : Union[str, Any] , *_A : ... | 331 | 1 |
'''simple docstring'''
import unittest
from pathlib import Path
from tempfile import NamedTemporaryFile, TemporaryDirectory
from transformers import BertConfig, BertTokenizerFast, FeatureExtractionPipeline
from transformers.convert_graph_to_onnx import (
convert,
ensure_valid_input,
generate_identified... | 331 |
'''simple docstring'''
from typing import List, Union
from ..utils import (
add_end_docstrings,
is_tf_available,
is_torch_available,
is_vision_available,
logging,
requires_backends,
)
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_vision_available():
from PIL import Image
... | 331 | 1 |
'''simple docstring'''
def lowerCamelCase ( lowerCAmelCase : str , lowerCAmelCase : int ):
"""simple docstring"""
__magic_name__ : Optional[int] = word.split()
def justify(lowerCAmelCase : list , lowerCAmelCase : int , lowerCAmelCase : int ) -> str:
__m... | 331 |
'''simple docstring'''
import argparse
import logging
import os
from pathlib import Path
from typing import Any, Dict
import pytorch_lightning as pl
from pytorch_lightning.utilities import rank_zero_info
from transformers import (
AdamW,
AutoConfig,
AutoModel,
AutoModelForPreTraining,
AutoMode... | 331 | 1 |
'''simple docstring'''
def lowerCamelCase ( lowerCAmelCase : int , lowerCAmelCase : list ):
"""simple docstring"""
_enforce_args(lowerCAmelCase , lowerCAmelCase )
if n == 0:
return 0
__magic_name__ : List[str] = float('-inf' )
for i in range(1 , n + 1 ):
__... | 331 |
'''simple docstring'''
import torch
from diffusers import DDPMScheduler
from .test_schedulers import SchedulerCommonTest
class _lowerCamelCase ( lowercase__ ):
'''simple docstring'''
A_ : Dict = (DDPMScheduler,)
def __lowerCAmelCase ( self : Any ... | 331 | 1 |
'''simple docstring'''
import unittest
from transformers import is_torch_available
from transformers.testing_utils import require_torch
if is_torch_available():
import torch
from transformers.generation import DisjunctiveConstraint
@require_torch
class _lowerCamelCase ( unittest.TestCas... | 331 |
'''simple docstring'''
import random
import unittest
import torch
from diffusers import IFInpaintingPipeline
from diffusers.utils import floats_tensor
from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import skip_mps, torch_device
from ..pipeline_params import (
... | 331 | 1 |
'''simple docstring'''
def lowerCamelCase ( lowerCAmelCase : int ):
"""simple docstring"""
return str(lowerCAmelCase ) == str(lowerCAmelCase )[::-1]
def lowerCamelCase ( lowerCAmelCase : int ):
"""simple docstring"""
return int(lowerCAmelCase ) + int(str(lowerCAme... | 331 |
'''simple docstring'''
import unittest
from transformers import DebertaConfig, is_torch_available
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import ModelTeste... | 331 | 1 |
'''simple docstring'''
lowerCAmelCase :Optional[int] = '''
# Transformers installation
! pip install transformers datasets
# To install from source instead of the last release, comment the command above and uncomment the following one.
# ! pip install git+https://github.com/huggingface/transformers.git
'... | 331 |
'''simple docstring'''
class _lowerCamelCase : # Public class to implement a graph
'''simple docstring'''
def __init__( self : List[Any] , _A : int , _A : int , _A : list[list[bool]] ) -> None:
__magic_name__ : Tuple = row
__... | 331 | 1 |
'''simple docstring'''
lowerCAmelCase :Tuple = '''ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'''
def lowerCamelCase ( lowerCAmelCase : bytes ):
"""simple docstring"""
if not isinstance(lowerCAmelCase , lowerCAmelCase ):
__magic_name__ : Dict ... | 331 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_sentencepiece_available,
is_tokenizers_available,
is_torch_available,
is_vision_available,
)
lowerCAmelCase :Tuple = {'''processing_layoutxlm''': ['''L... | 331 | 1 |
'''simple docstring'''
# Copyright 2023 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
... | 331 |
'''simple docstring'''
from __future__ import annotations
from math import ceil, floor, sqrt
def lowerCamelCase ( lowerCAmelCase : int = 200_0000 ):
"""simple docstring"""
__magic_name__ : list[int] = [0]
__magic_name__ : int
for idx in range(1 , ceil(sqrt... | 331 | 1 |
'''simple docstring'''
import argparse
import logging
import os
from pathlib import Path
from typing import Any, Dict
import pytorch_lightning as pl
from pytorch_lightning.utilities import rank_zero_info
from transformers import (
AdamW,
AutoConfig,
AutoModel,
AutoModelForPreTraining,
AutoMode... | 331 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
lowerCAmelCase :str = {'''configuration... | 331 | 1 |
'''simple docstring'''
from dataclasses import dataclass
from typing import Optional, Tuple, Union
import torch
import torch.nn as nn
from ..configuration_utils import ConfigMixin, register_to_config
from ..utils import BaseOutput, apply_forward_hook
from .modeling_utils import ModelMixin
from .vae import Decoder... | 331 |
'''simple docstring'''
from typing import Dict, List, Optional, Union
import numpy as np
from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict
from ...image_transforms import (
center_crop,
get_resize_output_image_size,
normalize,
rescale,
resize,
to_channel... | 331 | 1 |
'''simple docstring'''
lowerCAmelCase :List[str] = '''
# Transformers installation
! pip install transformers datasets
# To install from source instead of the last release, comment the command above and uncomment the following one.
# ! pip install git+https://github.com/huggingface/transformers.git
'''
... | 331 |
'''simple docstring'''
from __future__ import annotations
from math import pi
# Define the Reduced Planck Constant ℏ (H bar), speed of light C, value of
# Pi and the function
lowerCAmelCase :Tuple = 1.0_5_4_5_7_1_8_1_7E-3_4 # unit of ℏ : J * s
lowerCAmelCase :Union[str, Any] = 3E8 # unit ... | 331 | 1 |
'''simple docstring'''
import collections
import tempfile
import unittest
import numpy as np
from transformers.testing_utils import (
is_pt_flax_cross_test,
require_flax,
require_torch,
require_vision,
slow,
torch_device,
)
from transformers.utils import is_flax_available, is_torch_availab... | 331 |
'''simple docstring'''
import os
from itertools import chain
from random import randrange, shuffle
import pytest
from .sola import PokerHand
lowerCAmelCase :Tuple = (
'''4S 3H 2C 7S 5H''',
'''9D 8H 2C 6S 7H''',
'''2D 6D 9D TH 7D''',
'''TC 8C 2S JH 6C''',
'''JH 8S TH AH QH''',
'... | 331 | 1 |
'''simple docstring'''
from ...configuration_utils import PretrainedConfig
from ...utils import logging
lowerCAmelCase :Optional[Any] = logging.get_logger(__name__)
lowerCAmelCase :Tuple = {
'''microsoft/markuplm-base''': '''https://huggingface.co/microsoft/markuplm-base/resolve/main/co... | 331 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_tf_available,
is_torch_available,
)
lowerCAmelCase :Union[str, Any] = {
'''configuration_vision_encoder_decoder''': ['''VisionEncode... | 331 | 1 |
'''simple docstring'''
import unittest
from transformers import TrOCRConfig
from transformers.testing_utils import is_torch_available, require_torch, torch_device
from ...generation.test_utils import GenerationTesterMixin
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import Mo... | 331 |
'''simple docstring'''
from collections import UserDict
from typing import List, Union
from ..utils import (
add_end_docstrings,
is_tf_available,
is_torch_available,
is_vision_available,
logging,
requires_backends,
)
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_vision_available():... | 331 | 1 |
'''simple docstring'''
# Lint as: python3
import dataclasses
import re
from dataclasses import dataclass
from functools import total_ordering
from typing import Optional, Union
lowerCAmelCase :Union[str, Any] = re.compile(r'''^(?P<major>\d+)''' r'''\.(?P<minor>\d+)''' r'''\.(?P<patch>\d+)$''')
@total_o... | 331 |
'''simple docstring'''
import json
import os
import re
import shutil
import tempfile
import unittest
from typing import Tuple
from transformers import AddedToken, BatchEncoding, ByTaTokenizer
from transformers.utils import cached_property, is_tf_available, is_torch_available
from ...test_tokenization_common impor... | 331 | 1 |
'''simple docstring'''
import importlib
import json
import os
from collections import OrderedDict
from typing import Dict, Optional, Union
# Build the list of all image processors
from ...configuration_utils import PretrainedConfig
from ...dynamic_module_utils import get_class_from_dynamic_module, resolve_trust_re... | 331 |
'''simple docstring'''
import unittest
from transformers import is_torch_available
from transformers.testing_utils import require_torch
if is_torch_available():
import torch
from transformers.generation import DisjunctiveConstraint
@require_torch
class _lowerCamelCase ( unittest.TestCas... | 331 | 1 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
lowerCAmelCase :Any = {
'''configur... | 331 |
'''simple docstring'''
import re
import tempfile
from pathlib import Path
import pytest
import yaml
from datasets.utils.readme import ReadMe
# @pytest.fixture
# def example_yaml_structure():
lowerCAmelCase :List[str] = yaml.safe_load(
'''\
name: ""
allow_empty: false
allow_empty_text: true
subse... | 331 | 1 |
'''simple docstring'''
import importlib.metadata
import warnings
from copy import deepcopy
from packaging import version
from ..utils import logging
from .import_utils import is_accelerate_available, is_bitsandbytes_available
if is_bitsandbytes_available():
import bitsandbytes as bnb
import torch
... | 331 |
'''simple docstring'''
from __future__ import annotations
import os
import tempfile
import unittest
from transformers import ConvBertConfig, is_tf_available
from transformers.testing_utils import require_tf, slow
from ...test_configuration_common import ConfigTester
from ...test_modeling_tf_common import TFModel... | 331 | 1 |
'''simple docstring'''
import argparse
import json
from pathlib import Path
import requests
import torch
from huggingface_hub import hf_hub_download
from PIL import Image
from timm import create_model
from timm.data import resolve_data_config
from timm.data.transforms_factory import create_transform
from transfor... | 331 |
'''simple docstring'''
import os
import tempfile
from functools import partial
from unittest import TestCase
from unittest.mock import patch
import numpy as np
import pytest
from datasets.arrow_dataset import Dataset
from datasets.search import ElasticSearchIndex, FaissIndex, MissingIndex
from .utils import requ... | 331 | 1 |
'''simple docstring'''
import pickle
import unittest
import torch
from accelerate import Accelerator
from accelerate.state import AcceleratorState
from accelerate.test_utils import require_cpu
@require_cpu
class _lowerCamelCase ( unittest.TestCase ):
'''simple docstring'''
def __... | 331 |
'''simple docstring'''
import logging
from pathlib import Path
import numpy as np
import pytorch_lightning as pl
import torch
from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint
from pytorch_lightning.utilities import rank_zero_only
from utils_rag import save_json
def lowerCamelCase ( l... | 331 | 1 |
'''simple docstring'''
from __future__ import annotations
import os
from typing import Any
import requests
lowerCAmelCase :Dict = '''https://api.github.com'''
# https://docs.github.com/en/free-pro-team@latest/rest/reference/users#get-the-authenticated-user
lowerCAmelCase :Dict = BASE_URL +... | 331 |
'''simple docstring'''
def lowerCamelCase ( ):
"""simple docstring"""
return 1
def lowerCamelCase ( lowerCAmelCase : int ):
"""simple docstring"""
return 0 if x < 0 else two_pence(x - 2 ) + one_pence()
def lowerCamelCase ( lowerCAmelCase : int ):
"""s... | 331 | 1 |
'''simple docstring'''
import unittest
from transformers.testing_utils import CaptureStdout
from transformers.tools.python_interpreter import evaluate
def lowerCamelCase ( lowerCAmelCase : int ):
"""simple docstring"""
return x + 2
class _lowerCamelCase ( unittest.TestCas... | 331 |
'''simple docstring'''
from ..utils import DummyObject, requires_backends
class _lowerCamelCase ( metaclass=lowercase__ ):
'''simple docstring'''
A_ : Optional[Any] = ["""flax""", """transformers"""]
def __init__( self : Union[str, Any] , *_A : ... | 331 | 1 |
'''simple docstring'''
from __future__ import annotations
import copy
import inspect
import unittest
import numpy as np
from transformers import is_tf_available, is_vision_available
from transformers.models.auto import get_values
from transformers.testing_utils import require_tf, slow
from transformers.utils imp... | 331 |
'''simple docstring'''
from typing import List, Union
from ..utils import (
add_end_docstrings,
is_tf_available,
is_torch_available,
is_vision_available,
logging,
requires_backends,
)
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_vision_available():
from PIL import Image
... | 331 | 1 |
'''simple docstring'''
# This model implementation is heavily inspired by https://github.com/haofanwang/ControlNet-for-Diffusers/
import gc
import random
import tempfile
import unittest
import numpy as np
import torch
from PIL import Image
from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer
fro... | 331 |
'''simple docstring'''
import argparse
import logging
import os
from pathlib import Path
from typing import Any, Dict
import pytorch_lightning as pl
from pytorch_lightning.utilities import rank_zero_info
from transformers import (
AdamW,
AutoConfig,
AutoModel,
AutoModelForPreTraining,
AutoMode... | 331 | 1 |
'''simple docstring'''
def lowerCamelCase ( ):
"""simple docstring"""
return 1
def lowerCamelCase ( lowerCAmelCase : int ):
"""simple docstring"""
return 0 if x < 0 else two_pence(x - 2 ) + one_pence()
def lowerCamelCase ( lowerCAmelCase : int ):
"""s... | 331 |
'''simple docstring'''
import torch
from diffusers import DDPMScheduler
from .test_schedulers import SchedulerCommonTest
class _lowerCamelCase ( lowercase__ ):
'''simple docstring'''
A_ : Dict = (DDPMScheduler,)
def __lowerCAmelCase ( self : Any ... | 331 | 1 |
'''simple docstring'''
import importlib
import sys
from argparse import REMAINDER, ArgumentParser
from pathlib import Path
import torch_xla.distributed.xla_multiprocessing as xmp
def lowerCamelCase ( ):
"""simple docstring"""
__magic_name__ : int = ArgumentParser(
desc... | 331 |
'''simple docstring'''
import random
import unittest
import torch
from diffusers import IFInpaintingPipeline
from diffusers.utils import floats_tensor
from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import skip_mps, torch_device
from ..pipeline_params import (
... | 331 | 1 |
'''simple docstring'''
from __future__ import annotations
import math
def lowerCamelCase ( lowerCAmelCase : float , lowerCAmelCase : int ):
"""simple docstring"""
__magic_name__ : List[str] = u
for i in range(1 , lowerCAmelCase ):
__magic_name__ : st... | 331 |
'''simple docstring'''
import unittest
from transformers import DebertaConfig, is_torch_available
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import ModelTeste... | 331 | 1 |
'''simple docstring'''
from __future__ import annotations
import inspect
import unittest
import numpy as np
from transformers import ResNetConfig
from transformers.testing_utils import require_tf, require_vision, slow
from transformers.utils import cached_property, is_tf_available, is_vision_available
from ...t... | 331 |
'''simple docstring'''
class _lowerCamelCase : # Public class to implement a graph
'''simple docstring'''
def __init__( self : List[Any] , _A : int , _A : int , _A : list[list[bool]] ) -> None:
__magic_name__ : Tuple = row
__... | 331 | 1 |
'''simple docstring'''
import ast
import os
import re
import shutil
import tempfile
import unittest
from unittest import mock
import torch
from accelerate.test_utils.examples import compare_against_test
from accelerate.test_utils.testing import TempDirTestCase, require_trackers, run_command, slow
from accelerate.... | 331 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_sentencepiece_available,
is_tokenizers_available,
is_torch_available,
is_vision_available,
)
lowerCAmelCase :Tuple = {'''processing_layoutxlm''': ['''L... | 331 | 1 |
'''simple docstring'''
from __future__ import annotations
import random
# Maximum size of the population. Bigger could be faster but is more memory expensive.
lowerCAmelCase :Optional[int] = 2_0_0
# Number of elements selected in every generation of evolution. The selection takes
# place from best to ... | 331 |
'''simple docstring'''
from __future__ import annotations
from math import ceil, floor, sqrt
def lowerCamelCase ( lowerCAmelCase : int = 200_0000 ):
"""simple docstring"""
__magic_name__ : list[int] = [0]
__magic_name__ : int
for idx in range(1 , ceil(sqrt... | 331 | 1 |
'''simple docstring'''
import argparse
from pathlib import Path
import torch
from packaging import version
from torch.onnx import export
from diffusers import AutoencoderKL
lowerCAmelCase :Optional[int] = version.parse(version.parse(torch.__version__).base_version) < version.parse('''1.11''')
de... | 331 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
lowerCAmelCase :str = {'''configuration... | 331 | 1 |
'''simple docstring'''
import io
import os
import unicodedata
from typing import Any, Dict, List, Optional, Tuple
import sentencepiece as spm
from ...tokenization_utils import PreTrainedTokenizer
from ...utils import logging
lowerCAmelCase :Any = logging.get_logger(__name__)
lowerCAmelCase :Tup... | 331 |
'''simple docstring'''
from typing import Dict, List, Optional, Union
import numpy as np
from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict
from ...image_transforms import (
center_crop,
get_resize_output_image_size,
normalize,
rescale,
resize,
to_channel... | 331 | 1 |
'''simple docstring'''
import argparse
from copy import deepcopy
import numpy as np
from datasets import ClassLabel, DatasetDict, load_dataset
from evaluate import load
from transformers import (
AutoModelForSequenceClassification,
AutoTokenizer,
DataCollatorWithPadding,
Trainer,
TrainerCallba... | 331 |
'''simple docstring'''
from __future__ import annotations
from math import pi
# Define the Reduced Planck Constant ℏ (H bar), speed of light C, value of
# Pi and the function
lowerCAmelCase :Tuple = 1.0_5_4_5_7_1_8_1_7E-3_4 # unit of ℏ : J * s
lowerCAmelCase :Union[str, Any] = 3E8 # unit ... | 331 | 1 |
'''simple docstring'''
# A Bipartite Graph is a graph whose vertices can be divided into two independent sets,
# U and V such that every edge (u, v) either connects a vertex from U to V or a vertex
# from V to U. In other words, for every edge (u, v), either u belongs to U and v to V,
# or u belongs to V and v to U... | 331 |
'''simple docstring'''
import os
from itertools import chain
from random import randrange, shuffle
import pytest
from .sola import PokerHand
lowerCAmelCase :Tuple = (
'''4S 3H 2C 7S 5H''',
'''9D 8H 2C 6S 7H''',
'''2D 6D 9D TH 7D''',
'''TC 8C 2S JH 6C''',
'''JH 8S TH AH QH''',
'... | 331 | 1 |
'''simple docstring'''
import unittest
from transformers import is_flax_available
from transformers.testing_utils import require_flax, require_sentencepiece, require_tokenizers, require_torch, slow
if is_flax_available():
import optax
from flax.training.common_utils import onehot
from trans... | 331 |
'''simple docstring'''
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_tf_available,
is_torch_available,
)
lowerCAmelCase :Union[str, Any] = {
'''configuration_vision_encoder_decoder''': ['''VisionEncode... | 331 | 1 |
'''simple docstring'''
from __future__ import annotations
def lowerCamelCase ( lowerCAmelCase : list , lowerCAmelCase : int , lowerCAmelCase : int , lowerCAmelCase : int ):
"""simple docstring"""
__magic_name__ : List[Any] = []
__magic_name__ , __magic... | 331 |
'''simple docstring'''
from collections import UserDict
from typing import List, Union
from ..utils import (
add_end_docstrings,
is_tf_available,
is_torch_available,
is_vision_available,
logging,
requires_backends,
)
from .base import PIPELINE_INIT_ARGS, Pipeline
if is_vision_available():... | 331 | 1 |
'''simple docstring'''
from __future__ import annotations
def lowerCamelCase ( lowerCAmelCase : list[float] ):
"""simple docstring"""
__magic_name__ : Tuple = 0.00
__magic_name__ : int = 0
for resistor in resistors:
if resistor <= 0:
__magic_name... | 331 |
'''simple docstring'''
import json
import os
import re
import shutil
import tempfile
import unittest
from typing import Tuple
from transformers import AddedToken, BatchEncoding, ByTaTokenizer
from transformers.utils import cached_property, is_tf_available, is_torch_available
from ...test_tokenization_common impor... | 331 | 1 |
'''simple docstring'''
import math_equivalence # From: git+https://github.com/hendrycks/math.git
import datasets
lowerCAmelCase :List[str] = '''\
@article{hendrycksmath2021,
title={Measuring Mathematical Problem Solving With the MATH Dataset},
author={Dan Hendrycks
and Collin Burns
and Sa... | 331 |
'''simple docstring'''
import unittest
from transformers import is_torch_available
from transformers.testing_utils import require_torch
if is_torch_available():
import torch
from transformers.generation import DisjunctiveConstraint
@require_torch
class _lowerCamelCase ( unittest.TestCas... | 331 | 1 |
'''simple docstring'''
import unittest
import numpy as np
from transformers import RoFormerConfig, is_flax_available
from transformers.testing_utils import require_flax, slow
from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask
if is_flax_available():
import j... | 331 |
'''simple docstring'''
import re
import tempfile
from pathlib import Path
import pytest
import yaml
from datasets.utils.readme import ReadMe
# @pytest.fixture
# def example_yaml_structure():
lowerCAmelCase :List[str] = yaml.safe_load(
'''\
name: ""
allow_empty: false
allow_empty_text: true
subse... | 331 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.