code
stringlengths
87
55.2k
code_codestyle
int64
0
349
style_context
stringlengths
135
49.1k
style_context_codestyle
int64
0
349
label
int64
0
1
from ....configuration_utils import PretrainedConfig from ....utils import logging _lowerCamelCase : str = logging.get_logger(__name__) _lowerCamelCase : List[str] = { "CarlCochet/trajectory-transformer-halfcheetah-medium-v2": ( "https://huggingface.co/CarlCochet/tra...
336
import os from typing import List, Optional, Union from ...tokenization_utils import PreTrainedTokenizer from ...tokenization_utils_base import AddedToken from ...utils import logging _lowerCamelCase : Optional[Any] = logging.get_logger(__name__) _lowerCamelCase : Dict = {"voca...
336
1
import warnings from ...utils import logging from .image_processing_layoutlmva import LayoutLMvaImageProcessor _lowerCamelCase : str = logging.get_logger(__name__) class __UpperCAmelCase ( lowerCamelCase__ ): def __init__( self : Tuple, *__A :...
336
import inspect import unittest from transformers import MobileNetVaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...tes...
336
1
import argparse import torch from torch import nn from transformers import MaMaaaConfig, MaMaaaForConditionalGeneration def a__ ( UpperCAmelCase : List[str] ) -> str: UpperCAmelCase : Dict = [ '''encoder.version''', '''decoder.version''', '''model.encoder.ve...
336
from collections import OrderedDict from typing import Any, List, Mapping, Optional from ... import PreTrainedTokenizer, TensorType, is_torch_available from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfigWithPast, PatchingSpec from ...utils import logging _lowerCamelCase : str ...
336
1
import argparse import json from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import BeitConfig, BeitForImageClassification, BeitForMaskedImageModeling, BeitImageProcessor from transformers.image_utils import PILImageResampling ...
336
# limitations under the License. # NOTE: This file is deprecated and will be removed in a future version. # It only exists so that temporarely `from diffusers.pipelines import DiffusionPipeline` works from .pipelines import DiffusionPipeline, ImagePipelineOutput # noqa: F401 from .utils import deprecate deprecat...
336
1
from __future__ import annotations import string from itertools import cycle, product from pathlib import Path _lowerCamelCase : str = ( string.ascii_letters + string.digits + string.punctuation + string.whitespace ) _lowerCamelCase : list[int] = [ord(letter) for letter in st...
336
from dataclasses import dataclass from typing import Optional, Tuple, Union import flax import jax.numpy as jnp from jax import random from ..configuration_utils import ConfigMixin, register_to_config from ..utils import BaseOutput from .scheduling_utils_flax import FlaxSchedulerMixin @flax.struct.dataclass cl...
336
1
import inspect import unittest from transformers import MobileNetVaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...tes...
336
import os import sys from contextlib import contextmanager # Windows only if os.name == "nt": import ctypes import msvcrt # noqa class __UpperCAmelCase ( ctypes.Structure ): # _fields is a specific attr expected by ctypes UpperCamelCase = [("""size...
336
1
import pickle import numpy as np from matplotlib import pyplot as plt class __UpperCAmelCase : def __init__( self : int, __A : Dict, __A : Any, __A : str, __A : int, __A : Tuple, __A : Union[str, Any]=0.2, __A ...
336
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) _lowerCamelCase : Tuple = { "configuration_encodec": [ "ENCODEC_PRETRAINED_CONFIG_ARCHIVE_MAP", "EncodecConfig", ], "feature_extractio...
336
1
import argparse import json import os import fairseq import torch from fairseq.data import Dictionary from transformers import ( WavaVecaConfig, WavaVecaCTCTokenizer, WavaVecaFeatureExtractor, WavaVecaForCTC, WavaVecaForPreTraining, WavaVecaProcessor, logging, ) from transformers.models....
336
from __future__ import annotations def a__ ( UpperCAmelCase : int , UpperCAmelCase : int ) -> list[str]: if partitions <= 0: raise ValueError('''partitions must be a positive number!''' ) if partitions > number_of_bytes: raise ValueError('''partitions can not > number...
336
1
import time from dataclasses import dataclass from multiprocessing import Pool from unittest import TestCase from unittest.mock import patch import multiprocess import numpy as np import pytest from datasets.utils.py_utils import ( NestedDataStructure, asdict, iflatmap_unordered, map_nested, tem...
336
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applica...
336
1
from abc import ABC, abstractmethod from argparse import ArgumentParser class __UpperCAmelCase ( lowerCamelCase__ ): @staticmethod @abstractmethod def __magic_name__ ( __A : ArgumentParser ): raise NotImplementedError() @abstractmethod ...
336
import argparse import collections import json from pathlib import Path import requests import torch import yaml from huggingface_hub import hf_hub_download from PIL import Image from transformers import ( MobileViTImageProcessor, MobileViTVaConfig, MobileViTVaForImageClassification, MobileViTVaForS...
336
1
import os import string import sys _lowerCamelCase : List[str] = 1 << 8 _lowerCamelCase : Dict = { "tab": ord("\t"), "newline": ord("\r"), "esc": 2_7, "up": 6_5 + ARROW_KEY_FLAG, "down": 6_6 + ARROW_KEY_FLAG, "right": 6_7 + ARROW_KEY_FLAG, "left": 6_8...
336
import inspect import tempfile from collections import OrderedDict, UserDict from collections.abc import MutableMapping from contextlib import ExitStack, contextmanager from dataclasses import fields from enum import Enum from typing import Any, ContextManager, List, Tuple import numpy as np from .import_utils impo...
336
1
import argparse import collections import json from pathlib import Path import requests import torch import yaml from huggingface_hub import hf_hub_download from PIL import Image from transformers import ( MobileViTImageProcessor, MobileViTVaConfig, MobileViTVaForImageClassification, MobileViTVaForS...
336
import os import unittest from transformers import LayoutLMTokenizer, LayoutLMTokenizerFast from transformers.models.layoutlm.tokenization_layoutlm import VOCAB_FILES_NAMES from transformers.testing_utils import require_tokenizers from ...test_tokenization_common import TokenizerTesterMixin @require_tokenizers...
336
1
import warnings from ...utils import logging from .image_processing_perceiver import PerceiverImageProcessor _lowerCamelCase : Union[str, Any] = logging.get_logger(__name__) class __UpperCAmelCase ( lowerCamelCase__ ): def __init__( self : Optional[i...
336
from __future__ import annotations import copy import inspect import json import math import os import tempfile import unittest from importlib import import_module import numpy as np from transformers import ViTMAEConfig from transformers.file_utils import cached_property, is_tf_available, is_vision_available from...
336
1
def a__ ( UpperCAmelCase : list[list[float]] ) -> list[list[float]]: UpperCAmelCase : list[list[float]] = [] for data in source_data: for i, el in enumerate(UpperCAmelCase ): if len(UpperCAmelCase ) < i + 1: data_lists.append([] ) data_lists[i].ap...
336
def a__ ( UpperCAmelCase : int ) -> int: UpperCAmelCase : list[list[int]] = [[0 for _ in range(UpperCAmelCase )] for _ in range(m + 1 )] for i in range(m + 1 ): UpperCAmelCase : Optional[Any] = 1 for n in range(m + 1 ): for k in range(1 , Upp...
336
1
from dataclasses import dataclass, field from typing import Optional from transformers import AutoConfig, AutoImageProcessor, AutoTokenizer, FlaxVisionEncoderDecoderModel, HfArgumentParser @dataclass class __UpperCAmelCase : UpperCamelCase = field( metadata={"""help""": """The outp...
336
from __future__ import annotations def a__ ( UpperCAmelCase : list[list[int]] ) -> bool: UpperCAmelCase : Union[str, Any] = len(UpperCAmelCase ) # We need to create solution object to save path. UpperCAmelCase : int = [[0 for _ in range(UpperCAmelCase )] fo...
336
1
_lowerCamelCase : Optional[int] = [ "DownloadConfig", "DownloadManager", "DownloadMode", "StreamingDownloadManager", ] from .download_config import DownloadConfig from .download_manager import DownloadManager, DownloadMode from .streaming_download_manager import StreamingDownloadMan...
336
import inspect import unittest from transformers import ViTHybridConfig from transformers.testing_utils import require_accelerate, require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import Config...
336
1
import unittest from transformers import MODEL_FOR_ZERO_SHOT_OBJECT_DETECTION_MAPPING, is_vision_available, pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf, require_torch, require_vision, slow, ) from .test_pipelines_common import ANY if is_v...
336
from __future__ import annotations from itertools import permutations from random import randint from timeit import repeat def a__ ( ) -> tuple[list[int], int]: UpperCAmelCase : str = [randint(-1_000 , 1_000 ) for i in range(10 )] UpperCAmelCase : Any = randint(-5_...
336
1
import json import os from functools import lru_cache from typing import List, Optional, Tuple import regex as re from ...tokenization_utils import AddedToken, PreTrainedTokenizer from ...utils import logging _lowerCamelCase : Tuple = logging.get_logger(__name__) _lowerCamelCase : List[st...
336
from queue import Queue from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: from ..models.auto import AutoTokenizer class __UpperCAmelCase : def __magic_name__ ( self : int, __A : Dict ): raise NotImplementedError() def ...
336
1
from typing import Optional, Tuple, Union import flax import flax.linen as nn import jax import jax.numpy as jnp from flax.core.frozen_dict import FrozenDict from ..configuration_utils import ConfigMixin, flax_register_to_config from ..utils import BaseOutput from .embeddings_flax import FlaxTimestepEmbedding, Flax...
336
import numpy # List of input, output pairs _lowerCamelCase : Dict = ( ((5, 2, 3), 1_5), ((6, 5, 9), 2_5), ((1_1, 1_2, 1_3), 4_1), ((1, 1, 1), 8), ((1_1, 1_2, 1_3), 4_1), ) _lowerCamelCase : str = (((5_1_5, 2_2, 1_3), 5_5_5), ((6_1, 3_5, 4_9), 1_5_0)) _lowerCame...
336
1
from ...configuration_utils import PretrainedConfig from ...utils import logging _lowerCamelCase : Tuple = logging.get_logger(__name__) _lowerCamelCase : Optional[Any] = { "sayakpaul/vit-msn-base": "https://huggingface.co/sayakpaul/vit-msn-base/resolve/main/config.json", ...
336
def a__ ( UpperCAmelCase : List[Any] , UpperCAmelCase : Optional[int] ) -> Optional[Any]: UpperCAmelCase : List[str] = 0 UpperCAmelCase : List[Any] = len(UpperCAmelCase ) - 1 while left <= right: # avoid divided by 0 during interpolation if...
336
1
def a__ ( UpperCAmelCase : int = 4_000_000 ) -> int: UpperCAmelCase : int = [0, 1] UpperCAmelCase : int = 0 while fib[i] <= n: fib.append(fib[i] + fib[i + 1] ) if fib[i + 2] > n: break i += 1 UpperCAmelCase : str = 0 for j in...
336
import argparse import json from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import BeitConfig, BeitForImageClassification, BeitForMaskedImageModeling, BeitImageProcessor from transformers.image_utils import PILImageResampling ...
336
1
import warnings from ...utils import logging from .image_processing_deit import DeiTImageProcessor _lowerCamelCase : int = logging.get_logger(__name__) class __UpperCAmelCase ( lowerCamelCase__ ): def __init__( self : Dict, *__A : Any, **...
336
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from transformers.mod...
336
1
def a__ ( UpperCAmelCase : int , UpperCAmelCase : int , UpperCAmelCase : int ) -> int: if exponent == 1: return base if exponent % 2 == 0: UpperCAmelCase : Union[str, Any] = _modexpt(UpperCAmelCase , exponent // 2 , UpperCAmelCase ) % mod...
336
import os from typing import List, Optional, Union from ...tokenization_utils import PreTrainedTokenizer from ...tokenization_utils_base import AddedToken from ...utils import logging _lowerCamelCase : Optional[Any] = logging.get_logger(__name__) _lowerCamelCase : Dict = {"voca...
336
1
from typing import TYPE_CHECKING # rely on isort to merge the imports from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _lowerCamelCase : Dict = { "configuration_informer": [ "INFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "InformerConfig", ], ...
336
import inspect import unittest from transformers import MobileNetVaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...tes...
336
1
def a__ ( UpperCAmelCase : int ) -> int: UpperCAmelCase : list[list[int]] = [[0 for _ in range(UpperCAmelCase )] for _ in range(m + 1 )] for i in range(m + 1 ): UpperCAmelCase : Optional[Any] = 1 for n in range(m + 1 ): for k in range(1 , Upp...
336
from collections import OrderedDict from typing import Any, List, Mapping, Optional from ... import PreTrainedTokenizer, TensorType, is_torch_available from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfigWithPast, PatchingSpec from ...utils import logging _lowerCamelCase : str ...
336
1
import gc import unittest from diffusers import FlaxStableDiffusionInpaintPipeline from diffusers.utils import is_flax_available, load_image, slow from diffusers.utils.testing_utils import require_flax if is_flax_available(): import jax import jax.numpy as jnp from flax.jax_utils import replicate ...
336
# limitations under the License. # NOTE: This file is deprecated and will be removed in a future version. # It only exists so that temporarely `from diffusers.pipelines import DiffusionPipeline` works from .pipelines import DiffusionPipeline, ImagePipelineOutput # noqa: F401 from .utils import deprecate deprecat...
336
1
_lowerCamelCase : Any = [sum(int(c, 1_0) ** 2 for c in i.__str__()) for i in range(1_0_0_0_0_0)] def a__ ( UpperCAmelCase : int ) -> int: UpperCAmelCase : int = 0 while number: # Increased Speed Slightly by checking every 5 digits together. sum_of_di...
336
from dataclasses import dataclass from typing import Optional, Tuple, Union import flax import jax.numpy as jnp from jax import random from ..configuration_utils import ConfigMixin, register_to_config from ..utils import BaseOutput from .scheduling_utils_flax import FlaxSchedulerMixin @flax.struct.dataclass cl...
336
1
import argparse from collections import defaultdict def a__ ( UpperCAmelCase : int , UpperCAmelCase : Union[str, Any] , UpperCAmelCase : Optional[int] , UpperCAmelCase : List[Any] , UpperCAmelCase : Dict ) -> str: UpperCAmelCase : Any ...
336
import os import sys from contextlib import contextmanager # Windows only if os.name == "nt": import ctypes import msvcrt # noqa class __UpperCAmelCase ( ctypes.Structure ): # _fields is a specific attr expected by ctypes UpperCamelCase = [("""size...
336
1
import argparse import json import pickle from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import MaskFormerConfig, MaskFormerForInstanceSegmentation, MaskFormerImageProcessor, SwinConfig from transformers.utils import logging ...
336
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) _lowerCamelCase : Tuple = { "configuration_encodec": [ "ENCODEC_PRETRAINED_CONFIG_ARCHIVE_MAP", "EncodecConfig", ], "feature_extractio...
336
1
import os from typing import List, Optional, Union from ...image_processing_utils import BatchFeature from ...image_utils import ImageInput from ...processing_utils import ProcessorMixin from ...tokenization_utils_base import PaddingStrategy, PreTokenizedInput, TextInput, TruncationStrategy from ...utils import Tens...
336
from __future__ import annotations def a__ ( UpperCAmelCase : int , UpperCAmelCase : int ) -> list[str]: if partitions <= 0: raise ValueError('''partitions must be a positive number!''' ) if partitions > number_of_bytes: raise ValueError('''partitions can not > number...
336
1
def a__ ( UpperCAmelCase : int ) -> str: if number > 0: raise ValueError('''input must be a negative integer''' ) UpperCAmelCase : int = len(bin(UpperCAmelCase )[3:] ) UpperCAmelCase : List[str] = bin(abs(UpperCAmelCase ) - (1 << binary_number_length...
336
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applica...
336
1
import argparse import json import os import fairseq import torch from fairseq.data import Dictionary from transformers import ( UniSpeechConfig, UniSpeechForCTC, UniSpeechForPreTraining, WavaVecaFeatureExtractor, WavaVecaPhonemeCTCTokenizer, WavaVecaProcessor, logging, ) logging.set_v...
336
import argparse import collections import json from pathlib import Path import requests import torch import yaml from huggingface_hub import hf_hub_download from PIL import Image from transformers import ( MobileViTImageProcessor, MobileViTVaConfig, MobileViTVaForImageClassification, MobileViTVaForS...
336
1
def a__ ( UpperCAmelCase : Any , UpperCAmelCase : List[str] , UpperCAmelCase : List[str] ) -> Any: if n == 0: return 1 elif n % 2 == 1: return (binary_exponentiation(UpperCAmelCase , n - 1 , UpperCAmelCase ) * a) % mod else: UpperCAmelCase ...
336
import inspect import tempfile from collections import OrderedDict, UserDict from collections.abc import MutableMapping from contextlib import ExitStack, contextmanager from dataclasses import fields from enum import Enum from typing import Any, ContextManager, List, Tuple import numpy as np from .import_utils impo...
336
1
import inspect import unittest from transformers import SegformerConfig, is_torch_available, is_vision_available from transformers.models.auto import get_values from transformers.testing_utils import require_torch, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common...
336
import os import unittest from transformers import LayoutLMTokenizer, LayoutLMTokenizerFast from transformers.models.layoutlm.tokenization_layoutlm import VOCAB_FILES_NAMES from transformers.testing_utils import require_tokenizers from ...test_tokenization_common import TokenizerTesterMixin @require_tokenizers...
336
1
import logging import os import sys from dataclasses import dataclass, field from typing import Optional from seqaseq_trainer import SeqaSeqTrainer from seqaseq_training_args import SeqaSeqTrainingArguments import transformers from transformers import ( AutoConfig, AutoModelForSeqaSeqLM, AutoTokenizer, ...
336
from __future__ import annotations import copy import inspect import json import math import os import tempfile import unittest from importlib import import_module import numpy as np from transformers import ViTMAEConfig from transformers.file_utils import cached_property, is_tf_available, is_vision_available from...
336
1
from __future__ import annotations def a__ ( UpperCAmelCase : list[list[int]] ) -> bool: UpperCAmelCase : Union[str, Any] = len(UpperCAmelCase ) # We need to create solution object to save path. UpperCAmelCase : int = [[0 for _ in range(UpperCAmelCase )] fo...
336
def a__ ( UpperCAmelCase : int ) -> int: UpperCAmelCase : list[list[int]] = [[0 for _ in range(UpperCAmelCase )] for _ in range(m + 1 )] for i in range(m + 1 ): UpperCAmelCase : Optional[Any] = 1 for n in range(m + 1 ): for k in range(1 , Upp...
336
1
def a__ ( UpperCAmelCase : int ) -> list[int]: if length <= 0 or not isinstance(UpperCAmelCase , UpperCAmelCase ): raise ValueError('''Length must be a positive integer.''' ) return [n * (2 * n - 1) for n in range(UpperCAmelCase )] if __name__ == "__main__": print(hexa...
336
from __future__ import annotations def a__ ( UpperCAmelCase : list[list[int]] ) -> bool: UpperCAmelCase : Union[str, Any] = len(UpperCAmelCase ) # We need to create solution object to save path. UpperCAmelCase : int = [[0 for _ in range(UpperCAmelCase )] fo...
336
1
from __future__ import annotations import os from typing import Any import requests _lowerCamelCase : Optional[int] = "https://api.github.com" # https://docs.github.com/en/free-pro-team@latest/rest/reference/users#get-the-authenticated-user _lowerCamelCase : str = BASE_URL + "/...
336
import inspect import unittest from transformers import ViTHybridConfig from transformers.testing_utils import require_accelerate, require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import Config...
336
1
from __future__ import annotations def a__ ( UpperCAmelCase : list[int] , UpperCAmelCase : int ) -> list[int]: UpperCAmelCase : Any = 0 UpperCAmelCase : Tuple = len(UpperCAmelCase ) - 1 while i < j: if nums[i] + nums[j] == target: return...
336
from __future__ import annotations from itertools import permutations from random import randint from timeit import repeat def a__ ( ) -> tuple[list[int], int]: UpperCAmelCase : str = [randint(-1_000 , 1_000 ) for i in range(10 )] UpperCAmelCase : Any = randint(-5_...
336
1
import argparse import json from pathlib import Path import requests import torch from huggingface_hub import cached_download, hf_hub_url from PIL import Image from transformers import DPTConfig, DPTForDepthEstimation, DPTForSemanticSegmentation, DPTImageProcessor from transformers.utils import logging logging.se...
336
from queue import Queue from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: from ..models.auto import AutoTokenizer class __UpperCAmelCase : def __magic_name__ ( self : int, __A : Dict ): raise NotImplementedError() def ...
336
1
def a__ ( UpperCAmelCase : int ) -> int: UpperCAmelCase : str = abs(UpperCAmelCase ) UpperCAmelCase : Tuple = 0 while n > 0: res += n % 10 n //= 10 return res def a__ ( UpperCAmelCase : int ) -> int: UpperCAmelCase : ...
336
import numpy # List of input, output pairs _lowerCamelCase : Dict = ( ((5, 2, 3), 1_5), ((6, 5, 9), 2_5), ((1_1, 1_2, 1_3), 4_1), ((1, 1, 1), 8), ((1_1, 1_2, 1_3), 4_1), ) _lowerCamelCase : str = (((5_1_5, 2_2, 1_3), 5_5_5), ((6_1, 3_5, 4_9), 1_5_0)) _lowerCame...
336
1
import tempfile import unittest import numpy as np from diffusers import ( DDIMScheduler, DPMSolverMultistepScheduler, EulerAncestralDiscreteScheduler, EulerDiscreteScheduler, LMSDiscreteScheduler, OnnxStableDiffusionPipeline, PNDMScheduler, ) from diffusers.utils.testing_utils import is...
336
def a__ ( UpperCAmelCase : List[Any] , UpperCAmelCase : Optional[int] ) -> Optional[Any]: UpperCAmelCase : List[str] = 0 UpperCAmelCase : List[Any] = len(UpperCAmelCase ) - 1 while left <= right: # avoid divided by 0 during interpolation if...
336
1
from __future__ import annotations import inspect import unittest from typing import List, Tuple from transformers import RegNetConfig from transformers.testing_utils import require_tf, require_vision, slow from transformers.utils import cached_property, is_tf_available, is_vision_available from ...test_configurat...
336
import argparse import json from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import BeitConfig, BeitForImageClassification, BeitForMaskedImageModeling, BeitImageProcessor from transformers.image_utils import PILImageResampling ...
336
1
from dataclasses import dataclass, field from typing import Tuple from ..utils import cached_property, is_tf_available, logging, requires_backends from .benchmark_args_utils import BenchmarkArguments if is_tf_available(): import tensorflow as tf _lowerCamelCase : int = logging.get_logger(_...
336
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from transformers.mod...
336
1
def a__ ( UpperCAmelCase : int ) -> bool: return number & 1 == 0 if __name__ == "__main__": import doctest doctest.testmod()
336
import os from typing import List, Optional, Union from ...tokenization_utils import PreTrainedTokenizer from ...tokenization_utils_base import AddedToken from ...utils import logging _lowerCamelCase : Optional[Any] = logging.get_logger(__name__) _lowerCamelCase : Dict = {"voca...
336
1
from __future__ import annotations import copy import inspect import json import math import os import tempfile import unittest from importlib import import_module import numpy as np from transformers import ViTMAEConfig from transformers.file_utils import cached_property, is_tf_available, is_vision_available from...
336
import inspect import unittest from transformers import MobileNetVaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...tes...
336
1
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_torch_available, is_vision_available, ) _lowerCamelCase : List[Any] = { "configuration_convnext": ["CONVNEXT_PRETRAINED_CONFIG_ARCHIVE_MAP", "ConvNextCo...
336
from collections import OrderedDict from typing import Any, List, Mapping, Optional from ... import PreTrainedTokenizer, TensorType, is_torch_available from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfigWithPast, PatchingSpec from ...utils import logging _lowerCamelCase : str ...
336
1
def a__ ( UpperCAmelCase : int ) -> int: if not isinstance(UpperCAmelCase , UpperCAmelCase ) or number < 0: raise ValueError('''Input must be a non-negative integer''' ) UpperCAmelCase : Dict = 0 while number: # This way we arrive at next set bit (next 1) ins...
336
# limitations under the License. # NOTE: This file is deprecated and will be removed in a future version. # It only exists so that temporarely `from diffusers.pipelines import DiffusionPipeline` works from .pipelines import DiffusionPipeline, ImagePipelineOutput # noqa: F401 from .utils import deprecate deprecat...
336
1
def a__ ( UpperCAmelCase : List[Any] , UpperCAmelCase : Optional[int] ) -> Optional[Any]: UpperCAmelCase : List[str] = 0 UpperCAmelCase : List[Any] = len(UpperCAmelCase ) - 1 while left <= right: # avoid divided by 0 during interpolation if...
336
from dataclasses import dataclass from typing import Optional, Tuple, Union import flax import jax.numpy as jnp from jax import random from ..configuration_utils import ConfigMixin, register_to_config from ..utils import BaseOutput from .scheduling_utils_flax import FlaxSchedulerMixin @flax.struct.dataclass cl...
336
1
from __future__ import annotations from collections import deque from collections.abc import Iterator from dataclasses import dataclass @dataclass class __UpperCAmelCase : UpperCamelCase = 42 UpperCamelCase = 42 class __UpperCAmelCase : def __init__( ...
336
import os import sys from contextlib import contextmanager # Windows only if os.name == "nt": import ctypes import msvcrt # noqa class __UpperCAmelCase ( ctypes.Structure ): # _fields is a specific attr expected by ctypes UpperCamelCase = [("""size...
336
1
from collections import UserDict from typing import List, Union from ..utils import ( add_end_docstrings, is_tf_available, is_torch_available, is_vision_available, logging, requires_backends, ) from .base import PIPELINE_INIT_ARGS, Pipeline if is_vision_available(): from PIL import Ima...
336
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) _lowerCamelCase : Tuple = { "configuration_encodec": [ "ENCODEC_PRETRAINED_CONFIG_ARCHIVE_MAP", "EncodecConfig", ], "feature_extractio...
336
1
from __future__ import annotations from typing import Any def a__ ( UpperCAmelCase : list ) -> int: if not postfix_notation: return 0 UpperCAmelCase : Tuple = {'''+''', '''-''', '''*''', '''/'''} UpperCAmelCase : list[Any] = [] for token in postfix_not...
336
from __future__ import annotations def a__ ( UpperCAmelCase : int , UpperCAmelCase : int ) -> list[str]: if partitions <= 0: raise ValueError('''partitions must be a positive number!''' ) if partitions > number_of_bytes: raise ValueError('''partitions can not > number...
336
1
import os # Precomputes a list of the 100 first triangular numbers _lowerCamelCase : Any = [int(0.5 * n * (n + 1)) for n in range(1, 1_0_1)] def a__ ( ) -> Any: UpperCAmelCase : List[Any] = os.path.dirname(os.path.realpath(UpperCAmelCase ) ) UpperCAmelCase ...
336
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applica...
336
1
import unittest from typing import Tuple import torch from diffusers.utils import floats_tensor, randn_tensor, torch_all_close, torch_device from diffusers.utils.testing_utils import require_torch @require_torch class __UpperCAmelCase : @property def __magic_name__ ( self ...
336
import argparse import collections import json from pathlib import Path import requests import torch import yaml from huggingface_hub import hf_hub_download from PIL import Image from transformers import ( MobileViTImageProcessor, MobileViTVaConfig, MobileViTVaForImageClassification, MobileViTVaForS...
336
1
from .configuration_bert_masked import MaskedBertConfig from .modeling_bert_masked import ( MaskedBertForMultipleChoice, MaskedBertForQuestionAnswering, MaskedBertForSequenceClassification, MaskedBertForTokenClassification, MaskedBertModel, ) from .modules import *
336
import inspect import tempfile from collections import OrderedDict, UserDict from collections.abc import MutableMapping from contextlib import ExitStack, contextmanager from dataclasses import fields from enum import Enum from typing import Any, ContextManager, List, Tuple import numpy as np from .import_utils impo...
336
1
import shutil import tempfile import unittest from transformers import ( SPIECE_UNDERLINE, AddedToken, BatchEncoding, NllbTokenizer, NllbTokenizerFast, is_torch_available, ) from transformers.testing_utils import ( get_tests_dir, nested_simplify, require_sentencepiece, require...
336
import os import unittest from transformers import LayoutLMTokenizer, LayoutLMTokenizerFast from transformers.models.layoutlm.tokenization_layoutlm import VOCAB_FILES_NAMES from transformers.testing_utils import require_tokenizers from ...test_tokenization_common import TokenizerTesterMixin @require_tokenizers...
336
1
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) _lowerCamelCase : Tuple = { "configuration_encodec": [ "ENCODEC_PRETRAINED_CONFIG_ARCHIVE_MAP", "EncodecConfig", ], "feature_extractio...
336
from __future__ import annotations import copy import inspect import json import math import os import tempfile import unittest from importlib import import_module import numpy as np from transformers import ViTMAEConfig from transformers.file_utils import cached_property, is_tf_available, is_vision_available from...
336
1
import csv import tweepy # Twitter API credentials _lowerCamelCase : Union[str, Any] = "" _lowerCamelCase : Tuple = "" _lowerCamelCase : Dict = "" _lowerCamelCase : Tuple = "" def a__ ( UpperCAmelCase : str ) -> None: ...
336
def a__ ( UpperCAmelCase : int ) -> int: UpperCAmelCase : list[list[int]] = [[0 for _ in range(UpperCAmelCase )] for _ in range(m + 1 )] for i in range(m + 1 ): UpperCAmelCase : Optional[Any] = 1 for n in range(m + 1 ): for k in range(1 , Upp...
336
1
import copy from typing import Dict, List, Optional from ...configuration_utils import PretrainedConfig from ...utils import logging from ..auto import CONFIG_MAPPING _lowerCamelCase : Optional[int] = { "facebook/mask2former-swin-small-coco-instance": ( "https://huggingface.co/faceboo...
336
from __future__ import annotations def a__ ( UpperCAmelCase : list[list[int]] ) -> bool: UpperCAmelCase : Union[str, Any] = len(UpperCAmelCase ) # We need to create solution object to save path. UpperCAmelCase : int = [[0 for _ in range(UpperCAmelCase )] fo...
336
1
import argparse import dataclasses import json import logging import os import shutil from typing import List, Optional import datasets from accelerate import Accelerator from datasets import load_dataset from finetuning import finetune from tqdm.auto import tqdm import transformers from transformers import AutoCon...
336
import inspect import unittest from transformers import ViTHybridConfig from transformers.testing_utils import require_accelerate, require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import Config...
336
1
from __future__ import annotations from math import pow, sqrt def a__ ( UpperCAmelCase : float , UpperCAmelCase : float , UpperCAmelCase : float ) -> dict[str, float]: if (resistance, reactance, impedance).count(0 ) != 1: raise ValueError('''One and only one arg...
336
from __future__ import annotations from itertools import permutations from random import randint from timeit import repeat def a__ ( ) -> tuple[list[int], int]: UpperCAmelCase : str = [randint(-1_000 , 1_000 ) for i in range(10 )] UpperCAmelCase : Any = randint(-5_...
336
1
from ...configuration_utils import PretrainedConfig from ...utils import logging _lowerCamelCase : List[str] = logging.get_logger(__name__) _lowerCamelCase : Optional[int] = { "naver-clova-ix/donut-base": "https://huggingface.co/naver-clova-ix/donut-base/resolve/main/config....
336
from queue import Queue from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: from ..models.auto import AutoTokenizer class __UpperCAmelCase : def __magic_name__ ( self : int, __A : Dict ): raise NotImplementedError() def ...
336
1
from __future__ import annotations def a__ ( UpperCAmelCase : list[int] , UpperCAmelCase : list[int] , UpperCAmelCase : int ) -> tuple[float, list[float]]: UpperCAmelCase : str = list(range(len(UpperCAmelCase ) ) ) UpperCAmelCase : List...
336
import numpy # List of input, output pairs _lowerCamelCase : Dict = ( ((5, 2, 3), 1_5), ((6, 5, 9), 2_5), ((1_1, 1_2, 1_3), 4_1), ((1, 1, 1), 8), ((1_1, 1_2, 1_3), 4_1), ) _lowerCamelCase : str = (((5_1_5, 2_2, 1_3), 5_5_5), ((6_1, 3_5, 4_9), 1_5_0)) _lowerCame...
336
1
import collections import os import re from pathlib import Path _lowerCamelCase : int = "src/transformers" # Matches is_xxx_available() _lowerCamelCase : Union[str, Any] = re.compile(R"is\_([a-z_]*)_available()") # Catches a one-line _import_struct = {xxx} _lowerCamelCase : ...
336
def a__ ( UpperCAmelCase : List[Any] , UpperCAmelCase : Optional[int] ) -> Optional[Any]: UpperCAmelCase : List[str] = 0 UpperCAmelCase : List[Any] = len(UpperCAmelCase ) - 1 while left <= right: # avoid divided by 0 during interpolation if...
336
1
from queue import PriorityQueue from typing import Any import numpy as np def a__ ( UpperCAmelCase : dict , UpperCAmelCase : str , UpperCAmelCase : set , UpperCAmelCase : set , UpperCAmelCase : dict , UpperCAmelCase : dict , UpperCAmelCase ...
336
import argparse import json from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import BeitConfig, BeitForImageClassification, BeitForMaskedImageModeling, BeitImageProcessor from transformers.image_utils import PILImageResampling ...
336
1
import warnings from ...utils import logging from .image_processing_clip import CLIPImageProcessor _lowerCamelCase : Optional[Any] = logging.get_logger(__name__) class __UpperCAmelCase ( lowerCamelCase__ ): def __init__( self : Any, *__A : ...
336
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from transformers.mod...
336
1
import logging import os import sys import warnings from dataclasses import dataclass, field from random import randint from typing import Optional import datasets import evaluate import numpy as np from datasets import DatasetDict, load_dataset import transformers from transformers import ( AutoConfig, Aut...
336
import os from typing import List, Optional, Union from ...tokenization_utils import PreTrainedTokenizer from ...tokenization_utils_base import AddedToken from ...utils import logging _lowerCamelCase : Optional[Any] = logging.get_logger(__name__) _lowerCamelCase : Dict = {"voca...
336
1
from collections import OrderedDict from typing import TYPE_CHECKING, Any, List, Mapping, Optional from packaging import version if TYPE_CHECKING: from ... import PreTrainedTokenizer, TensorType from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfigWithPast, PatchingSpec from ...u...
336
import inspect import unittest from transformers import MobileNetVaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...tes...
336
1
import gc import unittest import numpy as np import torch from diffusers import AutoencoderKL, DDIMScheduler, DiTPipeline, DPMSolverMultistepScheduler, TransformeraDModel from diffusers.utils import is_xformers_available, load_numpy, slow, torch_device from diffusers.utils.testing_utils import enable_full_determini...
336
from collections import OrderedDict from typing import Any, List, Mapping, Optional from ... import PreTrainedTokenizer, TensorType, is_torch_available from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfigWithPast, PatchingSpec from ...utils import logging _lowerCamelCase : str ...
336
1
import json import os import subprocess import unittest from ast import literal_eval import pytest from parameterized import parameterized, parameterized_class from . import is_sagemaker_available if is_sagemaker_available(): from sagemaker import Session, TrainingJobAnalytics from sagemaker.huggingface...
336
# limitations under the License. # NOTE: This file is deprecated and will be removed in a future version. # It only exists so that temporarely `from diffusers.pipelines import DiffusionPipeline` works from .pipelines import DiffusionPipeline, ImagePipelineOutput # noqa: F401 from .utils import deprecate deprecat...
336
1
import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs if is_torch_available(): import torc...
336
from dataclasses import dataclass from typing import Optional, Tuple, Union import flax import jax.numpy as jnp from jax import random from ..configuration_utils import ConfigMixin, register_to_config from ..utils import BaseOutput from .scheduling_utils_flax import FlaxSchedulerMixin @flax.struct.dataclass cl...
336
1
import os import tempfile import unittest from pathlib import Path from transformers import AutoConfig, is_tf_available from transformers.testing_utils import require_tf if is_tf_available(): import tensorflow as tf from transformers import TensorFlowBenchmark, TensorFlowBenchmarkArguments @require...
336
import os import sys from contextlib import contextmanager # Windows only if os.name == "nt": import ctypes import msvcrt # noqa class __UpperCAmelCase ( ctypes.Structure ): # _fields is a specific attr expected by ctypes UpperCamelCase = [("""size...
336
1
import time from contextlib import contextmanager from pathlib import Path import pytest import requests from huggingface_hub.hf_api import HfApi, HfFolder _lowerCamelCase : List[Any] = "__DUMMY_TRANSFORMERS_USER__" _lowerCamelCase : int = "Dummy User" _lowerCamelCase : int...
336
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) _lowerCamelCase : Tuple = { "configuration_encodec": [ "ENCODEC_PRETRAINED_CONFIG_ARCHIVE_MAP", "EncodecConfig", ], "feature_extractio...
336
1
from ..utils import DummyObject, requires_backends class __UpperCAmelCase ( metaclass=lowerCamelCase__ ): UpperCamelCase = ["""flax"""] def __init__( self : Optional[Any], *__A : List[Any], **__A : Union[str, Any] ): requires...
336
from __future__ import annotations def a__ ( UpperCAmelCase : int , UpperCAmelCase : int ) -> list[str]: if partitions <= 0: raise ValueError('''partitions must be a positive number!''' ) if partitions > number_of_bytes: raise ValueError('''partitions can not > number...
336
1
import json from typing import Iterator, List, Union from tokenizers import AddedToken, Regex, Tokenizer, decoders, normalizers, pre_tokenizers, trainers from tokenizers.implementations.base_tokenizer import BaseTokenizer from tokenizers.models import Unigram from tokenizers.processors import TemplateProcessing ...
336
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applica...
336
1
import jax.numpy as jnp from ...utils import logging from ..ta.modeling_flax_ta import FlaxTaEncoderModel, FlaxTaForConditionalGeneration, FlaxTaModel from .configuration_mta import MTaConfig _lowerCamelCase : Optional[int] = logging.get_logger(__name__) _lowerCamelCase : Optional[Any] ...
336
import argparse import collections import json from pathlib import Path import requests import torch import yaml from huggingface_hub import hf_hub_download from PIL import Image from transformers import ( MobileViTImageProcessor, MobileViTVaConfig, MobileViTVaForImageClassification, MobileViTVaForS...
336
1
import re from filelock import FileLock try: import nltk _lowerCamelCase : Union[str, Any] = True except (ImportError, ModuleNotFoundError): _lowerCamelCase : Dict = False if NLTK_AVAILABLE: with FileLock(".lock") as lock: nltk.download("punkt", q...
336
import inspect import tempfile from collections import OrderedDict, UserDict from collections.abc import MutableMapping from contextlib import ExitStack, contextmanager from dataclasses import fields from enum import Enum from typing import Any, ContextManager, List, Tuple import numpy as np from .import_utils impo...
336
1
import unittest from transformers import MODEL_FOR_VISUAL_QUESTION_ANSWERING_MAPPING, is_vision_available from transformers.pipelines import pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf, require_torch, require_vision, slow, ) from .test_pipe...
336
import os import unittest from transformers import LayoutLMTokenizer, LayoutLMTokenizerFast from transformers.models.layoutlm.tokenization_layoutlm import VOCAB_FILES_NAMES from transformers.testing_utils import require_tokenizers from ...test_tokenization_common import TokenizerTesterMixin @require_tokenizers...
336
1
import logging import os from dataclasses import dataclass from typing import List, Optional, Union import tqdm from filelock import FileLock from transformers import ( BartTokenizer, BartTokenizerFast, DataProcessor, PreTrainedTokenizer, RobertaTokenizer, RobertaTokenizerFast, XLMRobert...
336
from __future__ import annotations import copy import inspect import json import math import os import tempfile import unittest from importlib import import_module import numpy as np from transformers import ViTMAEConfig from transformers.file_utils import cached_property, is_tf_available, is_vision_available from...
336
1
from .integrations import ( is_optuna_available, is_ray_available, is_sigopt_available, is_wandb_available, run_hp_search_optuna, run_hp_search_ray, run_hp_search_sigopt, run_hp_search_wandb, ) from .trainer_utils import ( HPSearchBackend, default_hp_space_optuna, default_h...
336
def a__ ( UpperCAmelCase : int ) -> int: UpperCAmelCase : list[list[int]] = [[0 for _ in range(UpperCAmelCase )] for _ in range(m + 1 )] for i in range(m + 1 ): UpperCAmelCase : Optional[Any] = 1 for n in range(m + 1 ): for k in range(1 , Upp...
336
1
def a__ ( UpperCAmelCase : str ) -> str: return " ".join(input_str.split()[::-1] ) if __name__ == "__main__": import doctest doctest.testmod()
336
from __future__ import annotations def a__ ( UpperCAmelCase : list[list[int]] ) -> bool: UpperCAmelCase : Union[str, Any] = len(UpperCAmelCase ) # We need to create solution object to save path. UpperCAmelCase : int = [[0 for _ in range(UpperCAmelCase )] fo...
336
1
import os import posixpath import uuid from dataclasses import dataclass from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple, Union import numpy as np import pyarrow as pa import datasets from datasets.arrow_writer import ArrowWriter, ParquetWriter from datasets.config import MAX_SHARD_SIZE from datas...
336
import inspect import unittest from transformers import ViTHybridConfig from transformers.testing_utils import require_accelerate, require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import Config...
336
1
# this script reports modified .py files under the desired list of top-level sub-dirs passed as a list of arguments, e.g.: # python ./utils/get_modified_files.py utils src tests examples # # it uses git to find the forking point and which files were modified - i.e. files not under git won't be considered # since th...
336
from __future__ import annotations from itertools import permutations from random import randint from timeit import repeat def a__ ( ) -> tuple[list[int], int]: UpperCAmelCase : str = [randint(-1_000 , 1_000 ) for i in range(10 )] UpperCAmelCase : Any = randint(-5_...
336
1
import argparse from collections import defaultdict import yaml _lowerCamelCase : str = "docs/source/en/_toctree.yml" def a__ ( UpperCAmelCase : str ) -> List[str]: UpperCAmelCase : Tuple = defaultdict(UpperCAmelCase ) for doc in model_doc: counts[d...
336
from queue import Queue from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: from ..models.auto import AutoTokenizer class __UpperCAmelCase : def __magic_name__ ( self : int, __A : Dict ): raise NotImplementedError() def ...
336
1
# limitations under the License. # NOTE: This file is deprecated and will be removed in a future version. # It only exists so that temporarely `from diffusers.pipelines import DiffusionPipeline` works from .pipelines import DiffusionPipeline, ImagePipelineOutput # noqa: F401 from .utils import deprecate deprecat...
336
import numpy # List of input, output pairs _lowerCamelCase : Dict = ( ((5, 2, 3), 1_5), ((6, 5, 9), 2_5), ((1_1, 1_2, 1_3), 4_1), ((1, 1, 1), 8), ((1_1, 1_2, 1_3), 4_1), ) _lowerCamelCase : str = (((5_1_5, 2_2, 1_3), 5_5_5), ((6_1, 3_5, 4_9), 1_5_0)) _lowerCame...
336
1
def a__ ( UpperCAmelCase : int , UpperCAmelCase : int ) -> str: if not isinstance(UpperCAmelCase , UpperCAmelCase ): raise ValueError('''iterations must be defined as integers''' ) if not isinstance(UpperCAmelCase , UpperCAmelCase ) or not number >= 1: raise ...
336
def a__ ( UpperCAmelCase : List[Any] , UpperCAmelCase : Optional[int] ) -> Optional[Any]: UpperCAmelCase : List[str] = 0 UpperCAmelCase : List[Any] = len(UpperCAmelCase ) - 1 while left <= right: # avoid divided by 0 during interpolation if...
336
1
import numpy # List of input, output pairs _lowerCamelCase : Dict = ( ((5, 2, 3), 1_5), ((6, 5, 9), 2_5), ((1_1, 1_2, 1_3), 4_1), ((1, 1, 1), 8), ((1_1, 1_2, 1_3), 4_1), ) _lowerCamelCase : str = (((5_1_5, 2_2, 1_3), 5_5_5), ((6_1, 3_5, 4_9), 1_5_0)) _lowerCame...
336
import argparse import json from pathlib import Path import requests import torch from huggingface_hub import hf_hub_download from PIL import Image from transformers import BeitConfig, BeitForImageClassification, BeitForMaskedImageModeling, BeitImageProcessor from transformers.image_utils import PILImageResampling ...
336
1
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applica...
336
import unittest import numpy as np from transformers import RobertaConfig, is_flax_available from transformers.testing_utils import require_flax, slow from ...test_modeling_flax_common import FlaxModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask if is_flax_available(): from transformers.mod...
336
1
from math import factorial _lowerCamelCase : dict[str, int] = {str(digit): factorial(digit) for digit in range(1_0)} def a__ ( UpperCAmelCase : int ) -> int: if not isinstance(UpperCAmelCase , UpperCAmelCase ): raise TypeError('''Parameter number must be int''' ...
336
import os from typing import List, Optional, Union from ...tokenization_utils import PreTrainedTokenizer from ...tokenization_utils_base import AddedToken from ...utils import logging _lowerCamelCase : Optional[Any] = logging.get_logger(__name__) _lowerCamelCase : Dict = {"voca...
336
1
import functools import gc import inspect import torch from .imports import is_npu_available, is_xpu_available def a__ ( *UpperCAmelCase : int ) -> Optional[int]: if not isinstance(UpperCAmelCase , UpperCAmelCase ): UpperCAmelCase : Any = list(UpperCAmelCase ) fo...
336
import inspect import unittest from transformers import MobileNetVaConfig from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import ConfigTester from ...tes...
336
1
def a__ ( UpperCAmelCase : str , UpperCAmelCase : str ) -> int: if len(UpperCAmelCase ) != len(UpperCAmelCase ): raise ValueError('''String lengths must match!''' ) UpperCAmelCase : Union[str, Any] = 0 for chara, chara in zip(UpperCAmelCase , UpperC...
336
from collections import OrderedDict from typing import Any, List, Mapping, Optional from ... import PreTrainedTokenizer, TensorType, is_torch_available from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfigWithPast, PatchingSpec from ...utils import logging _lowerCamelCase : str ...
336
1
from __future__ import annotations import random import unittest from transformers import TransfoXLConfig, is_tf_available from transformers.testing_utils import require_tf, slow from ...test_configuration_common import ConfigTester from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor from ...test...
336
# limitations under the License. # NOTE: This file is deprecated and will be removed in a future version. # It only exists so that temporarely `from diffusers.pipelines import DiffusionPipeline` works from .pipelines import DiffusionPipeline, ImagePipelineOutput # noqa: F401 from .utils import deprecate deprecat...
336
1
import inspect import unittest from transformers import ViTHybridConfig from transformers.testing_utils import require_accelerate, require_torch, require_vision, slow, torch_device from transformers.utils import cached_property, is_torch_available, is_vision_available from ...test_configuration_common import Config...
336
from dataclasses import dataclass from typing import Optional, Tuple, Union import flax import jax.numpy as jnp from jax import random from ..configuration_utils import ConfigMixin, register_to_config from ..utils import BaseOutput from .scheduling_utils_flax import FlaxSchedulerMixin @flax.struct.dataclass cl...
336
1
import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin if is_torch_available(): import torch if is_vision_availa...
336
import os import sys from contextlib import contextmanager # Windows only if os.name == "nt": import ctypes import msvcrt # noqa class __UpperCAmelCase ( ctypes.Structure ): # _fields is a specific attr expected by ctypes UpperCamelCase = [("""size...
336
1
import cva import numpy as np class __UpperCAmelCase : def __init__( self : int, __A : float, __A : int ): if k in (0.0_4, 0.0_6): UpperCAmelCase : Tuple = k UpperCAmelCase : str = window_size else: ...
336
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) _lowerCamelCase : Tuple = { "configuration_encodec": [ "ENCODEC_PRETRAINED_CONFIG_ARCHIVE_MAP", "EncodecConfig", ], "feature_extractio...
336
1
import inspect import warnings from typing import Any, Dict, Optional, Union from packaging import version def a__ ( *UpperCAmelCase : Optional[int] , UpperCAmelCase : Optional[Union[Dict, Any]] = None , UpperCAmelCase : Dict=True , UpperCAmelCase : Any=2 ) -> ...
336
from __future__ import annotations def a__ ( UpperCAmelCase : int , UpperCAmelCase : int ) -> list[str]: if partitions <= 0: raise ValueError('''partitions must be a positive number!''' ) if partitions > number_of_bytes: raise ValueError('''partitions can not > number...
336
1
import copy from ...configuration_utils import PretrainedConfig from ...utils import logging from ..auto.configuration_auto import CONFIG_MAPPING _lowerCamelCase : List[Any] = logging.get_logger(__name__) class __UpperCAmelCase ( lowerCamelCase__ ): UpperCamelCase ...
336
# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applica...
336
1
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available _lowerCamelCase : Any = { "configuration_git": ["GIT_PRETRAINED_CONFIG_ARCHIVE_MAP", "GitConfig", "GitVisionConfig"], "processing_git": ["GitProcessor"], } try: if n...
336
import argparse import collections import json from pathlib import Path import requests import torch import yaml from huggingface_hub import hf_hub_download from PIL import Image from transformers import ( MobileViTImageProcessor, MobileViTVaConfig, MobileViTVaForImageClassification, MobileViTVaForS...
336
1
from __future__ import annotations _lowerCamelCase : Optional[Any] = 1.60_21E-19 # units = C def a__ ( UpperCAmelCase : float , UpperCAmelCase : float , UpperCAmelCase : float , ) -> tuple[str, float]: if (conductivity, electron_conc, mobility).cou...
336
import inspect import tempfile from collections import OrderedDict, UserDict from collections.abc import MutableMapping from contextlib import ExitStack, contextmanager from dataclasses import fields from enum import Enum from typing import Any, ContextManager, List, Tuple import numpy as np from .import_utils impo...
336
1
import requests _lowerCamelCase : List[str] = "YOUR API KEY" def a__ ( UpperCAmelCase : str , UpperCAmelCase : str = giphy_api_key ) -> list: UpperCAmelCase : Any = '''+'''.join(query.split() ) UpperCAmelCase : List[Any] = f'''http...
336
import os import unittest from transformers import LayoutLMTokenizer, LayoutLMTokenizerFast from transformers.models.layoutlm.tokenization_layoutlm import VOCAB_FILES_NAMES from transformers.testing_utils import require_tokenizers from ...test_tokenization_common import TokenizerTesterMixin @require_tokenizers...
336
1