python_code stringlengths 0 992k | repo_name stringlengths 8 46 | file_path stringlengths 5 162 |
|---|---|---|
import collections
import time
from typing import Any, Dict, List, Optional, Tuple, Union
import torch
from packaging import version
from torch import nn
from torch.utils.data import DataLoader, Dataset
from transformers.trainer_utils import EvalPrediction, PredictionOutput, speed_metrics
from transformers.utils impo... | EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/trainers/xfun_trainer.py |
from .funsd_trainer import FunsdTrainer
from .xfun_trainer import XfunReTrainer, XfunSerTrainer
| EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/trainers/__init__.py |
from typing import Any, Dict, Union
import torch
from transformers import Trainer
class FunsdTrainer(Trainer):
def _prepare_inputs(self, inputs: Dict[str, Union[torch.Tensor, Any]]) -> Dict[str, Union[torch.Tensor, Any]]:
"""
Prepare :obj:`inputs` before feeding them to the model, converting the... | EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/trainers/funsd_trainer.py |
EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/modules/__init__.py | |
EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/modules/decoders/__init__.py | |
import copy
import torch
from torch import nn
from torch.nn import CrossEntropyLoss
class BiaffineAttention(torch.nn.Module):
"""Implements a biaffine attention operator for binary relation classification.
PyTorch implementation of the biaffine attention operator from "End-to-end neural relation
extract... | EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/modules/decoders/re.py |
# flake8: noqa
from .data_collator import DataCollatorForKeyValueExtraction
from .datasets import *
| EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/data/__init__.py |
import torch
from detectron2.data.detection_utils import read_image
from detectron2.data.transforms import ResizeTransform, TransformList
def normalize_bbox(bbox, size):
return [
int(1000 * bbox[0] / size[0]),
int(1000 * bbox[1] / size[1]),
int(1000 * bbox[2] / size[0]),
int(1000 ... | EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/data/utils.py |
from dataclasses import dataclass, field
from typing import Optional
@dataclass
class DataTrainingArguments:
"""
Arguments pertaining to what data we are going to input our model for training and eval.
"""
task_name: Optional[str] = field(default="ner", metadata={"help": "The name of the task (ner, p... | EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/data/data_args.py |
from dataclasses import dataclass
from typing import Optional, Union
import torch
from detectron2.structures import ImageList
from transformers import PreTrainedTokenizerBase
from transformers.file_utils import PaddingStrategy
@dataclass
class DataCollatorForKeyValueExtraction:
"""
Data collator that will d... | EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/data/data_collator.py |
EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/data/datasets/__init__.py | |
# Lint as: python3
import json
import logging
import os
import datasets
from layoutlmft.data.utils import load_image, merge_bbox, normalize_bbox, simplify_bbox
from transformers import AutoTokenizer
_URL = "https://github.com/doc-analysis/XFUN/releases/download/v1.0/"
_LANG = ["zh", "de", "es", "fr", "en", "it", "... | EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/data/datasets/xfun.py |
# coding=utf-8
import json
import os
import datasets
from layoutlmft.data.utils import load_image, normalize_bbox
logger = datasets.logging.get_logger(__name__)
_CITATION = """\
@article{Jaume2019FUNSDAD,
title={FUNSD: A Dataset for Form Understanding in Noisy Scanned Documents},
author={Guillaume Jaume and ... | EXA-1-master | exa/models/unilm-master/layoutlmft/layoutlmft/data/datasets/funsd.py |
#!/usr/bin/env python
# coding=utf-8
import logging
import os
import sys
from dataclasses import dataclass, field
from typing import Optional
import numpy as np
from datasets import ClassLabel, load_dataset, load_metric
import layoutlmft.data.datasets.xfun
import transformers
from layoutlmft.data import DataCollator... | EXA-1-master | exa/models/unilm-master/layoutlmft/examples/run_xfun_ser.py |
#!/usr/bin/env python
# coding=utf-8
import logging
import os
import sys
from dataclasses import dataclass, field
from typing import Optional
import numpy as np
from datasets import ClassLabel, load_dataset, load_metric
import layoutlmft.data.datasets.funsd
import transformers
from layoutlmft.data import DataCollato... | EXA-1-master | exa/models/unilm-master/layoutlmft/examples/run_funsd.py |
#!/usr/bin/env python
# coding=utf-8
import logging
import os
import sys
import numpy as np
from datasets import ClassLabel, load_dataset
import layoutlmft.data.datasets.xfun
import transformers
from layoutlmft import AutoModelForRelationExtraction
from layoutlmft.data.data_args import XFUNDataTrainingArguments
from... | EXA-1-master | exa/models/unilm-master/layoutlmft/examples/run_xfun_re.py |
import os
import json
import tqdm
import numpy as np
import torch
import argparse
from datasets import Dataset
from typing import List, Dict
from functools import partial
from transformers import AutoModel, AutoTokenizer, PreTrainedTokenizerFast, BatchEncoding, DataCollatorWithPadding
from transformers.modeling_output... | EXA-1-master | exa/models/unilm-master/e5/mteb_beir_eval.py |
import torch
import logging
from torch import Tensor
from typing import Mapping
def _setup_logger():
log_format = logging.Formatter("[%(asctime)s %(levelname)s] %(message)s")
logger = logging.getLogger()
logger.setLevel(logging.INFO)
console_handler = logging.StreamHandler()
console_handler.setF... | EXA-1-master | exa/models/unilm-master/e5/utils.py |
import os
import torch
import torch.nn.functional as F
import tqdm
import json
import numpy as np
import argparse
from functools import partial
from torch.utils.data import DataLoader
from datasets import Dataset
from transformers import AutoModel, AutoTokenizer, DataCollatorWithPadding, PreTrainedTokenizerFast, Batch... | EXA-1-master | exa/models/unilm-master/e5/mteb_eval.py |
"""
Simple check list from AllenNLP repo: https://github.com/allenai/allennlp/blob/master/setup.py
To create the package for pypi.
1. Change the version in __init__.py and setup.py.
2. Commit these changes with the message: "Release: VERSION"
3. Add a tag in git to mark the release: "git tag VERSION -m'Adds tag VER... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/setup.py |
import torch
from torch.nn import DataParallel
from torch.cuda._utils import _get_device_index
from torch.nn.parallel._functions import Scatter
from itertools import chain
def scatter_imbalance(inputs, target_gpus, dim=0):
r"""
Slices tensors into approximately equal chunks and
distributes them across giv... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/nn/data_parallel.py |
EXA-1-master | exa/models/unilm-master/unilm-v1/src/nn/__init__.py | |
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENS... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/pytorch_pretrained_bert/optimization.py |
__version__ = "0.4.0"
from .tokenization import BertTokenizer, BasicTokenizer, WordpieceTokenizer
from .modeling import (BertConfig, BertModel, BertForPreTraining, BertForMaskedLM, BertForNextSentencePrediction, BertForSequenceClassification,
BertForMultipleChoice, BertForTokenClassification, Ber... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/pytorch_pretrained_bert/__init__.py |
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENS... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/pytorch_pretrained_bert/tokenization.py |
# coding=utf-8
"""PyTorch optimization for BERT model."""
from apex.optimizers import FP16_Optimizer
class FP16_Optimizer_State(FP16_Optimizer):
def __init__(self,
init_optimizer,
static_loss_scale=1.0,
dynamic_loss_scale=False,
dynamic_loss_arg... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/pytorch_pretrained_bert/optimization_fp16.py |
# coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
import torch.nn.functional as F
from torch.nn.modules.loss import _Loss
class LabelSmoothingLoss(_Loss):
"""
With label smoothing,
KL-divergence between q_{smoothed gr... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/pytorch_pretrained_bert/loss.py |
"""
Utilities for working with the local dataset cache.
This file is adapted from the AllenNLP library at https://github.com/allenai/allennlp
Copyright by the AllenNLP authors.
"""
import os
import logging
import shutil
import tempfile
import json
from urllib.parse import urlparse
from pathlib import Path
from typing ... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/pytorch_pretrained_bert/file_utils.py |
# coding=utf-8
"""PyTorch BERT model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import copy
import json
import math
import logging
import tarfile
import tempfile
import shutil
import numpy as np
from scipy.stats import truncnorm
import t... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/pytorch_pretrained_bert/modeling.py |
# coding: utf8
def main():
import sys
try:
from .convert_tf_checkpoint_to_pytorch import convert_tf_checkpoint_to_pytorch
except ModuleNotFoundError:
print("pytorch_pretrained_bert can only be used from the commandline to convert TensorFlow models in PyTorch, "
"In that case, i... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/pytorch_pretrained_bert/__main__.py |
#!/usr/bin/env python
from __future__ import print_function
__author__ = 'xinya'
from bleu.bleu import Bleu
from meteor.meteor import Meteor
from rouge.rouge import Rouge
from cider.cider import Cider
from collections import defaultdict
from argparse import ArgumentParser
import string
import sys
reload(sys)
sys.setd... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/qg/eval_on_unilm_tokenized_ref.py |
#!/usr/bin/env python
from __future__ import print_function
__author__ = 'xinya'
from bleu.bleu import Bleu
from meteor.meteor import Meteor
from rouge.rouge import Rouge
from cider.cider import Cider
from collections import defaultdict
from argparse import ArgumentParser
import string
import sys
reload(sys)
sys.setd... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/qg/eval.py |
EXA-1-master | exa/models/unilm-master/unilm-v1/src/gigaword/__init__.py | |
"""BERT finetuning runner."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import logging
import glob
import json
import argparse
import math
import string
from multiprocessing import Pool, cpu_count
from tqdm import tqdm, trange
from pathlib i... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/gigaword/eval.py |
from __future__ import print_function, unicode_literals, division
import os
import re
import codecs
import platform
from subprocess import check_output
from tempfile import mkdtemp
from functools import partial
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigPars... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/gigaword/bs_pyrouge.py |
EXA-1-master | exa/models/unilm-master/unilm-v1/src/cnndm/__init__.py | |
"""BERT finetuning runner."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import logging
import glob
import json
import argparse
import math
import string
from multiprocessing import Pool, cpu_count
from tqdm import tqdm, trange
from pathlib i... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/cnndm/eval.py |
from __future__ import print_function, unicode_literals, division
import os
import re
import codecs
import platform
from subprocess import check_output
from tempfile import mkdtemp
from functools import partial
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigPars... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/cnndm/bs_pyrouge.py |
from random import randint, shuffle
from random import random as rand
import numpy as np
import torch
import torch.utils.data
def get_random_word(vocab_words):
i = randint(0, len(vocab_words)-1)
return vocab_words[i]
def batch_list_to_batch_tensors(batch):
batch_tensors = []
for x in zip(*batch):
... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/biunilm/loader_utils.py |
"""BERT finetuning runner."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import logging
import glob
import argparse
import math
from tqdm import tqdm, trange
import numpy as np
import torch
from torch.utils.data import DataLoader, RandomSampl... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/biunilm/decode_seq2seq.py |
EXA-1-master | exa/models/unilm-master/unilm-v1/src/biunilm/__init__.py | |
from random import randint, shuffle, choice
from random import random as rand
import math
import torch
from biunilm.loader_utils import get_random_word, batch_list_to_batch_tensors, Pipeline
# Input file format :
# 1. One sentence per line. These should ideally be actual sentences,
# not entire paragraphs or arbit... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/biunilm/seq2seq_loader.py |
import pickle
import math
import argparse
import glob
from pathlib import Path
from tqdm import tqdm
import unicodedata
from pytorch_pretrained_bert.tokenization import BertTokenizer
def read_traces_from_file(file_name):
with open(file_name, "rb") as fin:
meta = pickle.load(fin)
num_samples = met... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/biunilm/gen_seq_from_trace.py |
"""BERT finetuning runner."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import logging
import glob
import math
import json
import argparse
import random
from pathlib import Path
from tqdm import tqdm, trange
import numpy as np
import torch
f... | EXA-1-master | exa/models/unilm-master/unilm-v1/src/biunilm/run_seq2seq.py |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import subprocess
import sys
from setuptools import setup, find_packages, Extension
from setuptools import E... | EXA-1-master | exa/models/unilm-master/edgelm/setup.py |
#!/usr/bin/env python3 -u
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Legacy entry point. Use fairseq_cli/train.py or fairseq-train instead.
"""
from fairseq_cli.train import cli_mai... | EXA-1-master | exa/models/unilm-master/edgelm/train.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""isort:skip_file"""
import functools
import importlib
dependencies = [
"dataclasses",
"hydra",
"numpy",
"omegaconf",
"... | EXA-1-master | exa/models/unilm-master/edgelm/hubconf.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import io
import os
import string
import tempfile
import unittest
import torch
from fairseq import tokenizer
from fairseq.data import Diction... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_dictionary.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import functools
import unittest
from typing import Any, Dict, Sequence
import fairseq
import fairseq.options
import fairseq.tasks
import tor... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_roberta.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from fairseq import utils
class TestUtils(unittest.TestCase):
def test_convert_padding_direction(self):
... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_utils.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from copy import deepcopy
from dataclasses import dataclass
from typing import Optional
import torch
from fairseq.models.ema ... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_ema.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from fairseq.data import iterators
class TestIterators(unittest.TestCase):
def test_counting_iterator_index(self, ref=N... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_iterators.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from typing import Dict, List
import tests.utils as test_utils
import torch
from fairseq import utils
from fairseq.data impor... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_noising.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import tempfile
import unittest
import torch
from fairseq.data.dictionary import Dictionary
from fairseq.models.lstm import L... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_lstm_jitable.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import copy
import unittest
import torch
from torch.cuda.amp import autocast, GradScaler
from fairseq.optim import build_opti... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_amp_optimizer.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from fairseq.modules.sparse_multihead_attention import SparseMultiheadAttention
class TestSparseMultiheadAtten... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_sparse_multihead_attention.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import uuid
from fairseq import metrics
class TestMetrics(unittest.TestCase):
def test_nesting(self):
with metr... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_metrics.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import contextlib
import logging
import unittest
from io import StringIO
from unittest.mock import MagicMock, patch
import torch
from fairseq... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_train.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import unittest
import tests.utils as test_utils
import torch
from fairseq.sequence_scorer import SequenceScorer
class Test... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_sequence_scorer.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import unittest
from fairseq.dataclass.utils import convert_namespace_to_omegaconf
from fairseq.models.transformer import Tran... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_inference_dropout.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from fairseq.modules.multihead_attention import MultiheadAttention
class TestMultiheadAttention(unittest.TestC... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_multihead_attention.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import numpy as np
from fairseq.data.data_utils_fast import batch_by_size_fn
from fairseq.data.data_utils_fast import batch_b... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_data_utils.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import random
import string
import typing as tp
import unittest
from collections import Counter
from tempfile import NamedTemporaryF... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_huffman.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import contextlib
import logging
import os
import tempfile
import unittest
from io import StringIO
from unittest.mock import patch
from fairs... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_checkpoint_utils.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from unittest import mock
class TestIOPath(unittest.TestCase):
def test_no_iopath(self):
from .test_reproducibi... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_iopath.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import collections
import unittest
import numpy as np
from fairseq.data import ListDataset, ResamplingDataset
class TestResamplingDataset(u... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_resampling_dataset.py |
import os
import shutil
import tempfile
import unittest
from fairseq import options
from fairseq.dataclass.utils import convert_namespace_to_omegaconf
from fairseq.data.data_utils import raise_if_valid_subsets_unintentionally_ignored
from .utils import create_dummy_data, preprocess_lm_data, train_language_model
def ... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_valid_subset_checks.py |
EXA-1-master | exa/models/unilm-master/edgelm/tests/__init__.py | |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import tests.utils as test_utils
import torch
from fairseq.data import (
BacktranslationDataset,
LanguagePairDataset,... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_backtranslation_dataset.py |
import contextlib
import unittest
import tempfile
from io import StringIO
import numpy as np
from tests.utils import create_dummy_data, preprocess_lm_data, train_language_model
try:
from pyarrow import plasma
from fairseq.data.plasma_utils import PlasmaView, PlasmaStore
PYARROW_AVAILABLE = True
except I... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_plasma_utils.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import contextlib
import logging
import json
import os
import random
import sys
import tempfile
import unittest
from io import StringIO
from t... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_binaries.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import sys
import unittest
import torch
from fairseq.token_generation_constraints import *
def tensorize(constraints: List[List[int]]) -> t... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_constraints.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import copy
import unittest
import tests.utils as test_utils
import torch
from fairseq.criterions.cross_entropy import CrossE... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_label_smoothing.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import json
import os
import random
import sys
from io import StringIO
import torch
import torch.nn.functional as F
from fair... | EXA-1-master | exa/models/unilm-master/edgelm/tests/utils.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import unittest
from typing import Sequence
from fairseq.data import LanguagePairDataset, ListDataset, RoundRobinZipDatasets
f... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_dataset.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
import torch.nn as nn
from fairseq.modules import ConvTBC
class TestConvTBC(unittest.TestCase):
def test_c... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_convtbc.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from collections import OrderedDict
import torch
from fairseq.data import LanguagePairDataset, TokenBlockDataset
from fairseq... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_multi_corpus_dataset.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import copy
import logging
import unittest
import torch
from fairseq.optim.fp16_optimizer import FP16Optimizer, MemoryEfficie... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_fp16_optimizer.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import logging
import unittest
import torch
from fairseq.optim.adam import FairseqAdam
from fairseq.optim.fp16_optimizer impo... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_memory_efficient_fp16.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import tests.utils as test_utils
import torch
from fairseq.data import TokenBlockDataset
class TestTokenBlockDataset(unitte... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_token_block_dataset.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import tempfile
import unittest
import math
import numpy as np
import tests.utils as test_utils
import torch
from fairseq im... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_sequence_generator.py |
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import shutil
import tempfile
import unittest
from typing import Optional
class TestFileChunker(unittest.TestCase):
_tmpdir: Optional[str] = None
_tmpfile: Optional[str] = Non... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_file_chunker_utils.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from argparse import ArgumentParser
from dataclasses import dataclass, field
from fairseq.dataclass import FairseqDataclass
f... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_dataclass_utils.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from fairseq.data import LanguagePairDataset, TokenBlockDataset
from fairseq.data.concat_dataset import ConcatDa... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_concat_dataset.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import tempfile
import unittest
from pathlib import Path
from typing import Any, Dict, Sequence
import fairseq.data.indexed_dataset as indexe... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_online_backtranslation.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from fairseq.data import MonolingualDataset
from fairseq.tasks.language_modeling import LanguageModelingTask, La... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_lm_context_window.py |
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import shutil
import sys
import tempfile
import unittest
from typing import Optional
from unittest.mock import MagicMock
class TestFileIO(unittest.TestCase):
_tmpdir: Optional[st... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_file_io.py |
import argparse
import unittest
from typing import Any, Dict, Sequence
import torch
from fairseq.models import transformer
from tests.test_roberta import FakeTask
def mk_sample(tok: Sequence[int] = None, batch_size: int = 2) -> Dict[str, Any]:
if not tok:
tok = [10, 11, 12, 13, 14, 15, 2]
batch = t... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_transformer.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import contextlib
import json
import os
import tempfile
import unittest
from io import StringIO
import torch
from . import test_binaries
c... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_reproducibility.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from fairseq.data import Dictionary
from fairseq.modules import CharacterTokenEmbedder
class TestCharacterToke... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_character_token_embedder.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import collections
import os
import shutil
import tempfile
import unittest
import numpy as np
import torch
from scripts.average_checkpoints i... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_average_checkpoints.py |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import tempfile
import unittest
import torch
from fairseq.data.dictionary import Dictionary
from fairs... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_export.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
import torch.nn as nn
from fairseq.modules.checkpoint_activations import checkpoint_wrapper
from torch.utils.che... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_activation_checkpointing.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from collections import OrderedDict
import numpy as np
import torch
from fairseq.data import LanguagePairDataset, TokenBlockD... | EXA-1-master | exa/models/unilm-master/edgelm/tests/test_multi_corpus_sampled_dataset.py |
EXA-1-master | exa/models/unilm-master/edgelm/tests/gpu/__init__.py | |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from copy import deepcopy
from dataclasses import dataclass
from typing import Optional
import torch
from fairseq.models.ema ... | EXA-1-master | exa/models/unilm-master/edgelm/tests/gpu/test_ema_gpu.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import contextlib
import logging
import json
import os
import tempfile
import unittest
from io import StringIO
import torch
from fairseq impo... | EXA-1-master | exa/models/unilm-master/edgelm/tests/gpu/test_binaries_gpu.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from torch import nn
from fairseq.distributed import ModuleProxyWrapper
from .utils import objects_are_equal
... | EXA-1-master | exa/models/unilm-master/edgelm/tests/distributed/test_module_proxy_wrapper.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import functools
import random
import unittest
from multiprocessing import Manager
import torch
import torch.nn as nn
from fa... | EXA-1-master | exa/models/unilm-master/edgelm/tests/distributed/test_bmuf.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import functools
import sys
import unittest
import torch
from fairseq.distributed import utils as dist_utils
from .utils import objects_are... | EXA-1-master | exa/models/unilm-master/edgelm/tests/distributed/test_utils.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.