python_code stringlengths 0 992k | repo_name stringlengths 8 46 | file_path stringlengths 5 162 |
|---|---|---|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import glob
import argparse
from utils.dedup import deup
import sys
WORKDIR_ROOT = os.environ.get('WORKDIR_ROOT', None)
if WORKD... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/multilingual/data_scripts/dedup_all.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import argparse
import pandas as pd
import sys
WORKDIR_ROOT = os.environ.get('WORKDIR_ROOT', None)
if WORKDIR_ROOT is None or n... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/multilingual/data_scripts/check_valid_test_overlaps.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os, sys
import subprocess
import re
from subprocess import check_call, check_output
WORKDIR_ROOT = os.environ.get('WORKDIR_ROOT', Non... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/multilingual/data_scripts/check_iswlt_test_data.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import itertools
import os
import csv
from collections import defaultdict
from six.moves import zip
import io
import wget
import sys
from su... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/multilingual/data_scripts/download_ted_and_extract.py |
from typing import NamedTuple, List
from urllib.parse import urlparse
import os, sys
import subprocess
from subprocess import check_call, check_output
import glob
import wget
import re
import multiprocessing as mp
from functools import partial
import pathlib
from collections import OrderedDict
WORKDIR_ROOT = os.envir... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/multilingual/data_scripts/download_wmt19_and_before.py |
import os, sys
import glob, itertools
import pandas as pd
WORKDIR_ROOT = os.environ.get('WORKDIR_ROOT', None)
if WORKDIR_ROOT is None or not WORKDIR_ROOT.strip():
print('please specify your working directory root in OS environment variable WORKDIR_ROOT. Exitting..."')
sys.exit(-1)
def load_langs(path):
... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/multilingual/data_scripts/remove_valid_test_in_train.py |
import shutil
import os, sys
from subprocess import check_call, check_output
import glob
import argparse
import shutil
import pathlib
import itertools
def call_output(cmd):
print(f"Executing: {cmd}")
ret = check_output(cmd, shell=True)
print(ret)
return ret
def call(cmd):
print(cmd)
check_call... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/multilingual/data_scripts/binarize.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import glob
import argparse
from utils.dedup import deup
import sys
WORKDIR_ROOT = os.environ.get('WORKDIR_ROOT', None)
if WORKDI... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/multilingual/data_scripts/check_self_overlaps.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#!/bin/python
import fasttext
from multiprocessing import Pool
import contextlib
import sys
import argparse
from functools import partial
im... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/multilingual/data_scripts/utils/fasttext_multi_filter.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
def deup(src_file, tgt_file, src_file_out, tgt_file_out):
seen = set()
dup_count = 0
with open(src_file, encodin... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/multilingual/data_scripts/utils/dedup.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from . import rxf_src # noqa
| EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/rxf/__init__.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from . import label_smoothed_cross_entropy_r3f, sentence_prediction_r3f # noqa
| EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/rxf/rxf_src/__init__.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import torch
import torch.nn.functional as F
from fairseq import utils
from fairseq.criterions import FairseqCriterion, register_... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/rxf/rxf_src/sentence_prediction_r3f.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import torch
import torch.nn.functional as F
from fairseq import metrics, utils
from fairseq.criterions import FairseqCriterion, ... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/rxf/rxf_src/label_smoothed_cross_entropy_r3f.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from . import transformer_xl_model, truncated_bptt_lm_task # noqa
| EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/truncated_bptt/__init__.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import os
from dataclasses import dataclass, field
from typing import List, Optional, Tuple
import torch
from fairseq import d... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/truncated_bptt/truncated_bptt_lm_task.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
from dataclasses import dataclass, field
from typing import Dict, List, Optional
import torch
from fairseq.dataclass import Fa... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/truncated_bptt/transformer_xl_model.py |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Helper script to pre-compute embeddings for a flashlight (previously called wav2letter++) dataset
"""
import argpa... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/wav2vec/wav2vec_featurize.py |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Data pre-processing: build vocabularies and binarize training data.
"""
import argparse
import glob
import os
impor... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/wav2vec/wav2vec_manifest.py |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Helper script to pre-compute embeddings for a flashlight (previously called wav2letter++) dataset
"""
import argpa... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/wav2vec/libri_labels.py |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Helper script to pre-compute embeddings for a flashlight (previously called wav2letter++) dataset
"""
import argpa... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/wav2vec/vq-wav2vec_featurize.py |
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import contextlib
import sys
from collections import Counter
from multiprocessing imp... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/roberta/multiprocessing_bpe_encoder.py |
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import json
import os
import re
class InputExample:
def __init__(self, paragrap... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/roberta/preprocess_RACE.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import json
from functools import lru_cache
def convert_sentence_to_json(sentence):
if "_" in sentence:
prefix, rest = sentence.... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/roberta/wsc/wsc_utils.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
import torch
import torch.nn.functional as F
from fairseq import utils
from fairseq.criterions import LegacyFairseqCriterion, reg... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/roberta/wsc/wsc_criterion.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from . import wsc_criterion # noqa
from . import wsc_task # noqa
| EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/roberta/wsc/__init__.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import json
import os
import tempfile
import numpy as np
import torch
import torch.nn.functional as F
from fairseq import utils
from fairseq.... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/roberta/wsc/wsc_task.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from . import commonsense_qa_task # noqa
| EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/roberta/commonsense_qa/__init__.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import json
import os
import numpy as np
import torch
from fairseq.data import (
Dictionary,
IdDataset,
ListDataset,
NestedDi... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/roberta/commonsense_qa/commonsense_qa_task.py |
#!/usr/bin/env python3 -u
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import fileinput
import sacremoses
def main():
parser = argparse.ArgumentParser(description="... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/megatron_11b/detok.py |
#!/usr/bin/env python3 -u
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Translate pre-processed data with a trained model.
"""
import numpy as np
import torch
from fairseq import check... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/criss/save_encoder.py |
#!/usr/bin/env python3 -u
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import glob
from subprocess import check_call
try:
import faiss
has_faiss = True
except Imp... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/criss/mining/mine.py |
#!/usr/bin/env python3 -u
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import glob
import numpy as np
DIM = 1024
def compute_dist(source_embs, target_embs, k=5, return... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/criss/sentence_retrieval/encoder_analysis.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
from fairseq.search import Search
class NoisyChannelBeamSearch(Search):
def __init__(self, tgt_dict):
super().__in... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/fast_noisy_channel/noisy_channel_beam_search.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from . import noisy_channel_translation # noqa
from . import noisy_channel_sequence_generator # noqa
from . import noisy_channel_beam_search... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/fast_noisy_channel/__init__.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Dict, List, Optional
import math
import numpy as np
import torch
import torch.nn.functional as F
from torch import Tensor... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/fast_noisy_channel/noisy_channel_sequence_generator.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from fairseq.tasks.translation import TranslationTask
from fairseq.tasks.language_modeling import LanguageModelingTask
from fairseq import che... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/fast_noisy_channel/noisy_channel_translation.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import os
import os.path as op
from collections import namedtuple
from multiprocessing import cpu_count
from typing import Li... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/byte_level_bpe/get_bitext.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the r... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/examples/byte_level_bpe/gru_transformer.py |
#!/usr/bin/env python
"""Helper script to compare two argparse.Namespace objects."""
from argparse import Namespace # noqa
def main():
ns1 = eval(input("Namespace 1: "))
ns2 = eval(input("Namespace 2: "))
def keys(ns):
ks = set()
for k in dir(ns):
if not k.startswith("_"):
... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/compare_namespaces.py |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Split a large file into a train and valid set while respecting document
boundaries. Documents should be separated by... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/split_train_valid_docs.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Use this script in order to build symmetric alignments for your translation
dataset.
This script depends on fast_align and mosesdecoder too... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/build_sym_alignment.py |
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/spm_decode.py |
EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/__init__.py | |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import os
import re
import shutil
import sys
pt_regexp = re.compile(r"checkpoint(\d+|_\d+_\d+|_[a-z]+... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/rm_pt.py |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Count the number of documents and average number of lines and tokens per
document in a large file. Documents should ... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/count_docs.py |
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
i... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/spm_encode.py |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Split a large file into shards while respecting document boundaries. Documents
should be separated by a single empty... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/shard_docs.py |
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
impor... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/spm_train.py |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import collections
import os
import re
import torch
from fairseq.file_io import PathManager
def aver... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/average_checkpoints.py |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
from fairseq.data import Dictionary, data_utils, indexed_dataset
def get_parser():
parser = argp... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/read_binarized.py |
#!/usr/bin/env python3
#
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import sys
"""Reads in a fairseq output file, and verifies that the constraints
(C- lines) are present in the outpu... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/constraints/validate.py |
#!/usr/bin/env python3
#
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""Extracts random constraints from reference files."""
import argparse
import random
import sys
from sacrebleu imp... | EXA-1-master | exa/models/unilm-master/decoding/IAD/fairseq/scripts/constraints/extract.py |
# --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# B... | EXA-1-master | exa/models/unilm-master/beit2/engine_for_finetuning.py |
"""
Originally inspired by impl at https://github.com/zhunzhong07/Random-Erasing, Apache 2.0
Copyright Zhun Zhong & Liang Zheng
Hacked together by / Copyright 2020 Ross Wightman
Modified by Hangbo Bao, for generating the masked position for visual image transformer
"""
# ----------------------------------------------... | EXA-1-master | exa/models/unilm-master/beit2/masking_generator.py |
# --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# B... | EXA-1-master | exa/models/unilm-master/beit2/transforms.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/engine_for_pretraining.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/modeling_pretrain.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/modeling_vqkd.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/norm_ema_quantizer.py |
# --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# B... | EXA-1-master | exa/models/unilm-master/beit2/datasets.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/run_class_finetuning.py |
# --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# M... | EXA-1-master | exa/models/unilm-master/beit2/dataset_folder.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/run_vqkd_training.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/utils.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/test_get_code.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/run_beitv2_pretraining.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/engine_for_vqkd.py |
all_wnids = ['n01440764', 'n01443537', 'n01484850', 'n01491361', 'n01494475', 'n01496331', 'n01498041', 'n01514668', 'n01514859', 'n01518878', 'n01530575', 'n01531178', 'n01532829', 'n01534433', 'n01537544', 'n01558993', 'n01560419', 'n01580077', 'n01582220', 'n01592084', 'n01601694', 'n01608432', 'n01614925', 'n016163... | EXA-1-master | exa/models/unilm-master/beit2/imagenet_a_r_indices.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/modeling_finetune.py |
# --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# B... | EXA-1-master | exa/models/unilm-master/beit2/optim_factory.py |
from .dino import *
from .clip import * | EXA-1-master | exa/models/unilm-master/beit2/vqkd_teacher/__init__.py |
# --------------------------------------------------------
# BEiT v2: Masked Image Modeling with Vector-Quantized Visual Tokenizers (https://arxiv.org/abs/2208.06366)
# Github source: https://github.com/microsoft/unilm/tree/master/beitv2
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for d... | EXA-1-master | exa/models/unilm-master/beit2/vqkd_teacher/dino.py |
from .clip import *
from .model import *
| EXA-1-master | exa/models/unilm-master/beit2/vqkd_teacher/clip/__init__.py |
from collections import OrderedDict
from typing import Tuple, Union
import math
import numpy as np
import torch
import torch.nn.functional as F
from torch import nn
import pdb
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1):
super().__init__()
# all ... | EXA-1-master | exa/models/unilm-master/beit2/vqkd_teacher/clip/model.py |
import hashlib
import os
import urllib
import warnings
from typing import Any, Union, List
from pkg_resources import packaging
import torch
from PIL import Image
from torchvision.transforms import Compose, Resize, CenterCrop, ToTensor, Normalize
from tqdm import tqdm
from .model import build_model
from .simple_tokeni... | EXA-1-master | exa/models/unilm-master/beit2/vqkd_teacher/clip/clip.py |
import gzip
import html
import os
from functools import lru_cache
import ftfy
import regex as re
@lru_cache()
def default_bpe():
return os.path.join(os.path.dirname(os.path.abspath(__file__)), "bpe_simple_vocab_16e6.txt.gz")
@lru_cache()
def bytes_to_unicode():
"""
Returns list of utf-8 byte and a corr... | EXA-1-master | exa/models/unilm-master/beit2/vqkd_teacher/clip/simple_tokenizer.py |
import argparse
import os
import mmcv
import torch
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import get_dist_info, init_dist, load_checkpoint
from mmcv.utils import DictAction
from mmseg.apis import multi_gpu_test, single_gpu_test
from mmseg.datasets import build_dataloader,... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/tools/test.py |
import argparse
import copy
import os
import os.path as osp
import time
import mmcv
import mmcv_custom
import torch
from mmcv.runner import init_dist
from mmcv.utils import Config, DictAction, get_git_hash
from mmseg import __version__
from mmseg.apis import set_random_seed
from mmcv_custom import train_segmentor
fro... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/tools/train.py |
import json
from mmcv.runner import OPTIMIZER_BUILDERS, DefaultOptimizerConstructor
from mmcv.runner import get_dist_info
def get_num_layer_for_vit(var_name, num_max_layer):
if var_name in ("backbone.cls_token", "backbone.mask_token", "backbone.pos_embed"):
return 0
elif var_name.startswith("backbone.... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/mmcv_custom/layer_decay_optimizer_constructor.py |
import random
import warnings
import numpy as np
import torch
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import build_optimizer, build_runner
from mmseg.core import DistEvalHook, EvalHook
from mmseg.datasets import build_dataloader, build_dataset
from mmseg.utils import get_r... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/mmcv_custom/train_api.py |
import mmcv
import numpy as np
from mmseg.datasets.builder import PIPELINES
@PIPELINES.register_module()
class SETR_Resize(object):
"""Resize images & seg.
This transform resizes the input image to some scale. If the input dict
contains the key "scale", then the scale in the input dict is used,
othe... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/mmcv_custom/resize_transform.py |
# Copyright (c) Open-MMLab. All rights reserved.
import io
import os
import os.path as osp
import pkgutil
import time
import warnings
from collections import OrderedDict
from importlib import import_module
from tempfile import TemporaryDirectory
import torch
import torchvision
from torch.optim import Optimizer
from to... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/mmcv_custom/checkpoint.py |
# -*- coding: utf-8 -*-
from .checkpoint import load_checkpoint
from .layer_decay_optimizer_constructor import LayerDecayOptimizerConstructor
from .resize_transform import SETR_Resize
from .apex_runner.optimizer import DistOptimizerHook
from .train_api import train_segmentor
__all__ = ['load_checkpoint', 'LayerDecayO... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/mmcv_custom/__init__.py |
# Copyright (c) Open-MMLab. All rights reserved.
import os.path as osp
import platform
import shutil
import torch
from torch.optim import Optimizer
import mmcv
from mmcv.runner import RUNNERS, IterBasedRunner
from .checkpoint import save_checkpoint
try:
import apex
except:
print('apex is not installed')
@R... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/mmcv_custom/apex_runner/apex_iter_based_runner.py |
# Copyright (c) Open-MMLab. All rights reserved.
import os.path as osp
import time
from tempfile import TemporaryDirectory
import torch
from torch.optim import Optimizer
import mmcv
from mmcv.parallel import is_module_wrapper
from mmcv.runner.checkpoint import weights_to_cpu, get_state_dict
try:
import apex
exce... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/mmcv_custom/apex_runner/checkpoint.py |
# Copyright (c) Open-MMLab. All rights reserved.
from .checkpoint import save_checkpoint
from .apex_iter_based_runner import IterBasedRunnerAmp
__all__ = [
'save_checkpoint', 'IterBasedRunnerAmp',
]
| EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/mmcv_custom/apex_runner/__init__.py |
from mmcv.runner import OptimizerHook, HOOKS
try:
import apex
except:
print('apex is not installed')
@HOOKS.register_module()
class DistOptimizerHook(OptimizerHook):
"""Optimizer hook for distributed training."""
def __init__(self, update_interval=1, grad_clip=None, coalesce=True, bucket_size_mb=-1, ... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/mmcv_custom/apex_runner/optimizer.py |
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook', by_epoch=False),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = None
resume_from = None
workflow = [('train', 1)]
cudnn_benchmark = True... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/configs/_base_/default_runtime.py |
# dataset settings
dataset_type = 'ADE20KDataset'
data_root = 'data/ade/ADEChallengeData2016'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
crop_size = (640, 640)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', reduce_zero_labe... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/configs/_base_/datasets/ade20k_640x640.py |
# dataset settings
dataset_type = 'ADE20KDataset'
data_root = 'data/ade/ADEChallengeData2016'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
crop_size = (512, 512)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', reduce_zero_labe... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/configs/_base_/datasets/ade20k.py |
# --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# B... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/configs/_base_/models/upernet_beit.py |
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0005)
optimizer_config = dict()
# learning policy
lr_config = dict(policy='poly', power=0.9, min_lr=1e-4, by_epoch=False)
# runtime settings
runner = dict(type='IterBasedRunner', max_iters=160000)
checkpoint_config = dict(by_epoch=False, int... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/configs/_base_/schedules/schedule_160k.py |
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0005)
optimizer_config = dict()
# learning policy
lr_config = dict(policy='poly', power=0.9, min_lr=1e-4, by_epoch=False)
# runtime settings
runner = dict(type='IterBasedRunner', max_iters=320000)
checkpoint_config = dict(by_epoch=False, int... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/configs/_base_/schedules/schedule_320k.py |
# --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# B... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/configs/beit/upernet/upernet_beit_base_12_512_slide_160k_21ktoade20k.py |
# --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# B... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/configs/beit/upernet/upernet_beit_large_24_512_slide_160k_21ktoade20k.py |
# --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# B... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/configs/beit/upernet/upernet_beit_large_24_512_slide_160k_ade20k.py |
# --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# B... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/configs/beit/upernet/upernet_beit_base_12_512_slide_160k_ade20k.py |
# --------------------------------------------------------
# BEIT: BERT Pre-Training of Image Transformers (https://arxiv.org/abs/2106.08254)
# Github source: https://github.com/microsoft/unilm/tree/master/beit
# Copyright (c) 2021 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# By Hangbo Bao
# B... | EXA-1-master | exa/models/unilm-master/beit2/semantic_segmentation/backbone/beit.py |
import random
import numpy as np
import torch
import os
import shutil
# import logging
import sys
# def set_logging(args):
# '''
# Set logger for recording
# '''
# logging.basicConfig(filename="./output/{}/log.txt".format(args.exp_name), level=logging.INFO,
# format='[%(asctime... | EXA-1-master | exa/models/unilm-master/xdoc/fine_tuning/websrc/util.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.