repo
stringlengths
2
99
file
stringlengths
13
225
code
stringlengths
0
18.3M
file_length
int64
0
18.3M
avg_line_length
float64
0
1.36M
max_line_length
int64
0
4.26M
extension_type
stringclasses
1 value
MAX-Toxic-Comment-Classifier
MAX-Toxic-Comment-Classifier-master/config.py
# # Copyright 2018-2019 IBM Corp. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law...
1,432
30.844444
93
py
MAX-Toxic-Comment-Classifier
MAX-Toxic-Comment-Classifier-master/core/bert_pytorch.py
# # Copyright 2018-2019 IBM Corp. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law...
8,564
43.378238
112
py
MAX-Toxic-Comment-Classifier
MAX-Toxic-Comment-Classifier-master/core/model.py
# # Copyright 2018-2019 IBM Corp. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law...
4,848
39.07438
116
py
MAX-Toxic-Comment-Classifier
MAX-Toxic-Comment-Classifier-master/core/__init__.py
# # Copyright 2018-2019 IBM Corp. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law...
603
36.75
74
py
MAX-Toxic-Comment-Classifier
MAX-Toxic-Comment-Classifier-master/api/metadata.py
# # Copyright 2018-2019 IBM Corp. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law...
907
32.62963
74
py
MAX-Toxic-Comment-Classifier
MAX-Toxic-Comment-Classifier-master/api/__init__.py
# # Copyright 2018-2019 IBM Corp. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law...
740
36.05
74
py
MAX-Toxic-Comment-Classifier
MAX-Toxic-Comment-Classifier-master/api/predict.py
# # Copyright 2018-2019 IBM Corp. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law...
4,453
42.666667
125
py
MAX-Toxic-Comment-Classifier
MAX-Toxic-Comment-Classifier-master/tests/test.py
# # Copyright 2018-2019 IBM Corp. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law...
5,738
30.190217
118
py
Vita-CLIP
Vita-CLIP-main/training/VitaCLIP_text_encoder.py
import torch import torch.nn as nn import copy from collections import OrderedDict from typing import Union, List from pkg_resources import packaging from VitaCLIP_text_encoder_utils import SimpleTokenizer as _Tokenizer class QuickGELU(nn.Module): def forward(self, x: torch.Tensor): return x * torch.sigm...
11,343
37.454237
137
py
Vita-CLIP
Vita-CLIP-main/training/checkpoint.py
#!/usr/bin/env python import argparse import os import torch import torch.distributed as dist def setup_arg_parser(parser: argparse.ArgumentParser): parser.add_argument('--checkpoint_dir', type=str, help='checkpoint output path') parser.add_argument('--auto_resume', action='store_tru...
3,710
35.742574
125
py
Vita-CLIP
Vita-CLIP-main/training/VitaCLIP_vision_encoder.py
from typing import Tuple import numpy as np from einops import rearrange import torch import torch.nn as nn import torch.nn.functional as F from operator import mul from functools import reduce import math from VitaCLIP_vision_encoder_utils import QuickGELU, LayerNorm, TransformerEncoderLayer, ImagePatchEmbed2D c...
4,541
34.209302
129
py
Vita-CLIP
Vita-CLIP-main/training/VitaCLIP_text_encoder_utils.py
import gzip import html import os from functools import lru_cache import ftfy import regex as re @lru_cache() def default_bpe(): return os.path.join(os.path.dirname(os.path.abspath(__file__)), "bpe_simple_vocab_16e6.txt.gz") @lru_cache() def bytes_to_unicode(): """ Returns list of utf-8 byte and a corr...
4,628
33.804511
144
py
Vita-CLIP
Vita-CLIP-main/training/VitaCLIP_model.py
#!/usr/bin/env python from typing import Tuple import numpy as np import torch import torch.nn as nn from VitaCLIP_vision_encoder import CLIPVisionEncoder from VitaCLIP_text_encoder import CLIPTextEncoder, TextPromptLearner class VitaCLIP(nn.Module): def __init__( self, # load weights ...
5,576
32.8
100
py
Vita-CLIP
Vita-CLIP-main/training/__init__.py
#!/usr/bin/env python
21
21
21
py
Vita-CLIP
Vita-CLIP-main/training/VitaCLIP_vision_encoder_utils.py
#!/usr/bin/env python from collections import OrderedDict from typing import Tuple import torch import torch.nn as nn from operator import mul from functools import reduce import math ''' QuickGELU and LayerNorm w/ fp16 from official CLIP repo (https://github.com/openai/CLIP/blob/3b473b0e682c091a9e53623eebc1ca16573...
7,573
34.064815
133
py
Vita-CLIP
Vita-CLIP-main/training/train.py
#!/usr/bin/env python import argparse from datetime import datetime import builtins import torch import torch.distributed as dist import sys sys.path.append('./') import video_dataset import checkpoint from VitaCLIP_model import VitaCLIP from collections import OrderedDict def setup_print(is_master: bool): ""...
13,336
42.161812
120
py
Vita-CLIP
Vita-CLIP-main/video_dataset/dataloader.py
#!/usr/bin/env python import argparse from typing import Dict import torch import torch.distributed as dist from .dataset import VideoDataset, DummyDataset def setup_arg_parser(parser: argparse.ArgumentParser): parser.add_argument('--train_list_path', type=str, help='path to training dat...
6,717
41.518987
138
py
Vita-CLIP
Vita-CLIP-main/video_dataset/transform.py
#!/usr/bin/env python3 # Originate from: https://github.com/facebookresearch/SlowFast/blob/fee19d699c49a81f33b890c5ff592bbb11aa5c54/slowfast/datasets/transform.py # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. import logging import math import numpy as np # import cv2 import random import tor...
27,344
33.18125
139
py
Vita-CLIP
Vita-CLIP-main/video_dataset/dataset.py
#!/usr/bin/env python import os, sys from typing import Optional import av import io import numpy as np import torch from torchvision import transforms from .transform import create_random_augment, random_resized_crop class VideoDataset(torch.utils.data.Dataset): def __init__( self, list_path: str, dat...
7,185
36.821053
115
py
Vita-CLIP
Vita-CLIP-main/video_dataset/__init__.py
#!/usr/bin/env python from .dataloader import setup_arg_parser, create_train_loader, create_val_loader
103
33.666667
80
py
Vita-CLIP
Vita-CLIP-main/video_dataset/random_erasing.py
#!/usr/bin/env python # Originates from: https://github.com/facebookresearch/SlowFast/blob/fee19d699c49a81f33b890c5ff592bbb11aa5c54/slowfast/datasets/random_erasing.py # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. """ This implementation is based on https://github.com/rwightman/pytorch-image...
7,056
37.353261
145
py
Vita-CLIP
Vita-CLIP-main/video_dataset/rand_augment.py
#!/usr/bin/env python # Originates from: https://github.com/facebookresearch/SlowFast/blob/fee19d699c49a81f33b890c5ff592bbb11aa5c54/slowfast/datasets/rand_augment.py # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. """ This implementation is based on https://github.com/rwightman/pytorch-image-m...
16,366
29.478585
143
py
dstc9-SIMMC
dstc9-SIMMC-master/tools/simmc_dataset.py
import json import pdb import re import string import torch from nltk.tokenize import WordPunctTokenizer from torch.utils.data import Dataset """ The dialog intents have the shapes: DA:<DIALOG_ACT>:<ACTIVITY>:<OBJECT> or DA:<DIALOG_ACT>:<ACTIVITY>:<OBJECT>.<attribute> Examples: DA:INFORM:GET:CLOTHING.embellishment ...
22,029
47.955556
193
py
dstc9-SIMMC
dstc9-SIMMC-master/tools/dialogue_visualizer.py
import pdb from simmc_dataset import SIMMCDataset dataset = SIMMCDataset(data_path='data/simmc_fashion/dev/fashion_dev_dials.json', metadata_path='data/simmc_fashion/fashion_metadata.json') printed=False for dial_id, dial in dataset.id2dialog.items(): coref_map = dial['dialogue_coref_map'...
1,388
38.685714
103
py
dstc9-SIMMC
dstc9-SIMMC-master/tools/embed_metadata.py
import argparse import json import pdb import re import string import numpy as np from nltk.tokenize import WordPunctTokenizer from simmc_dataset import SIMMCDatasetForResponseGeneration # for single embedding FIELDS_TO_EMBED = ['type', 'color', 'embellishments', 'pattern', 'brand'] FIELD2STR = SIMMCDatasetForRespon...
5,973
32.943182
99
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/preprocessing.py
import argparse import datetime import math import os import pdb import random import sys import time import numpy as np import torch from torch.utils.data import DataLoader sys.path.append('.') from config import special_toks from tools.simmc_dataset import SIMMCDatasetForResponseGeneration from transformers import...
16,410
42.762667
160
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/config.py
""" class SIMMCFashionConfig(): #? not used _FASHION_ACTION_NO = 5 _FASHION_ATTRS_NO = 33 """ model_conf = { 'dropout_prob': 0.5, 'freeze_bert': True, 'n_decoders': 2, 'decoder_heads': 6 #todo must be a perfect divisor of 768 } special_toks = { 'pad_token': '[PAD]', 'start_token':...
573
18.793103
61
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/eval.py
import argparse import json import os import pdb import sys import time import string import torch from torch.utils.data import DataLoader sys.path.append('.') from config import special_toks, train_conf from dataset import FastDataset from models import BlindStatelessLSTM, MultiAttentiveTransformer from tools.simmc...
8,110
32.795833
133
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/train.py
import argparse import datetime import json import math import os import pdb import pickle import random import sys import time import numpy as np import torch from torch.utils.data import DataLoader from config import model_conf, special_toks, train_conf from dataset import FastDataset from models import BlindStatel...
15,568
42.733146
186
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/dataset/processed_dataset.py
import pdb import random import numpy as np import torch from torch.utils.data import Dataset torch.backends.cudnn.benchmark = True torch.backends.cudnn.enabled = True class FastDataset(Dataset): """Dataset with preprocessed data for response generation subtask self.data.keys() = dict_keys(['dial_ids',...
4,071
43.26087
165
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/dataset/__init__.py
from .processed_dataset import FastDataset
42
42
42
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/models/bert.py
import pdb import torch import torch.nn as nn from transformers import BertConfig, BertModel class BertEncoder(nn.Module): def __init__(self, pretrained, freeze=False): super(BertEncoder, self).__init__() configuration = BertConfig() self.bert = BertModel(config=configuration).from_pretr...
728
29.375
79
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/models/embednets.py
import pdb import numpy as np import torch import torch.nn as nn from spellchecker import SpellChecker class ItemEmbeddingNetwork(nn.Module): """Base class for word embedding layer initialization and weights loading Args: nn (torch.nn.Module): inherits from torch.nn.Module """ def __init__(s...
5,715
39.828571
150
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/models/old_encoder.py
import math import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence class SingleEncoder(nn.Module): def __init__(self, input_size, hidden_size, dropout_prob, ...
10,368
36.981685
140
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/models/decoder.py
import math import pdb import numpy as np import torch import torch.nn as nn import torch.nn.functional as F #the value with which to mask the attention # DO NOT USE '-INF' BECAUSE IT WILL GENERATE NaN AFTER SOFTMAX FOR PADDING ROWS (filled with all 0's) _MASKING_VALUE=-1e30 class Decoder(nn.Module): def __init...
17,844
42.630807
168
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/models/matransformer.py
import math import pdb import numpy as np import torch import torch.nn.functional as F from torch import nn from .embednets import ItemEmbeddingNetwork, WordEmbeddingNetwork from .decoder import Decoder from .old_encoder import SingleEncoder from .bert import BertEncoder from transformers import BertTokenizer #todo r...
18,570
51.312676
155
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/models/__init__.py
from .blindstateless import BlindStatelessLSTM from .matransformer import MultiAttentiveTransformer
99
49
52
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/models/blindstateless.py
import pdb import numpy as np import torch import torch.nn.functional as F from torch import nn from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence from .embednets import WordEmbeddingNetwork class BlindStatelessLSTM(nn.Module): """Implementation of a blind and stateless LSTM for ac...
7,099
42.82716
156
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/utilities/simmc_utilities.py
import os import sys import matplotlib.pyplot as plt class Logger(object): def __init__(self, log_path): self.terminal = sys.stdout self.log = open(log_path, "a") def write(self, message): self.terminal.write(message) self.log.write(message) def flush(self): #...
2,737
37.027778
161
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/utilities/__init__.py
from .simmc_utilities import Logger, plotting_loss from .dataparallelv2 import DataParallelV2
93
46
50
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_response_generation/utilities/dataparallelv2.py
from torch.nn.parallel._functions import Scatter from torch.nn.parallel import DataParallel import torch import math # This code was copied from torch.nn.parallel and adapted for DataParallel to chunk lists instead of duplicating them # (this is really all this code is here for) def scatter(inputs, target_gpus, dim=...
2,498
41.355932
117
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/preprocessing.py
import argparse import datetime import math import os import pdb import random import sys import time import numpy as np import torch from torch.utils.data import DataLoader import sys sys.path.append('.') from config import TrainConfig from tools.simmc_dataset import SIMMCDatasetForActionPrediction class Collate...
8,706
38.220721
117
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/config.py
class TrainConfig(): _SEED = 240797 _LEARNING_RATE = 1e-3 _WEIGHT_DECAY = 0#1e-3 _PAD_TOKEN = '[PAD]' _UNK_TOKEN = '[UNK]' _CHECKPOINT_FOLDER = 'mm_action_prediction/checkpoints'
205
19.6
59
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/eval.py
import argparse import json import os import pdb import sys import torch from torch.utils.data import DataLoader sys.path.append('.') from config import TrainConfig from dataset import FastDataset from models import BlindStatefulLSTM, BlindStatelessLSTM, MMStatefulLSTM from tools.simmc_dataset import SIMMCDatasetFor...
7,874
35.971831
120
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/train.py
import argparse import datetime import math import os import pdb import random import sys import time import numpy as np import torch from torch.utils.data import DataLoader from config import TrainConfig from models import BlindStatefulLSTM, BlindStatelessLSTM, MMStatefulLSTM from utilities import Logger, plotting_l...
13,757
44.556291
146
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/dataset/processed_dataset.py
import numpy as np import torch from torch.utils.data import Dataset import pdb class FastDataset(Dataset): """Dataset with preprocessed data for response generation subtask self.data.keys() = dict_keys(['dial_ids', 'turns', 'utterances', 'histories', 'actions', 'attribut...
2,625
39.4
113
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/dataset/__init__.py
from .processed_dataset import FastDataset
42
42
42
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/models/embednets.py
import pdb import numpy as np import torch import torch.nn as nn from spellchecker import SpellChecker class ItemEmbeddingNetwork(nn.Module): """Base class for word embedding layer initialization and weights loading Args: nn (torch.nn.Module): inherits from torch.nn.Module """ def __init__(s...
4,777
35.753846
124
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/models/__init__.py
from .blindstateless import BlindStatelessLSTM from .blindstateful import BlindStatefulLSTM from .mmstateful import MMStatefulLSTM
130
42.666667
46
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/models/blindstateless.py
import pdb import numpy as np import torch import torch.nn.functional as F from torch import nn from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence from .embednets import WordEmbeddingNetwork class BlindStatelessLSTM(nn.Module): """Implementation of a blind and stateless LSTM for ac...
6,218
42.795775
156
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/models/mmstateful.py
import pdb import torch import torch.nn.functional as F from torch import nn from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence from .embednets import WordEmbeddingNetwork, ItemEmbeddingNetwork import numpy as np def get_positional_embeddings(n_position, emb_dim): """Create positional em...
15,354
45.814024
154
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/models/blindstateful.py
import pdb import torch import torch.nn.functional as F from torch import nn from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence from .embednets import WordEmbeddingNetwork class BlindStatefulLSTM(nn.Module): _HIDDEN_SIZE = 300 def __init__(self, word_embeddings_path, word2id, num_ac...
11,586
45.163347
154
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/utilities/simmc_utilities.py
import os import sys import matplotlib.pyplot as plt class Logger(object): def __init__(self, log_path): self.terminal = sys.stdout self.log = open(log_path, "a") def write(self, message): self.terminal.write(message) self.log.write(message) def flush(self): #t...
3,765
40.384615
165
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/utilities/__init__.py
from .simmc_utilities import *
30
30
30
py
dstc9-SIMMC
dstc9-SIMMC-master/mm_action_prediction/utilities/action_evaluation.py
"""Script evaluates action prediction along with attributes. Author(s): Satwik Kottur """ from absl import app, flags import collections import json import numpy as np FLAGS = flags.FLAGS flags.DEFINE_string( "action_json_path", "data/furniture_api_calls.json", "Ground truth API calls" ) flags.DEFINE_string( ...
4,901
36.419847
89
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/translate.py
# -*- coding: utf-8 -*- import logging import torch import os from beaver.data import build_dataset from beaver.infer import beam_search from beaver.model import NMTModel from beaver.utils import parseopt, get_device, calculate_bleu logging.basicConfig(format="%(asctime)s - %(message)s", level=logging.INFO) opt = pa...
2,389
33.142857
111
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/train.py
# -*- coding: utf-8 -*- import logging import torch import torch.cuda from beaver.data import build_dataset from beaver.infer import beam_search from beaver.loss import WarmAdam, LabelSmoothingLoss from beaver.model import NMTModel from beaver.utils import Saver from beaver.utils import calculate_bleu from beaver.uti...
5,407
42.264
176
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/tools/model_average.py
# -*- coding: utf-8 -*- import os import torch import sys def main(): if len(sys.argv) != 3: print("python model_average.py model_path n") exit() model_path = sys.argv[1] n = int(sys.argv[2]) # last n model to be averaged fs = [os.path.join(model_path, f) for f in os.listdir(model...
941
29.387097
114
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/tools/build_vocab.py
# -*- coding: utf-8 -*- import sys import collections log = sys.stderr.write def main(): size = int(sys.argv[1]) counter = collections.Counter() for line in sys.stdin: counter.update(line.strip().split()) items = counter.most_common() for word, _ in items[:size]: print(word) t...
635
23.461538
66
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/__init__.py
# -*- coding: utf-8 -*-
24
11.5
23
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/loss/optimizers.py
# -*- coding: utf-8 -*- import torch.nn as nn import torch.optim as optim class WarmAdam(object): def __init__(self, params, lr, hidden_size, warm_up, n_step): self.original_lr = lr self.n_step = n_step self.hidden_size = hidden_size self.warm_up_step = warm_up self.optimi...
1,529
36.317073
87
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/loss/__init__.py
# -*- coding: utf-8 -*- from beaver.loss.optimizers import WarmAdam, LabelSmoothingLoss
90
17.2
63
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/utils/saver.py
import json import torch import os import datetime class Saver(object): def __init__(self, opt): self.ckpt_names = [] self.model_path = opt.model_path + datetime.datetime.now().strftime("-%y%m%d-%H%M%S") self.max_to_keep = opt.max_to_keep os.mkdir(self.model_path) with op...
1,626
38.682927
122
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/utils/parseopt.py
# -*- coding: utf-8 -*- import argparse import json def common_opts(parser): parser.add_argument("-vocab", type=str, nargs="*", help="Vocab file") parser.add_argument("-batch_size", type=int, default=8192, help="Batch size") parser.add_argument("-beam_size", type=int, default=4, help="Beam size") pa...
3,366
42.166667
122
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/utils/rouge.py
# -*- coding: utf-8 -*- def get_ngrams(n, text): ngram_set = set() text_length = len(text) max_index_ngram = text_length - n for i in range(max_index_ngram + 1): ngram_set.add(tuple(text[i:i+n])) return ngram_set def rouge_n(evaluated_sentences, reference_sentences, n=2): #默认rouge_2 ...
2,152
35.491525
86
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/utils/__init__.py
# -*- coding: utf-8 -*- import torch.cuda from beaver.utils.metric import calculate_bleu, file_bleu from beaver.utils.saver import Saver def get_device(): if torch.cuda.is_available(): return torch.device('cuda') else: return torch.device('cpu') def printing_opt(opt): return "\n".join(...
405
21.555556
105
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/utils/metric.py
import os import re import subprocess import tempfile from beaver.utils.rouge import F_1 def calculate_bleu(hypotheses, references, lowercase=False): hypothesis_file = tempfile.NamedTemporaryFile(mode="w", encoding="UTF-8", delete=False) hypothesis_file.write("\n".join(hypotheses) + "\n") hypothesis_file...
1,745
35.375
108
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/data/field.py
# -*- coding: utf-8 -*- from typing import List import torch EOS_TOKEN = "<eos>" BOS_TOKEN = "<bos>" UNK_TOKEN = "<unk>" PAD_TOKEN = "<pad>" class Field(object): def __init__(self, bos: bool, eos: bool, pad: bool, unk: bool): self.bos_token = BOS_TOKEN if bos else None self.eos_token = EOS_TOKEN...
2,418
25.582418
115
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/data/utils.py
# -*- coding: utf-8 -*- from beaver.data.dataset import SumTransDataset from beaver.data.field import Field def build_dataset(opt, data_path, vocab_path, device, train=True): source_path = data_path[0] summary_cn_path = data_path[1] summary_en_path = data_path[2] source_field = Field(unk=True, pad=T...
1,387
37.555556
116
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/data/dataset.py
# -*- coding: utf-8 -*- import random from collections import namedtuple from typing import Dict import torch from beaver.data.field import Field Batch = namedtuple("Batch", ['source', 'summary_cn', 'summary_en', 'batch_size']) Example = namedtuple("Example", ['source', 'summary_cn', 'summary_en']) class SumTrans...
2,812
35.532468
121
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/data/__init__.py
# -*- coding: utf-8 -*- from beaver.data.utils import build_dataset
69
16.5
43
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/infer/beam.py
# -*- coding: utf-8 -*- import torch class Beam(object): def __init__(self, beam_size, pad, bos, eos, device, lp): self.size = beam_size self.alpha = lp self.scores = torch.full([beam_size], -1e20).float().to(device) self.scores[0] = 0. self.hypotheses = torch.full([1, ...
1,652
32.06
118
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/infer/__init__.py
# -*- coding: utf-8 -*- from beaver.infer.translator import beam_search
74
14
47
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/infer/translator.py
# -*- coding: utf-8 -*- import torch from beaver.infer.beam import Beam def beam_search(opt, model, src, fields): batch_size = src.size(0) beam_size = opt.beam_size encoder = model.encoder src = src.repeat(1, beam_size).view(batch_size * beam_size, -1) src_pad = src.eq(fields["source"].pad_id)...
2,185
33.698413
122
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/model/embeddings.py
# -*- coding: utf-8 -*- import math import torch import torch.nn as nn def positional_encoding(dim, max_len=5000): pe = torch.zeros(max_len, dim) position = torch.arange(0, max_len).unsqueeze(1) div_term = torch.exp((torch.arange(0, dim, 2, dtype=torch.float) * -(math.log(10000.0) / dim))) pe[:, 0::...
1,313
31.04878
110
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/model/transformer.py
# -*- coding: utf-8 -*- import math import torch import torch.nn as nn class FeedForward(nn.Module): def __init__(self, hidden_size, inner_size, dropout): super(FeedForward, self).__init__() self.linear_in = nn.Linear(hidden_size, inner_size, bias=False) self.linear_out = nn.Linear(inner_...
6,591
35.622222
120
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/model/__init__.py
# -*- coding: utf-8 -*- from beaver.model.nmt_model import NMTModel
70
13.2
43
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-2task/beaver/model/nmt_model.py
# -*- coding: utf-8 -*- from typing import Dict import torch import torch.nn as nn from beaver.model.embeddings import Embedding from beaver.model.transformer import Decoder, Encoder class Generator(nn.Module): def __init__(self, hidden_size: int, tgt_vocab_size: int): self.vocab_size = tgt_vocab_size ...
4,603
39.743363
100
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/translate.py
# -*- coding: utf-8 -*- import logging import torch import os from beaver.data import build_dataset from beaver.infer import beam_search from beaver.model import NMTModel from beaver.utils import parseopt, get_device, calculate_bleu logging.basicConfig(format="%(asctime)s - %(message)s", level=logging.INFO) opt = pa...
2,194
30.811594
111
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/train.py
# -*- coding: utf-8 -*- import logging import torch import torch.cuda from beaver.data import build_dataset from beaver.infer import beam_search from beaver.loss import WarmAdam, LabelSmoothingLoss from beaver.model import NMTModel from beaver.utils import Saver from beaver.utils import calculate_bleu from beaver.uti...
3,303
32.714286
102
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/tools/model_average.py
# -*- coding: utf-8 -*- import os import torch import sys def main(): if len(sys.argv) != 3: print("python model_average.py model_path n") exit() model_path = sys.argv[1] n = int(sys.argv[2]) # last n model to be averaged fs = [os.path.join(model_path, f) for f in os.listdir(model...
941
29.387097
114
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/tools/build_vocab.py
# -*- coding: utf-8 -*- import sys import collections log = sys.stderr.write def main(): size = int(sys.argv[1]) counter = collections.Counter() for line in sys.stdin: counter.update(line.strip().split()) items = counter.most_common() for word, _ in items[:size]: print(word) t...
635
23.461538
66
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/__init__.py
# -*- coding: utf-8 -*-
24
11.5
23
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/loss/optimizers.py
# -*- coding: utf-8 -*- import torch.nn as nn import torch.optim as optim class WarmAdam(object): def __init__(self, params, lr, hidden_size, warm_up, n_step): self.original_lr = lr self.n_step = n_step self.hidden_size = hidden_size self.warm_up_step = warm_up self.optimi...
1,529
36.317073
87
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/loss/__init__.py
# -*- coding: utf-8 -*- from beaver.loss.optimizers import WarmAdam, LabelSmoothingLoss
90
17.2
63
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/utils/saver.py
import json import torch import os import datetime class Saver(object): def __init__(self, opt): self.ckpt_names = [] self.model_path = opt.model_path + datetime.datetime.now().strftime("-%y%m%d-%H%M%S") self.max_to_keep = opt.max_to_keep os.mkdir(self.model_path) with op...
1,063
34.466667
113
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/utils/parseopt.py
# -*- coding: utf-8 -*- import argparse import json def common_opts(parser): parser.add_argument("-vocab", type=str, nargs="*", help="Vocab file") parser.add_argument("-batch_size", type=int, default=8192, help="Batch size") parser.add_argument("-beam_size", type=int, default=4, help="Beam size") pa...
3,249
41.763158
114
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/utils/__init__.py
# -*- coding: utf-8 -*- import torch.cuda from beaver.utils.metric import calculate_bleu, file_bleu from beaver.utils.saver import Saver def get_device(): if torch.cuda.is_available(): return torch.device('cuda') else: return torch.device('cpu') def printing_opt(opt): return "\n".join(...
405
21.555556
105
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/utils/metric.py
import os import re import subprocess import tempfile def calculate_bleu(hypotheses, references, lowercase=False): hypothesis_file = tempfile.NamedTemporaryFile(mode="w", encoding="UTF-8", delete=False) hypothesis_file.write("\n".join(hypotheses) + "\n") hypothesis_file.close() reference_file = tempfi...
1,328
39.272727
108
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/data/field.py
# -*- coding: utf-8 -*- from typing import List import torch EOS_TOKEN = "<eos>" BOS_TOKEN = "<bos>" UNK_TOKEN = "<unk>" PAD_TOKEN = "<pad>" class Field(object): def __init__(self, bos: bool, eos: bool, pad: bool, unk: bool): self.bos_token = BOS_TOKEN if bos else None self.eos_token = EOS_TOKEN...
2,466
25.815217
115
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/data/utils.py
# -*- coding: utf-8 -*- from beaver.data.dataset import TranslationDataset from beaver.data.field import Field def build_dataset(opt, data_path, vocab_path, device, train=True): src = data_path[0] tgt = data_path[1] src_field = Field(unk=True, pad=True, bos=False, eos=False) tgt_field = Field(unk=Tr...
1,083
31.848485
108
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/data/dataset.py
# -*- coding: utf-8 -*- import random from collections import namedtuple from typing import Dict import torch from beaver.data.field import Field Batch = namedtuple("Batch", ['src', 'tgt', 'batch_size']) Example = namedtuple("Example", ['src', 'tgt']) class TranslationDataset(object): def __init__(self, ...
2,164
29.069444
89
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/data/__init__.py
# -*- coding: utf-8 -*- from beaver.data.utils import build_dataset
69
16.5
43
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/infer/beam.py
# -*- coding: utf-8 -*- import torch class Beam(object): def __init__(self, beam_size, pad, bos, eos, device, lp): self.size = beam_size self.alpha = lp self.scores = torch.full([beam_size], -1e20).float().to(device) self.scores[0] = 0. self.hypotheses = torch.full([1, ...
1,652
32.06
118
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/infer/__init__.py
# -*- coding: utf-8 -*- from beaver.infer.translator import beam_search
74
14
47
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/infer/translator.py
# -*- coding: utf-8 -*- import torch from beaver.infer.beam import Beam def beam_search(opt, model, src, fields): batch_size = src.size(0) beam_size = opt.beam_size device = src.device num_words = model.generator.vocab_size encoder = model.encoder decoder = model.decoder generator = mod...
1,903
32.403509
98
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/model/embeddings.py
# -*- coding: utf-8 -*- import math import torch import torch.nn as nn def positional_encoding(dim, max_len=5000): pe = torch.zeros(max_len, dim) position = torch.arange(0, max_len).unsqueeze(1) div_term = torch.exp((torch.arange(0, dim, 2, dtype=torch.float) * -(math.log(10000.0) / dim))) pe[:, 0::...
1,313
31.04878
110
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/model/transformer.py
# -*- coding: utf-8 -*- import math import torch import torch.nn as nn class FeedForward(nn.Module): def __init__(self, hidden_size, inner_size, dropout): super(FeedForward, self).__init__() self.linear_in = nn.Linear(hidden_size, inner_size, bias=False) self.linear_out = nn.Linear(inner_...
6,591
35.622222
120
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/model/__init__.py
# -*- coding: utf-8 -*- from beaver.model.nmt_model import NMTModel
70
13.2
43
py
NCLS-Corpora
NCLS-Corpora-master/code/beaver-base/beaver/model/nmt_model.py
# -*- coding: utf-8 -*- from typing import Dict import torch import torch.nn as nn from beaver.model.embeddings import Embedding from beaver.model.transformer import Decoder, Encoder class Generator(nn.Module): def __init__(self, hidden_size: int, tgt_vocab_size: int): self.vocab_size = tgt_vocab_size ...
3,231
34.516484
100
py