8000 add checking of consol running for tqdm · flypythoncom/ner-bert@c08a105 · GitHub
[go: up one dir, main page]

Skip to content

Commit c08a105

Browse files
author
Ubuntu
committed
add checking of consol running for tqdm
1 parent 077cbd8 commit c08a105

File tree

3 files changed

+20
-1
lines changed

3 files changed

+20
-1
lines changed

modules/data/bert_data.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
from torch.utils.data import DataLoader
22
from modules.data import tokenization
3+
from modules.utils.utils import ipython_info
34
import torch
45
import pandas as pd
56
import numpy as np
67
from tqdm._tqdm_notebook import tqdm_notebook
8+
from tqdm import tqdm
79

810

911
class InputFeatures(object):
@@ -276,6 +278,9 @@ def __init__(self, train_dl, valid_dl, tokenizer, label2idx, max_seq_len=424,
276278
@classmethod
277279
def create(cls,
278280
train_path, valid_path, vocab_file, batch_size=16, cuda=True, is_cls=False, data_type="bert_cased", max_seq_len=424):
281+
if ipython_info():
282+
global tqdm_notebook
283+
tqdm_notebook = tqdm
279284
if data_type == "bert_cased":
280285
do_lower_case = False
281286
fn = get_bert_data_loaders

modules/train/train.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,20 @@
11
from tqdm._tqdm_notebook import tqdm_notebook
2+
from tqdm import tqdm
23
from sklearn_crfsuite.metrics import flat_classificati 8000 on_report
34
import logging
45
import torch
56
from modules.utils.plot_metrics import get_mean_max_metric
6-
from modules.train.clr import CyclicLR
7+
from modules.utils.utils import ipython_info
78
from torch.optim import Adam
89
from .optimization import BertAdam
910

1011

1112
logging.basicConfig(level=logging.INFO)
1213

14+
if __name__ == '__main__':
15+
print(1)
16+
tqdm_notebook = tqdm
17+
1318

1419
def train_step(dl, model, optimizer, lr_scheduler=None, clip=None, num_epoch=1):
1520
model.train()
@@ -132,6 +137,9 @@ def predict(dl, model, id2label, id2cls=None):
132137
class NerLearner(object):
133138
def __init__(self, model, data, best_model_path, lr=0.001, betas=list([0.8, 0.9]), clip=5,
134139
verbose=True, sup_labels=None, t_total=-1, warmup=0.1, weight_decay=0.01):
140+
if ipython_info():
141+
global tqdm_notebook
142+
tqdm_notebook = tqdm
135143
self.model = model
136144
self.optimizer = BertAdam(model, lr, t_total=t_total, b1=betas[0], b2=betas[1], max_grad_norm=clip)
137145
self.optimizer_defaults = dict(model=model, lr=lr, warmup=warmup, t_total=t_total, schedule='warmup_linear',

modules/utils/utils.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
import sys
2+
import __main__ as main
3+
4+
5+
def ipython_info():
6+
return hasattr(main, '__file__')
17

28

39
def bert_labels2tokens(dl, labels):

0 commit comments

Comments
 (0)
0