Skip to content

Commit 763aa6a

Browse files
committed
Fixed bug when NER weight not being updated
1 parent 4b145c5 commit 763aa6a

File tree

2 files changed

+4
-3
lines changed

2 files changed

+4
-3
lines changed

main.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ def custom_batch_collation(x):
153153
seg_loss = FocalLoss(ignore_index=0, gamma=2)
154154

155155
bert_param_optimizer = list(model.transformer.named_parameters())
156-
ner_fc_param_optimizer = list(model.plain_ner.named_parameters())
156+
ner_fc_param_optimizer = list(model.ner.named_parameters())
157157
no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']
158158

159159
if not BASELINE:

model.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,9 @@ def __init__(self, config):
1919
pretrained_model_name_or_path="roberta-base", cache_dir=MODEL_CACHE_DIR, config=config)
2020
self.dropout = nn.Dropout()
2121
self.relu = nn.ReLU(True)
22-
self.plain_ner = nn.Linear(config.hidden_size, len(LABEL_BIO))
23-
if not BASELINE:
22+
if BASELINE:
23+
self.ner = nn.Linear(config.hidden_size, len(LABEL_BIO))
24+
else:
2425
self.boundary_encoder = nn.LSTM(bidirectional=True, input_size=config.hidden_size, hidden_size=LSTM_HIDDEN, batch_first=True)
2526
self.boundary_decoder = nn.LSTM(bidirectional=False, input_size=LSTM_HIDDEN*2, hidden_size=LSTM_HIDDEN, batch_first=True)
2627
self.boundary_biaffine = BoundaryBiaffine(LSTM_HIDDEN, LSTM_HIDDEN*2, len(BOUNDARY_LABEL_UNIDIRECTION))

0 commit comments

Comments
 (0)