WARNING: THIS SITE IS A MIRROR OF GITHUB.COM / IT CANNOT LOGIN OR REGISTER ACCOUNTS / THE CONTENTS ARE PROVIDED AS-IS / THIS SITE ASSUMES NO RESPONSIBILITY FOR ANY DISPLAYED CONTENT OR LINKS / IF YOU FOUND SOMETHING MAY NOT GOOD FOR EVERYONE, CONTACT ADMIN AT ilovescratch@foxmail.com
Skip to content

Commit c5f74dd

Browse files
generatedunixname89002005232357facebook-github-bot
authored andcommitted
Revert D71058400
Summary: This diff reverts D71058400 broke spna/bender_predictor:bender_predictor_lib-type-checking - main T218443607 Differential Revision: D71478648 fbshipit-source-id: 6a2017f32fd219a19ea6719fd083dff79034a1a6
1 parent 53ced64 commit c5f74dd

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

mmf/modules/hf_layers.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,7 @@
88
from torch import nn, Tensor
99

1010
try:
11-
from transformers.modeling_utils import PreTrainedModel
12-
from transformers.models.bert.modeling_bert import (
11+
from transformers3.modeling_bert import (
1312
BertAttention,
1413
BertEmbeddings,
1514
BertEncoder,
@@ -19,17 +18,17 @@
1918
BertSelfAttention,
2019
BertSelfOutput,
2120
)
22-
from transformers.models.roberta.modeling_roberta import (
21+
from transformers3.modeling_roberta import (
2322
RobertaAttention,
2423
RobertaEmbeddings,
2524
RobertaEncoder,
2625
RobertaLayer,
2726
RobertaModel,
2827
RobertaSelfAttention,
2928
)
29+
from transformers3.modeling_utils import PreTrainedModel
3030
except ImportError:
31-
from transformers.modeling_utils import PreTrainedModel
32-
from transformers.models.bert.modeling_bert import (
31+
from transformers.modeling_bert import (
3332
BertAttention,
3433
BertEmbeddings,
3534
BertEncoder,
@@ -39,14 +38,15 @@
3938
BertSelfAttention,
4039
BertSelfOutput,
4140
)
42-
from transformers.models.roberta.modeling_roberta import (
41+
from transformers.modeling_roberta import (
4342
RobertaAttention,
4443
RobertaEmbeddings,
4544
RobertaEncoder,
4645
RobertaLayer,
4746
RobertaModel,
4847
RobertaSelfAttention,
4948
)
49+
from transformers.modeling_utils import PreTrainedModel
5050

5151

5252
patch_functions = [

0 commit comments

Comments
 (0)