더북(TheBook)
    model = Model(inputs=[input_ids, attention_mask], outputs=out)
    model.compile(
            Adam(learning_rate=3e-5), loss=loss, metrics=[tf.keras.metrics.AUC()]
    )

    return model

with strategy.scope():
    transformer_layer = transformers.TFBertModel.from_pretrained("bert-base-uncased")
    model = build_model(transformer_layer, max_len=MAX_LEN)

>>> model.summary()
Model: "model_1"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_word_ids (InputLayer)  [(None, 220)]             0         
_________________________________________________________________
tf_bert_model_1 (TFBertModel ((None, 220, 768), (None, 109482240 
_________________________________________________________________
tf_op_layer_strided_slice_1  [(None, 768)]             0         
_________________________________________________________________
dropout_75 (Dropout)         (None, 768)               0         
_________________________________________________________________
dense_1 (Dense)              (None, 1)                 769       
=================================================================
Total params: 109,483,009
Trainable params: 109,483,009
Non-trainable params: 0
_________________________________________________________________
신간 소식 구독하기
뉴스레터에 가입하시고 이메일로 신간 소식을 받아 보세요.