In [7]:
# 실제 관측 값 출력 print('Actual value: ', y_test.values)
Actual value: [1 0]
In [8]:
# 테스트 데이터셋에서 모델을 평가합니다 from sklearn.metrics import classification_report from sklearn.metrics import confusion_matrix, accuracy_score pred = classifier.predict(X_test) print(classification_report(y_test,pred)) print('Confusion Matrix: \n', confusion_matrix(y_test,pred)) print() print('Accuracy: ', accuracy_score(y_test,pred))
precision recall f1-score support 0 0.00 0.00 0.00 1 1 0.50 1.00 0.67 1 accuracy 0.50 2 macro avg 0.25 0.50 0.33 2 weighted avg 0.25 0.50 0.33 2 Confusion Matrix: [[0 1] [0 1]] Accuracy: 0.5