-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain_face.py
134 lines (102 loc) · 3.59 KB
/
main_face.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
# 원하는 모델로 바꾸어 사용하기
from model_face import NN, ResNet_50, BottleneckBlock
import cv2
from PIL import ImageFont, ImageDraw, Image
import numpy as np
import torch
import torchvision.transforms as transforms
import matplotlib.pyplot as plt
import os
import pandas as pd
import sys
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# 사용할 모델 선언하기
NN = NN().to(device)
# train이 아닌, evaluation 과정
NN.eval()
# 기존에 학습한 모델 불러오기. XXXXXXXXXXXX에 epoch 번호 작성.
path = './checkpoint/model_weights_NN/weights_epoch_' + '66' + '.pth.tar'
checkpoint = torch.load(path, map_location=device)
NN.load_state_dict(checkpoint['NN'])
test = pd.read_csv("./data/fer2013.csv")["pixels"][22] # 10, 15, 22, 24
test = test.split(' ')
test = np.array(list(map(int, test)), 'float32')
test = test.reshape([48, 48])
img = cv2.imread("./test_image1.png") # 이미지 불러오기
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img = cv2.resize(img, (48, 48))
img = cv2.equalizeHist(img)
img = np.array(img, 'float32')
face_frame = img / 255
input_face = torch.tensor(face_frame)
input_face = input_face.to(device)
input_face = input_face.reshape(1, 1, 48, 48)
def get_label_emotion(label):
"""
label 값에 대응되는 감정의 이름 문자열을 구하기 위한 함수
매개변수 (Parameters)
----------
label : int
emotion label 번호
반환 값 (Returns)
-------
String
label 번호에 대응되는 감정의 이름 문자열
"""
# 여기에 코드 작성
data = {0: 'Angry',
1: 'Disgust',
2: 'Fear',
3: 'Happy',
4: 'Sad',
5: 'Surprise',
6: 'Neutral'}
return data[label]
softmax = torch.nn.Softmax()
emotion_vec = NN(input_face.float()).squeeze()
# 7차원 감정 확률 벡터
vision_vec = softmax(emotion_vec)
vision_vec = vision_vec.cpu().detach().numpy() # .reshape(-1,1)
vision_label_for_Qn = np.argmax(vision_vec)
vision_emotion_label = get_label_emotion(np.argmax(vision_vec))
print(vision_emotion_label)
vision_percentage = np.max(vision_vec)
# 감정 벡터 상수 설정
Happy_vec = np.array([0.6, 0.85])
Surprise_vec = np.array([0.05, 0.5])
anger_vec = np.array([-0.6, 0.85])
disgust_vec = np.array([-0.3, 0.45])
fear_vec = np.array([-0.62, 0.1])
sad_vec = np.array([-0.7, -0.2])
neutral_vec = np.array([0, 0])
def emotion_to_A_V_vec(vision_vec):
A_V_vec = vision_vec[0]*anger_vec + vision_vec[1]*disgust_vec + vision_vec[2]*fear_vec +\
vision_vec[3]*Happy_vec + vision_vec[4]*sad_vec + vision_vec[5]*Surprise_vec +\
vision_vec[6]*neutral_vec*np.array([0, 0])
return A_V_vec
######################################
data_PATH = "./MMD/"
df1 = pd.read_csv(data_PATH + f"MMD.csv")
print(df1)
df2 = df1[['arousal', 'valence']]
a = df2['arousal'] * 2 - 1
b = df2['valence'] * 2 - 1
df4 = pd.concat([b, a], axis=1)
val = np.array(df4['valence'])
ener = np.array(df4['arousal'])
table1 = pd.DataFrame({
'valence': val,
'arousal': ener
})
# happy
#valence_min = 0.58
#valence_max = 0.62
#energy_min = 0.8
#energy_max = 0.9
table2 = table1.loc[(table1['valence'] > float(sys.argv[1])) & (table1['valence'] < float(sys.argv[2])) & (
table1['arousal'] > float(sys.argv[3])) & (table1['arousal'] < float(sys.argv[4]))]
print(table2)
index_list = list(table2.index)
df5 = df1.loc[index_list]
df5.to_csv(f'./filtered_MMD/MMD.csv', index=False)
print(sys.argv)