|
| 1 | +""" |
| 2 | +Created on Tue Jul 27 10:18:46 2021 |
| 3 | +
|
| 4 | +@author: hm |
| 5 | +""" |
| 6 | + |
| 7 | +from glob import glob |
| 8 | +import time |
| 9 | +import matplotlib.pyplot as plt |
| 10 | +import matplotlib.patheffects as pe |
| 11 | +from torchvision import transforms |
| 12 | +from PIL import Image |
| 13 | +from threading import Thread |
| 14 | +import psutil |
| 15 | +import cv2 |
| 16 | +import sys |
| 17 | +import numpy as np |
| 18 | +import statistics as st |
| 19 | +from copy import deepcopy |
| 20 | +import pandas as pd |
| 21 | +import traceback |
| 22 | +from utils import faceMTCNN, embedResnet, DeepPixBiSExtractor |
| 23 | +plt.ioff() |
| 24 | + |
| 25 | + |
| 26 | +demo_length = 100 #frame |
| 27 | +detector = faceMTCNN(period=1) |
| 28 | +verifier = embedResnet(period=3) |
| 29 | + |
| 30 | +liveness = DeepPixBiSExtractor(scoring_method='combined', #['pixel_mean','binary','combined'] |
| 31 | + model_file='Pretrained_models/OULU_Protocol_1_model_0_0.pth') |
| 32 | + |
| 33 | +tensor2pil = transforms.ToPILImage(mode='RGB') |
| 34 | +cpu_hist = [] |
| 35 | +cpu_ma1 = [] |
| 36 | +cpu_ma2 = [] |
| 37 | +stamp = [] |
| 38 | +stop_threads = False |
| 39 | +def cpu_usage(delay=.1,ma_win1=10,ma_win2=100): |
| 40 | + t = 0 |
| 41 | + while True: |
| 42 | + cusage = psutil.cpu_percent() |
| 43 | + cpu_hist.append(cusage) |
| 44 | + cpu_ma1.append(st.mean(cpu_hist[-ma_win1:])) |
| 45 | + cpu_ma2.append(st.mean(cpu_hist[-ma_win2:])) |
| 46 | + stamp.append(t) |
| 47 | + t += delay |
| 48 | + time.sleep(delay) |
| 49 | + global stop_threads |
| 50 | + if stop_threads: |
| 51 | + print('kill hist thread') |
| 52 | + break |
| 53 | + |
| 54 | +font = cv2.FONT_HERSHEY_SIMPLEX |
| 55 | +bottomLeftCornerOfText = (15,35) |
| 56 | +fontScale = 1 |
| 57 | +lineType = 2 |
| 58 | + |
| 59 | +wait = .2 |
| 60 | +th1 = .85 |
| 61 | +th2 = 1.1 |
| 62 | +i = 0 |
| 63 | + |
| 64 | +ss = 100 |
| 65 | +# time.sleep(5) |
| 66 | +thread = Thread(target = cpu_usage,) |
| 67 | +thread.start() |
| 68 | +# time.sleep(15) |
| 69 | +w,h = (1280//2,720//2) |
| 70 | +fps = 1 |
| 71 | +buffer = np.ones((w,w,3))*255 |
| 72 | +vid = cv2.VideoCapture("""autovideosrc |
| 73 | + ! videoconvert |
| 74 | + ! video/x-raw, framerate=5/1, |
| 75 | + width=640, height=480, format=BGR |
| 76 | + ! appsink""") |
| 77 | +# vid = cv2.VideoCapture(0) |
| 78 | +while True: |
| 79 | + ret,first_frame = vid.read() |
| 80 | + time.sleep(wait) |
| 81 | + if ret: |
| 82 | + break |
| 83 | +print('get frist frame!') |
| 84 | +first_frame = cv2.resize(first_frame,(w,h)) |
| 85 | +fourcc = cv2.VideoWriter_fourcc(*'XVID') |
| 86 | +h,w = tuple(first_frame.shape[:2]) |
| 87 | +# cv2.namedWindow('res',cv2.WINDOW_NORMAL) |
| 88 | +# cv2.resizeWindow('res', w,w+h) |
| 89 | +video = cv2.VideoWriter('demo.avi',fourcc,5,(w,w+h)) |
| 90 | +facebank = pd.read_csv('hm.csv',header=None) |
| 91 | +verify = False |
| 92 | +norm_min = 1.0 |
| 93 | +norm_mean = 1.0 |
| 94 | + |
| 95 | +print('begin real-time process') |
| 96 | +# vid.set(cv2.CAP_PROP_FRAME_WIDTH, w) |
| 97 | +# vid.set(cv2.CAP_PROP_FRAME_HEIGHT, h) |
| 98 | +# vid.set(cv2.CAP_PROP_FPS,fps) |
| 99 | +tracker = cv2.TrackerKCF_create() |
| 100 | +init_tracker = False |
| 101 | +tight = None |
| 102 | +liveness_score = 0 |
| 103 | +live_turn = 0 |
| 104 | +sequence = [] |
| 105 | +while True: |
| 106 | + time.sleep(wait) |
| 107 | + ret,frame = vid.read() |
| 108 | + frame = cv2.resize(frame,(w,h))[:,:,::-1] |
| 109 | + output = frame.copy() |
| 110 | + res = frame.copy() |
| 111 | + orig = frame.copy() |
| 112 | + verify_status = 'NOT VERIFIED' |
| 113 | + if ret: |
| 114 | + # print(f'{i}-frame, time:{stamp[-1]}s') |
| 115 | + img = Image.fromarray(frame) |
| 116 | + i += 1 |
| 117 | + if i % 10 == 0 : |
| 118 | + print('i:\t',i) |
| 119 | + if i >= demo_length : |
| 120 | + print('end of demo!') |
| 121 | + break |
| 122 | + # detection |
| 123 | + try: |
| 124 | + img_cropped, boxes = detector.schedule(img) |
| 125 | + except Exception as e: |
| 126 | + traceback.print_exc() |
| 127 | + print("error:",e) |
| 128 | + print('\n') |
| 129 | + sequence.append(frame) |
| 130 | + continue |
| 131 | + if img_cropped is not None: |
| 132 | + verifier.last_face = img_cropped |
| 133 | + # verification |
| 134 | + embeddings = verifier.schedule() |
| 135 | + if embeddings is not None: |
| 136 | + last_verify = None |
| 137 | + diff = facebank - embeddings |
| 138 | + norm = np.linalg.norm(diff,axis=1) |
| 139 | + norm_min = np.around(norm.min(),3) |
| 140 | + norm_mean = np.around(norm.mean(),3) |
| 141 | + |
| 142 | + live_turn += 1 |
| 143 | + # liveness |
| 144 | + if tight is not None and live_turn >= 2: |
| 145 | + time.sleep(1.5) |
| 146 | + liveness_score = liveness(tight) |
| 147 | + live_turn = 0 |
| 148 | + |
| 149 | + |
| 150 | + if norm.min() < th1 and norm.mean() < th2 : |
| 151 | + verify = True |
| 152 | + else: |
| 153 | + verify = False |
| 154 | + else: |
| 155 | + last_verify = verify |
| 156 | + if not last_verify: |
| 157 | + try: |
| 158 | + img_cropped, boxes = detector.detect(img) |
| 159 | + except Exception as e: |
| 160 | + traceback.print_exc() |
| 161 | + print("error:",e) |
| 162 | + print('\n') |
| 163 | + sequence.append(frame) |
| 164 | + continue |
| 165 | + try: |
| 166 | + print(type(img_cropped)) |
| 167 | + embeddings = verifier.verify(img_cropped) |
| 168 | + last_verify = None |
| 169 | + diff = facebank - embeddings |
| 170 | + norm = np.linalg.norm(diff,axis=1) |
| 171 | + norm_min = np.around(norm.min(),3) |
| 172 | + norm_mean = np.around(norm.mean(),3) |
| 173 | + except: |
| 174 | + continue |
| 175 | + if norm.min() < th1 and norm.mean() < th2 : |
| 176 | + verify = True |
| 177 | + else: |
| 178 | + verify = False |
| 179 | + |
| 180 | + |
| 181 | + |
| 182 | + #visualize webcam and verification |
| 183 | + if last_verify is not None: |
| 184 | + if verify == True: |
| 185 | + verify_status = f'VERIFIED min:{norm_min}, mean:{norm_mean}' |
| 186 | + color = (255,255,0) |
| 187 | + elif verify == False: |
| 188 | + verify_status = f'NOT VERIFIED min:{norm_min}, mean:{norm_mean}' |
| 189 | + color = (255,0,0) |
| 190 | + |
| 191 | + else: |
| 192 | + if verify == True: |
| 193 | + verify_status = f'VERIFIED min:{norm_min}, mean:{norm_mean}' |
| 194 | + color = (0,255,0) |
| 195 | + |
| 196 | + elif verify == False: |
| 197 | + verify_status = f'NOT VERIFIED min:{norm_min}, mean:{norm_mean}' |
| 198 | + color = (255,0,0) |
| 199 | + |
| 200 | + if boxes is not None: |
| 201 | + x1,y1,x2,y2 = np.array(boxes[0]).astype(int) |
| 202 | + tight_crop = orig[y1:y2,x1:x2] |
| 203 | + tight = cv2.resize(tight_crop,(224,224)).transpose(2,0,1) |
| 204 | + res = cv2.rectangle(res,(x1,y1),(x2,y2),color,2) |
| 205 | + w_face, h_face = (x2 - x1),(y2 - y1) |
| 206 | + if init_tracker == False: |
| 207 | + ok = tracker.init(frame, tuple((x1,y1,w_face,h_face))) |
| 208 | + init_tracker = ok |
| 209 | + else: |
| 210 | + tracker = cv2.TrackerKCF_create() |
| 211 | + ok = tracker.init(frame, tuple((x1,y1,w_face,h_face))) |
| 212 | + |
| 213 | + elif init_tracker: |
| 214 | + ok, bbox = tracker.update(frame) |
| 215 | + x1,y1,w_face, h_face= np.array(bbox).astype(int) |
| 216 | + res = cv2.rectangle(res,(x1,y1),(x1+w_face,y1+h_face),color,2) |
| 217 | + |
| 218 | + res = cv2.putText( res,verify_status, |
| 219 | + bottomLeftCornerOfText, |
| 220 | + font, |
| 221 | + fontScale, |
| 222 | + color, |
| 223 | + lineType |
| 224 | + ) |
| 225 | + |
| 226 | + res = cv2.putText( res,f'liveness: {liveness_score:.3f}', |
| 227 | + (15,35+35), |
| 228 | + font, |
| 229 | + fontScale, |
| 230 | + color, |
| 231 | + lineType |
| 232 | + ) |
| 233 | + |
| 234 | + sequence.append(res) |
| 235 | + |
| 236 | + |
| 237 | + |
| 238 | + else: |
| 239 | + break |
| 240 | + |
| 241 | +plt.close() |
| 242 | +#cv2.destroyAllWindows() |
| 243 | + |
| 244 | +stop_threads = True |
| 245 | +print('last stamp:',stamp[-1]) |
| 246 | +print(len(stamp),len(sequence)) |
| 247 | +fig = plt.figure() |
| 248 | +ss = 0 |
| 249 | +for r,res in enumerate(sequence): |
| 250 | + # visualize cpu usage |
| 251 | + # r *= 1 |
| 252 | + if ss < 100: |
| 253 | + ss += 1 |
| 254 | + plt.plot(stamp[r:r+ss],cpu_hist[r:r+ss],linewidth=1,alpha=0.2) |
| 255 | + plt.plot(stamp[r:r+ss],cpu_ma1[r:r+ss],linewidth=1,c='black',alpha=0.7,linestyle='--') |
| 256 | + plt.plot(stamp[r:r+ss],cpu_ma2[r:r+ss],linewidth=2,c='#38a7ab', |
| 257 | + path_effects=[pe.Stroke(linewidth=2, foreground='g'), pe.Normal()]) |
| 258 | + plt.title('CPU Usage') |
| 259 | + plt.savefig('buffer.jpg') |
| 260 | + buffer = cv2.imread('buffer.jpg') |
| 261 | + buffer = cv2.resize(buffer,(w,w)) |
| 262 | + output = np.vstack((res[:,:,::-1],buffer)) |
| 263 | + video.write(output) |
| 264 | + fig.clear() |
| 265 | +vid.release() |
| 266 | + |
| 267 | + |
0 commit comments