예측값을 십진수로 변환하기

0

0

졸업작품을 진행하며 남긴 기록들을 블로그로 옮긴 글입니다. 따라서 블로그에는 졸업작품을 완성하기 위해 적용한 글들만 옮기려고 합니다.

전체적인 내용을 확인하시려면 아래 링크로 이동해주세요 !

인공지능 기반 개인용 혈압 모니터링 시스템 - 예측값을 십진수로 변환하기

1. 적용한 이유

  • 정확도가 높을수록 gray code가 실제 수축기 혈압, 이완기 혈압과 비슷해지는 것은 알겠다.
  • 하지만 그 값이 정확히 얼마인지는 한눈에 알아보기 힘들기 때문에 후처리작업을 진행했다.

2. 코드

A. Gray Code의 매칭율

  • 정답 Y 데이터와 딥러닝 모델이 예측한 Y 데이터의 정확도를 보여준다.
def matching_per(self, real_data, predict_data):
        if len(real_data[0]) != len(predict_data[0]):
            return
        else:
            real_data_len = len(real_data[0])
            per_list = []

            for i, single_list in enumerate(real_data):
                true_count = 0
                for j, element in enumerate(single_list):
                    if element == predict_data[i][j]:
                        true_count = true_count + 1
                    per = true_count / real_data_len
                per_list.append(per)

            return per_list
matching per :  [0.75, 0.75, 1.0, 0.9375, 0.6875, 0.75, 1.0, 0.875, 1.0, 0.75, 1.0,
                                 1.0, 0.75, 0.9375, 0.8125, 1.0, 1.0, 0.875, 1.0, 1.0, 1.0, 1.0, 0.75,
                                 1.0, 0.75, 0.9375, 1.0, 1.0, 1.0, 1.0, 0.8125, 0.625, 0.8125, 0.8125,
                                 0.75, 1.0, 1.0, 0.75, 0.9375, 0.6875, 1.0, 0.8125, 1.0, 0.875, 0.6875,
                                 1.0, 0.75, 0.8125, 0.8125, 1.0, 0.9375, 0.75, 0.9375, 0.8125, 0.875,
                                 0.9375, 1.0, 1.0, 0.8125, 0.75, 0.625, 1.0, 1.0, 0.875, 1.0, 1.0,
                                 1.0, 1.0, 1.0, 1.0, 1.0, 0.9375, 1.0, 1.0, 1.0, 0.9375, 0.5625, 1.0,
                                 1.0, 1.0, 1.0, 1.0, 0.875, 1.0, 1.0, 0.8125, 1.0, 1.0, 1.0, 0.625,
                                 1.0, 0.6875, 0.9375, 0.75, 1.0, 0.8125, 1.0, 1.0, 1.0, 0.875, 1.0,
                                 0.5625, 0.75, 0.6875, 1.0, 0.9375, 1.0, 1.0, 1.0, 0.6875, 0.75, 0.875,
                                 0.875, 1.0, 1.0, 1.0, 0.9375, 0.875, 1.0, 1.0, 1.0, 1.0, 0.75, 0.875,
                                 0.6875, 0.6875, 0.6875, 0.625, 1.0, 1.0, 1.0, 0.75, 1.0, 0.9375, 0.875,
                                 1.0, 0.625, 0.9375, 1.0, 1.0, 0.6875, 1.0, 1.0, 0.75, 1.0, 1.0, 0.8125,
                                 0.6875, 0.75, 0.6875, 1.0, 0.9375, 1.0, 0.6875, 1.0, 0.9375, 1.0, 1.0,
                                 1.0, 1.0, 1.0, 1.0, 0.8125, 1.0, 1.0, 0.75, 0.75, 0.875, 1.0, 1.0, 1.0,
                                 1.0, 0.625, 1.0, 1.0, 0.625, 0.75, 0.8125, 1.0, 1.0, 0.8125, 1.0, 1.0,
                                 1.0, 1.0, 1.0, 1.0, 0.75, 0.9375, 1.0, 1.0, 1.0, 0.875, 1.0, 0.5625,
                                 0.6875, 0.625, 0.9375, 1.0, 0.9375, 0.75, 1.0, 0.875]

B. Y_test, predict의 gray code → 십진수로 변환

def binary_to_dec(self, binary_code):
    dec_list = []

    for code in binary_code:
        code = list(map(str, code))
        binary_sum = "0b"+"".join(code)
        dec_list.append(int(binary_sum, 2))

    return dec_list

def binary_code(self, gray_code_list):
    BP_D_binary_code = []
    BP_S_binary_code = []

    for i, gray_code in enumerate(gray_code_list):
        BP_D = []
        BP_S = []
        for j, code in enumerate(gray_code):
            if j < 8:
                BP_D.append(code)
            else:
                BP_S.append(code)
        BP_D_binary_code.append(self.gray_to_binary(BP_D))
        BP_S_binary_code.append(self.gray_to_binary(BP_S))

    return BP_D_binary_code, BP_S_binary_code

def gray_to_binary(self, gray_code):
    binary_code = []
    x_bit = 0

    for i, bit in enumerate(gray_code):
        if i == 0:
            binary_code.append(bit)
            x_bit = bit
        else:
            x_bit = x_bit ^ bit
            binary_code.append(x_bit)

    return binary_code
====Y 값====

BP_D_Y_dec
         [74, 92, 73, 65, 67, 71, 63, 73, 73, 64, 63, 73, 63, 73, 74, 83, 70, 70, 71, 75,
            78, 63, 73, 70, 80, 65, 64, 70, 88, 70, 73, 83, 74, 74, 70, 78, 92, 90, 77, 71,
            63, 73, 90, 85, 76, 70, 90, 74, 74, 85, 80, 71, 74, 67, 73, 65, 64, 75, 76, 92,
            83, 70, 59, 70, 70, 83, 92, 88, 88, 76, 70, 70, 59, 65, 71, 71, 80, 67, 70, 70,
            73, 59, 63, 70, 70, 76, 85, 92, 70, 64, 73, 67, 71, 71, 80, 73, 88, 78, 73, 70,
            76, 63, 85, 64, 70, 67, 85, 75, 63, 67, 70, 73, 67, 64, 63, 80, 70, 85, 70, 88, 
            73, 92, 71, 73, 63, 63, 70, 83, 77, 83, 64, 85, 63, 73, 78, 76, 83, 70, 70, 78, 
            76, 73, 59, 70, 92, 73, 65, 76, 90, 76, 88, 73, 63, 74, 70, 65, 65, 92, 59, 85, 
            64, 59, 59, 63, 59, 90, 78, 78, 67, 71, 92, 75, 67, 80, 70, 63, 73, 74, 64, 83, 
            76, 64, 92, 71, 70, 65, 70, 70, 80, 59, 85, 80, 63, 78, 73, 70, 77, 73, 80, 73, 
            90, 70, 77]

BP_S_Y_dec 
         [116, 132, 107, 118, 110, 115, 105, 125, 107, 108, 101, 121, 105, 125, 116, 134, 
            110, 131, 115, 127, 126, 105, 107, 131, 122, 118, 108, 131, 142, 98, 107, 134, 
            116, 116, 118, 126, 132, 124, 120, 115, 105, 125, 124, 124, 123, 110, 124, 116, 
            116, 124, 122, 115, 116, 110, 107, 118, 108, 127, 123, 132, 134, 110, 114, 131, 
            98, 134, 132, 142, 142, 123, 110, 131, 114, 118, 115, 115, 122, 110, 131, 110, 
            121, 114, 101, 98, 98, 123, 124, 132, 98, 108, 125, 110, 115, 115, 122, 125, 
            142, 126, 107, 131, 123, 101, 124, 108, 110, 110, 124, 127, 101, 110, 110, 125, 
            110, 108, 105, 122, 118, 124, 118, 142, 121, 132, 115, 121, 101, 101, 118, 134, 
            120, 134, 108, 124, 105, 125, 126, 123, 134, 118, 110, 126, 123, 121, 114, 118, 
            132, 107, 118, 123, 124, 123, 142, 125, 105, 116, 98, 118, 118, 132, 114, 124, 
            108, 114, 114, 101, 114, 124, 126, 126, 110, 115, 132, 127, 110, 122, 110, 101, 
            107, 116, 108, 134, 123, 108, 132, 115, 118, 118, 118, 110, 122, 114, 124, 122, 
            101, 126, 121, 131, 120, 125, 122, 125, 124, 110, 120]

====predict 값====

BP_D_dec
         [73, 90, 73, 70, 63, 70, 63, 73, 73, 70, 63, 73, 70, 73, 73, 83, 70, 73, 71, 75, 
            78, 63, 70, 70, 78, 70, 64, 70, 88, 70, 73, 85, 73, 73, 71, 78, 92, 92, 76, 65, 
            63, 70, 90, 91, 67, 70, 92, 73, 73, 85, 80, 70, 73, 67, 70, 70, 64, 75, 67, 90, 
            85, 70, 59, 73, 70, 83, 92, 88, 88, 76, 70, 73, 59, 65, 71, 68, 70, 67, 70, 70, 
            73, 59, 51, 70, 70, 67, 85, 92, 70, 70, 73, 67, 68, 70, 80, 70, 88, 78, 73, 73, 
            76, 77, 92, 65, 70, 67, 85, 75, 63, 63, 63, 73, 67, 64, 63, 80, 71, 91, 70, 88, 
            73, 92, 70, 70, 76, 76, 64, 85, 77, 83, 64, 83, 63, 73, 73, 76, 85, 65, 70, 78, 
            67, 73, 59, 71, 92, 73, 73, 63, 92, 63, 88, 73, 63, 70, 70, 70, 65, 92, 59, 85, 
            64, 59, 56, 63, 59, 92, 73, 79, 67, 71, 92, 75, 76, 80, 70, 76, 70, 73, 64, 83, 
            67, 64, 92, 71, 70, 65, 70, 63, 80, 59, 85, 80, 48, 78, 59, 78, 63, 73, 80, 73, 
            92, 70, 76]

BP_S_dec
         [131, 124, 107, 118, 101, 118, 105, 131, 107, 114, 101, 121, 110, 124, 124, 134, 
            110, 124, 115, 127, 126, 105, 98, 131, 126, 118, 108, 131, 142, 98, 98, 124, 
            124, 124, 115, 126, 132, 132, 120, 118, 105, 131, 124, 124, 105, 110, 132, 124, 
            124, 124, 121, 118, 116, 101, 106, 118, 108, 127, 101, 124, 124, 110, 114, 124,
            98, 134, 132, 142, 142, 123, 110, 131, 114, 118, 115, 115, 131, 110, 131, 110, 
            121, 114, 101, 98, 98, 101, 124, 132, 98, 118, 125, 123, 115, 118, 122, 131, 142, 
            126, 107, 124, 123, 120, 123, 118, 110, 105, 124, 127, 101, 101, 105, 131, 106, 
            108, 105, 122, 118, 124, 118, 142, 121, 132, 118, 118, 123, 123, 115, 124, 120, 
            134, 108, 132, 105, 124, 125, 123, 124, 118, 110, 126, 105, 121, 114, 115, 132, 
            107, 121, 101, 132, 101, 142, 124, 105, 131, 98, 118, 118, 132, 114, 124, 108, 
            114, 118, 101, 114, 132, 131, 121, 110, 115, 132, 127, 123, 122, 110, 120, 98, 
            124, 108, 134, 101, 108, 132, 115, 118, 118, 118, 105, 121, 114, 124, 122, 102, 
            126, 114, 122, 102, 124, 122, 124, 132, 110, 123]

이 글이 도움이 되었나요?

신고하기
0분 전
작성된 댓글이 없습니다. 첫 댓글을 달아보세요!
    댓글을 작성하려면 로그인이 필요합니다.