728x90
반응형

automl efficientdet 다운로드 및 설치

!git clone --depth 1 https://github.com/google/automl

 

!cd /content/automl/efficientdet; pip install -r requirements.txt

 

!nvidia-smi

Thu Dec  9 11:13:02 2021       
+-----------------------------------------------------------------------------+
| NVIDIA-SMI 495.44       Driver Version: 460.32.03    CUDA Version: 11.2     |
|-------------------------------+----------------------+----------------------+
| GPU  Name        Persistence-M| Bus-Id        Disp.A | Volatile Uncorr. ECC |
| Fan  Temp  Perf  Pwr:Usage/Cap|         Memory-Usage | GPU-Util  Compute M. |
|                               |                      |               MIG M. |
|===============================+======================+======================|
|   0  Tesla K80           Off  | 00000000:00:04.0 Off |                    0 |
| N/A   49C    P8    29W / 149W |      0MiB / 11441MiB |      0%      Default |
|                               |                      |                  N/A |
+-------------------------------+----------------------+----------------------+
                                                                               
+-----------------------------------------------------------------------------+
| Processes:                                                                  |
|  GPU   GI   CI        PID   Type   Process name                  GPU Memory |
|        ID   ID                                                   Usage      |
|=============================================================================|
|  No running processes found                                                 |
+-----------------------------------------------------------------------------+

 

import os
import sys
import tensorflow.compat.v1 as tf

sys.path.append('/content/automl/efficientdet')

import hparams_config
from tf2 import anchors
from model_inspect import ModelInspector

ESRI Object Detection Challenge Dataset 다운로드 및 파일 살펴 보기

  • Annotation은 PASCAL VOC 와 동일한 XML 포맷
!mkdir -p /content/poolncar
!wget https://github.com/chulminkw/DLCV/releases/download/1.0/swimming_pool_and_car.zip
!unzip swimming_pool_and_car.zip -d /content/poolncar > /dev/null 2>&1

 

import glob
import pandas as pd
import xml.etree.ElementTree as ET
import math

CLASSES = ['1','2']

# XML 파일을 Pandas DataFrame으로 변환 한뒤 DataFrame의 to_csv()를 이용하여 csv 파일로 생성하고 DataFrame반환
def get_bboxes_from_xml(xml_file):
  # xml 파일을 parsing하여 XML Element형태의 Element Tree를 생성하여 object 정보를 추출. 
  tree = ET.parse(xml_file)
  root = tree.getroot()
  bboxes = []
  bbox_names = []

  if root.iter('object') is not None:
    for obj in root.iter('object'):
      bbox_name = obj.find('name').text
      if bbox_name not in CLASSES:
          continue

      xmlbox = obj.find('bndbox')
      # 위치 좌표가 소수점까지 표시됨. pixel 단위는 정수형이므로 변환하되 ceil로 조금 이동하여 변환
      x1 = math.ceil(float(xmlbox.find('xmin').text))
      y1 = math.ceil(float(xmlbox.find('ymin').text))
      x2 = math.ceil(float(xmlbox.find('xmax').text))
      y2 = math.ceil(float(xmlbox.find('ymax').text))
      if x1 == x2 or y1 == y2:
          continue
      bbox = [x1, y1, x2, y2]
      bboxes.append(bbox)
      bbox_names.append(bbox_name) 
    
  return bbox_names, bboxes

bbox_names, bboxes = get_bboxes_from_xml('/content/poolncar/training_data/training_data/labels/000000000.xml')
print('object별 class 명:', bbox_names)
print('object별 bbox 정보 list:', bboxes)

# object별 class 명: ['1', '1', '1', '1']
# object별 bbox 정보 list: [[59, 153, 70, 164], [11, 206, 22, 217], [41, 0, 51, 4], [47, 42, 58, 53]]

 

import matplotlib.pyplot as plt
import cv2

img_rgb = cv2.cvtColor(cv2.imread('/content/poolncar/training_data/training_data/images/000000002.jpg'), cv2.COLOR_BGR2RGB)

bbox_names, bboxes = get_bboxes_from_xml('/content/poolncar/training_data/training_data/labels/000000002.xml')
for box_name, box in zip(bbox_names, bboxes):
  cv2.rectangle(img_rgb, (box[0], box[1]), (box[2], box[3]), color=(0, 255, 0), thickness=1)
  cv2.putText(img_rgb, box_name, (int(box[0]), int(box[1] - 7)), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 1)

plt.figure(figsize=(12, 12))
plt.imshow(img_rgb)

 

import os
ANNO_DIR = '/content/poolncar/training_data/training_data/labels'
file_list = os.listdir(ANNO_DIR)
xml_files = sorted([file for file in file_list if file.endswith('.xml')])
print('xml file list:', xml_files)
print('xml file 개수:', len(xml_files))

# xml file list: ['000000000.xml', '000000001.xml', '000000002.xml', '000000003.xml', '000000004.xml', '000000005.xml', '000000006.xml', '000000007.xml', '000000008.xml', '000000009.xml', '000000010.xml', '000000011.xml', '000000012.xml', '000000013.xml', '000000014.xml', '000000015.xml', '000000016.xml', '000000017.xml', '000000018.xml', '000000019.xml', '000000020.xml', '000000021.xml', '000000022.xml', '000000023.xml', '000000024.xml', '000000025.xml', '000000026.xml', '000000027.xml', '000000028.xml', '000000029.xml', '000000030.xml', '000000031.xml', '000000032.xml', '000000033.xml', '000000034.xml', '000000035.xml', '000000036.xml', '000000037.xml', '000000038.xml', '000000039.xml', '000000040.xml', '000000041.xml', '000000042.xml', '000000043.xml', '000000044.xml', '000000045.xml', '000000046.xml', '000000047.xml', '000000048.xml', '000000049.xml', '000000050.xml', '000000051.xml', '000000052.xml', '000000053.xml', '000000054.xml', '000000055.xml', '000000056.xml', '000000057.xml', '000000058.xml', '000000059.xml', '000000060.xml', '000000061.xml', '000000062.xml', '000000063.xml', '000000064.xml', '000000065.xml', '000000066.xml', '000000067.xml', '000000068.xml', '000000069.xml', '000000070.xml', '000000071.xml', '000000072.xml', '000000073.xml', '000000074.xml', '000000075.xml', '000000076.xml', '000000077.xml', '000000078.xml', '000000079.xml', '000000080.xml', '000000081.xml', '000000082.xml', '000000083.xml', '000000084.xml', '000000085.xml', '000000086.xml', '000000087.xml', '000000088.xml', '000000089.xml', '000000090.xml', '000000091.xml', '000000092.xml', '000000093.xml', '000000094.xml', '000000095.xml', '000000096.xml', '000000097.xml', '000000098.xml', '000000099.xml', '000000100.xml', '000000101.xml', '000000102.xml', '000000103.xml', '000000104.xml', '000000105.xml', '000000106.xml', '000000107.xml', '000000108.xml', '000000109.xml', '000000110.xml', '000000111.xml', '000000112.xml', '000000113.xml', '000000114.xml', '000000115.xml', '000000116.xml', '000000117.xml', '000000118.xml', '000000119.xml', '000000120.xml', '000000121.xml', '000000122.xml', '000000123.xml', '000000124.xml', '000000125.xml', '000000126.xml', '000000127.xml', '000000128.xml', '000000129.xml', '000000130.xml', '000000131.xml', '000000132.xml', '000000133.xml', '000000134.xml', '000000135.xml', '000000136.xml', '000000137.xml', '000000138.xml', '000000139.xml', '000000140.xml', '000000141.xml', '000000142.xml', '000000143.xml', '000000144.xml', '000000145.xml', '000000146.xml', '000000147.xml', '000000148.xml', '000000149.xml', '000000150.xml', '000000151.xml', '000000152.xml', '000000153.xml', '000000154.xml', '000000155.xml', '000000156.xml', '000000157.xml', '000000158.xml', '000000159.xml', '000000160.xml', '000000161.xml', '000000162.xml', '000000163.xml', '000000164.xml', '000000165.xml', '000000166.xml', '000000167.xml', '000000168.xml', '000000169.xml', '000000170.xml', '000000171.xml', '000000172.xml', '000000173.xml', '000000174.xml', '000000175.xml', '000000176.xml', '000000177.xml', '000000178.xml', '000000179.xml', '000000180.xml', '000000181.xml', '000000182.xml', '000000183.xml', '000000184.xml', '000000185.xml', '000000186.xml', '000000187.xml', '000000188.xml', '000000189.xml', '000000190.xml', '000000191.xml', '000000192.xml', '000000193.xml', '000000194.xml', '000000195.xml', '000000196.xml', '000000197.xml', '000000198.xml', '000000199.xml', '000000200.xml', '000000201.xml', '000000202.xml', '000000203.xml', '000000204.xml', '000000205.xml', '000000206.xml', '000000207.xml', '000000208.xml', '000000209.xml', '000000210.xml', '000000211.xml', '000000212.xml', '000000213.xml', '000000214.xml', '000000215.xml', '000000216.xml', '000000217.xml', '000000218.xml', '000000219.xml', '000000220.xml', '000000221.xml', '000000222.xml', '000000223.xml', '000000224.xml', '000000225.xml', '000000226.xml', '000000227.xml', '000000228.xml', '000000229.xml', '000000230.xml', '000000231.xml', '000000232.xml', '000000233.xml', '000000234.xml', '000000235.xml', '000000236.xml', '000000237.xml', '000000238.xml', '000000239.xml', '000000240.xml', '000000241.xml', '000000242.xml', '000000243.xml', '000000244.xml', '000000245.xml', '000000246.xml', '000000247.xml', '000000248.xml', '000000249.xml', '000000250.xml', '000000251.xml', '000000252.xml', '000000253.xml', '000000254.xml', '000000255.xml', '000000256.xml', '000000257.xml', '000000258.xml', '000000259.xml', '000000260.xml', '000000261.xml', '000000262.xml', '000000263.xml', '000000264.xml', '000000265.xml', '000000266.xml', '000000267.xml', '000000268.xml', '000000269.xml', '000000270.xml', '000000271.xml', '000000272.xml', '000000273.xml', '000000274.xml', '000000275.xml', '000000276.xml', '000000277.xml', '000000278.xml', '000000279.xml', '000000280.xml', '000000281.xml', '000000282.xml', '000000283.xml', '000000284.xml', '000000285.xml', '000000286.xml', '000000287.xml', '000000288.xml', '000000289.xml', '000000290.xml', '000000291.xml', '000000292.xml', '000000293.xml', '000000294.xml', '000000295.xml', '000000296.xml', '000000297.xml', '000000298.xml', '000000299.xml', '000000300.xml', '000000301.xml', '000000302.xml', '000000303.xml', '000000304.xml', '000000305.xml', '000000306.xml', '000000307.xml', '000000308.xml', '000000309.xml', '000000310.xml', '000000311.xml', '000000312.xml', '000000313.xml', '000000314.xml', '000000315.xml', '000000316.xml', '000000317.xml', '000000318.xml', '000000319.xml', '000000320.xml', '000000321.xml', '000000322.xml', '000000323.xml', '000000324.xml', '000000325.xml', '000000326.xml', '000000327.xml', '000000328.xml', '000000329.xml', '000000330.xml', '000000331.xml', '000000332.xml', '000000333.xml', '000000334.xml', '000000335.xml', '000000336.xml', '000000337.xml', '000000338.xml', '000000339.xml', '000000340.xml', '000000341.xml', '000000342.xml', '000000343.xml', '000000344.xml', '000000345.xml', '000000346.xml', '000000347.xml', '000000348.xml', '000000349.xml', '000000350.xml', '000000351.xml', '000000352.xml', '000000353.xml', '000000354.xml', '000000355.xml', '000000356.xml', '000000357.xml', '000000358.xml', '000000359.xml', '000000360.xml', '000000361.xml', '000000362.xml', '000000363.xml', '000000364.xml', '000000365.xml', '000000366.xml', '000000367.xml', '000000368.xml', '000000369.xml', '000000370.xml', '000000371.xml', '000000372.xml', '000000373.xml', '000000374.xml', '000000375.xml', '000000376.xml', '000000377.xml', '000000378.xml', '000000379.xml', '000000380.xml', '000000381.xml', '000000382.xml', '000000383.xml', '000000384.xml', '000000385.xml', '000000386.xml', '000000387.xml', '000000388.xml', '000000389.xml', '000000390.xml', '000000391.xml', '000000392.xml', '000000393.xml', '000000394.xml', '000000395.xml', '000000396.xml', '000000397.xml', '000000398.xml', '000000399.xml', '000000400.xml', '000000401.xml', '000000402.xml', '000000403.xml', '000000404.xml', '000000405.xml', '000000406.xml', '000000407.xml', '000000408.xml', '000000409.xml', '000000410.xml', '000000411.xml', '000000412.xml', '000000413.xml', '000000414.xml', '000000415.xml', '000000416.xml', '000000417.xml', '000000418.xml', '000000419.xml', '000000420.xml', '000000421.xml', '000000422.xml', '000000423.xml', '000000424.xml', '000000425.xml', '000000426.xml', '000000427.xml', '000000428.xml', '000000429.xml', '000000430.xml', '000000431.xml', '000000432.xml', '000000433.xml', '000000434.xml', '000000435.xml', '000000436.xml', '000000437.xml', '000000438.xml', '000000439.xml', '000000440.xml', '000000441.xml', '000000442.xml', '000000443.xml', '000000444.xml', '000000445.xml', '000000446.xml', '000000447.xml', '000000448.xml', '000000449.xml', '000000450.xml', '000000451.xml', '000000452.xml', '000000453.xml', '000000454.xml', '000000455.xml', '000000456.xml', '000000457.xml', '000000458.xml', '000000459.xml', '000000460.xml', '000000461.xml', '000000462.xml', '000000463.xml', '000000464.xml', '000000465.xml', '000000466.xml', '000000467.xml', '000000468.xml', '000000469.xml', '000000470.xml', '000000471.xml', '000000472.xml', '000000473.xml', '000000474.xml', '000000475.xml', '000000476.xml', '000000477.xml', '000000478.xml', '000000479.xml', '000000480.xml', '000000481.xml', '000000482.xml', '000000483.xml', '000000484.xml', '000000485.xml', '000000486.xml', '000000487.xml', '000000488.xml', '000000489.xml', '000000490.xml', '000000491.xml', '000000492.xml', '000000493.xml', '000000494.xml', '000000495.xml', '000000496.xml', '000000497.xml', '000000498.xml', '000000499.xml', '000000500.xml', '000000501.xml', '000000502.xml', '000000503.xml', '000000504.xml', '000000505.xml', '000000506.xml', '000000507.xml', '000000508.xml', '000000509.xml', '000000510.xml', '000000511.xml', '000000512.xml', '000000513.xml', '000000514.xml', '000000515.xml', '000000516.xml', '000000517.xml', '000000518.xml', '000000519.xml', '000000520.xml', '000000521.xml', '000000522.xml', '000000523.xml', '000000524.xml', '000000525.xml', '000000526.xml', '000000527.xml', '000000528.xml', '000000529.xml', '000000530.xml', '000000531.xml', '000000532.xml', '000000533.xml', '000000534.xml', '000000535.xml', '000000536.xml', '000000537.xml', '000000538.xml', '000000539.xml', '000000540.xml', '000000541.xml', '000000542.xml', '000000543.xml', '000000544.xml', '000000545.xml', '000000546.xml', '000000547.xml', '000000548.xml', '000000549.xml', '000000550.xml', '000000551.xml', '000000552.xml', '000000553.xml', '000000554.xml', '000000555.xml', '000000556.xml', '000000557.xml', '000000558.xml', '000000559.xml', '000000560.xml', '000000561.xml', '000000562.xml', '000000563.xml', '000000564.xml', '000000565.xml', '000000566.xml', '000000567.xml', '000000568.xml', '000000569.xml', '000000570.xml', '000000571.xml', '000000572.xml', '000000573.xml', '000000574.xml', '000000575.xml', '000000576.xml', '000000577.xml', '000000578.xml', '000000579.xml', '000000580.xml', '000000581.xml', '000000582.xml', '000000583.xml', '000000584.xml', '000000585.xml', '000000586.xml', '000000587.xml', '000000588.xml', '000000589.xml', '000000590.xml', '000000591.xml', '000000592.xml', '000000593.xml', '000000594.xml', '000000595.xml', '000000596.xml', '000000597.xml', '000000598.xml', '000000599.xml', '000000600.xml', '000000601.xml', '000000602.xml', '000000603.xml', '000000604.xml', '000000605.xml', '000000606.xml', '000000607.xml', '000000608.xml', '000000609.xml', '000000610.xml', '000000611.xml', '000000612.xml', '000000613.xml', '000000614.xml', '000000615.xml', '000000616.xml', '000000617.xml', '000000618.xml', '000000619.xml', '000000620.xml', '000000621.xml', '000000622.xml', '000000623.xml', '000000624.xml', '000000625.xml', '000000626.xml', '000000627.xml', '000000628.xml', '000000629.xml', '000000630.xml', '000000631.xml', '000000632.xml', '000000633.xml', '000000634.xml', '000000635.xml', '000000636.xml', '000000637.xml', '000000638.xml', '000000639.xml', '000000640.xml', '000000641.xml', '000000642.xml', '000000643.xml', '000000644.xml', '000000645.xml', '000000646.xml', '000000647.xml', '000000648.xml', '000000649.xml', '000000650.xml', '000000651.xml', '000000652.xml', '000000653.xml', '000000654.xml', '000000655.xml', '000000656.xml', '000000657.xml', '000000658.xml', '000000659.xml', '000000660.xml', '000000661.xml', '000000662.xml', '000000663.xml', '000000664.xml', '000000665.xml', '000000666.xml', '000000667.xml', '000000668.xml', '000000669.xml', '000000670.xml', '000000671.xml', '000000672.xml', '000000673.xml', '000000674.xml', '000000675.xml', '000000676.xml', '000000677.xml', '000000678.xml', '000000679.xml', '000000680.xml', '000000681.xml', '000000682.xml', '000000683.xml', '000000684.xml', '000000685.xml', '000000686.xml', '000000687.xml', '000000688.xml', '000000689.xml', '000000690.xml', '000000691.xml', '000000692.xml', '000000693.xml', '000000694.xml', '000000695.xml', '000000696.xml', '000000697.xml', '000000698.xml', '000000699.xml', '000000700.xml', '000000701.xml', '000000702.xml', '000000703.xml', '000000704.xml', '000000705.xml', '000000706.xml', '000000707.xml', '000000708.xml', '000000709.xml', '000000710.xml', '000000711.xml', '000000712.xml', '000000713.xml', '000000714.xml', '000000715.xml', '000000716.xml', '000000717.xml', '000000718.xml', '000000719.xml', '000000720.xml', '000000721.xml', '000000722.xml', '000000723.xml', '000000724.xml', '000000725.xml', '000000726.xml', '000000727.xml', '000000728.xml', '000000729.xml', '000000730.xml', '000000731.xml', '000000732.xml', '000000733.xml', '000000734.xml', '000000735.xml', '000000736.xml', '000000737.xml', '000000738.xml', '000000739.xml', '000000740.xml', '000000741.xml', '000000742.xml', '000000743.xml', '000000744.xml', '000000745.xml', '000000746.xml', '000000747.xml', '000000748.xml', '000000749.xml', '000000750.xml', '000000751.xml', '000000752.xml', '000000753.xml', '000000754.xml', '000000755.xml', '000000756.xml', '000000757.xml', '000000758.xml', '000000759.xml', '000000760.xml', '000000761.xml', '000000762.xml', '000000763.xml', '000000764.xml', '000000765.xml', '000000766.xml', '000000767.xml', '000000768.xml', '000000769.xml', '000000770.xml', '000000771.xml', '000000772.xml', '000000773.xml', '000000774.xml', '000000775.xml', '000000776.xml', '000000777.xml', '000000778.xml', '000000779.xml', '000000780.xml', '000000781.xml', '000000782.xml', '000000783.xml', '000000784.xml', '000000785.xml', '000000786.xml', '000000787.xml', '000000788.xml', '000000789.xml', '000000790.xml', '000000791.xml', '000000792.xml', '000000793.xml', '000000794.xml', '000000795.xml', '000000796.xml', '000000797.xml', '000000798.xml', '000000799.xml', '000000800.xml', '000000801.xml', '000000802.xml', '000000803.xml', '000000804.xml', '000000805.xml', '000000806.xml', '000000807.xml', '000000808.xml', '000000809.xml', '000000810.xml', '000000811.xml', '000000812.xml', '000000813.xml', '000000814.xml', '000000815.xml', '000000816.xml', '000000817.xml', '000000818.xml', '000000819.xml', '000000820.xml', '000000821.xml', '000000822.xml', '000000823.xml', '000000824.xml', '000000825.xml', '000000826.xml', '000000827.xml', '000000828.xml', '000000829.xml', '000000830.xml', '000000831.xml', '000000832.xml', '000000833.xml', '000000834.xml', '000000835.xml', '000000836.xml', '000000837.xml', '000000838.xml', '000000839.xml', '000000840.xml', '000000841.xml', '000000842.xml', '000000843.xml', '000000844.xml', '000000845.xml', '000000846.xml', '000000847.xml', '000000848.xml', '000000849.xml', '000000850.xml', '000000851.xml', '000000852.xml', '000000853.xml', '000000854.xml', '000000855.xml', '000000856.xml', '000000857.xml', '000000858.xml', '000000859.xml', '000000860.xml', '000000861.xml', '000000862.xml', '000000863.xml', '000000864.xml', '000000865.xml', '000000866.xml', '000000867.xml', '000000868.xml', '000000869.xml', '000000870.xml', '000000871.xml', '000000872.xml', '000000873.xml', '000000874.xml', '000000875.xml', '000000876.xml', '000000877.xml', '000000878.xml', '000000879.xml', '000000880.xml', '000000881.xml', '000000882.xml', '000000883.xml', '000000884.xml', '000000885.xml', '000000886.xml', '000000887.xml', '000000888.xml', '000000889.xml', '000000890.xml', '000000891.xml', '000000892.xml', '000000893.xml', '000000894.xml', '000000895.xml', '000000896.xml', '000000897.xml', '000000898.xml', '000000899.xml', '000000900.xml', '000000901.xml', '000000902.xml', '000000903.xml', '000000904.xml', '000000905.xml', '000000906.xml', '000000907.xml', '000000908.xml', '000000909.xml', '000000910.xml', '000000911.xml', '000000912.xml', '000000913.xml', '000000914.xml', '000000915.xml', '000000916.xml', '000000917.xml', '000000918.xml', '000000919.xml', '000000920.xml', '000000921.xml', '000000922.xml', '000000923.xml', '000000924.xml', '000000925.xml', '000000926.xml', '000000927.xml', '000000928.xml', '000000929.xml', '000000930.xml', '000000931.xml', '000000932.xml', '000000933.xml', '000000934.xml', '000000935.xml', '000000936.xml', '000000937.xml', '000000938.xml', '000000939.xml', '000000940.xml', '000000941.xml', '000000942.xml', '000000943.xml', '000000944.xml', '000000945.xml', '000000946.xml', '000000947.xml', '000000948.xml', '000000949.xml', '000000950.xml', '000000951.xml', '000000952.xml', '000000953.xml', '000000954.xml', '000000955.xml', '000000956.xml', '000000957.xml', '000000958.xml', '000000959.xml', '000000960.xml', '000000961.xml', '000000962.xml', '000000963.xml', '000000964.xml', '000000965.xml', '000000966.xml', '000000967.xml', '000000968.xml', '000000969.xml', '000000970.xml', '000000971.xml', '000000972.xml', '000000973.xml', '000000974.xml', '000000975.xml', '000000976.xml', '000000977.xml', '000000978.xml', '000000979.xml', '000000980.xml', '000000981.xml', '000000982.xml', '000000983.xml', '000000984.xml', '000000985.xml', '000000986.xml', '000000987.xml', '000000988.xml', '000000989.xml', '000000990.xml', '000000991.xml', '000000992.xml', '000000993.xml', '000000994.xml', '000000995.xml', '000000996.xml', '000000997.xml', '000000998.xml', '000000999.xml', '000001000.xml', '000001001.xml', '000001002.xml', '000001003.xml', '000001004.xml', '000001005.xml', '000001006.xml', '000001007.xml', '000001008.xml', '000001009.xml', '000001010.xml', '000001011.xml', '000001012.xml', '000001013.xml', '000001014.xml', '000001015.xml', '000001016.xml', '000001017.xml', '000001018.xml', '000001019.xml', '000001020.xml', '000001021.xml', '000001022.xml', '000001023.xml', '000001024.xml', '000001025.xml', '000001026.xml', '000001027.xml', '000001028.xml', '000001029.xml', '000001030.xml', '000001031.xml', '000001032.xml', '000001033.xml', '000001034.xml', '000001035.xml', '000001036.xml', '000001037.xml', '000001038.xml', '000001039.xml', '000001040.xml', '000001041.xml', '000001042.xml', '000001043.xml', '000001044.xml', '000001045.xml', '000001046.xml', '000001047.xml', '000001048.xml', '000001049.xml', '000001050.xml', '000001051.xml', '000001052.xml', '000001053.xml', '000001054.xml', '000001055.xml', '000001056.xml', '000001057.xml', '000001058.xml', '000001059.xml', '000001060.xml', '000001061.xml', '000001062.xml', '000001063.xml', '000001064.xml', '000001065.xml', '000001066.xml', '000001067.xml', '000001068.xml', '000001069.xml', '000001070.xml', '000001071.xml', '000001072.xml', '000001073.xml', '000001074.xml', '000001075.xml', '000001076.xml', '000001077.xml', '000001078.xml', '000001079.xml', '000001080.xml', '000001081.xml', '000001082.xml', '000001083.xml', '000001084.xml', '000001085.xml', '000001086.xml', '000001087.xml', '000001088.xml', '000001089.xml', '000001090.xml', '000001091.xml', '000001092.xml', '000001093.xml', '000001094.xml', '000001095.xml', '000001096.xml', '000001097.xml', '000001098.xml', '000001099.xml', '000001100.xml', '000001101.xml', '000001102.xml', '000001103.xml', '000001104.xml', '000001105.xml', '000001106.xml', '000001107.xml', '000001108.xml', '000001109.xml', '000001110.xml', '000001111.xml', '000001112.xml', '000001113.xml', '000001114.xml', '000001115.xml', '000001116.xml', '000001117.xml', '000001118.xml', '000001119.xml', '000001120.xml', '000001121.xml', '000001122.xml', '000001123.xml', '000001124.xml', '000001125.xml', '000001126.xml', '000001127.xml', '000001128.xml', '000001129.xml', '000001130.xml', '000001131.xml', '000001132.xml', '000001133.xml', '000001134.xml', '000001135.xml', '000001136.xml', '000001137.xml', '000001138.xml', '000001139.xml', '000001140.xml', '000001141.xml', '000001142.xml', '000001143.xml', '000001144.xml', '000001145.xml', '000001146.xml', '000001147.xml', '000001148.xml', '000001149.xml', '000001150.xml', '000001151.xml', '000001152.xml', '000001153.xml', '000001154.xml', '000001155.xml', '000001156.xml', '000001157.xml', '000001158.xml', '000001159.xml', '000001160.xml', '000001161.xml', '000001162.xml', '000001163.xml', '000001164.xml', '000001165.xml', '000001166.xml', '000001167.xml', '000001168.xml', '000001169.xml', '000001170.xml', '000001171.xml', '000001172.xml', '000001173.xml', '000001174.xml', '000001175.xml', '000001176.xml', '000001177.xml', '000001178.xml', '000001179.xml', '000001180.xml', '000001181.xml', '000001182.xml', '000001183.xml', '000001184.xml', '000001185.xml', '000001186.xml', '000001187.xml', '000001188.xml', '000001189.xml', '000001190.xml', '000001191.xml', '000001192.xml', '000001193.xml', '000001194.xml', '000001195.xml', '000001196.xml', '000001197.xml', '000001198.xml', '000001199.xml', '000001200.xml', '000001201.xml', '000001202.xml', '000001203.xml', '000001204.xml', '000001205.xml', '000001206.xml', '000001207.xml', '000001208.xml', '000001209.xml', '000001210.xml', '000001211.xml', '000001212.xml', '000001213.xml', '000001214.xml', '000001215.xml', '000001216.xml', '000001217.xml', '000001218.xml', '000001219.xml', '000001220.xml', '000001221.xml', '000001222.xml', '000001223.xml', '000001224.xml', '000001225.xml', '000001226.xml', '000001227.xml', '000001228.xml', '000001229.xml', '000001230.xml', '000001231.xml', '000001232.xml', '000001233.xml', '000001234.xml', '000001235.xml', '000001236.xml', '000001237.xml', '000001238.xml', '000001239.xml', '000001240.xml', '000001241.xml', '000001242.xml', '000001243.xml', '000001244.xml', '000001245.xml', '000001246.xml', '000001247.xml', '000001248.xml', '000001249.xml', '000001250.xml', '000001251.xml', '000001252.xml', '000001253.xml', '000001254.xml', '000001255.xml', '000001256.xml', '000001257.xml', '000001258.xml', '000001259.xml', '000001260.xml', '000001261.xml', '000001262.xml', '000001263.xml', '000001264.xml', '000001265.xml', '000001266.xml', '000001267.xml', '000001268.xml', '000001269.xml', '000001270.xml', '000001271.xml', '000001272.xml', '000001273.xml', '000001274.xml', '000001275.xml', '000001276.xml', '000001277.xml', '000001278.xml', '000001279.xml', '000001280.xml', '000001281.xml', '000001282.xml', '000001283.xml', '000001284.xml', '000001285.xml', '000001286.xml', '000001287.xml', '000001288.xml', '000001289.xml', '000001290.xml', '000001291.xml', '000001292.xml', '000001293.xml', '000001294.xml', '000001295.xml', '000001296.xml', '000001297.xml', '000001298.xml', '000001299.xml', '000001300.xml', '000001301.xml', '000001302.xml', '000001303.xml', '000001304.xml', '000001305.xml', '000001306.xml', '000001307.xml', '000001308.xml', '000001309.xml', '000001310.xml', '000001311.xml', '000001312.xml', '000001313.xml', '000001314.xml', '000001315.xml', '000001316.xml', '000001317.xml', '000001318.xml', '000001319.xml', '000001320.xml', '000001321.xml', '000001322.xml', '000001323.xml', '000001324.xml', '000001325.xml', '000001326.xml', '000001327.xml', '000001328.xml', '000001329.xml', '000001330.xml', '000001331.xml', '000001332.xml', '000001333.xml', '000001334.xml', '000001335.xml', '000001336.xml', '000001337.xml', '000001338.xml', '000001339.xml', '000001340.xml', '000001341.xml', '000001342.xml', '000001343.xml', '000001344.xml', '000001345.xml', '000001346.xml', '000001347.xml', '000001348.xml', '000001349.xml', '000001350.xml', '000001351.xml', '000001352.xml', '000001353.xml', '000001354.xml', '000001355.xml', '000001356.xml', '000001357.xml', '000001358.xml', '000001359.xml', '000001360.xml', '000001361.xml', '000001362.xml', '000001363.xml', '000001364.xml', '000001365.xml', '000001366.xml', '000001367.xml', '000001368.xml', '000001369.xml', '000001370.xml', '000001371.xml', '000001372.xml', '000001373.xml', '000001374.xml', '000001375.xml', '000001376.xml', '000001377.xml', '000001378.xml', '000001379.xml', '000001380.xml', '000001381.xml', '000001382.xml', '000001383.xml', '000001384.xml', '000001385.xml', '000001386.xml', '000001387.xml', '000001388.xml', '000001389.xml', '000001390.xml', '000001391.xml', '000001392.xml', '000001393.xml', '000001394.xml', '000001395.xml', '000001396.xml', '000001397.xml', '000001398.xml', '000001399.xml', '000001400.xml', '000001401.xml', '000001402.xml', '000001403.xml', '000001404.xml', '000001405.xml', '000001406.xml', '000001407.xml', '000001408.xml', '000001409.xml', '000001410.xml', '000001411.xml', '000001412.xml', '000001413.xml', '000001414.xml', '000001415.xml', '000001416.xml', '000001417.xml', '000001418.xml', '000001419.xml', '000001420.xml', '000001421.xml', '000001422.xml', '000001423.xml', '000001424.xml', '000001425.xml', '000001426.xml', '000001427.xml', '000001428.xml', '000001429.xml', '000001430.xml', '000001431.xml', '000001432.xml', '000001433.xml', '000001434.xml', '000001435.xml', '000001436.xml', '000001437.xml', '000001438.xml', '000001439.xml', '000001440.xml', '000001441.xml', '000001442.xml', '000001443.xml', '000001444.xml', '000001445.xml', '000001446.xml', '000001447.xml', '000001448.xml', '000001449.xml', '000001450.xml', '000001451.xml', '000001452.xml', '000001453.xml', '000001454.xml', '000001455.xml', '000001456.xml', '000001457.xml', '000001458.xml', '000001459.xml', '000001460.xml', '000001461.xml', '000001462.xml', '000001463.xml', '000001464.xml', '000001465.xml', '000001466.xml', '000001467.xml', '000001468.xml', '000001469.xml', '000001470.xml', '000001471.xml', '000001472.xml', '000001473.xml', '000001474.xml', '000001475.xml', '000001476.xml', '000001477.xml', '000001478.xml', '000001479.xml', '000001480.xml', '000001481.xml', '000001482.xml', '000001483.xml', '000001484.xml', '000001485.xml', '000001486.xml', '000001487.xml', '000001488.xml', '000001489.xml', '000001490.xml', '000001491.xml', '000001492.xml', '000001493.xml', '000001494.xml', '000001495.xml', '000001496.xml', '000001497.xml', '000001498.xml', '000001499.xml', '000001500.xml', '000001501.xml', '000001502.xml', '000001503.xml', '000001504.xml', '000001505.xml', '000001506.xml', '000001507.xml', '000001508.xml', '000001509.xml', '000001510.xml', '000001511.xml', '000001512.xml', '000001513.xml', '000001514.xml', '000001515.xml', '000001516.xml', '000001517.xml', '000001518.xml', '000001519.xml', '000001520.xml', '000001521.xml', '000001522.xml', '000001523.xml', '000001524.xml', '000001525.xml', '000001526.xml', '000001527.xml', '000001528.xml', '000001529.xml', '000001530.xml', '000001531.xml', '000001532.xml', '000001533.xml', '000001534.xml', '000001535.xml', '000001536.xml', '000001537.xml', '000001538.xml', '000001539.xml', '000001540.xml', '000001541.xml', '000001542.xml', '000001543.xml', '000001544.xml', '000001545.xml', '000001546.xml', '000001547.xml', '000001548.xml', '000001549.xml', '000001550.xml', '000001551.xml', '000001552.xml', '000001553.xml', '000001554.xml', '000001555.xml', '000001556.xml', '000001557.xml', '000001558.xml', '000001559.xml', '000001560.xml', '000001561.xml', '000001562.xml', '000001563.xml', '000001564.xml', '000001565.xml', '000001566.xml', '000001567.xml', '000001568.xml', '000001569.xml', '000001570.xml', '000001571.xml', '000001572.xml', '000001573.xml', '000001574.xml', '000001575.xml', '000001576.xml', '000001577.xml', '000001578.xml', '000001579.xml', '000001580.xml', '000001581.xml', '000001582.xml', '000001583.xml', '000001584.xml', '000001585.xml', '000001586.xml', '000001587.xml', '000001588.xml', '000001589.xml', '000001590.xml', '000001591.xml', '000001592.xml', '000001593.xml', '000001594.xml', '000001595.xml', '000001596.xml', '000001597.xml', '000001598.xml', '000001599.xml', '000001600.xml', '000001601.xml', '000001602.xml', '000001603.xml', '000001604.xml', '000001605.xml', '000001606.xml', '000001607.xml', '000001608.xml', '000001609.xml', '000001610.xml', '000001611.xml', '000001612.xml', '000001613.xml', '000001614.xml', '000001615.xml', '000001616.xml', '000001617.xml', '000001618.xml', '000001619.xml', '000001620.xml', '000001621.xml', '000001622.xml', '000001623.xml', '000001624.xml', '000001625.xml', '000001626.xml', '000001627.xml', '000001628.xml', '000001629.xml', '000001630.xml', '000001631.xml', '000001632.xml', '000001633.xml', '000001634.xml', '000001635.xml', '000001636.xml', '000001637.xml', '000001638.xml', '000001639.xml', '000001640.xml', '000001641.xml', '000001642.xml', '000001643.xml', '000001644.xml', '000001645.xml', '000001646.xml', '000001647.xml', '000001648.xml', '000001649.xml', '000001650.xml', '000001651.xml', '000001652.xml', '000001653.xml', '000001654.xml', '000001655.xml', '000001656.xml', '000001657.xml', '000001658.xml', '000001659.xml', '000001660.xml', '000001661.xml', '000001662.xml', '000001663.xml', '000001664.xml', '000001665.xml', '000001666.xml', '000001667.xml', '000001668.xml', '000001669.xml', '000001670.xml', '000001671.xml', '000001672.xml', '000001673.xml', '000001674.xml', '000001675.xml', '000001676.xml', '000001677.xml', '000001678.xml', '000001679.xml', '000001680.xml', '000001681.xml', '000001682.xml', '000001683.xml', '000001684.xml', '000001685.xml', '000001686.xml', '000001687.xml', '000001688.xml', '000001689.xml', '000001690.xml', '000001691.xml', '000001692.xml', '000001693.xml', '000001694.xml', '000001695.xml', '000001696.xml', '000001697.xml', '000001698.xml', '000001699.xml', '000001700.xml', '000001701.xml', '000001702.xml', '000001703.xml', '000001704.xml', '000001705.xml', '000001706.xml', '000001707.xml', '000001708.xml', '000001709.xml', '000001710.xml', '000001711.xml', '000001712.xml', '000001713.xml', '000001714.xml', '000001715.xml', '000001716.xml', '000001717.xml', '000001718.xml', '000001719.xml', '000001720.xml', '000001721.xml', '000001722.xml', '000001723.xml', '000001724.xml', '000001725.xml', '000001726.xml', '000001727.xml', '000001728.xml', '000001729.xml', '000001730.xml', '000001731.xml', '000001732.xml', '000001733.xml', '000001734.xml', '000001735.xml', '000001736.xml', '000001737.xml', '000001738.xml', '000001739.xml', '000001740.xml', '000001741.xml', '000001742.xml', '000001743.xml', '000001744.xml', '000001745.xml', '000001746.xml', '000001747.xml', '000001748.xml', '000001749.xml', '000001750.xml', '000001751.xml', '000001752.xml', '000001753.xml', '000001754.xml', '000001755.xml', '000001756.xml', '000001757.xml', '000001758.xml', '000001759.xml', '000001760.xml', '000001761.xml', '000001762.xml', '000001763.xml', '000001764.xml', '000001765.xml', '000001766.xml', '000001767.xml', '000001768.xml', '000001769.xml', '000001770.xml', '000001771.xml', '000001772.xml', '000001773.xml', '000001774.xml', '000001775.xml', '000001776.xml', '000001777.xml', '000001778.xml', '000001779.xml', '000001780.xml', '000001781.xml', '000001782.xml', '000001783.xml', '000001784.xml', '000001785.xml', '000001786.xml', '000001787.xml', '000001788.xml', '000001789.xml', '000001790.xml', '000001791.xml', '000001792.xml', '000001793.xml', '000001794.xml', '000001795.xml', '000001796.xml', '000001797.xml', '000001798.xml', '000001799.xml', '000001800.xml', '000001801.xml', '000001802.xml', '000001803.xml', '000001804.xml', '000001805.xml', '000001806.xml', '000001807.xml', '000001808.xml', '000001809.xml', '000001810.xml', '000001811.xml', '000001812.xml', '000001813.xml', '000001814.xml', '000001815.xml', '000001816.xml', '000001817.xml', '000001818.xml', '000001819.xml', '000001820.xml', '000001821.xml', '000001822.xml', '000001823.xml', '000001824.xml', '000001825.xml', '000001826.xml', '000001827.xml', '000001828.xml', '000001829.xml', '000001830.xml', '000001831.xml', '000001832.xml', '000001833.xml', '000001834.xml', '000001835.xml', '000001836.xml', '000001837.xml', '000001838.xml', '000001839.xml', '000001840.xml', '000001841.xml', '000001842.xml', '000001843.xml', '000001844.xml', '000001845.xml', '000001846.xml', '000001847.xml', '000001848.xml', '000001849.xml', '000001850.xml', '000001851.xml', '000001852.xml', '000001853.xml', '000001854.xml', '000001855.xml', '000001856.xml', '000001857.xml', '000001858.xml', '000001859.xml', '000001860.xml', '000001861.xml', '000001862.xml', '000001863.xml', '000001864.xml', '000001865.xml', '000001866.xml', '000001867.xml', '000001868.xml', '000001869.xml', '000001870.xml', '000001871.xml', '000001872.xml', '000001873.xml', '000001874.xml', '000001875.xml', '000001876.xml', '000001877.xml', '000001878.xml', '000001879.xml', '000001880.xml', '000001881.xml', '000001882.xml', '000001883.xml', '000001884.xml', '000001885.xml', '000001886.xml', '000001887.xml', '000001888.xml', '000001889.xml', '000001890.xml', '000001891.xml', '000001892.xml', '000001893.xml', '000001894.xml', '000001895.xml', '000001896.xml', '000001897.xml', '000001898.xml', '000001899.xml', '000001900.xml', '000001901.xml', '000001902.xml', '000001903.xml', '000001904.xml', '000001905.xml', '000001906.xml', '000001907.xml', '000001908.xml', '000001909.xml', '000001910.xml', '000001911.xml', '000001912.xml', '000001913.xml', '000001914.xml', '000001915.xml', '000001916.xml', '000001917.xml', '000001918.xml', '000001919.xml', '000001920.xml', '000001921.xml', '000001922.xml', '000001923.xml', '000001924.xml', '000001925.xml', '000001926.xml', '000001927.xml', '000001928.xml', '000001929.xml', '000001930.xml', '000001931.xml', '000001932.xml', '000001933.xml', '000001934.xml', '000001935.xml', '000001936.xml', '000001937.xml', '000001938.xml', '000001939.xml', '000001940.xml', '000001941.xml', '000001942.xml', '000001943.xml', '000001944.xml', '000001945.xml', '000001946.xml', '000001947.xml', '000001948.xml', '000001949.xml', '000001950.xml', '000001951.xml', '000001952.xml', '000001953.xml', '000001954.xml', '000001955.xml', '000001956.xml', '000001957.xml', '000001958.xml', '000001959.xml', '000001960.xml', '000001961.xml', '000001962.xml', '000001963.xml', '000001964.xml', '000001965.xml', '000001966.xml', '000001967.xml', '000001968.xml', '000001969.xml', '000001970.xml', '000001971.xml', '000001972.xml', '000001973.xml', '000001974.xml', '000001975.xml', '000001976.xml', '000001977.xml', '000001978.xml', '000001979.xml', '000001980.xml', '000001981.xml', '000001982.xml', '000001983.xml', '000001984.xml', '000001985.xml', '000001986.xml', '000001987.xml', '000001988.xml', '000001989.xml', '000001990.xml', '000001991.xml', '000001992.xml', '000001993.xml', '000001994.xml', '000001995.xml', '000001996.xml', '000001997.xml', '000001998.xml', '000001999.xml', '000002000.xml', '000002001.xml', '000002002.xml', '000002003.xml', '000002004.xml', '000002005.xml', '000002006.xml', '000002007.xml', '000002008.xml', '000002009.xml', '000002010.xml', '000002011.xml', '000002012.xml', '000002013.xml', '000002014.xml', '000002015.xml', '000002016.xml', '000002017.xml', '000002018.xml', '000002019.xml', '000002020.xml', '000002021.xml', '000002022.xml', '000002023.xml', '000002024.xml', '000002025.xml', '000002026.xml', '000002027.xml', '000002028.xml', '000002029.xml', '000002030.xml', '000002031.xml', '000002032.xml', '000002033.xml', '000002034.xml', '000002035.xml', '000002036.xml', '000002037.xml', '000002038.xml', '000002039.xml', '000002040.xml', '000002041.xml', '000002042.xml', '000002043.xml', '000002044.xml', '000002045.xml', '000002046.xml', '000002047.xml', '000002048.xml', '000002049.xml', '000002050.xml', '000002051.xml', '000002052.xml', '000002053.xml', '000002054.xml', '000002055.xml', '000002056.xml', '000002057.xml', '000002058.xml', '000002059.xml', '000002060.xml', '000002061.xml', '000002062.xml', '000002063.xml', '000002064.xml', '000002065.xml', '000002066.xml', '000002067.xml', '000002068.xml', '000002069.xml', '000002070.xml', '000002071.xml', '000002072.xml', '000002073.xml', '000002074.xml', '000002075.xml', '000002076.xml', '000002077.xml', '000002078.xml', '000002079.xml', '000002080.xml', '000002081.xml', '000002082.xml', '000002083.xml', '000002084.xml', '000002085.xml', '000002086.xml', '000002087.xml', '000002088.xml', '000002089.xml', '000002090.xml', '000002091.xml', '000002092.xml', '000002093.xml', '000002094.xml', '000002095.xml', '000002096.xml', '000002097.xml', '000002098.xml', '000002099.xml', '000002100.xml', '000002101.xml', '000002102.xml', '000002103.xml', '000002104.xml', '000002105.xml', '000002106.xml', '000002107.xml', '000002108.xml', '000002109.xml', '000002110.xml', '000002111.xml', '000002112.xml', '000002113.xml', '000002114.xml', '000002115.xml', '000002116.xml', '000002117.xml', '000002118.xml', '000002119.xml', '000002120.xml', '000002121.xml', '000002122.xml', '000002123.xml', '000002124.xml', '000002125.xml', '000002126.xml', '000002127.xml', '000002128.xml', '000002129.xml', '000002130.xml', '000002131.xml', '000002132.xml', '000002133.xml', '000002134.xml', '000002135.xml', '000002136.xml', '000002137.xml', '000002138.xml', '000002139.xml', '000002140.xml', '000002141.xml', '000002142.xml', '000002143.xml', '000002144.xml', '000002145.xml', '000002146.xml', '000002147.xml', '000002148.xml', '000002149.xml', '000002150.xml', '000002151.xml', '000002152.xml', '000002153.xml', '000002154.xml', '000002155.xml', '000002156.xml', '000002157.xml', '000002158.xml', '000002159.xml', '000002160.xml', '000002161.xml', '000002162.xml', '000002163.xml', '000002164.xml', '000002165.xml', '000002166.xml', '000002167.xml', '000002168.xml', '000002169.xml', '000002170.xml', '000002171.xml', '000002172.xml', '000002173.xml', '000002174.xml', '000002175.xml', '000002176.xml', '000002177.xml', '000002178.xml', '000002179.xml', '000002180.xml', '000002181.xml', '000002182.xml', '000002183.xml', '000002184.xml', '000002185.xml', '000002186.xml', '000002187.xml', '000002188.xml', '000002189.xml', '000002190.xml', '000002191.xml', '000002192.xml', '000002193.xml', '000002194.xml', '000002195.xml', '000002196.xml', '000002197.xml', '000002198.xml', '000002199.xml', '000002200.xml', '000002201.xml', '000002202.xml', '000002203.xml', '000002204.xml', '000002205.xml', '000002206.xml', '000002207.xml', '000002208.xml', '000002209.xml', '000002210.xml', '000002211.xml', '000002212.xml', '000002213.xml', '000002214.xml', '000002215.xml', '000002216.xml', '000002217.xml', '000002218.xml', '000002219.xml', '000002220.xml', '000002221.xml', '000002222.xml', '000002223.xml', '000002224.xml', '000002225.xml', '000002226.xml', '000002227.xml', '000002228.xml', '000002229.xml', '000002230.xml', '000002231.xml', '000002232.xml', '000002233.xml', '000002234.xml', '000002235.xml', '000002236.xml', '000002237.xml', '000002238.xml', '000002239.xml', '000002240.xml', '000002241.xml', '000002242.xml', '000002243.xml', '000002244.xml', '000002245.xml', '000002246.xml', '000002247.xml', '000002248.xml', '000002249.xml', '000002250.xml', '000002251.xml', '000002252.xml', '000002253.xml', '000002254.xml', '000002255.xml', '000002256.xml', '000002257.xml', '000002258.xml', '000002259.xml', '000002260.xml', '000002261.xml', '000002262.xml', '000002263.xml', '000002264.xml', '000002265.xml', '000002266.xml', '000002267.xml', '000002268.xml', '000002269.xml', '000002270.xml', '000002271.xml', '000002272.xml', '000002273.xml', '000002274.xml', '000002275.xml', '000002276.xml', '000002277.xml', '000002278.xml', '000002279.xml', '000002280.xml', '000002281.xml', '000002282.xml', '000002283.xml', '000002284.xml', '000002285.xml', '000002286.xml', '000002287.xml', '000002288.xml', '000002289.xml', '000002290.xml', '000002291.xml', '000002292.xml', '000002293.xml', '000002294.xml', '000002295.xml', '000002296.xml', '000002297.xml', '000002298.xml', '000002299.xml', '000002300.xml', '000002301.xml', '000002302.xml', '000002303.xml', '000002304.xml', '000002305.xml', '000002306.xml', '000002307.xml', '000002308.xml', '000002309.xml', '000002310.xml', '000002311.xml', '000002312.xml', '000002313.xml', '000002314.xml', '000002315.xml', '000002316.xml', '000002317.xml', '000002318.xml', '000002319.xml', '000002320.xml', '000002321.xml', '000002322.xml', '000002323.xml', '000002324.xml', '000002325.xml', '000002326.xml', '000002327.xml', '000002328.xml', '000002329.xml', '000002330.xml', '000002331.xml', '000002332.xml', '000002333.xml', '000002334.xml', '000002335.xml', '000002336.xml', '000002337.xml', '000002338.xml', '000002339.xml', '000002340.xml', '000002341.xml', '000002342.xml', '000002343.xml', '000002344.xml', '000002345.xml', '000002346.xml', '000002347.xml', '000002348.xml', '000002349.xml', '000002350.xml', '000002351.xml', '000002352.xml', '000002353.xml', '000002354.xml', '000002355.xml', '000002356.xml', '000002357.xml', '000002358.xml', '000002359.xml', '000002360.xml', '000002361.xml', '000002362.xml', '000002363.xml', '000002364.xml', '000002365.xml', '000002366.xml', '000002367.xml', '000002368.xml', '000002369.xml', '000002370.xml', '000002371.xml', '000002372.xml', '000002373.xml', '000002374.xml', '000002375.xml', '000002376.xml', '000002377.xml', '000002378.xml', '000002379.xml', '000002380.xml', '000002381.xml', '000002382.xml', '000002383.xml', '000002384.xml', '000002385.xml', '000002386.xml', '000002387.xml', '000002388.xml', '000002389.xml', '000002390.xml', '000002391.xml', '000002392.xml', '000002393.xml', '000002394.xml', '000002395.xml', '000002396.xml', '000002397.xml', '000002398.xml', '000002399.xml', '000002400.xml', '000002401.xml', '000002402.xml', '000002403.xml', '000002404.xml', '000002405.xml', '000002406.xml', '000002407.xml', '000002408.xml', '000002409.xml', '000002410.xml', '000002411.xml', '000002412.xml', '000002413.xml', '000002414.xml', '000002415.xml', '000002416.xml', '000002417.xml', '000002418.xml', '000002419.xml', '000002420.xml', '000002421.xml', '000002422.xml', '000002423.xml', '000002424.xml', '000002425.xml', '000002426.xml', '000002427.xml', '000002428.xml', '000002429.xml', '000002430.xml', '000002431.xml', '000002432.xml', '000002433.xml', '000002434.xml', '000002435.xml', '000002436.xml', '000002437.xml', '000002438.xml', '000002439.xml', '000002440.xml', '000002441.xml', '000002442.xml', '000002443.xml', '000002444.xml', '000002445.xml', '000002446.xml', '000002447.xml', '000002448.xml', '000002449.xml', '000002450.xml', '000002451.xml', '000002452.xml', '000002453.xml', '000002454.xml', '000002455.xml', '000002456.xml', '000002457.xml', '000002458.xml', '000002459.xml', '000002460.xml', '000002461.xml', '000002462.xml', '000002463.xml', '000002464.xml', '000002465.xml', '000002466.xml', '000002467.xml', '000002468.xml', '000002469.xml', '000002470.xml', '000002471.xml', '000002472.xml', '000002473.xml', '000002474.xml', '000002475.xml', '000002476.xml', '000002477.xml', '000002478.xml', '000002479.xml', '000002480.xml', '000002481.xml', '000002482.xml', '000002483.xml', '000002484.xml', '000002485.xml', '000002486.xml', '000002487.xml', '000002488.xml', '000002489.xml', '000002490.xml', '000002491.xml', '000002492.xml', '000002493.xml', '000002494.xml', '000002495.xml', '000002496.xml', '000002497.xml', '000002498.xml', '000002499.xml', '000002500.xml', '000002501.xml', '000002502.xml', '000002503.xml', '000002504.xml', '000002505.xml', '000002506.xml', '000002507.xml', '000002508.xml', '000002509.xml', '000002510.xml', '000002511.xml', '000002512.xml', '000002513.xml', '000002514.xml', '000002515.xml', '000002516.xml', '000002517.xml', '000002518.xml', '000002519.xml', '000002520.xml', '000002521.xml', '000002522.xml', '000002523.xml', '000002524.xml', '000002525.xml', '000002526.xml', '000002527.xml', '000002528.xml', '000002529.xml', '000002530.xml', '000002531.xml', '000002532.xml', '000002533.xml', '000002534.xml', '000002535.xml', '000002536.xml', '000002537.xml', '000002538.xml', '000002539.xml', '000002540.xml', '000002541.xml', '000002542.xml', '000002543.xml', '000002544.xml', '000002545.xml', '000002546.xml', '000002547.xml', '000002548.xml', '000002549.xml', '000002550.xml', '000002551.xml', '000002552.xml', '000002553.xml', '000002554.xml', '000002555.xml', '000002556.xml', '000002557.xml', '000002558.xml', '000002559.xml', '000002560.xml', '000002561.xml', '000002562.xml', '000002563.xml', '000002564.xml', '000002565.xml', '000002566.xml', '000002567.xml', '000002568.xml', '000002569.xml', '000002570.xml', '000002571.xml', '000002572.xml', '000002573.xml', '000002574.xml', '000002575.xml', '000002576.xml', '000002577.xml', '000002578.xml', '000002579.xml', '000002580.xml', '000002581.xml', '000002582.xml', '000002583.xml', '000002584.xml', '000002585.xml', '000002586.xml', '000002587.xml', '000002588.xml', '000002589.xml', '000002590.xml', '000002591.xml', '000002592.xml', '000002593.xml', '000002594.xml', '000002595.xml', '000002596.xml', '000002597.xml', '000002598.xml', '000002599.xml', '000002600.xml', '000002601.xml', '000002602.xml', '000002603.xml', '000002604.xml', '000002605.xml', '000002606.xml', '000002607.xml', '000002608.xml', '000002609.xml', '000002610.xml', '000002611.xml', '000002612.xml', '000002613.xml', '000002614.xml', '000002615.xml', '000002616.xml', '000002617.xml', '000002618.xml', '000002619.xml', '000002620.xml', '000002621.xml', '000002622.xml', '000002623.xml', '000002624.xml', '000002625.xml', '000002626.xml', '000002627.xml', '000002628.xml', '000002629.xml', '000002630.xml', '000002631.xml', '000002632.xml', '000002633.xml', '000002634.xml', '000002635.xml', '000002636.xml', '000002637.xml', '000002638.xml', '000002639.xml', '000002640.xml', '000002641.xml', '000002642.xml', '000002643.xml', '000002644.xml', '000002645.xml', '000002646.xml', '000002647.xml', '000002648.xml', '000002649.xml', '000002650.xml', '000002651.xml', '000002652.xml', '000002653.xml', '000002654.xml', '000002655.xml', '000002656.xml', '000002657.xml', '000002658.xml', '000002659.xml', '000002660.xml', '000002661.xml', '000002662.xml', '000002663.xml', '000002664.xml', '000002665.xml', '000002666.xml', '000002667.xml', '000002668.xml', '000002669.xml', '000002670.xml', '000002671.xml', '000002672.xml', '000002673.xml', '000002674.xml', '000002675.xml', '000002676.xml', '000002677.xml', '000002678.xml', '000002679.xml', '000002680.xml', '000002681.xml', '000002682.xml', '000002683.xml', '000002684.xml', '000002685.xml', '000002686.xml', '000002687.xml', '000002688.xml', '000002689.xml', '000002690.xml', '000002691.xml', '000002692.xml', '000002693.xml', '000002694.xml', '000002695.xml', '000002696.xml', '000002697.xml', '000002698.xml', '000002699.xml', '000002700.xml', '000002701.xml', '000002702.xml', '000002703.xml', '000002704.xml', '000002705.xml', '000002706.xml', '000002707.xml', '000002708.xml', '000002709.xml', '000002710.xml', '000002711.xml', '000002712.xml', '000002713.xml', '000002714.xml', '000002715.xml', '000002716.xml', '000002717.xml', '000002718.xml', '000002719.xml', '000002720.xml', '000002721.xml', '000002722.xml', '000002723.xml', '000002724.xml', '000002725.xml', '000002726.xml', '000002727.xml', '000002728.xml', '000002729.xml', '000002730.xml', '000002731.xml', '000002732.xml', '000002733.xml', '000002734.xml', '000002735.xml', '000002736.xml', '000002737.xml', '000002738.xml', '000002739.xml', '000002740.xml', '000002741.xml', '000002742.xml', '000002743.xml', '000002744.xml', '000002745.xml', '000002746.xml', '000002747.xml', '000002748.xml', '000002749.xml', '000002750.xml', '000002751.xml', '000002752.xml', '000002753.xml', '000002754.xml', '000002755.xml', '000002756.xml', '000002757.xml', '000002758.xml', '000002759.xml', '000002760.xml', '000002761.xml', '000002762.xml', '000002763.xml', '000002764.xml', '000002765.xml', '000002766.xml', '000002767.xml', '000002768.xml', '000002769.xml', '000002770.xml', '000002771.xml', '000002772.xml', '000002773.xml', '000002774.xml', '000002775.xml', '000002776.xml', '000002777.xml', '000002778.xml', '000002779.xml', '000002780.xml', '000002781.xml', '000002782.xml', '000002783.xml', '000002784.xml', '000002785.xml', '000002786.xml', '000002787.xml', '000002788.xml', '000002789.xml', '000002790.xml', '000002791.xml', '000002792.xml', '000002793.xml', '000002794.xml', '000002795.xml', '000002796.xml', '000002797.xml', '000002798.xml', '000002799.xml', '000002800.xml', '000002801.xml', '000002802.xml', '000002803.xml', '000002804.xml', '000002805.xml', '000002806.xml', '000002807.xml', '000002808.xml', '000002809.xml', '000002810.xml', '000002811.xml', '000002812.xml', '000002813.xml', '000002814.xml', '000002815.xml', '000002816.xml', '000002817.xml', '000002818.xml', '000002819.xml', '000002820.xml', '000002821.xml', '000002822.xml', '000002823.xml', '000002824.xml', '000002825.xml', '000002826.xml', '000002827.xml', '000002828.xml', '000002829.xml', '000002830.xml', '000002831.xml', '000002832.xml', '000002833.xml', '000002834.xml', '000002835.xml', '000002836.xml', '000002837.xml', '000002838.xml', '000002839.xml', '000002840.xml', '000002841.xml', '000002842.xml', '000002843.xml', '000002844.xml', '000002845.xml', '000002846.xml', '000002847.xml', '000002848.xml', '000002849.xml', '000002850.xml', '000002851.xml', '000002852.xml', '000002853.xml', '000002854.xml', '000002855.xml', '000002856.xml', '000002857.xml', '000002858.xml', '000002859.xml', '000002860.xml', '000002861.xml', '000002862.xml', '000002863.xml', '000002864.xml', '000002865.xml', '000002866.xml', '000002867.xml', '000002868.xml', '000002869.xml', '000002870.xml', '000002871.xml', '000002872.xml', '000002873.xml', '000002874.xml', '000002875.xml', '000002876.xml', '000002877.xml', '000002878.xml', '000002879.xml', '000002880.xml', '000002881.xml', '000002882.xml', '000002883.xml', '000002884.xml', '000002885.xml', '000002886.xml', '000002887.xml', '000002888.xml', '000002889.xml', '000002890.xml', '000002891.xml', '000002892.xml', '000002893.xml', '000002894.xml', '000002895.xml', '000002896.xml', '000002897.xml', '000002898.xml', '000002899.xml', '000002900.xml', '000002901.xml', '000002902.xml', '000002903.xml', '000002904.xml', '000002905.xml', '000002906.xml', '000002907.xml', '000002908.xml', '000002909.xml', '000002910.xml', '000002911.xml', '000002912.xml', '000002913.xml', '000002914.xml', '000002915.xml', '000002916.xml', '000002917.xml', '000002918.xml', '000002919.xml', '000002920.xml', '000002921.xml', '000002922.xml', '000002923.xml', '000002924.xml', '000002925.xml', '000002926.xml', '000002927.xml', '000002928.xml', '000002929.xml', '000002930.xml', '000002931.xml', '000002932.xml', '000002933.xml', '000002934.xml', '000002935.xml', '000002936.xml', '000002937.xml', '000002938.xml', '000002939.xml', '000002940.xml', '000002941.xml', '000002942.xml', '000002943.xml', '000002944.xml', '000002945.xml', '000002946.xml', '000002947.xml', '000002948.xml', '000002949.xml', '000002950.xml', '000002951.xml', '000002952.xml', '000002953.xml', '000002954.xml', '000002955.xml', '000002956.xml', '000002957.xml', '000002958.xml', '000002959.xml', '000002960.xml', '000002961.xml', '000002962.xml', '000002963.xml', '000002964.xml', '000002965.xml', '000002966.xml', '000002967.xml', '000002968.xml', '000002969.xml', '000002970.xml', '000002971.xml', '000002972.xml', '000002973.xml', '000002974.xml', '000002975.xml', '000002976.xml', '000002977.xml', '000002978.xml', '000002979.xml', '000002980.xml', '000002981.xml', '000002982.xml', '000002983.xml', '000002984.xml', '000002985.xml', '000002986.xml', '000002987.xml', '000002988.xml', '000002989.xml', '000002990.xml', '000002991.xml', '000002992.xml', '000002993.xml', '000002994.xml', '000002995.xml', '000002996.xml', '000002997.xml', '000002998.xml', '000002999.xml', '000003000.xml', '000003001.xml', '000003002.xml', '000003003.xml', '000003004.xml', '000003005.xml', '000003006.xml', '000003007.xml', '000003008.xml', '000003009.xml', '000003010.xml', '000003011.xml', '000003012.xml', '000003013.xml', '000003014.xml', '000003015.xml', '000003016.xml', '000003017.xml', '000003018.xml', '000003019.xml', '000003020.xml', '000003021.xml', '000003022.xml', '000003023.xml', '000003024.xml', '000003025.xml', '000003026.xml', '000003027.xml', '000003028.xml', '000003029.xml', '000003030.xml', '000003031.xml', '000003032.xml', '000003033.xml', '000003034.xml', '000003035.xml', '000003036.xml', '000003037.xml', '000003038.xml', '000003039.xml', '000003040.xml', '000003041.xml', '000003042.xml', '000003043.xml', '000003044.xml', '000003045.xml', '000003046.xml', '000003047.xml', '000003048.xml', '000003049.xml', '000003050.xml', '000003051.xml', '000003052.xml', '000003053.xml', '000003054.xml', '000003055.xml', '000003056.xml', '000003057.xml', '000003058.xml', '000003059.xml', '000003060.xml', '000003061.xml', '000003062.xml', '000003063.xml', '000003064.xml', '000003065.xml', '000003066.xml', '000003067.xml', '000003068.xml', '000003069.xml', '000003070.xml', '000003071.xml', '000003072.xml', '000003073.xml', '000003074.xml', '000003075.xml', '000003076.xml', '000003077.xml', '000003078.xml', '000003079.xml', '000003080.xml', '000003081.xml', '000003082.xml', '000003083.xml', '000003084.xml', '000003085.xml', '000003086.xml', '000003087.xml', '000003088.xml', '000003089.xml', '000003090.xml', '000003091.xml', '000003092.xml', '000003093.xml', '000003094.xml', '000003095.xml', '000003096.xml', '000003097.xml', '000003098.xml', '000003099.xml', '000003100.xml', '000003101.xml', '000003102.xml', '000003103.xml', '000003104.xml', '000003105.xml', '000003106.xml', '000003107.xml', '000003108.xml', '000003109.xml', '000003110.xml', '000003111.xml', '000003112.xml', '000003113.xml', '000003114.xml', '000003115.xml', '000003116.xml', '000003117.xml', '000003118.xml', '000003119.xml', '000003120.xml', '000003121.xml', '000003122.xml', '000003123.xml', '000003124.xml', '000003125.xml', '000003126.xml', '000003127.xml', '000003128.xml', '000003129.xml', '000003130.xml', '000003131.xml', '000003132.xml', '000003133.xml', '000003134.xml', '000003135.xml', '000003136.xml', '000003137.xml', '000003138.xml', '000003139.xml', '000003140.xml', '000003141.xml', '000003142.xml', '000003143.xml', '000003144.xml', '000003145.xml', '000003146.xml', '000003147.xml', '000003148.xml', '000003149.xml', '000003150.xml', '000003151.xml', '000003152.xml', '000003153.xml', '000003154.xml', '000003155.xml', '000003156.xml', '000003157.xml', '000003158.xml', '000003159.xml', '000003160.xml', '000003161.xml', '000003162.xml', '000003163.xml', '000003164.xml', '000003165.xml', '000003166.xml', '000003167.xml', '000003168.xml', '000003169.xml', '000003170.xml', '000003171.xml', '000003172.xml', '000003173.xml', '000003174.xml', '000003175.xml', '000003176.xml', '000003177.xml', '000003178.xml', '000003179.xml', '000003180.xml', '000003181.xml', '000003182.xml', '000003183.xml', '000003184.xml', '000003185.xml', '000003186.xml', '000003187.xml', '000003188.xml', '000003189.xml', '000003190.xml', '000003191.xml', '000003192.xml', '000003193.xml', '000003194.xml', '000003195.xml', '000003196.xml', '000003197.xml', '000003198.xml', '000003199.xml', '000003200.xml', '000003201.xml', '000003202.xml', '000003203.xml', '000003204.xml', '000003205.xml', '000003206.xml', '000003207.xml', '000003208.xml', '000003209.xml', '000003210.xml', '000003211.xml', '000003212.xml', '000003213.xml', '000003214.xml', '000003215.xml', '000003216.xml', '000003217.xml', '000003218.xml', '000003219.xml', '000003220.xml', '000003221.xml', '000003222.xml', '000003223.xml', '000003224.xml', '000003225.xml', '000003226.xml', '000003227.xml', '000003228.xml', '000003229.xml', '000003230.xml', '000003231.xml', '000003232.xml', '000003233.xml', '000003234.xml', '000003235.xml', '000003236.xml', '000003237.xml', '000003238.xml', '000003239.xml', '000003240.xml', '000003241.xml', '000003242.xml', '000003243.xml', '000003244.xml', '000003245.xml', '000003246.xml', '000003247.xml', '000003248.xml', '000003249.xml', '000003250.xml', '000003251.xml', '000003252.xml', '000003253.xml', '000003254.xml', '000003255.xml', '000003256.xml', '000003257.xml', '000003258.xml', '000003259.xml', '000003260.xml', '000003261.xml', '000003262.xml', '000003263.xml', '000003264.xml', '000003265.xml', '000003266.xml', '000003267.xml', '000003268.xml', '000003269.xml', '000003270.xml', '000003271.xml', '000003272.xml', '000003273.xml', '000003274.xml', '000003275.xml', '000003276.xml', '000003277.xml', '000003278.xml', '000003279.xml', '000003280.xml', '000003281.xml', '000003282.xml', '000003283.xml', '000003284.xml', '000003285.xml', '000003286.xml', '000003287.xml', '000003288.xml', '000003289.xml', '000003290.xml', '000003291.xml', '000003292.xml', '000003293.xml', '000003294.xml', '000003295.xml', '000003296.xml', '000003297.xml', '000003298.xml', '000003299.xml', '000003300.xml', '000003301.xml', '000003302.xml', '000003303.xml', '000003304.xml', '000003305.xml', '000003306.xml', '000003307.xml', '000003308.xml', '000003309.xml', '000003310.xml', '000003311.xml', '000003312.xml', '000003313.xml', '000003314.xml', '000003315.xml', '000003316.xml', '000003317.xml', '000003318.xml', '000003319.xml', '000003320.xml', '000003321.xml', '000003322.xml', '000003323.xml', '000003324.xml', '000003325.xml', '000003326.xml', '000003327.xml', '000003328.xml', '000003329.xml', '000003330.xml', '000003331.xml', '000003332.xml', '000003333.xml', '000003334.xml', '000003335.xml', '000003336.xml', '000003337.xml', '000003338.xml', '000003339.xml', '000003340.xml', '000003341.xml', '000003342.xml', '000003343.xml', '000003344.xml', '000003345.xml', '000003346.xml', '000003347.xml', '000003348.xml', '000003349.xml', '000003350.xml', '000003351.xml', '000003352.xml', '000003353.xml', '000003354.xml', '000003355.xml', '000003356.xml', '000003357.xml', '000003358.xml', '000003359.xml', '000003360.xml', '000003361.xml', '000003362.xml', '000003363.xml', '000003364.xml', '000003365.xml', '000003366.xml', '000003367.xml', '000003368.xml', '000003369.xml', '000003370.xml', '000003371.xml', '000003372.xml', '000003373.xml', '000003374.xml', '000003375.xml', '000003376.xml', '000003377.xml', '000003378.xml', '000003379.xml', '000003380.xml', '000003381.xml', '000003382.xml', '000003383.xml', '000003384.xml', '000003385.xml', '000003386.xml', '000003387.xml', '000003388.xml', '000003389.xml', '000003390.xml', '000003391.xml', '000003392.xml', '000003393.xml', '000003394.xml', '000003395.xml', '000003396.xml', '000003397.xml', '000003398.xml', '000003399.xml', '000003400.xml', '000003401.xml', '000003402.xml', '000003403.xml', '000003404.xml', '000003405.xml', '000003406.xml', '000003407.xml', '000003408.xml', '000003409.xml', '000003410.xml', '000003411.xml', '000003412.xml', '000003413.xml', '000003414.xml', '000003415.xml', '000003416.xml', '000003417.xml', '000003418.xml', '000003419.xml', '000003420.xml', '000003421.xml', '000003422.xml', '000003423.xml', '000003424.xml', '000003425.xml', '000003426.xml', '000003427.xml', '000003428.xml', '000003429.xml', '000003430.xml', '000003431.xml', '000003432.xml', '000003433.xml', '000003434.xml', '000003435.xml', '000003436.xml', '000003437.xml', '000003438.xml', '000003439.xml', '000003440.xml', '000003441.xml', '000003442.xml', '000003443.xml', '000003444.xml', '000003445.xml', '000003446.xml', '000003447.xml', '000003448.xml', '000003449.xml', '000003450.xml', '000003451.xml', '000003452.xml', '000003453.xml', '000003454.xml', '000003455.xml', '000003456.xml', '000003457.xml', '000003458.xml', '000003459.xml', '000003460.xml', '000003461.xml', '000003462.xml', '000003463.xml', '000003464.xml', '000003465.xml', '000003466.xml', '000003467.xml', '000003468.xml', '000003469.xml', '000003470.xml', '000003471.xml', '000003472.xml', '000003473.xml', '000003474.xml', '000003475.xml', '000003476.xml', '000003477.xml', '000003478.xml', '000003479.xml', '000003480.xml', '000003481.xml', '000003482.xml', '000003483.xml', '000003484.xml', '000003485.xml', '000003486.xml', '000003487.xml', '000003488.xml', '000003489.xml', '000003490.xml', '000003491.xml', '000003492.xml', '000003493.xml', '000003494.xml', '000003495.xml', '000003496.xml', '000003497.xml', '000003498.xml', '000003499.xml', '000003500.xml', '000003501.xml', '000003502.xml', '000003503.xml', '000003504.xml', '000003505.xml', '000003506.xml', '000003507.xml', '000003508.xml', '000003509.xml', '000003510.xml', '000003511.xml', '000003512.xml', '000003513.xml', '000003514.xml', '000003515.xml', '000003516.xml', '000003517.xml', '000003518.xml', '000003519.xml', '000003520.xml', '000003521.xml', '000003522.xml', '000003523.xml', '000003524.xml', '000003525.xml', '000003526.xml', '000003527.xml', '000003528.xml', '000003529.xml', '000003530.xml', '000003531.xml', '000003532.xml', '000003533.xml', '000003534.xml', '000003535.xml', '000003536.xml', '000003537.xml', '000003538.xml', '000003539.xml', '000003540.xml', '000003541.xml', '000003542.xml', '000003543.xml', '000003544.xml', '000003545.xml', '000003546.xml', '000003547.xml', '000003548.xml', '000003549.xml', '000003550.xml', '000003551.xml', '000003552.xml', '000003553.xml', '000003554.xml', '000003555.xml', '000003556.xml', '000003557.xml', '000003558.xml', '000003559.xml', '000003560.xml', '000003561.xml', '000003562.xml', '000003563.xml', '000003564.xml', '000003565.xml', '000003566.xml', '000003567.xml', '000003568.xml', '000003569.xml', '000003570.xml', '000003571.xml', '000003572.xml', '000003573.xml', '000003574.xml', '000003575.xml', '000003576.xml', '000003577.xml', '000003578.xml', '000003579.xml', '000003580.xml', '000003581.xml', '000003582.xml', '000003583.xml', '000003584.xml', '000003585.xml', '000003586.xml', '000003587.xml', '000003588.xml', '000003589.xml', '000003590.xml', '000003591.xml', '000003592.xml', '000003593.xml', '000003594.xml', '000003595.xml', '000003596.xml', '000003597.xml', '000003598.xml', '000003599.xml', '000003600.xml', '000003601.xml', '000003602.xml', '000003603.xml', '000003604.xml', '000003605.xml', '000003606.xml', '000003607.xml', '000003608.xml', '000003609.xml', '000003610.xml', '000003611.xml', '000003612.xml', '000003613.xml', '000003614.xml', '000003615.xml', '000003616.xml', '000003617.xml', '000003618.xml', '000003619.xml', '000003620.xml', '000003621.xml', '000003622.xml', '000003623.xml', '000003624.xml', '000003625.xml', '000003626.xml', '000003627.xml', '000003628.xml', '000003629.xml', '000003630.xml', '000003631.xml', '000003632.xml', '000003633.xml', '000003634.xml', '000003635.xml', '000003636.xml', '000003637.xml', '000003638.xml', '000003639.xml', '000003640.xml', '000003641.xml', '000003642.xml', '000003643.xml', '000003644.xml', '000003645.xml', '000003646.xml', '000003647.xml', '000003648.xml', '000003649.xml', '000003650.xml', '000003651.xml', '000003652.xml', '000003653.xml', '000003654.xml', '000003655.xml', '000003656.xml', '000003657.xml', '000003658.xml', '000003659.xml', '000003660.xml', '000003661.xml', '000003662.xml', '000003663.xml', '000003664.xml', '000003665.xml', '000003666.xml', '000003667.xml', '000003668.xml', '000003669.xml', '000003670.xml', '000003671.xml', '000003672.xml', '000003673.xml', '000003674.xml', '000003675.xml', '000003676.xml', '000003677.xml', '000003678.xml', '000003679.xml', '000003680.xml', '000003681.xml', '000003682.xml', '000003683.xml', '000003684.xml', '000003685.xml', '000003686.xml', '000003687.xml', '000003688.xml', '000003689.xml', '000003690.xml', '000003691.xml', '000003692.xml', '000003693.xml', '000003694.xml', '000003695.xml', '000003696.xml', '000003697.xml', '000003698.xml', '000003699.xml', '000003700.xml', '000003701.xml', '000003702.xml', '000003703.xml', '000003704.xml', '000003705.xml', '000003706.xml', '000003707.xml', '000003708.xml', '000003709.xml', '000003710.xml', '000003711.xml', '000003712.xml', '000003713.xml', '000003714.xml', '000003715.xml', '000003716.xml', '000003717.xml', '000003718.xml', '000003719.xml', '000003720.xml', '000003721.xml', '000003722.xml', '000003723.xml', '000003724.xml', '000003725.xml', '000003726.xml', '000003727.xml', '000003728.xml', '000003729.xml', '000003730.xml', '000003731.xml', '000003732.xml', '000003733.xml', '000003734.xml', '000003735.xml', '000003736.xml', '000003737.xml', '000003738.xml', '000003739.xml', '000003740.xml', '000003741.xml', '000003742.xml', '000003743.xml', '000003744.xml', '000003745.xml', '000003746.xml', '000003747.xml']
# xml file 개수: 3748

개별 XML 정보와 이미지를 tf.train.Example로 변경하는 함수 생성.

import xml.etree.ElementTree as ET

# xml 을 파싱해서 image와 object의 메타정보를 dict로 반환하는 함수 
def get_anno_dict_from_xml(xml_filepath):
  tree = ET.parse(xml_filepath)
  root = tree.getroot()
  bbox_names = []
  objects = []

  filename = root.find('filename').text
  size = root.find('size')
  width = int(size.find('width').text)
  height = int(size.find('height').text)

  # 파일내에 있는 모든 object Element를 찾음. 
  for obj in root.findall('object'):
    name = obj.find('name').text
    pose = 'Unspecified'
    #truncated, poses, difficult_obj는 사용되지 않지만 create_pascal_voc.py 파일과의 호환성을 유지하기 위해 설정. 
    truncated = 0
    difficult = 0
    occluded = 0

    #pose = obj.find('pose').text
    #truncated = int(obj.find('truncated').text)
    #difficult = int(obj.find('difficult').text)
    #occluded = int(obj.find('occluded').text)

    xmlbox = obj.find('bndbox')
    # 위치 좌표가 소수점까지 표시됨. pixel 단위는 정수형이므로 변환하되 ceil로 조금 이동하여 변환
    xmin = math.ceil(float(xmlbox.find('xmin').text))
    ymin = math.ceil(float(xmlbox.find('ymin').text))
    xmax = math.ceil(float(xmlbox.find('xmax').text))
    ymax = math.ceil(float(xmlbox.find('ymax').text))
    bbox = {
        'xmin': xmin, 
        'ymin': ymin, 
        'xmax': xmax, 
        'ymax': ymax
        }
    
    single_obj = {'name':name, 'pose':pose, 'truncated':truncated, 'difficult':difficult, 'occluded':occluded,
              'bndbox':bbox}
    objects.append(single_obj)

  anno_dict = {
      'folder':'training_data', 'filename':filename, 'width':width, 'height':height,
      'object':objects
  }

  return anno_dict

 

anno_dict = get_anno_dict_from_xml('/content/poolncar/training_data/training_data/labels/000000000.xml')
anno_dict

{'filename': '000000000.jpg',
 'folder': 'training_data',
 'height': 224,
 'object': [{'bndbox': {'xmax': 70, 'xmin': 59, 'ymax': 164, 'ymin': 153},
   'difficult': 0,
   'name': '1',
   'occluded': 0,
   'pose': 'Unspecified',
   'truncated': 0},
  {'bndbox': {'xmax': 22, 'xmin': 11, 'ymax': 217, 'ymin': 206},
   'difficult': 0,
   'name': '1',
   'occluded': 0,
   'pose': 'Unspecified',
   'truncated': 0},
  {'bndbox': {'xmax': 51, 'xmin': 41, 'ymax': 4, 'ymin': 0},
   'difficult': 0,
   'name': '1',
   'occluded': 0,
   'pose': 'Unspecified',
   'truncated': 0},
  {'bndbox': {'xmax': 58, 'xmin': 47, 'ymax': 53, 'ymin': 42},
   'difficult': 0,
   'name': '1',
   'occluded': 0,
   'pose': 'Unspecified',
   'truncated': 0}],
 'width': 224}

 

# image와 object의 고유 id 부여. 
class Unique_Id(object):
  
  def __init__(self):
    self.image_id = 0
    self.ann_id = 0

  def get_image_id(self):
    self.image_id += 1
    return self.image_id

  def get_ann_id(self):
    self.ann_id += 1
    return self.ann_id

 

# https://github.com/google/automl/blob/master/efficientdet/dataset/create_pascal_tfrecord.py 참조

import hashlib
import io
import json
import os
import PIL.Image
import tensorflow as tf
from dataset import tfrecord_util

# 1개의 image 파일 PATH와 1개의 annotation XML 정보를 가지는 data dic를 이용하여 tf.train.Example를 생성. 
def dict_to_tf_example(data, image_path, label_map_dict, unique_id, ignore_difficult_instances=False, ann_json_dict=None, debug=True):
  ''' 
    data는  1개의 xml 파일을 dictionary로 변환 anno_dict,
    image는 1개의 xml에 매핑되는 image 파일의 절대 경로
    unique_id는 고유한 image와 object id를 만들기 위한 Unique_Id객체
  ''' 
  
  #JPEG image를 binary 그대로 읽음. 
  with tf.io.gfile.GFile(image_path, 'rb') as fid:
    encoded_jpg = fid.read()
  
  # image가 JPEG 타입인지 확인. 
  encoded_jpg_io = io.BytesIO(encoded_jpg)
  image = PIL.Image.open(encoded_jpg_io)
  if image.format != 'JPEG':
    raise ValueError('Image format not JPEG')
  
  # image의 고유 key값 생성. 
  key = hashlib.sha256(encoded_jpg).hexdigest()

  #고유한 image id를 생성. 
  image_id = unique_id.get_image_id()
  # image의 width와 height 가져옴. 
  width = data['width']
  height = data['height']
 
  xmins, ymins, xmaxes, ymaxes = [], [], [], []
  areas, classes, classes_texts = [], [], []
  truncated, poses, difficult_obj = [], [], []
  # 만일 annotation에 1개 이상의 object가 있다면, 
  if 'object' in data:
    # data(anno_dict)의 'object' key값으로 개별 object 별 annotation 정보를 dict로 가지는 list로 구성. 이 list를 iteration 수행. 
    for obj in data['object']:
      difficult = bool(int(obj['difficult']))
      if ignore_difficult_instances and difficult:
        continue
      # object 명이 '1', '2'가 아니면 제외. 
      if obj['name'] not in label_map_dict:
        continue

      difficult_obj.append(int(difficult))
      # 개별 좌표 값을 이미지의 크기로 정규화하고 list로 저장. 
      xmins.append(float(obj['bndbox']['xmin']) / width)
      ymins.append(float(obj['bndbox']['ymin']) / height)
      xmaxes.append(float(obj['bndbox']['xmax']) / width)
      ymaxes.append(float(obj['bndbox']['ymax']) / height)
      areas.append((xmaxes[-1] - xmins[-1]) * (ymaxes[-1] - ymins[-1]))
      # class명과 class_id를 list로 저장. 
      classes_texts.append(obj['name'].encode('utf8'))
      classes.append(label_map_dict[obj['name']])
      # truncated, poses, difficult_obj는 사용되지 않지만 create_pascal_voc.py 파일과의 호환성을 유지하기 위해 저장. 
      truncated.append(int(obj['truncated']))
      poses.append(obj['pose'].encode('utf8'))
      difficult_obj.append(obj['difficult'])

  example_dict = {'height':height, 'width':width, 'filename':data['filename'].encode('utf8'),
                  'source_id': str(image_id).encode('utf8'), 'key_sha256': key.encode('utf8'),
                  'encoded': encoded_jpg, 'format':'jpeg'.encode('utf8'),
                  'xmin':xmins, 'xmax':xmaxes, 'ymin':ymins, 'ymax':ymaxes,
                  'area':areas, 'class_text':classes_texts, 'class_label':classes,
                  'difficult':difficult_obj, 'truncated':truncated, 'poses':poses}
  if debug:
   print('example_dict:', example_dict) 

  example = make_tfrecord_example(example_dict)

  return example

 

import tensorflow as tf
from dataset import tfrecord_util

# 인자로 들어온 dict에 따라 tf.train.Example을 생성. 
def make_tfrecord_example(example_dict):
  example = tf.train.Example(
        features=tf.train.Features(
            feature={
                'image/height': tfrecord_util.int64_feature(example_dict['height']),
                'image/width': tfrecord_util.int64_feature(example_dict['width']),
                'image/filename': tfrecord_util.bytes_feature(example_dict['filename']),
                'image/source_id': tfrecord_util.bytes_feature(example_dict['source_id']),
                'image/key/sha256': tfrecord_util.bytes_feature(example_dict['key_sha256']),
                'image/encoded': tfrecord_util.bytes_feature(example_dict['encoded']),
                'image/format': tfrecord_util.bytes_feature('jpeg'.encode('utf8')),
                'image/object/bbox/xmin': tfrecord_util.float_list_feature(example_dict['xmin']),
                'image/object/bbox/xmax': tfrecord_util.float_list_feature(example_dict['xmax']),
                'image/object/bbox/ymin': tfrecord_util.float_list_feature(example_dict['ymin']),
                'image/object/bbox/ymax': tfrecord_util.float_list_feature(example_dict['ymax']),
                'image/object/area': tfrecord_util.float_list_feature(example_dict['area']),
                'image/object/class/text': tfrecord_util.bytes_list_feature(example_dict['class_text']),
                'image/object/class/label': tfrecord_util.int64_list_feature(example_dict['class_label']),
                'image/object/difficult': tfrecord_util.int64_list_feature(example_dict['difficult']),
                'image/object/truncated': tfrecord_util.int64_list_feature(example_dict['truncated']),
                'image/object/view': tfrecord_util.bytes_list_feature(example_dict['poses']),
            }))
  
  return example

 

label_map_dict = {
    '1': 1,
    '2': 2
}

# 테스트 용도로 한개의 Example 생성해 보기 
unique_id = Unique_Id()
data = get_anno_dict_from_xml('/content/poolncar/training_data/training_data/labels/000000000.xml')
print('## xml 파일을 data dic로 변경 결과:', data)
image_path = '/content/poolncar/training_data/training_data/images/000000000.jpg'

example = dict_to_tf_example(data, image_path, label_map_dict, unique_id, ignore_difficult_instances=False, ann_json_dict=None)

## xml 파일을 data dic로 변경 결과: {'folder': 'training_data', 'filename': '000000000.jpg', 'width': 224, 'height': 224, 'object': [{'name': '1', 'pose': 'Unspecified', 'truncated': 0, 'difficult': 0, 'occluded': 0, 'bndbox': {'xmin': 59, 'ymin': 153, 'xmax': 70, 'ymax': 164}}, {'name': '1', 'pose': 'Unspecified', 'truncated': 0, 'difficult': 0, 'occluded': 0, 'bndbox': {'xmin': 11, 'ymin': 206, 'xmax': 22, 'ymax': 217}}, {'name': '1', 'pose': 'Unspecified', 'truncated': 0, 'difficult': 0, 'occluded': 0, 'bndbox': {'xmin': 41, 'ymin': 0, 'xmax': 51, 'ymax': 4}}, {'name': '1', 'pose': 'Unspecified', 'truncated': 0, 'difficult': 0, 'occluded': 0, 'bndbox': {'xmin': 47, 'ymin': 42, 'xmax': 58, 'ymax': 53}}]}
example_dict: {'height': 224, 'width': 224, 'filename': b'000000000.jpg', 'source_id': b'1', 'key_sha256': b'5ec019893b646aa03ec7cacb33b9038e4cc0c41848c6ea056074c1ae29c79bc4', 'encoded': b'\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\x00\x00\x01\x00\x01\x00\x00\xff\xdb\x00C\x00\x05\x03\x04\x04\x04\x03\x05\x04\x04\x04\x05\x05\x05\x06\x07\x0c\x08\x07\x07\x07\x07\x0f\x0b\x0b\t\x0c\x11\x0f\x12\x12\x11\x0f\x11\x11\x13\x16\x1c\x17\x13\x14\x1a\x15\x11\x11\x18!\x18\x1a\x1d\x1d\x1f\x1f\x1f\x13\x17"$"\x1e$\x1c\x1e\x1f\x1e\xff\xdb\x00C\x01\x05\x05\x05\x07\x06\x07\x0e\x08\x08\x0e\x1e\x14\x11\x14\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\x1e\xff\xc0\x00\x11\x08\x00\xe0\x00\xe0\x03\x01"\x00\x02\x11\x01\x03\x11\x01\xff\xc4\x00\x1d\x00\x00\x02\x02\x03\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x06\x04\x07\x02\x03\x08\x01\x00\t\xff\xc4\x00@\x10\x00\x02\x01\x02\x05\x02\x03\x06\x03\x06\x04\x06\x02\x03\x00\x00\x01\x02\x03\x04\x11\x00\x05\x12!1\x06A\x13"Q\x07\x14aq\x81\x912\xa1\xc1\x08\x15#B\xb1\xf03Rr\xd1\x16$b\x82\xe1\xf1\x92\xa2\x17\xb2\xc2\xff\xc4\x00\x19\x01\x00\x03\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x03\x04\x02\x00\x05\xff\xc4\x00\'\x11\x00\x02\x02\x02\x01\x04\x02\x03\x01\x00\x03\x00\x00\x00\x00\x00\x00\x01\x02\x11\x03!1\x04\x12AQ\x13"2aq3\x14B\x81\xff\xda\x00\x0c\x03\x01\x00\x02\x11\x03\x11\x00?\x00\xb6j\xd6*\xba\x96\x92\x95\x90\xc5b\xa4\x15\xde\xff\x00\x0cy\xd2\xce\xd4y\xab\xc7+\x91q\xa4\x03\xbd\xf7\xdb\x7f\xa6\x04\xe5uQ\xcf3\x99\x96]h\x01K\x126\xef\xb7\xa6\x0e\xd3\x8aF\xcc#h\xa2,|2A\x17&\xe3\xff\x00x\xf3\x97\xa3\xa5c"Y\xe5%\x9e\xfb\xec1\xa6\xbd\x94\xe5U\x90\x82.\xf1\xb0\x00\x0e\xf6\xdb\xf3\x03\x18\xc1\xa69\xf4\xe8\xb1\xefs\x89O\xe0\x95p\xea\x08#\xfc\xb86r\x14zr\xb1\xe1\xcb\xeb\xa8j\xe1\xa2\x9e\x8e\xba\x13\rLSJ\x0e\xa5 \x82,\xa6\xe3\x93\x82\xf9m\x0eC\x0eEKA\x04oKEIow\x82\x96\xa6tX\xf9\xe2\xee.nI\xb9\xbe\x12\x8d\xa9\xe7x\xdc\x9d\x98\xa9\xf3Z\xd6\xf9a\x8a\x8ax\r*\xa8[\xed\xfc\xdb\xe3qmi\x19\x93\xd1>\x931\xa2\xca\n\xe5y\x04&\x969\xdc\xde8BF\xa5\xd8[Q\x01wn7\'\xb69c\xf6\x81y:\x97\xae*\x12\xaa9a\xa9\xa6U\x8eQ,zI7:v\x07\x80\xa5E\xfb\xe3\xa4\xaa\xa5\x9d\xa7\x88\xc1JN\x87\x0ct\x01\xc0<q\x8e\x7f\xf6\xe3\x94=/X\xb8\x99e\x87\xc7MbRN\xb6\x01\x98j\xf9m\xc7\xd8`\xa6\xe3+7\x1d\xc4\xa23\xac\xb9\xa9\xa6F\xd4\x02\x0b^\xe6\xc5x\xe4v\xe7\xf2\xc6\xca\x0c\xe5\xa1`\xf1\xbe\x89\x02\x9f;\x8b\xfa\x1b\x0f\xb0\xc1\xac\xfeL\xb1(\xea\xa1%\x19\xf4\x08\xd2\xdc\xde\xe4\xdc\xfd\x86\x12\xb4\xd9\xac{m\x8b\xe1\xf7\x8e\xc5=;GPt\x16b\xb9\xa4\x14\x95\xf0MJ\xef\x10\xd5[4\xd6\xf1&\xbc(\xc20@\n\x00\xb7\xa6\xc1O;\x9cY\x81\xcc\x90$\x86!\xa1\xce\x93c\xc6\xdb~\xb8\xe7\xafc\xfd-\x9e\xe7\xb9\x18\xac\xa4\x9c\x98\xa8d\x12\xa57\x8aP\xccw\xf2\xde\xfe[\x85\xb5\xc0\xfd1|\xf4\xed6`\x99z\xd3L\'\tO\x15\x80\x91Aw \xecK\xdf}\xaf}\xb1\xe4\xf5\x10\x8cg\xa6]\xd3\xc9\xb8l\x9dK\x0cq\xb3\xa9\x94"\x85!\x80\xb9\xb0\xe7\xef\x8dK\t\xf7\xcf\x08\x1b#\\]\xbdH\xe2\xfe\x9bzbcG\x0e\xa37\x8aM\x80\xd4,,\x0f{\xfd\xf1\x8a\xd2\xeb\x99\xa3\x941k\\\x12\xc7\xd4}\xed\x84\x8e6EK\x1c\x8dfc\xe5FG\xde\xf6\xd8\xda\xdf\x96!\xaf\x85\xa6K\\\x86`\x8a\xac\x9b\x03\xc8\xd8\xfc\xbd13F\xb9\xa4\xf0\x0b\x84\x8eAk\r:\xadn7\xfa}\xb1\x004\r;\xd5\xa8\r\x00abI\xd6\xa0\x9f\x88\x1b\xed\xf1\xef\x8e\xb3\x919Ud\xadG\x94\xa17:\x11\xd2\xc3I\xbf\x06\xdf\x11\xf6\xc5}\xd7\xd4y\x92gb\xaeJ\xe9\x12\x825O\x0e\x969\xc2\xe9{Z\xfba\xcb>\xa3\xf7\x9a\x8ae\xa6\xcc^\x9d(\xe4Y\xa4\x0c\xbeY\x00\x07c~7\xfe\xce\x13\xfd\xa7\xc1_\x99TQU\xd0D\xd5rA%\xf4[\xca\xc6\xfb_sq`-\xc5\xaf\xf1\xc6\xa3\xc9\x99p#\xf5\xf2{\x99\xa2mLj\xfc0\xec\xf6%\xa2px\xd5\xde\xf6\xbe\xe0\xdb\r\xdd/\\d\xe9\xc5\xab\xaf\xacV\x8aYt\xdee\xb9[\x8e\xc7\xe2o\xbe$,\xb9]D\x14T\xbdI=%U\\\x8a\x1c\xc6\x03Z=\x89\xb1\xb1\xb7q\xf6\xf8\xe1\x82\xba\x8e9h4S$(\xb2\xa91\x94Qe&\xfam~9\xc6\x9c\xb4\x90\x14wf\xce\x89\xa8l\xeaY\xa9\xb5G\x14\xd0\xb2\xd3J\xe4\x06f]\xef`8\xd4\r\x81\xc2\xd7\xb6n\x8b\xea\x0c\xef\xa8!\xa1\xcas\x9a\x1c\xae\x86k\xb4\xf2\xcdQ\xa0\xb3\x13m+\x1a\xdd\xda\xc4\x13\xe9\xbf\xc3\x04\xba}+)3\\\xd2ZPa\x96\xa2\x91^:\x81\xba\x89S\xca\xba\x81\xe7\xf9ok\xf7\xc1\xbe\xb6\x81\x91\xf2<\xd6\xae\x7f\x15\x96\xed;\xa8\xb2\x89\x19T\x00\x9f\x0b\x86\xdb\xfe\xac6\x12qv\x84J7\xa6V\xf9\'\xb2\xcc\x87\xa4\xea\xe2\xac\xa8\xaf\xad\xcd\xeb\x806fu\xa7\xa7\x04\xa9\x04\x15\x17v\x04\x13q\xa8`\x85j\x18(\x84rH\xd2G\x11\xd0\x15\x1a\xca\x00\xe0X|-\xeb\x82\xd9\xfeiI\x0e\x91\x1c\x81\xe6\x90\x8f*\x8d[\x1b\x80\xc7\xe1\xb1\x17\xc0J\xa7\x90\xd1<\xa8\x82FE\xd6\xc8;i\xe4\xed\xde\xc7\xf2\xc3T\xa7\x93\x93\x0f\xb6;B\x9f[\xe6\xb5Y\x16OO_CA\x00g\x90\xc6\xcd(\xbe\x9d\x89\x1b_~;\xe2\xad\xae\xcf\xf3z\xc0\xd1\xcf\x98\xcd\xe13\x161\x86\xd2\xb7$\x93\xb0\xf8\x9c<{j\xaf\x9cM\x1eI)\xb3\xc3)bB\xd88\x00\x80G\xa8\xdf\xbe*\xcd\xc4\x85\x9a\xf6\'\x16\xe0\x82Q\xb6M\x92[%;\x977$\x92y\xc7\xdd\xb5\xb6\xcb\xdb\xd4\xe3\x1f*\xad\xc9\xdf\xb0?\xae<U\x92R\x14\x02\xc5\xb6\xb5\xbf\xa6)\x16\x8e\xda\x8b.5\x10G4u\xbe\t;\x10\xa0\xde\xd8\x9b\xd3\x99mE=R,\xf5E\x03\x12\xa0\xbb\xdb\xb76\xbe!\xe5b\xb7T\x95\r%\xa2h\xc5\x94\x01\xe9\x7f^p\x16\xba\x8b:j\xe1V#`\x14\x8d\x00\xb5\x89\xbe<\x18\x7fKd]\xd4\xb40\xc98+*\xb7\x97\x91\xbd\xf1=(\xdb\x7f2\x81\xeaF\x17\xbaZJ\xa6\xa5\x81\xaa\x8f\x84Y\x00\xd2;z\xef\x86\x14\x90\xac\x9a[\xcc\x0f&\xfc`\xf2d\xab\xfa\x9a8i\xfa\x92x^\x08\xa5c&\xadN\xed\xe6\xbe\xfc\x02-\xce%PUK\x10\x97\xc2T;\x80\x02\xc6\xb6Qn\xc4\x8b\xfex\xdb\xd7\xb9{\x7f\xc5\x9e*\xc6\xcd\x1b@\x1b\x9b\x00nF\xe7\xe9\x81\x94R\x08\xe7\x90x\xf4\xe1\x9c\\.\xbd_\xfe\xb7\xc3S1"U}faQNS\xc4\x9c#mbI\x18\xab?i\xac\xba\xbb0\xe9\xec\x9a\xbe*Cjr\xc8\xd5*\xf7r\xcc\xaaB\xe9\xb0\xdf\xc8}v\x07\x16&c]JT\x89*&\xdb\x90\x89\xb7\xe6F+\xbfh9\xa3\xbfN=\x1c5Q\x03\xef\x05\xe3\x04\xf9\x94\x95`\x05\xaf\xe5>k\xdf\x1d\xdc\xd4\x935\x15\xa3\x98*\x12\xaf\xde\xa4\x86\xa1$Vb.\xae,o\xb6\xf6<\x1d\xf1\x85l"\x15X\x01B\xe1\xaet\xef\xb8\xd8\x8b\xe1\xa1\xa9L5\xde\xf1%3SJI_\xe2\xb5\x86\xad\xaeWc\xfd\xf7\xc6\xe8:rSQ=?\x80\x17\xc0\xbe\xb9\x0bl-{\xd8\x11\xcf`\x7fL]\xf2\xa4"\x8bw\xf6k\xa8\x86\xa3"U\xadv\xf7\xaf{x\xd5P\xb6\xbb\x15\x1ea\xd8(\xb8\x1f3\xf5\xc5\xd6\xf4\xe2\x93\xc4\x95\xeeT\x86\xd4\xc7\x90;v\xb6(\xcf`tU4\xd9\xd4\xac\xd41\xa1\x89\x91\x84\xd1\xdc\x9f\x0c\xf9m\xcd\x85\xae\r\xfd\x01\xc5\xf9\x9c\xba5,\x82\xda\x99\x96\xc7\xb0\x1b\\\xff\x00Ly\x1dK\xbc\xad\xa3\xd4\xe9\xb7\x8cX\xcb\xea\xb5$\xaeBj\x07H\x0f\xa8nmn-\xbd\xf6\xc4\xed\n\x02C+(\x94_e\xb8$\xfa_\xd7\x10r\xa5\xa5\x8d\xe63\x90\x88V\xf6w\xba\xb3\x1b\xdbno\xc7\xcb\x04\x927j\xb0\xe2U \x10@M\xed\xbb\x0b\x92{\xdb\xf4\xc6\x06\x19\xd2\xb6\xb6\xbcK\xa2\xe0\x86\xd3v\x17\xdf\x9em\x81]G\xae\x99b\x8e8\xc6\x97\xb8`\xa4(\x05I\x17\xb0\xef\xb8\xfb\xe2LsJ*\x84\xb03\xf8\xca\xc66G\x07\xf0\xddv\xf5\xf5?\x0cg\x9b\x84\xac\xa7x\xd9A\x91\x1fs\xca\xd9\x85\xb9\xff\x00\xe3\x80\x80\r\xcd\xcb\xd4\'\x8f\xa2I`*7?\xccwR\x0f\xd4b\x16U\x0b\xcbC#17\x17\x1f\x8bN\x9fO\xa7\x18)\x94\xacL\x1e9\xd1\x99\xe1\x93A\xf3\x9d\x81\xdc\xdb\xd7p~\xf8\xd9)\xa6\x8a&\x8e\x15E\x1an\x08$\xd8\x9d\xacw\xf5\xc7_\x80\x8aq\xf4\xafO\xc9]-d\xe1Z\xa5\x8b8a!\xfb\x00=\x0f\xf5\xc4\x8a\xdaf\xa9\xca%\xa0\x82g\xa6q\xa5cu\xb8 \x0b\xef\xbfk[\xfb\xe3\x0c\xdb1\xa3\xa7\xadD\xaa\x915\xc8lR2I\x0b\xb6\xe6\xc3\x04\xa9\xa7X*\xd9\xa3\x926\x01t\x84`Im\x86\xd7\xbf\xf7lj\xd8)\x0b\x834\xac\xc9*r\xd1Y"T\xf8r\xaa\xc8\n\xeenB\xf6\x1f\x11\xf6\xc3\xafQS.o\xd0\xb5H\x91J\xf54Le\x81T\x1b\x97A\xa9m\xeb\xb6\xd8\x01\x9a!\xa8/R\xa4M\x1b.\xa2\xbb\x82,/k\x01\xebo\\;eI!\x86\x9a\xa2\x86\x99\x0c\x15\xf4\xc1\xf4\xea\xb1W\xdc\xe9\x02\xdc\x00q\xb8\xbdX\x99\xad\x94\xaeKH\x95\xb9\xcdm\\\xac]\xa3\xa4Jx\xc9\xf4\x1ef?v\xb6\x05\xe6\xfdY\x95d\xfe$R\xd7D\xf2\xa8\xd3\xa2/9\x17\xf9~\xa7\xd3\x12s\x8a\xb8\xfag6\xab\x86\xadX\xf8\x13<\x92"y\x85\x897\x1f\xa7\xd3\x15\x1fJI\x94\x9e\xa8\xa0\x938\xa7Y\xb2\xff\x00{\x8c\xd4\xc4X\x80c\xd45\r\xb7\xb5\xaf\xc6=\x0c8\xfb\xdd\xf8$\x9b\xad\x1b:\xa2\xaa\xbf\xab\xba\x83\xdehh\xaa\xe5\r\x1aC\x04zu\xb6\x95\x16\x03o\xa9\xc3\xa7C~\xcf^\xd3\xfa\x9fD\x9f\xb8\x7fu\xd3\xc9\xba\xcd\x99\xb7\x83\xb7\xa8C\xe7?lu\xb7O\xc3\xd3\xf9\x1dT\x99\xee[A\x155\rI\xf0\x1b\xc0\x85n\x88\x14X\xbb\x1d\xff\x00\x10"\xe4\xff\x008\xf4\xc3\x15wU\x98\xbc1IL\xd5W\x95\x10\xba\x11!\x886\xc5\xc8\xd8XZ\xe6\xc4\xe2\x8f\x96\x95D_n\xf6Q\xfd\r\xfb$\xe41\xacU\x1dY\xd4\xd5Y\x84\x9c\x9az\x18\xfc%\xbfpY\xaeO\xd0\x0c\\=7\xec\xef\xd9\xe7G\x01\x1fL\xf4\x9e^+"\x01\xfcGQ<\xe7\xd7\xcc\xe4\xb06\xde\xfb\x0cV_\xb4?[u\x0eM\x93\xd1e\xb9\x1dedU\xb9\x85L\x8c\xedO{\x88\xd4\x02TX\x0eL\x8b\xdb\xb1\xdf\t~\xc5\xfa\xdf\xaa\xb2\x8a\xfa8:\x82g\x93$\xac\xaex$\x92\xa0\x89&Y\x88\xd24\xab5\xf6`/\xb7\xae\xf8\xcf\xdaQ\xeel\x1d\xf4\xfbQu\njc\x12\x08\x9dmk-\xb7\xc6\xaa\x93H\xf1\x18\xe4\x06\xeb\xce\xd7\x1f;\xe2-#\xa80\x9aw\xa8\xa8\x06GP\xc0\x1e\xfc\x92O\xd3\x1a3JY`\xa7\x7f\x15\xe6\x91\x9c\xddl\x01 z\x1cy(\xa9\xaa\xe4p\xc9k!\x9e\x92#\x16\x86\x1ay\xb8\xc4\xe3X\xe9\x19%\x17\x9fL"\xf43\xd4\xc3\x12^\x12\x88\xcc\xc0\x127\xe0}\xb0\xe73\xf8\xd0\x8d-\xb77\xb64dS\xf6\x99\x18\x96l\xbe\xa8\xc9}a\xe3m+\xb6\xd6#\xf5\xc2D\x15\x14\x94R\xbdE}B\xc3O\x1f\xf8\x92J\xfaU~d\xed\x8b\x0f\xaf\x96\x13\x92\xc34\x91\xbb\x08*\x14\xd9_I\xdc[\x9b\x1csO\xb7\xc5\xac\xae\xaf\xcb2\x9a+"\xf8OP\xf1\x92w\xb9\x00o\xf0\xb1\xc3\xf1G\xbd\xd1\x99>\xd5a\xfe\xa4\xf6\xb1\xd2\x90N\xf4\xd9zO_\xb9_\x15T"\x0f\x88\xbe\xe4|m\x85\x8c\xf7=\xcb\xb3\xfay\xbd\xd4\'\xbe\x05W\x866NE\x89\'\x8eG\xf61QTB\xd06\xa2\xe5\xd9w\xd3\xa3cm\xf0\xf3\xd1\x92,\x99\x8d\x05\'\x81\xe2TT\xce\x90\rG~@\x00m\xe8Gc\xf3\xc5\x19pF\n\xd0\xa8\xcd\xbd3u%\x1c\xf5\x82\x9eyh\xcc\x9ab\xd0de[\\\x12.~\x16\x03\x7f\x8e$WTI\x14\xd3=@WX\x93\xf8b%\xf3i\xd8\xed\xdc\xef\xbf>\x9e\xb8\xcf\xac\xea\xe6\xe9\xfa\xe9i\x95\x98\xc8\xdeP\xaa/\xa3\x7f\xa5\xf7\xf5\xb7\x07\x01\xb2\xcc\xdcf\xb4+\x12*\tJ\x14\x93H$\x0b\x8b\x80=;\xf7\xed\x84(\xc9\xae\xef\x01t\x8b\x9b\xf6t\xcc2\xda\xf1\x9c\ni\x8a\xd6)F\x00\xeaV\xd3\xa8\xecv\xecv\x1b\xefq\xf3\x16\xbe|\x92OG\xa94"\x10\x0bY\x7f\x17?m\xed\x8a\x17\xd9#\xd0t\xf5}mY\xaa\x11\x1fp \xc6\xae\x06\xa3\xa8\x0f6\xab\x03k\x83\x7f\x89\xf4\x17\xb83\xac\xcax\xbaf\xf2\x83\xfcH\xf4\x96\x1e]\x8fsb@\xf9\x02F\xfc\x9cG\x9e+\xba\xd7\x05\xfd,\x9b\x85\x0b\xf4M$\x9e\x1bj2\x03\xab\xca\xb6\x90\xa8\xbd\xcd\xc5\xcd\xbf\\\x15\xa1\xac\xa43\xa1\x8a\x9aW2\xf2/\xc9\xb5\xc7\xa8\x16\xd2E\xb0\x9d\x96\x82\xd5i\x1a\xcc\xb1$\xc0\xeamV\xb0<\x8f\xcc\xed\xf2\xc3\x9e\\\x94\xc1d\x9e5\xd7o1"\xe5\x89\xf5\x1f{\xfdq\x9f\x03\xa5\xc9"\x82\xb4\x03Q;\x05\xd6\xa4\x83\xa5A\xd2G6<\x11\xc61J\x9f~G\x01\xb4\xbd\xca\xdcp\xe4\x1d\xc6\xc3\xe08\xc0\x87\xac\x8a\'\x962\xa9\xe10\xb3\xdbc\xa6\xf7;\x8f\xb5\xbe\x18\x99\x97K8\x8d\xcc\x0e\xac\x92j!\x80\xb1\xb8,\x08\xf9\xf1\x80\xd1\xc4HjeZ\xc9\xff\x00\x86D\x8c\x8f`T\xdd\xacu\x0e~\x04\xef\x885\x93\x89g\x8a\x19\x04\x8a\xce,\x00\x1aT\xdbc~\xe7\xd7\x9c\x12\xadj\x84\xacJ\xa4x\xe3\x11\xb1\x1a\\\xda\xf7\xbe\xde\x9b\xdc\r\xbdq\x1e\xa9\xe3\xd5\xa5X\x86.U_H p\x08\xf5\xbe\xf8(\x0cR\xea\xae\x9a\x96\xaf8\x15\xb13\xb3\x06\r(6\xb0\xbf\x1bv\x16\xbf\xdb\x0c\xb4\x12i\x82\x929\xadeK\t,/\xce\xad\xf6\xc6u\x8a\xb2G#4\xd1]\xc0\xd2\xc3`\x18m\xc1\xf8\x91\x81\xab$\xf0\xc9\xa4\xc8\x0c\x91\x1f0\xd3\xb0\xb5\xb8\xf5\x1c\xe3[fxaZ\xdfxWkF\xce\x83\xf1(6\xb6\xde\xbf,\x1c\xcb3\x88\x1f+\xa2\x8f^\xaa\x9ab\xcd\xa0Kk\xa2\r\xfd.7O]\xce\x17k\xe53Tj\xbf\x90\x0b\x8b/{w\xb6\x15:\xaas\x97\xd7R\xd5\xb1g\x8e{\xc2\xdaCy\x05\xaf\xf0\xe6\xc2\xff\x00,\x18\x99\x92\xbd\xa3w\xb6\xce\x99\xce\xfa\x8e\xb6(\xfaG-\xab\xcc\xea\'\x85`\xa9\x8a\x9d\x0b\x84\xdc\x15,x\x02\xdd\xc9\xc4\x1e\x85\xfd\x95:\xcb2h\xa7\xea\x0c\xd6\x83%\x8c\x1b\x98\xe3>\xf3(\xf8\x1d>@\x7f\xee\xc5\x93\xec\x1b>\x80W$5s"KX\x14\x05!Ab\x83M\xb8\xe6\xc0\x9d\xfb\x0c[\xdd{\xd5\x15=-\xd3\x12f\xf4Ya\xaf\x98:\xc5\x14Z\x9a\xec\xccl,\x145\xfdl\x08\xc5\xfd>F\xb1\xd4I2F\xa7\xb0\x0fEdUyEEWN\xd4f\rV\xb9lb\x05ycVz\x84uB\xb2\x1eH"\xc7e\x04_\x05\xff\x00uS\xe52\xadt\x10@\xd2\xc8\xdaj\x08\x8dC\xcd\xa9\xbc\xb76$\xb0\xe0\x02\x06\xdcX\xe3<\xe7\xc4\xa5\xea\xac\xaf3\x95\x80L\xca\x9d\xa9\xa6\xf2\x91w_:\x0bX\xdfo\x13\x91\x85\xdfk]u\x95tu$\x034\x86z\x81W\xad`\x8d\x1c\x07\xf2\x81s\xdfM\xb5\r\xee>X\xdcm\xf0.T\xb6\x07\xf6\x85\x94\xcd\xd6m\x92\xe6\x19\x05b{\xab\xbb,\x93\xab]\n\xabl\xa1\x89"\xdf\x8e\xe2\xfc\x0e1\xaf/\xe8\xcc\xab\xa4\xd6\xa4\xd4eI\x9a\xd7W\xcd\xe2\x89\x0cE\x84\x12*\r\x0c\xaal\xc7p\x0e\xc1\xb7\xbe\x18\xbaB<\x9a\x0e\x98\xca\xea\xf2\x88\x9e,\xbeZX\xe5\xa5U\xdb@\xb6\xe0\x93b[\xcc\xd7 \x9e7\x17\xc3\x93\xd2\xd3O\x0cBH,\xebv\x04\xad\x99X\x82\tSm@\xd8\xf3\xa4\xe3\xbb\xbc \xd7\x912\x8d\xa8 qEI4na@t\x8b\x8b\x13\xf3\xc4\xda#J\x1c\x99\xccm\xf3\xf5\xc0H2c\x1dO\xef\x1f\x1c\x9f\x11.\xbe}\x80\xf8c\x11_I\x06\xa0\xd3\xad\xce\xdb\xfa\xe3\xceC\x86\xba\x01G,S\x18\xe3\x17\x8d\xecE\xad\x8ck$\x897*\x00#a\x80\x10f\xf0\xd2\x96F\x909\x9c\r%w\xdf\x11%\xcc\xe6zt\x8f\xc2v\xb9\xbe\xb3\xb5\xb1\xa4\x02GQG\xef9UZ#\xb3\x0f\x07R\xc6\x05\xee\xc0\x83\xb7\xdb\x15oU\xf44\xd9\xe5U\x15O\x8a\xd45\x10Fc/%\x80\nOpm\xc6,ij%hZ \xa9\xa9\x95\x90\xdc\x9d\xc1\x06\xc7\np\xc92$`\xc9e<Z\xc3\x1b\x84\x9cv\x80\xd5\xe9\x8bP\xfb\x15\xe9\xc8\x98K\x99W\t\x0b\x9d\xc4k\xe57\xf9\x06\xfe\xb8f\xe9\x0e\x89\xe8\xdc\x875\x82j\n\x05\x96\xado\xa6I\x94\xb6\x92F\xe4\\\x9b\x1f\x90\x18.\x92\x19 \x01\xee\xe3N\xe4\x9e~\x18\x02\xd2\xcfJ\xefT\xab!\x10\xf9\xc0\x8fvm&\xf6\xfd1\xa9NR[fTb\xb6Q\xde\xdb\xe8\xa1\x8b\xda\x06v\xf3\xc8\xc5\xc4\x9e,k\x18\x00io0\x1bz\x06\x03\xd7\x15--eU\x14\xae\xf4\xe5\xd0\x92\x0e\xcdlY\xddr\xf2\xe6}W&o\x98\xd5\x7f\x1ef\xd4\xe8n\x00\x16\x00.\xfc\xec;\x1c.\xf5l9=-\x07\xbc\xd2"\xb3\xbb\x0bj;\xa9\xb1\xdfnF\xc7\x15\xe1\x9aIE\xecT\x96\xc8=\x1b\x9bTO\xd51O\x98JeY\x19L\xc8\xc6\xc1\x91N\xab|\xb6\x18\xbcz\x7f\xa9g\xa8\xa9h\x9ao\x12\x14\x81\xe1#\xf1\x0b0\xd8\x0f\xcb\x9cs\xcd4\xd0\xc5R\x95\x8b y\xbf\xc8\xd1\xddI\xf8\xef\xf9b\xdd\xf6;\x9a\xd3C\xefq\xcec\xd7*\x10\xab\xf8\xbc\xc4\xedqn9\xef\x8c\xf5pN\x1d\xde\x87t\xd3jt8R\xd5xU\x00I\xac\xe9}A\x81\xb1S\xfaa\xfb)\xa8\x12\xd3\x94D\x9dQT\rL\xb6\xbd\xfb\xdf\x8bX\x0f\xb6\x11+(\xa4\xa6\xae\x85e\x17\x12\xa8 \xecv<b\xc0\xcb\x82\x9a0\x14.\x85\xd2$!\xbc\xa8,-\xdb\xd4\x9f\xa6<\xcf\x07\xa3 L\x94f\xa2\xb5\x97A*\x87\xcd}\x8b\x02\r\xf7?;\xed\x8f\x86e\x15\x1c\x8d\x1c-+\xcdqak/`I\xf5\xef\xf5\xc4\xf9\x88%\xe3S\x1c\x81\x83\xb1\xd2\tV\x1cn>\x00\xe0d\xeb\x1a\xc6\x90\x88[\xc6b\xba\x19M\xc0\xb0\xbe\xfe\x9d\xfb\xf3\x8e2L\xaa\xaaSG\x1c\x93HI\x95\x06\xa5\x90\x1d\xca\xfa\x8chJB\x8e\x95\x08\xcb\xa8)h\xc8$-\xc0\xb0?kcMS\xcc\xf9a\x13\x03\xe5 \xdcn@\xe0\xff\x00v\xc1|\xaex_&\x8d\xe5\xd2^;\x05[\x03`@[\x9f\xfe$\xfd\xf0\x00\x07\xd0\xd0\xcf0\xa9eg\x17:H\xf2\xdf\x9d\xb6\xf5\xbe\xff\x00,b\xb2\xa7\x8ctF\xaa\xba\x81\xbf,\x7f\xf5\x895\x82\t\xd8\xc8\xf11\'\x92x"\xdc[\xb7\x03\x10\xeaBF\xe4J,\xa8\xa6\xc0\xdc\x12H\xec\x0f\xc8cV\x8e%HR\xa2`T)S`\xd7\xb0\xe3\xb6\x17:\x9eHj(^\'\x8fQ\x81|Q.\x9f\xf2\xfa\x11\xcf\x7f\xbe\x0e\xa1Fs\xfc0,\xb7\x1e\x96\xe3\xe5\xcd\xb1\x0f3h\xc2\xbc\x92)]K\xa4\x926e\xbf\x07\x1c\xf4p+\xa7se\xcaf\x8a\xa2\x18\xaa\x18\xd3\x08\xe7\xd5\xa2\xfc)\x12\xfcI [\xe7lt\xc7N\xf5M.f\xf4\xd1\xc75\xc4\xa8?\x88\xcaw$\\Y\x8d\xb6#\xe2y\xc7\x18u\x97QK\x93SE\x05\x18%\xc32;9\xda\xd6\xb5\xad\xdfm\xf1\xd0\xbf\xb2\x9ewG\x9d\xf4\x94(\xcaV\xb6\x94\xb5;\x12Af\xd1b\xbf\xcaH\x1a\x1e1\xc8\xfc\'\x14\xe1\x8c\xa2\xaf\xc3\'\xcc\xd3e\x8b\xed6e\xfd\xd9$FH\xde\xae\x94.cH\xac\xa0\xb1\x110\x12i\xda\xd7\n\xd7\xbd\xc9\xdf\xe1\x85_i\xbe\xcd\xa9z\xfd\xe1\xcd\x8b\xd5M?\xee\xe3\x1d\x02\x89\x0f\x85\x1b\x90J\xbb\r\xeen\xc2\xfb\xda\xc0m\x87~\xa5\xfd\xd9\x0b\xd1<\xb3R\xa3,\xad\x1c\xca\xefvx\x9dJ\xb0\xb97\xee\xa7s\xdb\x8ci\xf6w\x9bA\'O\xa5<k\x1b\xfb\xa3\xb51\x900maM\x94\xfdE\x8f\'\r\x8c\x9az\x10\xd2kb\xafI\xd1\xd67\xb1\xee\x9e\x86$e\xa9J\x18Y\x90\r\x04\x80.V\xc2\xc5\x8f\xd1\xbb\xf1\x87L\x86I\x9a\x81VX\xd8\x15\xd8\xa9\x03km\xba\x8b\xfej1#4{R\xca!\x11\xc4\xe5HU;\r]\xb6\x03\xff\x00\xe4\xe2\x98\xf6+\x92\xf5\xeeO\xd4u2u6m=n^\xd0\xbbA\x08\x9c\xc9\xaaMJ\x03\xd8\x8b(\xd26\x04\xa9\xdcq\x86(m\xca\xcc\xb9x\x1c fl\xae\x1akF\x12%\x01F\x90\x0e\xdbo\xdf\x00\xeb\xb2\x89jdp\x89L\xea\xcb\xb8\xd2}p\xdf\x10\x82(\xe5\x8c\xac\x05\xaf\xb7\x9fQ\xf9l\x0e40M[\xc7\xa0[\xb78\xf3\xed\xa1\xc2z\xe5\xd3\xd1I\x0f\xfc\xc4Q4h@\xb8 \r\xf0H\x04\xa9FR\xe1\x8e\xabyA\x03\xe9\x89\xd9\xfd\n\x1at\x95!wmb\xe1\xbd1\xba\x92\x02\xc0\xc8\xea\xd1\xa9P@\xb64\x81@\xda\xfaf\xa4JyU\x18\xab:\x82V\xf7\x1f,\x00\xcc\xa2\x82\x971\x97U\x14\xee\xa2B\x06\xa7\xf2\x80M\xf6\x16\xfdp\xdf\xd4\x11<\x94*#\x93F\x97\x0c\xa4|0\x135\x8ay\x83\x91\x01ud\r}\x05\x806\xc6\x90\t\x14*\xef\x15\x96\x86\x08\x14~\r{\xdf\xff\x00\x918\x03\x9d\xaeg\x1a4\xabT\x91)\x06\xe20T\x1e{\x0bz`\x8eL\xf3\x97\x91\xa7\x9a=H\x02\xe9,.E\xb8\xb5\xef\x80]ihi\xcc\xfe\xfc\x8a@6TV7\x1e\x966\x18\xd3\xe0\xca{(Oi\xb9EH\xcdU\xe2\x88xU?\xc4\x8fS\x96k\\\x00n~\x03\xf3\xc2-u\x1bF\xf2Q\xca\xa6@.\xd1\x9b\\\xfa\x03\xcf\x1b\x9e1d\xf5\rkfU\x02Z\xa8\xf5E\x14\x8a`!\x00b\xb7\xe0\xd8\x9f\xeb\x85\xdc\xc2X\x86K\xe3O\x12\xc6\xc5l\xce\xaa\x16P/\xb7=\xbb\xed\x8a1\xe4j\x90\xb9\xa5z\x12\xaa\xc4\x14\xe9\x1cP\x94i\x0b\x16bO\x03\x80\x07\xc7c\xdf\x0c\x1d\x11V\xb0\xe6\x0fLj<=aJ\x00\xd6\xd4\xfa\x86\xdf+\x13\x81t\xabKSU\x1a,D\rD\xa6\xc2\xfcw\xfb_\x11\xd66\x831\x01\x93Y\x12\x0b\x00\xb6\'|S%\xdf\x17\x16f\x0f\xb5\xa9\x1d\rU\xe2\xd4\xe4TU~vx\xaf\x1b\x1b\x9e\x07\x16\xf8[\r}4tSE(\x9c\xe9pH\x89\xd4\x1boa\xdb\xe5\xf6\xc0\x9c\xa2!?GPT\xc0S\\\x91\xa8\x94*\x82\xa0\x8eF\xfcm\xb6\t \x8c\xc9\x10\x8f\xc9!\xb0[1\xb9\x17\xdc\xdb\x8bn\x06<E\xe8\xf5\xd8F\x02%\xcc\x9e*\xa9#\x00\\)\xd2\x06\xaf\x8f\xa7{}0#4\xa9\x9e\x89B\xa3,\xb1Fn\x928\xed}\xee\x00\xf4\xc18\xdaAYS\xae6$A{\x8e\xe0\xecO~/\x81y\x8f\x86\xf1\x08c\x1a\xed\xb1\xf4g<\x13\xf9m\xc6\x08\x0f`\xa9\xf1@\x8d\x19\x11\xde\x03\xa8\x83\xb30]@o\xbff\xc6\xfc\x91\x11i\xdbK\x13\xa8x\x84\x0e\x1a\xdaA\x1cz\xdf\xef\x81\xf4\xd2UIi\xcc\x08\x1e\x9c\xab2"\x82\x02\x93\xe6;z\x02~\xf8\x9d\x937\x87W\xee\xca\xa6(RVV\x06\xc4\xd8\x93\xc7\xdf\x1c\x00\x92\xc0\xd30\xa7\xb2\x86\xbe\xb0\xc5\xbc\xdd\xc9\x07\xe8\x06#KL`\x91ZI@C\xfe\x1e\xb0\t\xb8\xb6\xf7\xe7\x7fLaOY\x18\x93U\x9dgG:\x19\x8d\xcb\x00w\x16\xdb\xe3\x8f&\xa9i\xab5,\xca\x84\x7f%\xfc\xacv\xfbq\x8e\nf\xb5\xf7Z@\x1aH\x94\xb0"\xcc\t\xbd\x89\x00\xed\xc5\xf7\xdf\x18gn\xcdJ\xcbN\x9ab\xbe\x96,\x08*O\x7f\x8e\xf7\xc6s\xd5,s\xa2\x13\xa7\xc5\xbb\x0b\xad\x8f7\x1b\xdb\xe0q\xa75\xa8gI\xd2I\x15W},\xad\xf8l\x0f$|p\x03\xa2\x83\xf6\xbd\x03\xd2\xf5\x1cr\xa9\xfe\x19\x8ci\x00mq\xb1\xfe\x83\x02\xba7\xacz\x9b"\xcd&\xa8\xc9\xab\x1e\n\x8a\x84d$(;\x11\xbd\x81\x16\xbf\xa1\xed\x86\x0fj\xb5\x94\x99\xe5M\xa9\xea\x01\x9e\x89\xda7\xd6-\xa9o\xca\xdb\x9eF\x00\xf4-$\xe34z\xd4\x84\xb9\xa3\x01\xc6\xa1\xde\xfb\\\x1f\xae=XM,;\xe5\x1e|\xe2\xa5\x92\x91\xd1\x1e\xcek3l\xd7\xa4!\x971\xa9\xa8\x96\xa1\xb5\xc6e\x9eB\xed\xa8\x9elv\x16\xdb\xed\x87\x1f`\xb5\x95\x949\xbe{\x91\xd6\xc1:D\xb5F85\\\x99t*\x83\xfe\xab\xdc\x1e\x0f\xe5\x85\xcc\x9b0f\xca\xa2\xaa\x8e\x02\xd1\xb4*\xe1"\x1b\x93bm\xf3\xc6\x1e\xcf:\xb78\xcez\x86\x8b5j\x1fw\xa5\x82\xa4\xa5N\xb2uh>Q\xb5\x8d\xf7"\xfb\x7f/{b<R\xfb\xd8\xdc\xab\xebGBTG<\xb0\xc7\xe1\x18\xd6H\xc8+qp\xbbZ\xf6\x17\x00\xef\xce\x95\xc6\x89\x8dDo\x15ML\xb2L\xc2B\t\xb0\x16\x0cma\xbd\xac.8n\xd8"\xb6d\x06\xe2\xdbX\xff\x00d\x81\xf7\x18\xd3Y\x08\x9a\t\x15\xdbF\xb5 \xbd\xecmm\xc8<\xfem\x8bZ&N\x859\xa9\xa5\x13\xd48p\xbd\xc7\xdf\x1b\xa9L\x88\x80Kb\x17\x96\xf5\xc6\x8a\xd7H\xb3\x034\xab\x12\xc5k\x81\xa4\x9d\xf6\xdfr~8\x9bMO\x0c\x80I\xa8\x95oK\x01o\x96<\xc6\x87\xd9\x94\xc1$\x85\x88 \x80/k\xe2%D\x81`>V\xddv[p0J4U\x85\xd5,M\xb8\xb64\xb6\x86\xa2\xf3[~\xe7\x1a\xd0\x05\xda\xaa\xb9$*\x82"E\xbb\xf00\x13?\x95\xcc\x11jd\xf3\xa9\x16\x17\xda\xdf\xfb\xc3\x99\x825\x84K\xe1\x8b\xdfo\x8e\x17\xba\x8e\x85e\xa3\x8e\xa0\x83h\xa5\xd2V;_q\xeb\xf4\xc1Og4*er\x9fyx\xda\xe54\x83\xc7\x94\x1f\x90\xc6\xce\xa0\xa2Y\xa3\x11\xbca\xd7`A\x16R7\x1ca\x8b(\xcb!\x8a\xad\x0cTw\x91\xcf2\xb6\xa2G\xd3O\xf6p^\xae\x95\x91CN\xb4\xb1\x82|\x89\xa3U\xed\xc778\xdbt\x8c\xaeJ\x07\xae\xb2\xba*>\x9a\x94\xd3e\x82\t"k$\xa1\xd5\x00$\x82,9`.v\xdb\x14Nq<\x92\xd7K;\x82& (&\xe4\xed\xcd\xbe\x96\xc7Y{bJ\x0f\xf8b\xf5L\x8dP\xe7\xf8\x1a\xdd\x81[s\xa4}\xb9\xdb\x1c\xd1\x99eq\xd4\x05\n\x97\xbd\xaf\xeb\x87\xf4\xf2Kl\xc6H7\xc0\xa5\x97L\xf1V\xc6O\x88X\xad\xc0\xef\x82\x86Is,\xfa\x92\x9a\x91I\x98\xc8#A\x1d\x98\x92\xc4l>\xf8\x91\x9bd\xc6:ox\x89|\xc8\xb6$\x0e\x06\'\xfb)\xa0\x92\x0fh\xf96\xa8\xdc\x016\xab\x91p\xde^1K\x9ci\xcdxF\x14_r\x8b:G\xdca\xc9z2\x8e\x82\x91\xec\x90\xc2\xa8\xca\xc3rl9\xfa\xf3\x8d\x14\xda\x92 \xber\xd1\xb5\xc3\x81\xc8\'\x8f\x8d\xed\x89}Fa\xfd\xdf$\xf2#2"\xdfN\xe2\xf7\xfe\xef\x809}d\xcd\x0f\xbbix\xd1\x86\xa5\x96\xf6\xf3\x1e\x07\x1c\x1f\x9flx\xb1v\x8fY\xaa\rWH\xcd\x960i,\xc0[ck.\xa1\xb5\xcf\xd0\xe0tQ\x94\xa5\x8ekJ\x93\x02t\x8f\xe6#\xca.\xc4\x9b\x0e\xfd\xbbo\x8fi\xe7B\xc5\xaa\t1\x82n\x9d\x8f#\xf2$}\xb1\x92\xd7\xd0x\xad\x192\x82\xc0\x95\xf3l/{l\x7f\xbd\xb1\xa4\x03YM!UBk\xd0Ak\xf9U\xad\xda\xc7}\xef\xeb|b\xb2?\xef\nz\xb0\x82d~T\xed\xa8\x91c\xf9\x8b\xe2L\x909\x8e)"\x94\\\xb7\x99\x83\x02\t\xb5\xfbw\xf8cM,\xa92\xa2\xb3\x8f+\xb2)\x0bk\xa8\xb9\x1f\xd4\xe0\xf2\x03bG\x12\xadFa\x05>\xb7k\xaa\x06\xba\xf8e\xafs\xe8\x7f\xf5\x8d9zI#1\x95t\xb5\xbc\xcc\xdb\xdf\xbd\xf1\xba\xa5\xe2\x86a\x03\x85\x01\xee\xaao\xc3_\xd0\xf3\x8dz}\xd2\x08\xe5\xf1<D*M\xd4s\xfe\xc7\x8cw\x07\x11kEQ\x90\x85c\xe5\xba\xb1\xbd\xaf{\xf6\x03\x8d\xf0/\xab=\xee^\x98\xaf\x8a\x19"Z\x81O\xc4\x84\xa8+\xb5\xcf\xdb\xef\x82\xd4\xb5P\xc9\'\x8c[\xfd..o~\xdb\xfc\x87\xcb\x13`XeyRU\x82\xa1t\xd8\xaf\x86\xa6\xd7\xeco\xfd1\xdc\x01\xabTs\xeeSV!\x99\rU\x9e39V Y\xe5\x1aH\xb2\xb8\x17\x1b\xd8\xfc,\x0fl\x1c\xcag\x14t\xce\xf0\x18\xa5\r\'\x85\xa8\xa8\xd5~\xf7n[\x9e\xf8\xb1\xf3.\x91\xe9\xfa\xdc\xd0\xd6\xc9N\xd4\xcc6\t\x1b\x05\x8e\xe4X[m\xb7\xdc\xfc\xb0*\xb7\xa3\xa6\x9d\x966\xaa\x8e\n\x1aB\x16\x1f.\xa6"\xc0\x92m\xc9\xb9#\xe9\x86\xbc\x91\x93\xf4K\x1c3\x8b\xb2?Hf\x99\x8c\x95\x14\xb9{W\x94\xa5\xf15<\x7f\xcaF\xfbs\xff\x00W\xc7\x0e\xb5\x91\xac\xf9\x9d5-,\x01\x8cH\xf2\x84\x89mk\x8b\x0b[\x82n~\xd8_\xca:z\x96J\xd8\x95"I\x08qvf\xb1\xb0m\xac\xa2\xde\xa0\xf7\xc5\x91\x12ECG\xe0\xd3 \xf2\x80\tU\x00\x90>X\xcbj\xc6\xb4\xd1ot\xfc\xd5\xf5\x19\x05\x14\xd5"*f\xd0\x14\xcb1\xde\xe3k\x8d\xee/o\xf3w\xc4<\xd73\xe9\xea-o\x99frV5\xc8x\xd6\xc1\x1cn,Uv?[\xe2\xb3\xa8\xcc39c\x8e6\x9eU\x80\r\x96\xff\x00\x85E\xaf\xc6\x16\xf3\xcc\xeb&\xcbe\x8e\n\xca\xd5\x8eY\x1fL"G\x03S\x1f\xd3\x14\xbc\xef\x84\x89\xd6?l|\xeb\x1a\xd8 \x8c\x0f\x15ZK~\x10\xe0\xe3oH\xe7\xadUU\xee\x91R\x9d\x01\x01S\xab\x1a\xfa\xcc\n|\x8ej\xa8"O\xe0\xab0@4\x03\xb7\x1e\x98]\xe9\xba\xd7\x86\xac{\xf1\xa7MP\xab\xab\xd3I\xac-\xc0$j\xb9\x04n\x05\xc7p\xc3{aq\xc2\xe7\x89\xce>\x00\xf2T\xd4_\x92\xcf\xa4\x92Ga\xe2S\x98\xa4W#B\xc9\xaa\xeb}\x8d\xfe[\xfeX\t\x14\xd9\x8c\x95S\xc5(U\x8a6\xf2\x81\xcbo\xdf\x19ER\x8f\xe1T\xc3<\x8a\xad\xb7\x94\xf21\xe1\x99#\xcc\xc4h\xace\x97k\xb1\xbd\xeds\x84\xd6\x86XEgs\x01\x8d\x97{\xe2\x14\x91\xcc\xd4\xd5\x1a\xc0d\xd9\xad\xe9b/\xf9_\x04=\xd2\xa5\xeeX*\x80.l1\x9f\xb9\x02\x92\'\xf15\x14m,\xa7pm\xf0\xc0J\x99\xd6\x06\\\xbc\x1a\xc8\xabJH|(\xd8+\\\x85\xb3\x11qc\xb1\xfc#\x13\x1c\x81\'\xf8\xd4\x81\t\xd4uH\x0b.\xfc\xed{w\xc2\xa3J\xd2H\x0c\x97bI%\x98\\\x9f\xa9\xc4\xf8\xd0I\x1e\x99<RM\x82\xd9\xb6Q\xdf\xf4\xc5J)!NO\xc8\x0b\xdae\x04=C\x08\x8dk\x1a\xb2\xa68\xda8#\x8dHU\x07\xd6\xf6\xfb\xdb\x14\x1euL2\xca\xd6\xa4\xab\xb2N\x86\xdaA\xecm\xfe\xf8\xe9\x17X\xe9\xb3\xaae\xd0\x7f\xc4P\xcd\xda\xc4\x0b\xfc\xb9\xc7;~\xd4\xd9t\x90\xf5\xb4se\xf22B\xf4\xd1\xeb\xd0lE\x80K\x7f\xf5\xbe:1\xb9\xa8\x9a\x8c\x9aV/\xe6\x14\xe6\xa9Z$i#*l<\xa2\xdf=\xfeG\x12\xba\x16\x96G\xf6\x89\x93\x8f\x1aX\x9b\xc5$\xb5\xce\x8b\xf8m\xe5Q\xfd\xf3\x85\x8a\t\xb3*\xcaY\xa3\x9ayI\xa7u`\xe4X\x946\xba\xdc[~~8\x97\xd3\xd5\xb3\xe4>\xd0\xb2Z\xa9\xe7\xd4\x82P\xba\xa7[\x85\x0ct\x96\xb7\xc2\xf7\xfaa\x8e\x0e\x9c\x7f\xa2\xd4\xaei\xb3\xa7\xf3ZH\xe4\xca\xde:\x87\xba\x1f\xf1\t]\xc0\xef\x84qS+\xbf\x83\xefN\xf4\xe2`c\x16<\x8bY\xbe\xd8\xb1\xeb\x91g\xa1}2\x95\xf2\xdfV+\x1a\xfbE]40I\x13\x80I\xd4?Ly\x90g\xae\xd5\x86\xe7\x86\x91h\xe6\xd9\x84\xd6V\x01M\x8b6\xd7\xdb\xe8{\xfa\xe0y\xa1iby\xa2\x85\xcbD\x01\x04\x81b;\xdfn\xfeo\xcf\x1a\xe8#ijH\x13_N\xab\x12\xc3I;\xff\x00K\x9d\xf0o/"\x86\x983\xea\xd4\\\xea\x00\xea`7\x07\x9d\xbba\x96,\x8b@\x8c(\xd67-.\xe1\xac\x07\x03\xe5\xfa\xfcq\x11e\x95\x0c\xe6S}6(\xb7$\xd8\x1d\xf7\xed\xf8\xb0\xc7\x04\x06:i3:WF\x89"\x10\x92\xcbv&\xf6\xb76\x1d\xb1\x17(\xca\xd6\xb6\xb3\xdd%i)\x99U\xc3E \x17qk\x8d#\xb8\xe3\xe3\xbe\x05\x9dC6W\x94e\xd4\xd4\xb2%`\xa7\xac\xa8\x84\x19v\x04\x95\x04\x126\xf9v\xc2\xd7R5:4\x11\xe5\xa4ID\xc4\x90WU\xc0;~W\xc3\xbd>]\x0cY\x1c\x12fPS\xc1VQ\x16A\x1b\x85\x0eTYA>\x86\xc0a\x0f;\xd1O,\xd0x2\x01\x11#\xc2,\r\xc3\x0b\xe9\xbfr\x0f\xe9\xeb\x8c\xc7\x93\x9a4\xd2e\xb4pRj\xdc\x16mHQ\xefa\xa8v\xbd\xb8\xed\x8d\xb4\xb4\xcf\x121y\x84\x8aA\xba+\x01q\xb0#\x83\xf1\xf9[\x10\xa1z\xb7X\xe4\x85D:X\xe86&\xe7~~\xe7\x12D\xb2\'\xf1L\x91\x847\xf1\x94\xa8bO\xc3\xfb\xe7\x1bg \x83\xe5\xe9\x1cl\x04\xac\xe9v`\xbc\xdc\xf01\x12y\xe3UaP\x86 \xab`H\xdf\xd0be\x0c\x12\xfb\xd2\xb8q$A59&\xd6?\xdf\xd3\x133\x1au\xd6X]\xd4\xf2mrp\x02\x08\xa2\x92*,\xc6\t\xdeo\x0e\x14\xa8\x8f\xc4\x1a\x01\xf2\x92nO\xa0\xf8\xe2v\x7f\x9aO\x14\xba\xe1\xa5X\x91\x81`\xcdq\xa9I68\x0b\x9bB#`T\xaa3F\xeb\xb9\x1ek\x0e\xfe\xbc\xed\xf4\xc6\xa6j\x9a\x9ax\x92U2\x80\x96-\xeb\xeb\xf2\xde\xfbcP\xd8\xac\x8a\xb6W\xbe\xd5}\xa3f\xd9udyF[9\x89\xf4\te\x93U\xc8$\xf0\x07\x03`/\x8a\x8f0\xcd\xab\xeb\xab\x1e\xb6\xb6\xa2Y\xa5sb\xeeo\x7f\x87\xfe1g\xe7\x1d\x19\x97?R\xd5V\xf5\x86i\x1e_\x030\x92\x1a5u\x15\xb5\x8a\x14\x05\xb2\x9b\x88\x90\xf6$\x12{\x06\xe3\x1b\xbaj\x8f \xeb\x1a\xd9hs\xb8h:w\xa6\xa8)e)\xe1\x0b4L\x07\x96\xd2\x10ZI\x19\x88\xd5{\x82\x06\xc0X[\xd4\xc6\xe1\x8e:VA$\xdb\xd9\xd455F`\xd0\xbd\x1a\x05b\x02\xf8\xcd`\xde\xa3`G\xd3\xbf\xc7\x10s\xb8*_(\x86Ow\x86%3\x08<\x08\xe3 \\\xa9`\xc1\xaf\xb8\xd8\xeda\x82\x0c\x11\x01PL\x90\x8d\xa4\x81\x87\x99~_\xed\xc7\xa1\xc6\xd8\xd5\x9b)h \x94TR\x89\xa2g\xd6\xf6x,\xe1~\xb6\r\xc1\xdcbHW\x14j^\xcd]7M-5\x1a#\xa6\xa7F\x17\x17\xbe\n\xd2\xe4\xd1V\xf5\x17\xbdM4\xba\xe1\xd2\xea\x80\xec6\xc1L\xba\x9e\x96=\x94\xec\xc3r\x0e2\xa7yW9_\x025HYl\xec\xc7|!\x0c|\x05\xc52\x86\xf4\xdb\xe7\x88\xcc^9\xac\n\x85\xf9\\\xe3faW\x1d"\x99\x19\xefp/a{\xe2%%b\xd5\xb1p\x14-\xec50\x17\xc3(\xc5\xd0\x955=\x155T\x89\xe0O1Y\x08"\xea\x80\x1b\xfd\x7fL}\x1c\xaa\x802\xd3\xc6M\xee\xba\xaem\xf9\xdb\x1b\xba\x8a\x02\x99\xc5J\xa2\xbc\x85\xdfP\x11\x82y\x1f\x0ci\x86\x8a\xb9c\x05\xe9Z%\xece\xb2\x0f\xcf\r\xd8\xb6\xb6B\xaa\x96\xb4Mux\xa1\x0c\xd7%"U\'\xea7\xc5k\xfbL\xe5\xb1P{\x9ev\r?\xfc\xc2\xb8Ecml\xbal\r\x87}\x7f\x96,\xfa\xca"\xd2\xdej\xead\xef\xe5b\xe7\xf2\x04~xT\xf6\xf7\x96\xc3]\xec\xda\x9d\x9e[\xfb\xb5d\x7f\xc6\xd3\xb0R\xac\xa4\x90x\xb9\x0b\x8c\xcb\xf2L\xdc/h\xe4#\xd55\x94\xf5\x82h\xa3\x11\x1b\x8dj\x1a\xe0\xaf\xa0\xbe%f\xf9\x93\xd5\xcfMR\x94MM\x18mq\xc8\x18\xea$\xef\xeb\xb7\x17\xc7\xd9\x96W\x95\xb5|\xcb\x15X\x92\x14\x05\x95\x95\xf5\x0f\x96\xc3o64W\xc2\xd1\xc1\xa1<B\x91=\xb4\xb2\xee7\xb0?\x9e.\x8a\x8bi\xa4%\xe8\xec\xdc\xaaH\xaazv\x16\x8d\x1c\x87\xa6V\x17&\xf6*\x087\xc5e\x9eGOO\x9b:\xc6\xb2\x85\x0f\xbe\xa3k|0\xf9\xec\xcb4\x837\xe8\x9a\x1a\x88\x1fTF%\x89\x99\x85\x8e\xa5\x16?\xd3\t\xddf\x91\xc7\x9a\xe8U\x1e Q\xac\x01a|x\x89T\x9a=\xa8\xb4\xd5\x92\xf2\x1bo3\xa0`X\x8dD\xd9\xb9 ~xg\xcbV9\xe9\xda\x8b\xdd\x8b\x11\xfcFpIQ\xf6\x1c\xf1\xbf\xc7\x03\xbaN\x03\xeeQ\xd4K\x10\x03\xf1\x86bl8\x03\xbf\xc7\x05\x8b<\x19X\xa6\x15J\xb11\x05\xb4\xad\xf6<\xdc\xf7\x00\x1e?\xdb\x04\x0c\xcf\xa7\x0b\x1c\xd2Zz[{\x95\x95\xe4Y/\xc9\xe0\x8d\xads\x86\x1a\x8c\x8e\x86\xb36J\xa4\x9bD\x96\xb4\xa1\x7f\x13\xa8\xbf~G#\x7f\xa6\x13\xf2\xd9\x02W\tX\x85\x8aFPc\xd6n\xca\x1b\x8d\xafkso\xe9\x83y\x15d\xf2u\x04\xd3\xce\x89\x1b(a\xfc\x10\x020\xf4;\\\x9d\xaf\xcf|\x06\x03GQ\xe5u\x93VR\xc0\xd2\x04\xa2H\x88P\xc2\xec\xa4vf\xed\xbd\xb8\xc46\xe9\xb9E\x02\xa8\x88Me2\xea\xd2H-\x7f\xc3\x7fK\x01\xfe\xd8a\xcf\xb3Jv\xa02\xc1\xe7\x13\xeaE\xf3\xda\xdd\xcf?\x11c\x8d\xf4u\xb0\x0c\xaeL\xca:i\x91V6\xd4\xa7\xf1l\t6\x1f\xae9\x01\x8a\x0b\x1a\xc7\x0cK\x02\x85*\xac^6\x1b\x86\xed\x7f\x8e#\x9ai\x1dTK\x01\x881\x0elE\x8d\x88\xb0?ll\xcb\xa5z\xaaml\x18\xeam\xec67&\xc4\xdf\xe0m\xb7|\x17\x98\x06\xa2\xd3SNA\n\x06\xab\x1f\xc2{\xdf\xd4`\xdd\x04\x86i\xcc\x03\xc6MCp\x08\x07as\xf8O\xae\xd8\x81\x98T\xd4F\xe9\x02\x96*X3xi\xc0\xf5\xfe\xfd0R<\xc2:\x86jP\xc4\xa5;\x11*\xe8 \x13\xc5\xf8\xf9\x8c\x0e\xcd&\xa3Z\xf34\x08\xe8\x85\x05\xd5\x8f\'\xe2\x7f?\xa68\xe3U[\x0bAW$,c\x88\\HW}\xfb|{z\xf1\x88\xd9,\x85e\xa8U\x8f\xc4\x7f\x14?\xc5\xcbr-\xf0\xc1\n\xe7\x15\x14)wXJ\xba\x93s\xa4i\xb8\xf8z_\x11h\xe8\xde\x97>J\xa8\xc2\x08\xdc\x14eSp./{\xf7\xec.pb\xe9\x82J\xe2/{o\xe8\xd8\xfa\x8b\xa6\xe3\xce\xe8)\x8b\xe7\x19L!\xa4\x88\x13z\x8ae\xdd\x97k\x12\xc9\xb9\x1b\xfe\x1d^\x83\x15\t\x9f%\xea\x8a\xee\x9e\x82\x82\x9e.\x97\x89iV\x0c\xc71\xae\x9d\xda\x19\xe5\xd5c"\xaa\x82T\x10G\x94\x0b\x0b\xfa\x0b\xe3\xa3\xa9\x9d\xd37Z\xdf\x1a\xf2(*\x83\xb0\xfab\x8e\xf6\xe7\xd23\xe5\x0f\x16m\x94\xd2\xd5\x1c\x8e\xa2I%\xf0\xd0\x13\x15\x0c\xa5\x81x\x88\xe0\x02\xc4\x15=\xd4\xa8\xdfN=\x0e\x9a\x7f\xf4\x97\xfe\x10\xe6\x8d\xfd\x91\xd3]M]\xd3\xd9$k?Q\xe7\x89I%\xbc\xb1\xcb(Y\x08\xff\x00J\rD}0\x9f\x98\xfbF\xf6\x7f\x954R\xfe\xf3\x82\xa6I\xa2\x13\'\x85\x0bL\xf6\xed\xa87\x07n\xfb\xe3\x99sZ\xec\xdf8\xac\x92\xb6\xa9\xaaj\xa7\x98\x97y\x98\x13\xab\x92I?|i\xa7\xca\xaa\xa5E\x9aJ\x88cG\xb6\x9f\xe2\xde\xff\x00\x1d\xae1L:X\xaf\xc9\xb2Yeo\xf1Git\x17Xe\x1dM\x95\x8c\xc6\x8b7_\x04\xc8\xca"e"K\x8en\x00\xf4 \xf3\xdf\x0c\x19\x86q\x1c\x14\xa1\xb2\xf9X4\x86\xde#\xa5\x88\xe3\xbbb\x91\xfd\x95\xa9k\xcfO\xe6\xe68c\xd3\x1dR\xa4l\xc2\xe7V\x9f5\xbf\xfa\xe2\xfb\xac\xc8!\x9b&H\xe5\x90\x99[\xb9\xb0\xdf\x10M(\xcd\xc5\x0f\x8b\xee\x8d\xb3vq\x03\xd7\xe5qD\xd5\x8e/bYN\xe7\xed\x8d49]%<*\xb0_\x9f\xa9\xc64\x94\xb5\x0bQ\x15\x99\x9a\x15\x1alw\xdf\x05\xa6E\x8d\xd4\x11bw\xc66\xcdP\xaf\xd4S\x98\xa6X\xd2i\xa1R\xbee\x0cw>\xb6\xc0\xa7\xaa\xa6\xa3\x87\xc6\xab\x98$G\x97\x91\xc2\x8b|q\xef\xb6L\xd4d=\x1f\x98g\xab\x1b\xb4\xb4\x91\x1f\x05B\\3\x92\x02\x83\xf0\xb9\x18\xe2\xee\xa0\xea\x9c\xe7<\x9f\xc7\xcd\xeb\xe5\xa8 \x9b\t\xa4\xd2\xab\xf2]\x80\xfa\x01\x8a\xb0b\xf9\x15\xb6\'$\xa9\x9d{\x1eu\x90\xd6\xd6\x93G\x99QT\xf8J\x04\x89\x1dB\x92.M\xb86\xed\x8c}\xa5\xf82\xfb"\xce`\x9a\x17\xbc\xe8\xab\x08\xd1}\xc4\x8a\xc0\xfc\xb68\xe2y\xeb\xe4H\xfcT1\xbaj\xb6\xa5\xdf\x16\x9f\xff\x00\x92\xf3I}\x93P\xd0\x99Me]\x1dI:\xa6bUi\x81[.\x90lI,@\xbe\xe0.\xd6\xc1\xcd\xd35]\x81\xc7;\xe4\x082\ne4\xd2\xcb\x1d\xec.T.\xadj/\xb6\x9bw$\x0e{\x8cj\xea\xdc\xa2:l\x86\xa2w\xa5\x9a\x9aK\xf9W\x90\xca\x1bk\x9b\x0bm\xf5\xfb`\x83\xe6p\xd5V<\xd4\x13\xd2\xac\xe2\x9cL\xe8\xf2\x824\xdc\x86\x06\xf6\xb1\x00-\xad\xbe\xf8\x81Y\x98?P\xe4\xf5\x85\xa6C\x18b\x8a\xa1l\xc2\xdb\x86bM\xce\xd7\xde\xd8\xc4\\\xfb\x93fyz.\xcf\xd9\xb6cY\xec\xe6\x920\xe8|\x19\x9e9\x02\x8b\x9d\xda\xe2\xff\x00\x1b\x11\xf4\xb6$\xe6\x94\xf0\xb7V\x88i\xe2\xd6U\x8b8}\xc3\x1fP\x0f\xcf\x03\xff\x00fM1\xfb1w\xb3\\V\xbd\xd9y\xe1w\xf9a\xb3=\x96\x92\x93>\x9ec\x1922\x95_\rN\xa26?\\C\x9b\xfddz\xb8w\x8d\x05(\xc5\x1a\xc5"\x91\x1c$\x1d\x82\xad\x83\x1e\x00\xfa\xdf\x02\xfc\tg&\xd2B\x14~ \xdf\xe6\x0c.7\x1bm\xfdq\x9bM\x1c\x90\xb3O\x12\xb0\x8e\xd2+\xa0\xb6\xf76\x16\xf5\xe3\x18S\x84\x9f[\xb4\xa9\xe0\xc6H`\x14\x92E\xcf#\xb7\xaf\xdb\x19\t\x85\x1c\x11\x95U\x8aS\x18\x0b\xa9-\xc5\x8d\xb7\xf8mo\xcf\x04e\x16\x89=\xdc\xc6\xb6\xf2\x99\x03[M\xed\xff\x00\x9c\n\xaby\xd2Y\x1c\x98\x9e+X+\x90\xa0\x81\xc7\xaf\xa1\xc7\xb4\xd5\xad\x03\xc4\xc9\xba\x0f\xc1\xb5\xf5l\x07}\xb7\xc7PS3\x8a4\xa6\xa4.f\x95\xb4Jn\x05\x8a\xb6\xfb\xd8\xe1\x8e\x99\xc2C\x13\x18\x8c\x02H\x99%\x8c0\xba\x80\t\xbe\xdc\xff\x00\xe7\x00\xa9gI\xa8\x10\xd5S\xf8J\x1fC\xeb\xe0\xed\xdb\xd7\x7f\xeb\x89iU\x1a\xe4\xd5LigB\x8e\x01kX6\xa6\xb0;\xf3\xa4[l\x00\xd9\xeeC\x95\xe8c$\x92\'\x84\x9b\x15a\xbe\x9f\x97\xce\xd6\xc1A\x94\xd1\xbc\x93T\x1a\x93"\x95\xb3)k\x8b\xda\xe0\x0e\xdc_o\x8e4ea\xea\xe9\x0b\x8a\xa8\x88R\xb1\x8d<\x81\xe8q\x0eJ/z\x06:j\x99\x125:\x8d\xc8\xe6\xc3b>\x9fc\x80\xce5O\x94\xd5Sj\xafH\xc0b\xcc\x0ck\xba\xa8\xd4|\xc0\x9d\xedn\xdb\xe0eE8\x96\xa6O\x19\n*\xee\xa2\xdf\x88\x9f\xd3c\x86G\xa9\x9e\x9e\x82M05EF\x90B\x96\xd4\x1c\xf6\xbd\xb8\xdb\xd3\x11\x15|hCOL\x102\x80^\xdc\x9fArpS:\x81\x0e\xa9O\x1f\x8eE\x95u\x15[\x1bq\xc65S\xd5G_J*V\x9d\xe3h\x940G\x04\x10A\x1f\xde\xf8!_J\xde"\xebEJe\xb9k\x9e\xd7\xda\xde\x9b\xdb\xf3\xc4\\\x8e\xd9\x8d5KG\x01\xa7C1\x8cx\xc3f#k\xec\x06\xdb`\xfe\xceJ\xd1\x84\xc1\xf5#\xc6\x0b\x17\xdc\x01\x86\x9c\xab\xa1S=\xcae\xa2\xea?\x13\xf7]IV\x96\x8fY_\x1bI\xd4\xba\xad\xc0\xbe\xfe\xa7\x04\xf2\n<\xaa\x11O4\x0f\x15T\xb2\x02\xd1\xba\xee\xaa\x01\xdc/\xcb\x8b\xe0\'[\xfbO\xcb2!5&_,9\x86d\xb7R\x15\xef\x14\'\xfe\xa29?\x01\xf7\x18\xd3\xc8\xdf\x02\x1cT7&8{4\xcazB\xbb\xd9}\x04\xeb\x94\xe5\x94\xd3\xe6\x19p\x8e\xaeD\xa7A#;/\x86\xe4\x90.|\xd7\xdf\x1c\xa1I\xec#\xda\xc7\xef\x17\xca\xa1\xcb\xa8()"\x99\x92:\xa9\xe7R\x1du\x1b0\x0b\xa9\xacG\xfd8\xed\x1e\x99\xae\xa2\xa8\xc9\xa9g\x80\x00\xb3\xc2\x92\x9f\r\x02\x83u\x07\x8f\xae<\xcfj\xe8\xe1\x87[_\xc85~2\x0e\xdb\xff\x00.\xf8\xb6=K\x8btG\xf1\xa6\x91_{\'\xe9\n\xee\x81\xe9Hr\x9a\xc9\xa9\'\xaffijf\x84\x00\xb3\xb1\xef\xbd\x8e\xc2\xc3\x8e\xc3\x0ey\xb3\xc4\xf9q\x8ez\x85\x8eF\xfc*\x8d\xa8\xfd\xf0\xb1\xd5\xd5(\xb5\x11KOK\xa6I\xa2\xb4l\x0e\xdav7\xe6\xf7\xdccnMKW\xe0\xc0j\x8a\xcf\x18\x8e\xfb\x83\xa85\xf0\x97\xcbf\xb8T4eF\x1a*$\x8a-r(\x16\x05\xcd\xce3\xab\xac\x9cDJ)\x16\xbe\xeb\xb1\xc0\xac\x9b1j\x9c\xc6zf\x8bH\x8c\xddM\xb6#\x05\xea\xda4\x1ao\xb9\x1cc\x9d\x9c\x91Y{i\xc8*:\xbb\xd9\xdea\x95\xd3J\xf0\xcd3F\xe2IX\xe9].\x0e\xf6\xbf\xc7\x1c[]\xd3\x13\xd1\xac\xb0\xd4\xbb\x9a\xc8\xea\x1a\x17\x8d\x00d\x04\r\xee\xf7\xe6\xfe\x80\x8f\x8e?D3\x08"\x93%\xab\x11jY\x04\rb\xbc\x83m\xbf<s]/\xb0\xfa:\xee\xbf\x93\xa6k3\\\xc09\x8b\xc6Iu\x0b\xc8\xdaul\x08#\x8b\xfd\x8e,\xe9$\xe9\xa19\xa4\x95\x1c\xf1\x16Y"\xc6 \xa8\x9e\x18!.\ts\xb9\x1d\x8e\xdc\x9c\x1b\xaa\xcc(\x13&\x19T4\x8ab\x8a\x97\xc2v\xf1\x0b\x19\x98K\xac\xb0\xd8i\xbd\xc8\xb6\xf6\xb0\xc5\xa9\xedG\xd9\xd5\'\xb2\xea\xdc\xa70\xa5\x86Z\xc8\xdaVi\x92\xadVU\x94\xa5\x98\x0b\x11kr8\xef\x8a\xf7\xace\xa3\xcd\xb3\x89j\x16\ndf\x81\xd1a\x81\x02*\xb0\x16\x17\n\x00\xfb|1kW\xc8\x85/B\x9d]S\xbc\xe6L\xae\x90\xd3\xc0P\xa9\x8c\xc8db\x08\xeeH\x1fkb>B\xb3=c+\xcaSH$\xf9\xacM\xc5\xacq\xacxKK\x0b;\x90\xc8\xc7XF\xf3\x11\xb5\xb1\x0b\xde\xaa)\xeaL\x90\xca\xea}I\xb9\xc2\xfbmhb:O\xf6O\xcc&\x19~g\x95H\xba\xa0\x8d\xf5\x8d\xff\x00\x9a\xf6\xfd1a\xf5\xd2K\xff\x00\x11\xc7,0\x81\xa9A \\\xd8po\xf9\xe2\x87\xfd\x9fs\x8c\xca\x1c\xfa\x1a:(Zh\x9e[O\x1c6\x125\xc3\x1dM\xea\x00<z\x8c_=tf\xa5\x92\x19\x15\x9a6P\xad\xe5\xfcM\xce\xdb\x7fLx\xddL{s3\xd6\xe9\xa5x\xd22\xca\x0cR\xc7S\xe2\'\xf1\x1e\xe4\x9eE\xc0\xfc\xbf\xbeq9 z_z\x95"\x8eH&\x02\xf76*77\x1f{}1\x1f*X\xe6d\xa8\x82}n\xe9yF\xb3\xe4\x16\'\x8b\x9bzm\x8d\xb3\x99\xa7[<\xf2\xa2\x03\xfc0.5\x9f\x97\xa7\xfba#\x19\x0ej\xc2h$\x89#Y$\xb9\x0eH\xdbb@\xb6\xd8\xd1N\xeb\x19II,7\xb1\xe3\xcd\xfe[z\x1f_\x9e12.\xb6\xa6r\xc4\xe9\xf2\x99\x1e\xdaH\xbd\xc7\x1b\xed\xfd1\x95C\xc3\xa9e\xf3\x15RU\x146\xc3\xb0\xfe\x97\xbe\x08\tY\xf2\xd3U\xe5\xd1\xcfM#l|=\xc1\xf32\xef\xc8\xf8lG\xcb|*\xe7\xb9\x9a\x89i\xe1\x96I\x16V}\x05w\xb1U \xdc\xfan@\xf9\xe1\xa3\xdeb\x19SP\'\x8cC\xb9!\x81\x1b\xb1\xbbj;_\xb8\x1b\xf7\xb6\x17!\xcb\x9e\xa7?\xa8.\xf1\x18\xe1T\x81\xd1\x98\xec\xecu\x9b\x1f\x97\x87\xf9\xe0\xc6\xbc\x81\x8d\xfd%]2\x8f\x03\xc2\xb4\x05T\x16\xbe\xfa\xac,~\xd8\x95UN\xd4Y\xa3Kf]`\x85p6\xb5\xb9\xb6 \xf4\x84m\x91\xe4\xd0e\xd3\xcc\xd5\xad\x0cE\x0c\xa5n\xe4v\xe3\x9bm\x89\xb9\x95r\xae\x910P\xc2\xc4\x02E\xdc[\xd3\x9cg\xce\x83d\xda\x08\xd6&2L\x03\xbb\x12\xaa\x8c\xa2\xc3~\xfbz\x11\x8c\x16\xbe\x92)\x99\x16\xb2\x18\xac\xa0 X\xeeuo\xe9\x85\xfc\xdb5\xad\xccj\x1e\x1aI<\x18PX\xef`~$\xfd\x064e\xf1MN\x80\x15\x88U\x81\xfcRM\xd6\xe4\xad\x98|\x00\r\xb7\xcf\x1c\xf8\nc\x05|\xb3<p\x18\x03\x00\xc7B\xbb~"\xbbo\xf2\xdf\x9c\r\xa4Xj\xe2+V\xa44C\x94o\x12\xecA\xbd\xc0\xed\x7f\xbe5\xbea*\x87\x89\xaa`\x8e9\x18\xe9mwk\r\xef\xf0\xf4\xb7\xc3\x10\xe6e\x89\xdd\xe0\x96S\xe2\x02H \xe9$\x8bv\xf4\xb60\x8d\xde\x88\xf9\xb5.\x7f\x04\xd5\x94\xf4\xf5rRQ\xcc\xa3D*B\x90\xac\xbb\x8b\x8e\x01#\xb7\x7f\xae\x00A\xd3\xeb:\xb2e\xf1\x0b\xe8\r,\xf20Q\x1d\xb9$\x9d\x80\xfe\xf7\xc1\\\xd3\xa8|\xc9\x14\t4\xef\x1bxa\x18\x15P\xa0\x8b\x8d]\xbb\xf6\xe3\x0b\xfdC\xd4ncX\x00UE\xdcA\t!\x14\xfa\xfa\x93\xf178\xa7\x1cd\xf8%\x9c\xa2\xb9\xe4\xbd\xbd\x8e\xd7\x9a\xdff9\x04\xe8\xba\xadF\x91\xb7\xc4\xaf\x94\xff\x00L1fYuE}\x04\x88ce\xd4\xb6\x06\xe4s\xb7\xeb\x84\x7f\xd9\xc2|\xe2\x93\xd9\x8d\r\x1eeM5/\x86\xd2xj\xf1\x94fR\xda\x81\xb5\xb8:\xb0\xfbU\xe3I\x1b\x1de\x8e\xe4\xdf\xb7\xdf\x0c\x9aJn\x89`\xed\x03\x92\x82I)\xe8\xe1\xa8h\x94\xd3*\x85*\x16\xe4\x00\x07\xa9\xec\x06\t2\xc4\x91\x91\xbd\x88\xb6\x02AZ\x13\'\xaf\x91\'G\xab\x8a\xa1\xd4\x0b\xf2\xa3I\x02\xc3\xe0N\xff\x00,E\xa5\xcf\'\xaa\xca\xdeQE1\x90\x02\x00\xb7\'\xe1\x82\xf9\x02\x0b\xe5\xb2\xd3\x1a\xa6D\x903n-\x82u>\x1f\x83q\x19\'\xb1\xc2\x8d\x04\xd2\xc3Z$j\x1f\t\x1brX\x80A\xfb\xe0\xf2fhe\xf0\xcd\\M\xb5\xf4(\xb9\xc6[\nDj\x08\xeb\xa3\x96C\xe1\x86\x8c\xdc\x12\xcfm\x8f\xfe1L\xfbO\xf6\xa9\'OfT\xf2T\xe5\xf4\xb0\xd7$F$\x9d\xac\xb3\x14\x06\xc4\x02\xa3W\x1e\xa5o\xf2\xc5\xdd\xef\xf4\xef\x1c\x90\xa2\x96-\xb7\x94\xef\xf2\xc7\'{u\xe8L\xf7\xa8=\xa3KS\x14\xb9u$mM\x1d\x85ej+\x9d+\xa5\x98Gr\xf6\xba\x9f\xe5\xecq\xac.\xe7OF\x9aJ6c\x17\xb5h3\xac\xdb\xf7m\x058\x88\xd5!\r\'\xb9\x81\'\x17>p\xc5\x8f\xcf\t\x1dA\x91\xd6S\xd6\x89\xeee\x85\xcd\xc1v\xf3_r}M\xf0g\xa7\xbd\x9eO\x93W\xa5Zu=,\xd5\x0e\x8e\xbe\x05<\x13.\xa0\x15\x89\x1a\xa4U\xbf\xe1\xed\xcf\x03|\x1d\x8ejSUO\x12C\xe2x\x84\xa3x\x8aY\x83[\x90}0\xcc\x99>9}I\xe4\x9b+z>\x9f\x9a\xaeU\x97\xc0\'\xc6mJ\x18yI\xbf\x17\xed\xc7\xe7\x85\xcc\xeb+\xf7<\xd5\xe9\xa5(\x84Z\xe0\\\xe9\xb8\x06\xc7\x17]U\x14Y]$\xac\xc1\xe4\x99\x01\x95V=\x95\xbe\xbe\xbbm\xe9\x8a\xc71\xcbk\xe5\xcc\x0ecU\x19[\xb7\x8b-\xce\xb3`H$\xfd\xb0\xcc=Gs~\x8c\xb8\xd0\xe5\xfb-T2{I\x14\xa2%a%4\x97\xd4t\x95\xb0\x1b\xfe_\x9e:\x1f\xafi\xc1\x96\n\x94\x88\xc8\xdb\xae\x93\xbd\xc69\xdb\xd8\xb4\xb4Yo\xb5\x9az\x88\xeb\xa2}pJ\x19T\xea$\x95\xd9V\xdd\xeemoA\x8e\x8d\xea\xf3+\xd2$\x916\x92\x80\xda\xf7\xb6\xf6\x1d\xb1\x1fX\xef*\x7f\xa3\xd3\xe9?\xcc\x12_+\x82\xb2?v\xbc\x16O\xe2x~U\xdcs\xe6\x17\xe3|I\x97Q1<\x12\xa4\xb1\xfe$,A_\xfd\xe1v\xae\x9a\xa9j\x0e\x9f\x19KiYU\xcd\xf5\x91m\xae7\xd3m\xb0^\x9e\x13\r"\xd3\x92\n\xa5\xecT\xf1\xd8\x81\x84.\x07\xcb\x93*\x91<lk#t2\xc8F\xa1\xdc\xdfaak\x01\xbe#\xf8sx1\x7f\x9aW\x0f\xe1h\xbfk\x12Oa\xc6$\xc4eXT4cJ\x94\t\xbd\xf8\xe4\x1f\xa0\x18%\x15<SM\xae4(\xc1\xbf\x19\x1b\x7fc\x1c\x00(\x87\xf8\xe0\\\xa9r\x05\x80\xbf\xa6\xe3\xe5\x89\x1d\x13H\xf2\xe5\x82\xb4!+U3\xd4\x9d\xb9Bm\x19\xf9\xe9\n1\xb3\xa82\xe8\xe92J\xf720t\x8c\xa5;\x0e<Y\x08H\xf6\xff\x00[\x01\xb7\xae\n\xc5%-=,\x19]\x0b\x8d4\xe8\xb02\xa0\xb9[\x00\x00\'\xb1\xc1\xbf\xa8<\x91\xf3\x8a\xb8\xe8i\x8a\xd1\xa1y\x80\xdc\x11\xdc\x1e09r\xe9\xaa\xbc*\xbc\xc6FW \x85A\xb1\xe3\xfa`\xebP\xc3G<U\x05\xec\xec\xb7:\xb7\n;\xfdw\x18\x81UW\xe2\x17u`\xa8\x05\x8bZ\xff\x00#\x8e\x8b\xa4\x06\x9bd\'\x82\\\xba\xaa)\x0b!6\xf2\xb1\xf3\x02-}6\xfas\xea1\xb2&5q\x89\x03G\x1e\xa5\x02g\x11\xdc\x9fB;oq\xb1\xf8\xe26z\xd0\xc2\xb1\xd5H\xc4.\x9b\xdeM\xd47\xae\xfca/7\xeaz\xf1W\xe0\xe5\x9a\x12\x8d\x94\xeb\x95\xae4\x928Q\xdf\xe7\x8d,n\\\x03\xbdG\x93vmUM\x96V\xabI2\xcf%\xae\x19\xfc\xde!\xbd\xee\xc0\xee\r\xc6>\xad\xea\tV&idt\x95\x94r4\x95\xe2\xd6\x1d\xbf\xae\x13\xdd\xe0\xf7\xa32+\xc9Q{x\xb2\x9b\xb0\xf9z}1\x84\x8b#1y]\xaf~Y\xaf\x8a\xa1\xd3\xaeY4\xf3\xfa\x08U\xe6\x95\x15$\xa21Q\x88\x8a\xa87v\xb9\xc6\xa0\xc1Wm\xb1\x1eI\xc0%G\x99\xbd1Ta\xe8\x9eR:\xb3\xd9\x0b\xcf\x99t&AU\xefQ\xe841+\xaa\x02^\xea4\x9b\xfan\xa7\x0fRQ\xc1\x1a\x86f\x95\xd5\xb6#\x8d\xf1T~\xcar4\x9e\xcb\xa9\xe6\xd2\xeb$5\x12\xc3\xa5\x87\x96\xc5\x83\x0f\xa7\x9b\x9c[\xcdu:\xd8jF\xd8\x8f\xd3\xe7\x88rR\x9bF\xe1n(^\xf7d5\xc6\x99h\xd4\xc0\xcft{\x92H\xefkpF\x17\xe3\xcb}\xd9\r<\x06G\xd2\xdb\xb3\x1eM\xf0\xe5\x0cU\x14\xd5\xf2H\x0bx2\x00V\xe2\xc0[\xbf\xfb\xe2.h\xd4\xd4\xbe\xf3S,g\xc3\x88<\x8cb]W\x02\xe7`9\xf9`;\r\x0b\xd3\xc1-2\xea*ea\xbd\x87|\x06\xbdKg\x84\xa4F%\xd3\xc3\x0e\xd8k\xca\xea\xe8\xf3\xee\x9d\xa5\xce2\xd0\xe6\x9a\xaa=q\x97[5\xaeF\xe3\xb7\x18\xf2\x1a2\x93\n\x82ot\xb8\x16\xdf\x8cw\x07rc\x91e\x8b\x08\x01cU\xeem\xdf\x15\x1f\xb7\x99i\xf2\x8fj}-]W\x0c~\xe5\x98?\xbaU3\x92\x00\x8fP\xd4o\x7fI\xdf|]\x14Nc\xa8\xb8$+\x11{\xfc1\xce\xdf\xb5\xb8\x9a\xb70X\x9d\xe3\x89a4\xef\x0b\xc8\xe1Q\x14\x89\x92Ss\xc7\x98E\xf9`\xe0\xfbd\xdf\x90\xcbPc\x07\xb4\x15\xe9\xcaN\xb1\xe9\x05\xc8\xe6\x83\xf7tR\xc7#%3\x87I\x01\x9a\xcc]\xaeE\x80\xd5n9>\x96\xc5SE\xd5\x195^eW\xd3\xae\xb2C]\x04\xae\x8c%\x88"\x96RA\xf5\xb7\x1f\x0cV\xd4\xb9\x99\xa5\xa4k\xe7\xf1B\xac\xb7\xd1\x18vbW\x80\x08[\x0ev\xdc\x0ci\xeb\x9a\xfaw\xeb\xdc\xcb8\xa1\x96H\x9e\xaeqW\x16\x8b\xf9\x04\xa0?\xe2\xd8\xdf\xcc{b\xc9t\x8aI&\xf6N\xe7m\xba,\xba\x8a\x9aY\x1a\x1a7\xcci\xde\xb0\x03\xa2\x16$\xc9\xc7ao\x87\xf7{\xe1g;\xa9\xb9\x92\x08\xd1\xbc\xcb\xa4\xb3\x0b\x12\xbb\xde\xc3\x8d\xcf\xa6\xf8C\xa6\xae\xcc%\xcc\xc5LU\x12{\xc1o<\xcf-\x89\xb8\xb1\xb9\'\xd3l7T\xc1P(\xa3i\xf4\x96t]\xae\x19I\xdc\x11q\xdb\xeb\x84\xcb\x02\xc4\xd3l\xe8\xfd\x98G\xd8y\x8a\x93\xda\xb6^\xb1@e\xbc2*\xa2.\xe5\xb4\x9e{\x0e>\x96\xc7Rg\xce\xa2\x85P\xa0\xbd\xec\r\xb6\x1f\x1f\x96\xd8\xe5\x7fcu\x9e\xe5\xeds*p\xe3D\xa1\xe2\xd6\x16\xf7\xba\x9d\x85\xbe"\xd8\xe9\xdc\xf3T\xb0F\x92k\xb1\xb8\xb2\xed\x7f\x86\'\xeb?5\xfc=\x1e\x91\xfd_\xf4\\\xa9\x82]\x99\x0cDF\xa4\xbc*\xa4\x81\xc9\xbd\xef\xf1\xc4\xb4\x9a\x12\x94\xf0^\x05i\x13Y\n.y<\xdb\xb7\xf4\xc6\x02\xa6\x17iQ\x1cER\xca\xcc\x8c\xc2\xeb\xdb\xcb\xf1\x04\x8ca\x95\xe8\x9f3a)i<M\xd8 \xb0$\x02oq\xc7$[\xe5\x89\x93+q\xb0\xc5\x140\xbe\xa5eVt\xb8\x00\x1b\xd8\xfa\xda\xf8\'M\x14T\xb4\xa0\xb5\xc2\xaf\t|E\x13Re\xda\xdbLI\xdc\x12\x05\xaf\xfd\xfa\xe3\t\xb3L\xbe\n\x945\xf5q\x82\xe0\x15\xb6\xfa\x89\xed\xf9`7fi.HY\xf7\xbc\xd6WeY\\\x11\x07\xf1\xaa\x8dD\xa8\xc7}\x11)p~\x92x_|e\x05L9d\x95\xa6\xb4\xc4\x92\xc8\xe5\xe2\xb3]\xb4v$v m\xf48^\xce\xfa\xa9\x1f\xab\x9a\xa2\x869[\xdd(\xd2\x99t\xa9\xdd\xe4:\xd8[\xfd"3\x8c)2j\xae\xa0\xa9z\xba\xf0\xd0\x96\xf2\xaa\xab\x90t\xf1}\xb8\xc3j\x92\xb1{l\x9b\x9b\xf5\x05\\\xf3\xd2KB\xa2\xa6\x85\xa6T\x96]B\xc3\xe3k\xf1\xb5\xbe\xb8)Y\x98eIH\xf5P\x98\xa3p\x971\x81m\xfe7\xc4\x1a\x9f\xdc\x1d1\x94\xad,\xf3MQ2\xaf\xf0\xe9\x81\xb1Qo\xe6$m\xf5\xed\xdb\xbe*\xfc\xeb8J\xba\xb9=\xd5\x04j\xcd}(M\x97\xe1\x86b\xc6\xe4\xff\x00B\xb2dQA\x9e\xae\xea)+\xea\x1eI\x168cA\xa6(ck\xa8\xff\x00\xa8\xfa\x920\xaa\xd2OS g;Z\xc0|1\xe4Q\xdeB\xd37<c)\\Z\xc9\xb2\x8e\xfe\x98\xba\x10Q\xd2#\x94\x9c\x8c\xf4\xa4[\x8b_\xe3\x8c%\x95N\xec~w\xc4Y*\xect\xa7\x98\xfa\xdb\x18\xc1\x1b;jo18r\x8f\xb1m\x99\xbb\xb4\x8f\xa5v__\\n\xa6\xa7:\xb8\xc4\x8aJC\xf8\xadlN\t\x1cB\xe7s\x8dZ@\xd9\xd7]94p\xe5kMN\xb1\xc6"\xba(D\xe0\r\x87\xe5l\x15\x92F\x0b\xaaI\x1a\xc7\xb06\x18]|\xdb/\xc8\xfaR\xbf\xa8\xb3H\xfd\xde\x92\x984\x93\x03\xf8\x94qa\xeaI\xda\xdf\x1csg]\xfe\xd0}I\x9e\xc6i\xfan\x9eL\xaa\x88\xeaO\x120d\x99\xff\x00\xee\xb0\x0b\xcfo\xbe<\xecx2M\xeb\x81\xb2\xc9\x18\x9dG]^\xab[C\x1e\xa8\xd6)e\x08\xe0\x8d\xc8;l{z\xdf\x03s\xfe\xa2\xca\xf2c\xa2\xaaPK\xb7\x87\x1a\x8d\xcc\x8d\xa0\xb1\x03\xd7`o\x8e&\xa7\xcdz\xd72\xabI \xac\xcf*gg\x01O\xbd\xb0vbx\x02\xf7\'\xe5\x83\xb9\x07V\xe6\xd2\xe7\xf9uf{Y[,\x94\x15\x0b\x0c\xf1\xcc\xe4\xbe\x9b\x90\xe0\xde\xe6\xf6\xf2\x9f\\S\xff\x00\r\xa5\xcd\x8a\xf9\x97\xa3\xb1r\x06J\\\x8e\x8e\x91cX\xd6(\x80:E\x85\xeds\xb7\xcc\x9cF\x9f4Jr\x88\xd2\xc6\xa0\xb3 \'\xd3\xb6\x16:K?\xfd\xf9\x90\xcf\x9cCK,T\xe2\xa2H\xa1\x0cl]T\xdbQ\x07\x8d\xef\xb64UCS\x99J\xf0\xbc\xbacG\x0c\x00^>\xb8\x8d\xa7m1\xc9\xa1\xd2\t\x91a\xf1@\x06\xe2\xf7\xf5\xc5m\xedV\x1c\xbb7\xa6\xae\x96\xa7-\xa6\xccV*s\x19J\x84m\x1f\x89$\xde\xc4_\xfc?\\<\xd0*\xad\x0ct\xc4\xc9u[\x16=\xf0#\xaai!\xfd\xd1S\x08\x01\x15\xd0\x1b\x13\xe9\xb7\xf4\'\nZ\x90\xc5\xc1\xca\xfdM\x9aV\xc3\x08\x87$\xa1\xca\xb2\xb8\xef`\xb4\xf4\x11+\x93\xeb\xafIa\xf7\xc5S\x99E_P\xf3VU3\xbc\x80\x8b\x97;\x9e\x7fA\x8b\x9f<\x81\xa9\xda\xa25@\xd2\x86+\xb8$l}1[\xe7\x8d.\x89i$\x81C\xc4K\x19@ \xb6\xe7\xb1\xf9\xe3\xd0\xe9\xf2>\x08\xe5b\xad\x12H\xb5\nQ\x0b\xb8`\x15m{\x9cX\xb5\x14\xf2\xa7I\xe5\xf3\xcc\x8c\x04\x8d*\xbf\x9f\xcc,\x13\xf1\x0f\xfb\xbbb\'F\xe4\xe8[\xc6\x9e5.\x08:]n\xa6\xe3n~\x16\xdb\x0c]^t\xf4\xc6V\x88\xa0\x84\xaa\x9c\xb5\x9a\xd6\xba\xc5kv\xf5\xfbc\xb3d\xee\x92C1\xc5\xd5\x81=\x98I\xe0{W\xc8\xae\x13\xfck*\x01`\xa0\xa9\xdfs\xf5\xc7Rg\x13\xb4t\xac\x85R@\xf6\x00\xdbP\x03\x9e1\xce\xbe\xc3:s\xf7\xc7\xb4\x01\x98>\x91\x0eW\t\x9d\xc2\x1b\xeas\xe5Q\xcf\xc4\x9f\xa6:\x0f1\xb8\xa4Q\xad\x94^\xfbm{b.\xad\xa75^\x8fC\xa6ME\xff\x00A-\x13\x17\x8eG\x8dYmk\xa0\xb7\x7fC\x824M\x16]MhL\xacI\x02\xe7s\xbf\xa6\x01\xe55\x9e\xe1$\xd4\xf55\x86y\x81\x1a\x15\xd8\x9b-\xac>\xb8%H\xfe\xf3T*\x92Vf+\xa5E\xec\x14o\xc7\xfb\xe2j*\xee\xb0\x9dT\xd0\xa5)q\x10\xaaf$\x18\xca\x83\xb1\x16\'\x7fBp\xaa\x99uNe_s*\xcc\xf0\rB \x00@\x1b\xbd\x8f\xcf\x06hhX,\xd0V\xb1+#\xd8+\xb1%W\x9eG\xe9\xb6!\xe6y\xe6Y\x90)\xa3\xa6\xa42LI\x01C\x90\x91\xae\xfb\xb3\x7f@7\xf9ru\x14\xf8F$\xd2\xdb&\xc7\x93\xe5yRTVL\xec\xd1TT\x99]\x99\x05\xd1\xca()\xb7\xa0Qo\x80\xc0\x9e\xa5\xebH\xa8 \x92\x97%f\x89\x0f\xe2\x99\xcf\x99\xbf\xd2?\x94m\xcf?,-\xe6}]\x9b\xd5e\xf2\xd0\xb5@\xf7i\x1ddu\x11\x80\xa5\x80\xb0 r-s\xfa\xdf\t\xb9\x84\xceM\xe5s\xb9\xd8\x7f\xbf\xc7\x15\xe2\xc1n\xe4I\x935*D\x8a\xac\xca\xa70\x95\xdd\x98\xe9-\xc9\xe5\xb1\xa2\xeb\x10\xba\x8f1\xc4?\x1b\x7f-\xd5q\xaeJ\x9b\x02\xa9b\xc7\xbe-\x8c|\x127\xb2d\xf5\n\x06\xa76>\x98\x88\xf2<\xa0\xff\x00*\xf6\xc7\x91@\xf2\x1dO\xb9\xf5\xc4\xc8\xa9\x80;\xf1\x86(\xd1\x93],:\x98\x00,pZ\x9a\x14As\xce#+,[\x8e?<n\x8c\xb3rl\x0f\x18,\x04\xa6\x9c~\x15\x1f\xf8\xc7\x91\xac\x925\x86\xf7\xef\xdb\x19\xd3R\x96 \xb0\xb0\xee=pB4\tm+\xfe\xd8\xce\x97\x01:\x82\xaa\x96\x0c\xea\xa9r\x9c\xc2\x99*\xa8\'ui#qu`\xb7;\x8f\xf5\x15\xfbb\x9a\xc9\xba\x0b<\xca\x7fiJ\xa9\xeb:y\xe7\xe9v\x9af\x8c\x1a}T\xcb\x1bDJiR,H6\x16\x1b\xdca\xc3\xd8\x87P\xe7\xb9\xf7\xbe\xe7\x19\xec6ij\xe4\xa7\x820\xa4$h\x96\x1aV\xfb\x9f0k\x9f\\[PU,h\x02\xa0S\xbf\x96\xdb\xe2\x08\xe6xn,k\xc6\xa7\xb3\x9a\xff\x00j\xda\xfe\x9cl\xbb-\xa3\xc8TQ\xe6\xd45\x84\xe8X<?\rt\xeel\xbb\x83}6\xb8\x1d\xf1\xcf\x92TOW_%\\\xc6V\x92i\x0b\xbb\xb1\xf31&\xe4\x9f\x8d\xf1`{y\xcc\xa4\xa8\xf6\x97\x9fE2\xca@\xa9o#\xb0kl\x00\xf5\x1e\x87\x15\xbd2\x9fw\xf1A>&\xb2\xa3O|z\xf8\x7f\x14\xc8f\x92t\x8e\xac\xe8G\x92>\x86\xcah\xe2\x9e(\xd2ZQ8U{\xb5\x8b\x1b\x93\xf5\xbe\x1e\xf2\x1a*h\xa9\x88\x92[3\xf7sn1\xce\x9e\xc6hk:\x97\xda\x05\x14u\x15\xb5\x0f\r\x14h\xd2EN\xdaQ"A\xe5\x04\x0b\x0f\xc4@\xb0\x1c\xb18\xea%\xa0\xa3\xa2@\xd0\xd2D\x18\r\x9aV\xd4o\xf3\xc7\x97\x9b\x1fl\xaa\xca\xa1+D&\xac\x8d*\xbc:x^f\xb5\xac\x89\x8dy\xa4OSN\xc9=/\x86\n\x9b\x16"\xe2\xe3\xd3\x18\xd4\xd6\xd4\\T{\xc0P\xb7\x1ab\x1b\xf3\xf0\xc0\x8c\xe6\xaef\x0c\xab\x0c\xac\x8e.\x18\x8e\xfe\xa7\x13\xb8\x8eL\xa1z\xde\x968\xba\x8a\xb2\n\x98\xbc\x8c\xba\xd4\xdf\xe4O\xe6H\xfaaF\xab%\x11\xce\xcb\xe1CQ\x1c\xf7\x02\xd7\x05o\xfd\xf3\x8b\x17\xda\x853I\x1cU\x8e\xba\xc8&6\xb8\xf9\x91\xfa\x9f\xa6*\xc4\xcf\x9a\x8a\xa6\xa9f&\xc1\x15!D7blI\xdb\xb0\xdf\x1b\x85\xbe\x05\xe5U \x8a\xd2S\xe5\xe8|i\x8a\xb2\x80nN\xcb\xb7\'\xfb\xed\x8d9\xc5L\x19\xafHR{\xae\xb8\xfc\n\xc9\x87\x88G\x95\x86\x88\xb8\x17\xbe\xd8\xd3\xd3\xdd-\xd4\xbdw\x98\x05X\x82S\x86\xdd\xcf\x964\x1e\xa4\xf78\xe8\x8e\x99\xf6}\xd3\xbd;\x94\xd2e\x95\x14p\xd7\xb4%\xa4f\x91vwk\\\xdb\xbf\x00}1\xd3j\x1c\xf2o\x14[\x7f\xa2\xb9\xfd\x9d\xa9&\xcb\xf2\x8c\xda\xa6\xa6\t"Z\x99S\xc2\x95\xe3\xb7\x8a\x00<\x1e-s\x8b\x0eTI\x8e\xa9gH\xd7\xf9Am\xfe\xd8\xc7\xac\xfa\xb3%\xa5\x91(a\x929*We\xb1\x1e\x1c"\xdbk\xdf\x8e8\xe3\x9cV\xad\x9c\xcb7QTT:\xb9*\x02H\x18\x04Xm\xb6\xe4\xec>\xf8\x9d\xc2SnE\xb1\xc9\x18\xc6\x86\n\xda\x1fv\xcee\x95[\xcb/\x17\xe6\xfe\xb8\'A,0B\xd3I!\x8d\x14l\xdc_\xfb\xf4\xc0j\x8e\xa0\xb4\xb2+,l\xd1\x92\xad \x07O\xa1\xb5\xf7&\xe3\x9d\xbe\xb8X\xcf:\x95\x1eB\xba\xb51\xd8/\xa7\xd3\r\x8e\'.E\xcb-p5\xe7\xdda\x12^\x1a\x1b\xc5u\xd2\xd3\x1f\xc4\xd7\xf4\xf4\xfe\xb8R\x96\xb5\xeaCI\xe2\x16\xdf{\xe0\rE@\x9cx\xb2?\x97\xf1*\xe2]\x1ddEv \x85[0\xc5q\xc4\xa2\xb4O,\x8d\x98\xd4\xcf\xe14\x84\xc8\t\xe4w\xc0\x9a\xca\xcdlY\xc8\xf4\x03\xd3\x1e\xe7A\xd2o\xf9}\xe3c\xb1\xec1\x16\x9e\x94\x92\x19\x8e\xa6\xe7\x14B\x0b\x91-\x9f)\x92s\xb5\xd1}=pB\x8e\x97aa\x8c\xe0\xa7\x00\x06&\xc3\xe5\x89\xa8\xc9\x12\x82\x08\x04cw\xe8\x07\xb1\xc4\x14o\xb6=(\\l,>8\xf4\xb6\xbd\xc5\xf6\xe0\x01\x89\x14\xf4\xa5\xfc\xd2\x9f\xfbF\x02\xd7\'\x1a \xa6i\x1a\xea\xbfS\xc6\t\xd2\xd3,{\x9d\xcf\xae7E\x18\xd8\x01a\xf2\xc4.\xa0\xcc\xd3*\xa0yQ\x04\x92\xda\xea\x96\xbd\xf7\x00\x9f\xcf\x02\xdbtw\x04\xca\xd9=\xd2\x8ei\xc2\x07\xf0\xd0\xb8_[\x03\x85\xa6\xeb\xaah\xa3E\xf7G\x92r\xcc\x1d\x14\xf0\x07\x18\x01\x9au\rK\xd6\xd5O$\x8f\x12\xd4B\xa28\xc1\xbf\x86\xc3I;|w\xc4\\\xa3!\xa9\xae\xcf\xa2W%\x12FR\xd2I\xc0\xbf\xaf\xd4\x81\x86\xa8kf\x1c\x9d\xe8\xff\xd9', 'format': b'jpeg', 'xmin': [0.26339285714285715, 0.049107142857142856, 0.18303571428571427, 0.20982142857142858], 'xmax': [0.3125, 0.09821428571428571, 0.22767857142857142, 0.25892857142857145], 'ymin': [0.6830357142857143, 0.9196428571428571, 0.0, 0.1875], 'ymax': [0.7321428571428571, 0.96875, 0.017857142857142856, 0.23660714285714285], 'area': [0.002411511479591833, 0.002411511479591839, 0.0007971938775510205, 0.0024115114795918375], 'class_text': [b'1', b'1', b'1', b'1'], 'class_label': [1, 1, 1, 1], 'difficult': [0, 0, 0, 0, 0, 0, 0, 0], 'truncated': [0, 0, 0, 0], 'poses': [b'Unspecified', b'Unspecified', b'Unspecified', b'Unspecified']}

 

example

features {
  feature {
    key: "image/encoded"
    value {
      bytes_list {
        value: "\377\330\377\340\000\020JFIF\000\001\001\000\000\001\000\001\000\000\377\333\000C\000\005\003\004\004\004\003\005\004\004\004\005\005\005\006\007\014\010\007\007\007\007\017\013\013\t\014\021\017\022\022\021\017\021\021\023\026\034\027\023\024\032\025\021\021\030!\030\032\035\035\037\037\037\023\027\"$\"\036$\034\036\037\036\377\333\000C\001\005\005\005\007\006\007\016\010\010\016\036\024\021\024\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\036\377\300\000\021\010\000\340\000\340\003\001\"\000\002\021\001\003\021\001\377\304\000\035\000\000\002\002\003\001\001\001\000\000\000\000\000\000\000\000\000\005\006\004\007\002\003\010\001\000\t\377\304\000@\020\000\002\001\002\005\002\003\006\003\006\004\006\002\003\000\000\001\002\003\004\021\000\005\022!1\006A\023\"Q\007\024aq\201\2212\241\301\010\025#B\261\3603Rr\321\026$b\202\341\361\222\242\027\262\302\377\304\000\031\001\000\003\001\001\001\000\000\000\000\000\000\000\000\000\000\000\001\003\004\002\000\005\377\304\000\'\021\000\002\002\002\001\004\002\003\001\000\003\000\000\000\000\000\000\001\002\021\003!1\004\022AQ\023\"2aq3\024B\201\377\332\000\014\003\001\000\002\021\003\021\000?\000\266j\326*\272\226\222\225\220\305b\244\025\336\377\000\014y\322\316\324y\253\307+\221q\244\003\275\367\333\177\246\004\345uQ\3173\231\226]h\001K\0226\357\267\246\016\323\212F\314#h\242,|2A\027&\343\377\000x\363\227\243\245c\"Y\345%\236\373\3541\246\275\224\345U\220\202.\361\260\000\016\366\333\363\003\030\301\2469\364\350\261\357s\211O\340\225p\352\010#\374\2706r\024zr\261\341\313\353\250j\341\242\236\216\272\023\rLSJ\016\245 \202,\246\343\223\202\371m\016C\016EKA\004oKEIow\202\226\246tX\371\342\356.nI\271\276\022\215\251\347x\334\235\230\251\363Z\326\371a\212\212x\r*\250[\355\374\333\343qmi\031\223\321>\2231\242\312\n\345y\004&\2269\334\3368BF\245\330[Q\001wn7\'\2669c\366\201y:\227\256*\022\2529a\251\246U\216Q,zI7:v\007\200\245E\373\343\244\252\245\235\247\210\301JN\207\014t\001\300<q\216\177\366\343\224=/X\270\231e\207\307MbRN\266\001\230j\371m\307\330`\246\343+7\035\304\2423\254\271\251\246F\324\002\013^\346\305x\344v\347\362\306\312\014\345\241`\361\276\211\002\237;\213\372\033\017\260\301\254\376L\261(\352\241%\031\364\010\322\334\336\344\334\375\206\022\264\331\254{m\213\341\367\216\305=;GPt\026b\271\244\024\225\360MJ\357\020\325[4\326\361&\274(\3020@\n\000\267\246\301O;\234Y\201\314\220$\206!\241\316\223c\306\333~\270\347\257c\375-\236\347\271\030\254\244\234\230\250d\022\2457\212P\314w\362\336\376[\205\265\300\3751|\364\3556`\231z\323L\'\tO\025\200\221Aw \354K\337}\257}\261\344\365\020\214g\246]\323\311\270l\235K\014q\263\251\224\"\205!\200\271\260\347\357\215K\t\367\317\010\033#\\]\275H\342\376\233zbcG\016\2437\212M\200\324,,\017{\375\361\212\322\353\231\243\2241k\\\022\307\324}\355\204\2166EK\034\215fc\345FG\336\366\330\332\337\226!\257\205\246K\\\206`\212\254\233\003\310\330\374\27513F\271\244\360\013\204\216Ak\r:\255n7\372}\261\0004\r;\325\250\r\000abI\326\240\237\210\033\355\361\357\216\263\2219Ud\255G\224\2417:\021\322\303I\277\006\337\021\366\305}\327\324y\222gb\256J\351\022\2025O\016\2269\302\351{Z\373a\313>\243\367\232\212e\246\314^\235(\344Y\244\014\276Y\000\007c~7\376\316\023\375\247\301_\231TQU\320D\325rA%\364[\312\306\373_sq`-\305\257\361\306\243\311\231p#\365\362{\231\242mLj\3740\354\366%\242px\325\336\366\276\340\333\r\335/\\d\351\305\253\257\254V\212Yt\336e\271[\216\307\342o\276$,\271]D\024T\275I=%U\\\212\034\306\003Z=\211\261\261\267q\366\370\341\202\272\2169h4S$(\262\2511\224Qe&\372m~9\306\234\264\220\024wf\316\211\250l\352Y\251\265G\024\320\262\323J\344\006f]\357`8\324\r\201\302\327\266n\213\352\014\357\250!\241\312s\232\034\256\206k\264\362\315Q\240\263\023m+\032\335\332\304\023\351\277\303\004\272}+)3\\\322ZPa\226\242\221^:\201\272\211S\312\272\201\347\371ok\367\301\276\266\201\221\362<\326\256\177\025\226\355;\250\262\211\031T\000\237\013\206\333\376\2546\022qv\204J7\246V\371\'\262\314\207\244\352\342\254\250\257\255\315\353\2006fu\247\247\004\251\004\025\027v\004\023q\250`\205j\030(\204rH\322G\021\320\025\032\312\000\340X|-\353\202\331\376iI\016\221\034\201\346\220\217*\215[\033\200\307\341\261\027\300J\247\220\321<\250\202FE\326\310;i\344\355\336\307\362\303T\247\223\223\017\266;B\237[\346\265Y\026OO_CA\000g\220\306\315(\276\235\211\033_~;\342\255\256\317\363z\300\321\317\230\315\3413\0261\206\322\267$\223\260\370\234<{j\257\234M\036I)\263\303)bB\3308\000\200G\250\337\276*\315\304\205\232\366\'\026\340\202Q\266M\222[%;\2277$\222y\307\335\265\266\313\333\324\343\037*\255\311\337\260?\256<U\222R\024\002\305\266\265\277\246)\026\216\332\213.5\020G4u\276\t;\020\240\336\330\233\323\231mE=R,\365E\003\022\240\273\333\2676\276!\345b\267T\225\r%\242h\305\224\001\351\177^p\026\272\213:j\341V#`\024\215\000\265\211\276<\030\177Kd]\324\2640\3118+*\267\227\221\275\361=(\333\1772\201\352F\027\272ZJ\246\245\201\252\217\204Y\000\322;z\357\206\024\220\254\232[\314\017&\374`\362d\253\372\2328i\372\222x^\010\245c&\255N\355\346\276\374\002-\316%PUK\020\227\302T;\200\002\306\266Qn\304\213\376x\333\327\271{\177\305\236*\306\315\033@\033\233\000nF\347\351\201\224R\010\347\220x\364\341\234\\.\275_\376\267\303S1\"U}faQNS\304\234#mbI\030\253?i\254\272\2730\351\354\232\276*Cjr\310\325*\367r\314\252B\351\260\337\310}v\007\026&c]JT\211*&\333\220\211\267\346F+\277h9\243\277N=\0345Q\003\357\005\343\004\371\224\225`\005\257\345>k\337\035\334\324\2235\025\243\230*\022\257\336\244\206\241$Vb.\256,o\266\366<\035\361\205l\"\025X\001B\341\256t\357\270\330\213\341\241\251L5\336\361%3SJI_\342\265\206\255\256Wc\375\367\306\350:rSQ=?\200\027\300\276\271\013l-{\330\021\317`\177L]\362\244\"\213w\366k\250\206\243\"U\255v\367\257{x\325P\266\273\025\036a\330(\270\0373\365\305\326\364\342\223\304\225\356T\206\324\307\220;v\266(\317`tU4\331\324\254\3241\241\211\221\204\321\334\237\014\371m\315\205\256\r\375\001\305\371\234\2725,\202\332\231\226\307\260\033\\\377\000Ly\035K\274\255\243\324\351\267\214X\313\352\265$\256Bj\007H\017\250nmn-\275\366\304\355\n\002C+(\224_e\270$\372_\327\020r\245\245\215\3463\220\210V\366w\272\263\033\333no\307\313\004\2227j\260\342U \020@M\355\273\013\222{\333\364\306\006\031\322\266\266\274K\242\340\206\323v\027\337\236m\201]G\256\231b\2168\306\227\270`\244(\005I\027\260\357\270\373\342LsJ*\204\2603\370\312\3066G\007\360\335v\365\365?\014g\233\204\254\247x\331A\221\037s\312\331\205\271\377\000\343\200\200\r\315\313\324\'\217\242I`*7?\314wR\017\324b\026U\013\313C#17\027\037\213N\237O\247\030)\224\254L\0369\321\231\341\223A\363\235\201\334\333\327p~\370\331)\246\212&\216\025E\032n\010$\330\235\254w\365\307_\200\212q\364\257O\311]-d\341Z\245\2138a!\373\000=\017\365\304\212\332f\251\312%\240\202g\246q\245cu\270 \013\357\277k[\373\343\014\3331\243\247\255D\252\2215\310lR2I\013\266\346\303\004\251\247X*\331\243\2226\001t\204`Im\206\327\277\367lj\330)\013\2034\254\311*r\321Y\"T\370r\252\310\n\356nB\366\037\021\366\303\257QS.o\320\265H\221J\3654Le\201T\033\227A\251m\353\266\330\001\232!\250/R\244M\033.\242\273\202,/k\001\353o\\;eI!\206\232\242\206\231\014\025\364\301\364\352\261W\334\351\002\334\000q\270\275X\231\255\224\256KH\225\271\315m\\\254]\243\244Jx\311\364\036f?v\266\005\346\375Y\225d\376$R\327D\362\250\323\242/9\027\371~\247\323\022s\212\270\372g6\253\206\255X\370\023<\222\"y\205\2117\037\247\323\025\037JI\224\236\250\240\2238\247Y\262\377\000{\214\324\304X\200c\3245\r\267\265\257\306=\0148\373\335\370$\233\255\033:\242\252\277\253\272\203\336hh\252\345\r\032C\004zu\266\225\026\003o\251\303\247C~\317^\323\372\237D\237\270\177u\323\311\272\315\231\267\203\267\250C\347?lu\267O\303\323\371\035T\231\356[A\0255\rI\360\033\300\205n\210\024X\273\035\377\000\020\"\344\377\0008\364\303\025wU\230\2741IL\325W\225\020\272\021!\2106\305\310\330XZ\346\304\342\217\226\225D_n\366Q\375\r\373$\3441\254U\035Y\324\325Y\204\234\232z\030\374%\277pY\256O\320\014\\=7\354\357\331\347G\001\037L\364\236^+\"\001\374GQ<\347\327\314\344\2606\336\373\014V_\264?[u\016M\223\321e\271\035edU\271\205L\214\355O{\210\324\002TX\016L\213\333\261\337\t~\305\372\337\252\262\212\3728:\202g\223$\254\256x$\222\240\211&Y\210\3224\2535\366`/\267\256\370\317\332Q\356l\035\364\373Qu\njc\022\010\235mk-\267\306\252\223H\361\030\344\006\353\316\327\037;\342-#\2500\232w\250\250\006GP\300\036\374\222O\323\0323JY`\247\177\025\346\221\234\335l\001 z\034y(\251\252\344p\311k!\236\222#\026\206\032y\270\304\343X\351\031%\027\237L\"\3643\324\303\022^\022\210\314\300\0227\340}\260\3473\370\320\215-\2677\2664dS\366\231\030\226l\276\250\311}a\343m+\266\326#\365\302D\025\024\224R\275E}B\303O\037\370\222J\372U~d\355\213\017\257\226\023\222\3034\221\273\010*\024\331_I\334[\233\034sO\267\305\254\256\257\3132\232+\"\370OP\361\222w\271\000o\360\261\303\361G\275\321\231>\325a\376\244\366\261\322\220N\364\331zO_\271_\025T\"\017\210\276\344|m\205\214\367=\313\263\372y\275\324\'\276\005W\2066NE\211\'\216G\3661QTB\3206\242\345\331w\323\243cm\360\363\321\222,\231\215\005\'\201\342TT\316\220\rG~@\000m\350Gc\363\305\031pF\n\320\250\315\2753u%\034\365\202\236yh\314\232b\320de[\\\022.~\026\003\177\216$WTI\024\323=@WX\223\370b%\363i\330\355\334\357\277>\236\270\317\254\352\346\351\372\351i\225\230\310\336P\252/\243\177\245\367\365\267\007\001\262\314\334f\264+\022*\tJ\024\223H$\013\213\200=;\367\355\204(\311\256\357\001t\213\233\366t\3142\332\361\234\ni\212\326)F\000\352V\323\250\354v\354v\033\357q\363\026\276|\222OG\2514\"\020\013Y\177\027?m\355\212\027\331#\320t\365}mY\252\021\037p \306\256\006\243\250\0176\253\003k\203\177\211\364\027\2703\254\312x\272f\362\203\374H\364\226\036]\217sb@\371\002F\374\234G\236+\272\327\005\375,\233\205\013\364M$\236\033j2\003\253\312\266\220\250\275\315\305\315\277\\\025\241\254\2443\241\212\232W2\362/\311\265\307\250\026\322E\260\235\226\202\325i\032\314\261$\300\352mV\260<\217\314\355\362\303\236\\\224\301d\2365\327o1\"\345\211\365\037{\375q\237\003\245\311\"\202\264\003Q;\005\326\244\203\245A\322G6<\021\3061J\237~G\001\264\275\312\334p\344\035\306\303\3408\300\207\254\212\'\2262\251\3410\263\333c\246\367;\217\265\276\030\231\227K8\215\314\016\254\222j!\200\261\270,\010\371\361\200\321\304HjeZ\311\377\000\206D\214\217`T\335\254u\016~\004\357\2105\223\211g\212\031\004\212\316,\000\032T\333c~\347\327\234\022\255j\204\254J\244x\343\021\261\032\\\332\367\276\336\233\334\r\275q\036\251\343\325\245X\206.U_H p\010\365\276\370(\014R\352\256\232\226\2578\025\2613\263\006\r(6\260\277\033v\026\277\333\014\264\022i\202\2229\255eK\t,/\316\255\366\306u\212\262G#4\321]\300\322\303`\030m\301\370\221\201\253$\360\311\244\310\014\221\0370\323\260\265\270\365\034\343[fxaZ\337xWkF\316\203\361(6\266\336\277,\034\3133\210\037+\242\217^\252\232b\315\240Kk\242\r\375.7O]\316\027k\3453Tj\277\220\013\213/{w\266\025:\252s\227\327R\325\261g\216{\302\332Cy\005\257\360\346\302\377\000,\030\231\222\275\243w\266\316\231\316\372\216\266(\372G-\253\314\352\'\205`\251\212\235\013\204\334\025,x\002\335\311\304\036\205\375\225:\3132h\247\352\014\326\203%\214\033\230\343>\363(\370\035>@\177\356\305\223\354\033>\200W$5s\"KX\024\005!Ab\203M\270\346\300\235\373\014[\335{\325\025=-\323\022f\364Ya\257\230:\305\024Z\232\354\314l,\0245\375l\010\305\375>F\261\324I2F\247\260\017EdUyEEWN\324f\rV\271lb\005ycVz\204uB\262\036H\"\307e\004_\005\377\000uS\3452\255t\020@\322\310\332j\010\215C\315\251\274\2676$\260\340\002\006\334X\343<\347\304\245\352\254\2573\225\200L\312\235\251\246\362\221w_:\013X\337o\023\221\205\337k]u\225tu$\0034\206z\201W\255`\215\034\007\362\201s\337M\265\r\356>X\334m\360.T\266\007\366\205\224\315\326m\222\346\031\005b{\253\273,\223\253]\n\253l\241\211\"\337\216\342\374\0161\257/\350\314\253\244\326\244\324eI\232\327W\315\342\211\014E\204\022*\r\014\252l\307p\016\301\267\276\030\272B<\232\016\230\312\352\362\210\236,\276ZX\345\245U\333@\266\340\223b[\314\327 \2367\027\303\223\322\323O\014BH,\353v\004\255\231X\202\tSm@\330\363\244\343\273\274 \327\2212\215\250 qEI4na@t\213\213\023\363\304\332#J\034\231\314m\363\365\300H2c\035O\357\037\034\237\021.\276}\200\370c\021_I\006\240\323\255\316\333\372\343\316C\206\272\001G,S\030\343\027\215\354E\255\214k$\2117*\000#a\200\020f\360\322\226F\2209\234\r%w\337\021%\314\346zt\217\302v\271\276\263\265\261\244\002GQG\3579UZ#\263\017\007R\306\005\356\300\203\267\333\025oU\3644\331\345U\025O\212\3245\020Fc/%\200\nOpm\306,ij%hZ \251\251\225\220\334\235\301\006\307\np\3112$`\311e<Z\303\033\204\234v\200\325\351\213P\373\025\351\310\230K\231W\t\013\235\304k\3457\371\006\376\270f\351\016\211\350\334\2075\202j\n\005\226\255o\246I\224\266\222F\344\\\233\037\220\030.\222\031 \001\356\343N\344\236~\030\002\322\317J\357T\253!\020\371\300\217vm&\366\3751\251NR[fTb\266Q\336\333\350\241\213\332\006v\363\310\305\304\236,k\030\000io0\033z\006\003\327\025--eU\024\256\364\345\320\222\016\315lY\335r\362\346}W&o\230\325\177\036f\324\350n\000\026\000.\374\354;\034.\365l9=-\007\274\322\"\263\273\013j;\251\261\337nF\307\025\341\232IE\354T\226\310=\033\233TO\3251O\230JeY\031L\310\306\301\221N\253|\266\030\274z\177\251g\250\251h\232o\022\024\201\341#\361\0130\330\017\313\234s\3154\320\305R\225\213 y\277\310\321\335I\370\357\371b\335\366;\232\323C\357q\316c\327*\020\253\370\274\304\355qn9\357\214\365pN\035\336\207t\323jt8R\325xU\000I\254\351}A\201\261S\372a\373)\250\022\323\224D\235QT\rL\266\275\373\337\213X\017\266\021+(\244\246\256\205e\027\022\250 \354v<b\300\313\202\2320\024.\205\322$!\274\250,-\333\324\237\246<\317\007\243 L\224f\242\265\227A*\207\315}\213\002\r\367?;\355\217\206e\025\034\215\034-+\315qak/`I\365\357\365\304\371\210%\343S\034\201\203\261\322\tV\034n>\000\340d\353\032\306\220\210[\306b\272\031M\300\260\276\376\235\373\363\2162L\252\252SG\034\223HI\225\006\245\220\035\312\372\214hJB\216\225\010\313\250)h\310$-\300\260?kcMS\314\371a\023\003\345 \334n@\340\377\000v\301|\256x_&\215\345\322^;\005[\003`@[\237\376$\375\360\000\007\320\320\3170\251eg\027:H\362\337\235\266\365\276\377\000,b\262\247\214tF\252\272\201\277,\177\365\2115\202\t\330\310\3611\'\222x\"\334[\267\003\020\352BF\344J,\250\246\300\334\022H\354\017\310cV\216%HR\242`T)S`\327\260\343\266\027:\236Hj(^\'\217Q\201|Q.\237\362\372\021\317\177\276\016\241Fs\3740,\267\036\226\343\345\315\261\0173h\302\274\222)]K\244\2226e\277\007\034\364p+\247se\312f\212\242\030\252\030\323\010\347\325\242\374)\022\374I [\347lt\307N\365M.f\364\321\3075\304\250?\210\312w$\\Y\215\266#\342y\307\030u\227QK\223SE\005\030%\3032;9\332\326\265\255\337m\361\320\277\262\236wG\235\364\224(\312V\266\224\265;\022Af\321b\277\312H\032\0361\310\374\'\024\341\214\242\257\303\'\314\323e\213\3556e\375\331$FH\336\256\224.cH\254\240\261\0210\022i\332\327\n\327\275\311\337\341\205_i\276\315\251z\375\341\315\213\325M?\356\343\035\002\211\017\205\033\220J\273\r\356n\302\373\332\300m\207~\245\375\331\013\321<\263R\243,\255\034\312\357vx\235J\260\2717\356\247s\333\214i\366w\233A\'O\245<k\033\373\243\2651\2200maM\224\375E\217\'\r\214\232z\020\322kb\257I\321\3267\261\356\236\206$e\251J\030Y\220\r\004\200.V\302\305\217\321\273\361\207L\206I\232\201VX\330\025\330\251\003km\272\213\376j1#4{R\312!\021\304\345HU;\r]\266\003\377\000\344\342\230\366+\222\365\356O\324u2u6m=n^\320\273A\010\234\311\252MJ\003\330\213(\3226\004\251\334q\206(m\312\314\271x\034 fl\256\032kF\022%\001F\220\016\333o\337\000\353\262\211jdp\211L\352\313\270\322}p\337\020\202(\345\214\254\005\257\267\237Q\371l\01640M[\307\240[\2678\363\355\241\302z\345\323\321I\017\374\304Q4h@\270 \r\360H\004\251FR\341\216\253yA\003\351\211\331\375\n\032t\225!wmb\341\2751\272\222\002\300\310\352\321\251P@\2664\201@\332\372f\244JyU\030\253:\202V\367\037,\000\314\242\202\2271\227U\024\356\242B\006\247\362\200M\366\026\375p\337\324\021<\224*#\223F\227\014\244|0\0235\212y\203\221\001ud\r}\005\2006\306\220\t\024*\357\025\226\206\010\024~\r{\337\377\000\2218\003\235\256g\0324\253T\221)\006\3420T\036{\013z`\216L\363\227\221\247\232=H\002\351,.E\270\265\357\200]ihi\314\376\374\212@6TV7\036\2266\030\323\340\312{(Oi\271EH\315U\342\210xU?\304\217S\226k\\\000n~\003\363\302-u\033F\362Q\312\246@.\321\233\\\372\003\317\033\2361d\365\rkfU\002Z\250\365E\024\212`!\000b\267\340\330\237\353\205\334\302X\206K\343O\022\306\305l\316\252\026P/\267=\273\355\2121\344j\220\271\245z\022\252\304\024\351\034P\224i\013\026bO\003\200\007\307c\337\014\035\021V\260\346\017Lj<=aJ\000\326\324\372\206\337+\023\201t\253KSU\032,D\rD\246\302\374w\373_\021\3266\2031\001\223Y\022\013\000\266\'|S%\337\027\026f\017\265\251\035\rU\342\324\344TU~vx\257\033\033\236\007\026\370[\r}4tSE(\234\351pH\211\324\033oa\333\345\366\300\234\242!?GPT\300S\\\221\250\224*\202\240\216F\374m\266\t \214\311\020\217\311!\260[1\271\027\334\333\213n\006<E\350\365\330F\002%\314\236*\251#\000\\)\322\006\257\217\247{}0#4\251\236\211B\243,\261Fn\2228\355}\356\000\364\3018\332AYS\2566$A{\216\340\354O~/\201y\217\206\361\010c\032\355\261\364g<\023\371m\306\010\017`\251\361@\215\031\021\336\003\250\203\2630]@o\277f\306\374\221\021i\333K\023\250x\204\016\032\332A\034z\337\357\201\364\322UIi\314\010\036\234\2532\"\202\002\223\346;z\002~\370\235\2237\207W\356\312\246(RVV\006\304\330\223\307\337\034\000\222\300\3230\247\262\206\276\260\305\274\335\311\007\350\006#KL`\221ZI@C\376\036\260\t\270\266\367\347\177LaOY\030\223U\235gG:\031\215\313\000w\026\333\343\217&\251i\2535,\312\204\177%\374\254v\373q\216\nf\265\367Z@\032H\224\260\"\314\t\275\211\000\355\305\367\337\030gn\315J\313N\232b\276\226,\010*O\177\216\367\306s\325,s\242\023\247\305\273\013\255\2177\033\333\340q\2475\250gI\322I\025W},\255\370l\017$|p\003\242\203\366\275\003\322\365\034r\251\376\031\214i\000mq\261\376\203\002\2727\254z\233\"\315&\250\311\253\036\n\212\204d$(;\021\275\201\026\277\241\355\206\017j\265\224\231\345M\251\352\001\236\211\3327\326-\251o\312\333\236F\000\364-$\3434z\324\204\271\243\001\306\241\336\373\\\037\256=XM,;\345\036|\342\245\222\221\321\036\316k3l\327\244!\2271\251\250\226\241\265\306e\236B\355\250\236lv\026\333\355\207\037`\265\225\2249\276{\221\326\301:D\265F85\\\231t*\203\376\253\334\036\017\345\205\314\2330f\312\242\252\216\002\321\264*\341\"\033\223bm\363\306\036\317:\2678\316z\206\2135j\037w\245\202\244\245N\262uh>Q\265\215\367\"\373\177/{b<R\373\330\334\253\353GBTG<\260\307\341\030\326H\310+qp\273Z\366\027\000\357\316\225\306\211\215Do\025ML\262L\302B\t\260\026\014ma\275\254.8n\330\"\266d\006\342\333X\377\000d\201\367\030\323Y\010\232\t\025\333F\265 \275\354mm\310<\376m\213Z&N\2059\251\245\023\3248p\275\307\337\033\251L\210\200Kb\027\226\365\306\212\327H\263\0034\253\022\305k\201\244\235\366\337r~8\233MO\014\200I\250\225oK\001o\226<\306\207\331\224\301$\205\210 \200/k\342%D\201`>V\335v[p0J4U\205\325,M\270\2664\266\206\242\363[~\347\032\320\005\332\252\271$*\202\"E\273\3600\023?\225\314\021jd\363\251\026\027\332\337\373\303\231\2025\204K\341\213\337o\216\027\272\216\205e\243\216\240\203h\245\322V;_q\353\364\301Og4*er\237yx\332\3454\203\307\224\037\220\306\316\240\242Y\243\021\274a\327`A\026R7\034a\213(\313!\212\255\014Tw\221\3172\266\242G\323O\366p^\256\225\221CN\264\261\202|\211\243U\355\30778\333t\214\256J\007\256\262\272*>\232\224\323e\202\t\"k$\241\325\000$\202,9`.v\333\024Nq<\222\327K;\202& (&\344\355\315\276\226\307Y{bJ\017\370b\365L\215P\347\370\032\335\201[s\244}\271\333\034\321\231eq\324\005\n\227\275\257\353\207\364\362Kl\306H7\300\245\227L\361V\306O\210X\255\300\357\202\206Is,\372\222\232\221I\230\310#A\035\230\222\304l>\370\221\233d\306:ox\211|\310\266$\016\006\'\373)\240\222\017h\3716\250\334\0016\253\221p\336^1K\234i\315xF\024_r\213:G\334a\311z2\216\202\221\354\220\302\250\312\303rl9\372\363\215\024\332\222 \276r\321\265\303\201\310\'\217\215\355\211}Fa\375\337$\362#2\"\337N\342\367\376\357\2009}d\315\017\273ix\321\206\245\226\366\363\036\007\034\037\237lx\261v\217Y\252\rWH\315\2260i,\300[ck.\241\265\317\320\340tQ\224\245\216kJ\223\002t\217\346#\312.\304\233\016\375\273o\217i\347B\305\252\t1\202n\235\217#\362$}\261\222\327\320x\255\0312\202\300\225\363l/{l\177\275\261\244\003YM!UBk\320Ak\371U\255\332\307}\357\353|b\262?\357\nz\260\202d~T\355\250\221c\371\213\342L\2209\216)\"\224\\\267\231\203\002\t\265\373w\370cM,\2512\242\263\217+\262)\013k\250\271\037\324\340\362\003bG\022\255Fa\005>\267k\252\006\272\370e\257s\350\177\365\2159zI#1\225t\265\274\314\333\337\275\361\272\245\342\206a\003\205\001\356\252o\303_\320\363\215z}\322\010\345\361<D*M\324s\376\307\214w\007\021kEQ\220\205c\345\272\261\275\257{\366\003\215\360/\253=\356^\230\257\212\031\"Z\201O\304\204\250+\265\317\333\357\202\324\265P\311\'\214[\375..o~\333\374\207\313\023`XeyRU\202\241t\330\257\206\246\327\354o\3751\334\001\253Ts\356SV!\231\rU\23639V Y\345\032H\262\270\027\033\330\374,\017l\034\312g\024t\316\360\030\245\r\'\205\250\250\325~\367n[\236\370\261\363.\221\351\372\334\320\326\311N\324\3146\t\033\005\216\344X[m\267\334\374\260*\267\243\246\235\2266\252\216\n\032B\026\037.\246\"\300\222m\311\271#\351\206\274\221\223\364K\0343\213\262?Hf\231\214\225\024\271{W\224\245\3615<\177\312F\373s\377\000W\307\016\265\221\254\371\2355-,\001\214H\362\204\211mk\213\013[\202n~\330_\312:z\226J\330\225\"I\010qvf\261\260m\254\242\336\240\367\305\221\022ECG\340\323 \362\200\tU\000\220>X\313j\306\264\321ot\374\325\365\031\005\024\325\"*f\320\024\3131\336\343k\215\356/o\363w\304<\3273\351\352-o\231frV5\310x\326\301\034n,Uv?[\342\263\250\31439c\2166\236U\200\r\226\377\000\205E\257\306\026\363\314\353&\313e\216\n\312\325\216Y\037L\"G\003S\037\323\024\274\357\204\211\326?l|\353\032\330 \214\017\025ZK~\020\340\343oH\347\255UU\356\221R\235\001\001S\253\032\372\314\n|\216j\250\"O\340\2530@4\003\267\036\230]\351\272\327\206\254{\361\247MP\253\253\323I\254-\300$j\271\004n\005\307p\303{aq\302\347\211\316>\000\362T\324_\222\317\244\222Ga\342S\230\244W#B\311\252\353}\215\376[\376X\t\024\331\214\225S\305(U\2126\362\201\313o\337\031ER\217\341T\303<\212\255\267\224\3621\341\231#\314\304h\254e\227k\261\275\355s\204\326\206XEgs\001\215\227{\342\024\221\314\324\325\032\300d\331\255\351b/\371_\004=\322\245\356X*\200.l1\237\271\002\222\'\3615\024m,\247pm\360\300J\231\326\006\\\274\032\310\253JH|(\330+\\\205\263\021qc\261\374#\023\034\201\'\370\324\201\t\324uH\013.\374\355{w\302\243J\322H\014\227bI%\230\\\237\251\304\370\320I\036\231<RM\202\331\266Q\337\364\305J)!NO\310\013\332e\004=C\010\215k\032\262\2468\3328#\215HU\007\326\366\373\333\024\036uL2\312\326\244\253\262N\206\332A\354m\376\370\351\027X\351\263\252e\320\177\304P\315\332\304\013\374\271\307;~\324\331t\220\365\264se\3622B\364\321\353\320lE\200K\177\365\276:1\271\250\232\214\232V/\346\024\346\251Z$i#*l<\242\337=\376G\022\272\026\226G\366\211\223\217\032X\233\305$\265\316\213\370m\345Q\375\363\205\212\t\263*\312Y\243\232yI\247u`\344X\2246\272\334[~~8\227\323\325\263\344>\320\262Z\251\347\324\202P\272\247[\205\014t\226\267\302\367\372a\216\016\234\177\242\324\256i\263\247\363ZH\344\312\336:\207\272\037\361\t]\300\357\204qS+\277\203\357N\364\342`c\026<\213Y\276\330\261\353\221g\241}2\225\362\337V+\032\373E]40I\023\200I\324?Ly\220g\256\325\206\347\206\221h\346\331\204\326V\001M\2136\327\333\350{\372\340y\241iby\242\205\313D\001\004\201b;\337n\376o\317\032\350#ijH\023_N\253\022\303I;\377\000K\235\360o/\"\206\2303\352\324\\\352\000\352`7\007\235\273a\226,\213@\214(\3267-.\341\254\007\003\345\372\374q\021e\225\014\346S}6(\267$\330\035\367\355\370\260\307\004\006:i3:WF\211\"\020\222\313v&\366\2676\035\261\027(\312\326\266\263\335%i)\231U\303E \027qk\215#\270\343\343\276\005\235C6W\224e\324\324\262%`\247\254\250\204\031v\004\225\004\0226\371v\302\327R5:4\021\345\244ID\304\220WU\300;~W\303\275>]\014Y\034\022fPS\301VQ\026A\033\205\016TYA>\206\300a\017;\321O,\320x2\001\021#\302,\r\303\013\351\277r\017\351\353\214\307\223\2324\322e\264pRj\334\026mHQ\357a\250v\275\270\355\215\264\264\317\0221y\204\212A\272+\001q\260#\203\361\371[\020\241z\267X\344\205D:X\3506&\347~~\347\022D\262\'\361L\221\2047\361\224\250bO\303\373\347\033g \203\345\351\034l\004\254\351v`\274\334\3601\022y\343UaP\206 \253`H\337\320be\014\022\373\322\270q$A59&\326?\337\323\0233\032u\326X]\324\362mrp\002\010\242\222*,\306\t\336o\016\024\250\217\304\032\001\362\222nO\240\370\342v\177\232O\024\272\341\245X\221\201`\315q\251I68\013\233B#`T\2523F\353\271\036k\016\376\274\355\364\306\246j\232\232x\222U2\200\226-\353\353\362\336\373cP\330\254\212\266W\276\325}\243f\331udyF[9\211\364\te\223U\310$\360\007\003`/\212\2170\315\253\353\253\036\266\266\242Y\245sb\356o\177\207\3761g\347\035\031\227?R\325V\365\206i\036_\0030\222\0325u\025\265\212\024\005\262\233\210\220\366$\022{\006\343\033\272j\217 \353\032\331hs\270h:w\246\250)e)\341\0134L\007\226\322\020ZI\031\210\325{\202\006\300X[\324\306\341\216:VA$\333\331\32455F`\320\275\032\005b\002\370\315`\336\243`G\323\277\307\020s\270*_(\206Ow\206%3\010<\010\343 \\\251`\301\257\270\330\355a\202\014\021\001PL\220\215\244\201\207\231~_\355\307\241\306\330\325\233)h \224TR\211\242g\326\366x,\341~\266\r\301\334bHW\024j^\315]7M-5\032#\246\247F\027\027\276\n\322\344\321V\365\027\275M4\272\341\322\352\200\3546\301L\272\236\226=\224\354\303r\0162\247yW9_\0025HYl\354\307|!\014|\005\3052\206\364\333\347\210\314^9\254\n\205\371\\\343faW\035\"\231\031\357p/a{\342%%b\325\261p\024-\35450\027\303(\305\320\2255=\0255T\211\340O1Y\010\"\352\200\033\375\177L}\034\252\2002\323\306M\356\272\256m\371\333\033\272\212\002\231\305J\242\274\205\337P\021\202y\037\014i\206\212\271c\005\351Z%\354e\262\017\317\r\330\266\266B\252\226\264Mux\241\014\327%\"U\'\3527\305k\373L\345\261P{\236v\r?\374\302\270Ecml\272l\r\207}\177\226,\372\312\"\322\336j\352d\357\345b\347\362\004~xT\366\367\226\303]\354\332\235\236[\373\265d\177\306\323\260R\254\244\220x\271\013\214\313\362L\334/h\344#\3255\224\365\202h\243\021\033\215j\032\340\257\240\276%f\371\223\325\317MR\224MM\030mq\310\030\352$\357\353\267\027\307\331\226W\225\265|\313\025X\222\024\005\225\225\365\017\226\303o64W\302\321\301\241<B\221=\264\262\3567\260?\236.\212\213i\244%\350\354\334\252H\252zv\026\215\034\207\246V\027&\366*\0107\305e\236GOO\233:\306\262\205\017\276\243k|0\371\354\3134\2037\350\232\032\210\037TF%\211\231\205\216\245\026?\323\t\335f\221\307\232\350U\036 Q\254\001a|x\211T\232=\250\264\325\222\362\033o3\240`X\215D\331\271 ~xg\313V9\351\332\213\335\213\021\374FpIQ\366\034\361\277\307\003\272N\003\356Q\324K\020\003\361\206bl8\003\277\307\005\213<\031X\246\025J\2611\005\264\255\366<\334\367\000\036?\333\004\014\317\247\013\034\322Zz[{\225\225\344Y/\311\340\215\255s\206\032\214\216\206\2636J\244\233D\226\264\241\177\023\250\277~G#\177\246\023\362\331\002W\tX\205\212FPc\326n\312\033\215\257kso\351\203y\025d\362u\004\323\316\211\033(a\374\020\0020\364;\\\235\257\317|\006\003GQ\345u\223VR\300\322\004\242H\210P\302\354\244vf\355\275\270\3046\351\271E\002\250\210Me2\352\322H-\177\303\177K\001\376\330a\317\263Jv\2402\301\347\023\352E\363\332\335\317?\021c\215\364u\260\014\256L\312:i\221V6\324\247\361l\t6\037\2569\001\212\013\032\307\014K\002\205*\254^6\033\206\355\177\216#\232i\035TK\001\2101\016lE\215\210\260?ll\313\245z\252ml\030\352m\35467&\304\337\340m\267|\027\230\006\242\323SNA\n\006\253\037\302{\337\324`\335\004\206i\314\003\306MCp\010\007as\370O\256\330\201\230T\324F\351\002\226*X3xi\300\365\376\3750R<\302:\206jP\304\245;\021*\350 \023\305\370\371\214\016\315&\243Z\3634\010\350\205\005\325\217\'\342\177?\2468\343U[\013AW$,c\210\\HW}\373|{z\361\210\331,\205e\250U\217\304\177\024?\305\313r-\360\301\n\347\025\024)wXJ\272\223s\244i\270\370z_\021h\350\336\227>J\250\302\010\334\024eSp./{\367\354.pb\351\202J\342/{o\350\330\372\213\246\343\316\350)\213\347\031L!\244\210\023z\212e\335\227k\022\311\271\033\376\035^\203\025\t\237%\352\212\356\236\202\202\236.\227\211iV\014\3071\256\235\332\031\345\325c\"\252\202T\020G\224\013\013\372\013\343\243\251\235\3237Z\337\032\362(*\203\260\372b\216\366\347\3223\345\017\026m\224\322\325\034\216\242I%\360\320\023\025\014\245\201x\210\340\002\304\025=\324\250\337N=\016\232\177\364\227\376\020\346\215\375\221\323]M]\323\331$k?Q\347\211I%\274\261\313(Y\010\377\000J\rD}0\237\230\373F\366\177\2254R\376\363\202\246I\242\023\'\205\013L\366\355\2507\007n\373\343\231sZ\354\3378\254\222\266\251\252j\247\230\227y\230\023\253\222I?|i\247\312\252\245E\232J\210cG\266\237\342\336\377\000\035\2561L:X\257\311\262Yeo\361Git\027Xe\035M\225\214\306\2137_\004\310\312\"e\"K\216n\000\364 \363\337\014\031\206q\034\024\241\262\371X4\206\336#\245\210\343\273b\221\375\225\251k\317O\346\3468c\323\035R\244l\302\347V\2375\277\372\342\373\254\310!\233&H\345\220\231[\271\260\337\020M(\315\305\017\213\356\215\263vq\003\327\345qD\325\216/bYN\347\355\21549]%<*\260_\237\251\3064\224\265\013Q\025\231\232\025\032lw\337\005\246E\215\324\021bw\3066\315P\257\324S\230\246X\322i\241R\276e\014w>\266\300\247\252\246\243\207\306\253\230$G\227\221\302\213|q\357\266L\324d=\037\230g\253\033\264\264\221\037\005B\\3\222\002\203\360\271\030\342\356\240\352\234\347<\237\307\315\353\345\250 \233\t\244\322\253\362]\200\372\001\212\260b\371\025\266\'$\251\235{\036u\220\326\326\223G\231QT\370J\004\211\035B\222.M\2706\355\214}\245\3702\373\"\316`\232\027\274\350\253\010\321}\304\212\300\374\2668\342y\353\344H\374T1\272j\266\245\337\026\237\377\000\222\363I}\223P\320\231Me]\035I:\246bUi\201[.\220lI,@\276\340.\326\301\315\3235]\201\307;\344\0102\ne4\322\313\035\354.T.\255j/\266\233w$\016{\214j\352\334\242:l\206\242w\245\232\232K\371W\220\312\033k\233\013m\365\373`\203\346p\325V<\324\023\322\254\342\234L\350\362\2024\334\206\006\366\261\000-\255\276\370\201Y\230?P\344\365\205\246C\030b\212\241l\302\333\206bM\316\327\336\330\304\\\373\223fyz.\317\331\266cY\354\346\2220\350|\031\2369\002\213\235\332\342\377\000\033\021\364\266$\346\224\360\267V\210i\342\326U\2138}\303\037P\017\317\003\377\000fM1\3731w\263\\V\275\331y\341w\371a\263=\226\222\223>\236c\03122\225_\rN\2426?\\C\233\375dz\270w\215\005(\305\032\305\"\221\034$\035\202\255\203\036\000\372\337\002\374\tg&\322B\024~ \337\346\014.7\033m\375q\233M\034\220\263O\022\260\216\322+\240\266\3676\026\365\343\030S\204\237[\264\251\340\306H`\024\222E\317#\267\257\333\031\t\205\034\021\225U\212S\030\013\251-\305\215\267\370mo\317\004e\026\211=\334\306\266\362\231\003[M\355\377\000\234\n\253y\322Y\034\230\236+X+\220\240\201\307\257\241\307\264\325\255\003\304\311\272\017\301\265\365l\007}\267\307PS3\2124\246\244.f\225\264Jn\005\212\266\373\330\341\216\231\302C\023\030\214\002H\231%\2140\272\200\t\276\334\377\000\347\000\251gI\250\020\325S\370J\037C\353\340\355\333\327\177\353\211iU\032\344\325LigB\216\001kX6\246\260;\363\244[l\000\331\356C\225\350c$\222\'\204\233\025a\276\237\227\316\326\301A\224\321\274\223T\032\223\"\225\263)k\213\332\340\016\334_o\2164ea\352\351\013\212\250\210R\261\215<\201\350q\016J/z\006:j\231\0225:\215\310\346\303b>\237c\200\3165O\224\325Sj\257H\300b\314\014k\272\250\324|\300\235\355n\333\340eE8\226\246O\031\n*\356\242\337\210\237\323c\206G\251\236\236\202M05EF\220B\226\324\034\366\275\270\333\323\021\025|hCOL\0202\200^\334\237ArpS:\201\016\251O\037\216E\225u\025[\033q\3065S\325G_J*V\235\343h\2240G\004\020A\037\336\370!_J\336\"\353EJe\271k\236\327\332\336\233\333\363\304\\\216\331\2155KG\001\247C1\214x\303f#k\354\006\333`\376\316J\321\204\301\365#\306\013\027\334\001\206\234\253\241S=\312e\242\352?\023\367]IV\226\217Y_\033I\324\272\255\300\276\376\247\004\362\n<\252\021O4\017\025T\262\002\321\272\356\252\001\334/\313\213\340\'[\373O\3132!5&_,9\206d\267R\025\357\024\'\376\2429?\001\367\030\323\310\337\002\034T7&8{4\312zB\273\331}\004\353\224\345\224\323\346\031p\216\256D\247A#;/\206\344\220.|\327\337\034\241I\354#\332\307\357\027\312\241\313\250()\"\231\222:\251\347R\035u\0330\013\251\254G\3758\355\036\231\256\242\250\311\251g\200\000\263\302\222\237\r\002\203u\007\217\256<\317j\350\341\207[_\3105~2\016\333\377\000.\370\266=K\213tG\361\246\221_{\'\351\n\356\201\351Hr\232\311\251\'\257fijf\204\000\263\261\357\275\216\302\303\216\303\016y\263\304\371q\216z\205\216F\374*\215\250\375\360\261\325\325(\265\021KOK\246I\242\264l\016\332v7\346\367\334cnMKW\340\300j\212\317\030\216\373\203\2505\360\227\313f\270T4eF\032*$\212-r(\026\005\315\3163\253\254\234DJ)\026\276\353\261\300\254\2331j\234\306zf\213H\214\335M\266#\005\352\3324\032o\271\034c\235\234\221Y{i\310*:\273\331\336a\225\323J\360\3153F\342IX\351].\016\366\277\307\034[]\323\023\321\254\260\324\273\232\310\352\032\027\215\000d\004\r\356\367\346\376\200\217\216?D3\010\"\223%\253\021jY\004\rb\274\203m\277<s]/\260\372:\356\277\223\246k3\\\3009\213\306Iu\013\310\332ul\010#\213\375\216,\351$\351\2419\244\225\034\361\026Y\"\306 \250\236\030!.\ts\271\035\216\334\234\033\252\314(\023&\031T4\212b\212\227\302v\361\013\031\230K\254\260\330i\275\310\266\366\260\305\251\355G\331\325\'\262\352\334\2470\245\206Z\310\332Vi\222\255VU\224\245\230\013\021kr8\357\212\367\254e\243\315\263\211j\026\ndf\201\321a\201\002*\260\026\027\n\000\373|1kW\310\205/B\235]S\274\346L\256\220\323\300P\251\214\310db\010\356H\037kb>B\263=c+\312SH$\371\254M\305\254q\254xKK\013;\220\310\307XF\363\021\265\261\013\336\252)\352L\220\312\352}I\271\302\373mhb:O\366O\314&\031~g\225H\272\240\215\365\215\377\000\232\366\3751a\365\322K\377\000\021\307,0\201\251A \\\330po\371\342\207\375\237s\214\312\034\372\032:(Zh\236[O\0346\0225\303\035M\352\000<z\214_=tf\245\222\031\025\2326P\255\345\374M\316\333\177Lx\335L{s3\326\351\245x\3222\312\014R\307S\342\'\361\036\344\236E\300\374\277\276q9 z_z\225\"\216H&\002\3676*77\037{}1\037*X\346d\250\202}n\351yF\263\344\026\'\213\233zm\215\263\231\247[<\362\242\003\3740.5\237\227\247\373a#\031\016j\302h$\211#Y$\271\016H\333b@\266\330\321N\353\031II,7\261\343\315\376[z\037_\23612.\266\246r\304\351\362\231\036\332H\275\307\033\355\3751\225C\303\251e\363\025RU\0246\303\260\376\227\276\010\tY\362\323U\345\321\317M#l|=\301\3632\357\310\370lG\313|*\347\271\232\211i\341\226I\026V}\005w\261U \334\372n@\371\341\243\336b\031SP\'\214C\271!\201\033\261\273j;_\270\033\367\266\027!\313\236\247?\250.\361\030\341T\201\321\230\354\354u\233\037\227\207\371\340\306\274\201\215\375%]2\217\003\302\264\005T\026\276\372\254,~\330\225UN\324Y\243Kf]`\205p6\265\271\266 \364\204m\221\344\320e\323\314\325\255\014E\014\245n\344v\343\233m\211\271\225r\256\2210P\302\304\002E\334[\323\234g\316\203d\332\010\326&2L\003\273\022\252\214\242\303~\373z\021\214\026\276\222)\231\026\262\030\254\240 X\356uo\351\205\374\3335\255\314j\036\032I<\030PX\357`~$\375\0064e\361MN\200\025\210U\201\374RM\326\344\255\230|\000\r\267\317\034\370\nc\005|\263<p\030\003\000\307B\273~\"\273o\362\337\234\r\244Xj\342+V\2444C\224o\022\354A\275\300\355\177\2765\276a*\207\211\252`\2169\030\351mwk\r\357\360\364\267\303\020\346e\211\335\340\226S\342\002H \351$\213v\364\2660\215\336\210\371\265.\177\004\325\224\364\365rRQ\314\243D*B\220\254\273\213\216\001#\267\177\256\000A\323\353:\262e\361\013\350\r,\3620Q\035\271$\235\200\376\367\301\\\323\250|\311\024\t4\357\033xa\030\025P\240\213\215]\273\366\343\013\375C\324ncX\000UE\334A\t!\024\372\372\223\36178\247\034d\370%\234\242\271\344\275\275\216\327\232\337f9\004\350\272\255F\221\267\304\257\224\377\000L1fYuE}\004\210ce\324\266\006\344s\267\353\204\177\331\302|\342\223\331\215\r\036eM5/\206\322xj\361\224fR\332\201\265\270:\260\373U\343I\033\035e\216\344\337\267\337\014\232Jn\211`\355\003\222\202I)\350\341\250h\224\323*\205*\026\344\000\007\251\354\006\t2\304\221\221\275\210\266\002AZ\023\'\257\221\'G\253\212\241\324\013\362\243I\002\303\340N\377\000,E\245\317\'\252\312\336QE1\220\002\000\267\'\341\202\371\002\013\345\262\323\032\246D\2203n-\202u>\037\203q\031\'\261\302\215\004\322\303Z$j\037\t\033rX\200A\373\340\362fhe\360\315\\M\265\364(\271\306[\nDj\010\353\243\226C\341\206\214\334\022\317m\217\3761L\373O\366\251\'OfT\362T\345\364\260\327$F$\235\254\263\024\006\304\002\243W\036\245o\362\305\335\357\364\357\034\220\242\226-\267\224\357\362\307\'{u\350L\367\250=\243KS\024\271u$mM\035\205ej+\235+\245\230Gr\366\272\237\345\354q\254.\347OF\232J6c\027\265h3\254\333\367m\0058\210\325!\r\'\271\201\'\027>p\305\217\317\t\035A\221\326S\326\211\356e\205\315\301v\363_r}M\360g\247\275\236O\223W\245Zu=,\325\016\216\276\005<\023.\240\025\211\032\244U\277\341\355\317\003|\035\216jSUO\022C\342x\204\243x\212Y\203[\220}0\314\231>9}I\344\233+z>\237\232\256U\227\300\'\306mJ\030yI\277\027\355\307\347\205\314\353+\367<\325\351\245(\204Z\340\\\351\270\006\307\027]U\024Y]$\254\301\344\231\001\225V=\225\276\276\273m\351\212\3071\313k\345\314\016cU\031[\267\213-\316\263`H$\375\260\314=Gs~\214\270\320\345\373-T2{I\024\242%a%4\227\324t\225\260\033\376_\236:\037\257i\301\226\n\224\210\310\333\256\223\275\3069\333\330\264\264Yo\265\232z\210\353\242}pJ\031T\352$\225\331V\335\356moA\216\215\352\363+\322$\2216\222\200\332\367\266\366\035\261\037X\357*\177\243\323\351?\314\022_+\202\262?v\274\026O\342x~U\334s\346\027\343|I\227Q1<\022\244\261\376$,A_\375\341v\256\232\251j\016\237\031KiYU\315\365\221m\2567\323m\260^\236\023\r\"\323\222\n\245\354T\361\330\201\204.\007\313\223*\221<lk#t2\310F\241\334\337aak\001\276#\370sx1\177\232W\017\341h\277k\022Oa\306$\304eXT4cJ\224\t\275\370\344\037\240\030%\025<SM\2564(\301\277\031\033\177c\034\000(\207\370\340\\\251r\005\200\277\246\343\345\211\035\023H\362\345\202\264!+U3\324\235\271Bm\031\371\351\n1\263\2502\350\3512J\36720t\214\245;\016<Y\010H\366\377\000[\001\267\256\n\305%-=,\031]\013\2154\350\2602\240\271[\000\000\'\261\301\277\250<\221\363\212\270\350i\212\321\241y\200\334\021\334\03609r\351\252\274*\274\306FW \205A\261\343\372`\353P\303G<U\005\354\354\267:\267\n;\375w\030\201UW\342\027u`\250\005\213Z\377\000#\216\213\244\006\233d\'\202\\\272\252)\013!6\362\261\363\002-}6\372s\3521\262&5q\211\003G\036\245\002g\021\334\237B;oq\261\370\3426z\320\302\261\325H\304.\233\336M\3247\256\374a/7\352z\361W\340\345\232\022\215\224\353\225\2564\2228Q\337\347\215,n\\\003\275G\223vmUM\226V\253I2\317%\256\031\374\336!\275\356\300\356\r\306>\255\352\tV&idt\225\224r4\225\342\326\035\277\256\023\335\340\367\2432+\311Q{x\262\233\260\371z}1\204\213#1y]\257~Y\257\212\241\323\256Y4\363\372\010U\346\225\025$\2421Q\210\212\2507v\271\306\240\301Wm\261\036I\300%G\231\2751Ta\350\236R:\263\331\013\317\231t&AU\357Q\35041+\252\002^\3524\233\372n\247\017RQ\301\032\206f\225\325\266#\215\361T~\312r4\236\313\251\346\322\353$5\022\303\245\207\226\305\203\017\247\233\234[\315u:\330jF\330\217\323\347\210rR\233F\341n(^\367d5\306\231h\324\300\317t{\222H\357kpF\027\343\313}\331\r<\006G\322\333\263\036M\360\345\014U\024\325\362H\013x2\000V\342\300[\277\373\342.h\324\324\276\363S,g\303\210<\214b]W\002\347`9\371`;\r\013\323\301-2\352*ea\275\207|\006\275Kg\204\244F%\323\303\016\330k\312\352\350\363\356\235\245\3162\320\346\232\252=q\227[5\256F\343\267\030\362\0322\223\n\202ot\270\026\337\214w\007rc\221e\213\010\001cU\356m\337\025\037\267\231i\362\217j}-]W\014~\345\230?\272U3\222\000\217P\324o\177I\337|]\024Nc\250\270$+\021{\3741\316\337\265\270\232\2670X\235\343\211a4\357\013\310\341Q\024\211\222Ss\307\230E\371`\340\373d\337\220\313Pc\007\264\025\351\312N\261\351\005\310\346\203\367tR\307#%3\207I\001\232\314]\256E\200\325n9>\226\305SE\325\0315^eW\323\256\262C]\004\256\214%\210\"\226RA\365\267\037\014V\324\271\231\245\244k\347\361B\254\267\321\030vbW\200\010[\016v\334\014i\353\232\372w\353\334\3138\241\226H\236\256qW\026\213\371\004\240?\342\330\337\314{b\311t\212I&\366N\347m\272,\272\212\232Y\032\0327\314i\336\260\003\242\026$\311\307ao\207\367{\341g;\251\271\222\010\321\274\313\244\263\013\022\273\336\303\215\317\246\370C\246\256\314%\314\305LU\022{\301o<\317-\211\270\261\271\'\323l7T\301P(\243i\364\226t]\256\031I\334\021q\333\353\204\313\002\304\323l\350\375\230G\330y\212\223\332\266^\261@e\2742*\242.\345\264\236{\016>\226\307Rg\316\242\205P\240\275\354\r\266\037\037\226\330\345\177cu\236\345\355s*p\343D\241\342\326\026\367\272\235\205\276\"\330\351\334\363T\260F\222k\261\270\262\355\177\206\'\353?5\374=\036\221\375_\364\\\251\202]\231\014DF\244\274*\244\201\311\275\357\361\304\264\232\022\224\360^\005i\023Y\n.y<\333\267\364\306\002\246\027iQ\034ER\312\314\214\302\353\333\313\361\004\214a\225\350\2373a)i<M\330 \260$\002oq\307$[\345\211\223+q\260\305\0240\276\245eVt\270\000\033\330\372\332\370\'M\024T\264\240\265\302\257\t|E\023Re\332\333LI\334\022\005\257\375\372\343\t\263L\276\n\2245\365q\202\340\025\266\372\211\355\371`7fi.HY\367\274\326WeY\\\021\007\361\252\215D\250\307}\021)p~\222x_|e\005L9d\225\246\264\304\222\310\345\342\263]\264v$v m\3648^\316\372\251\037\253\232\242\2069[\335(\322\231t\251\335\344:\330[\375\"3\214)2j\256\240\251z\272\360\320\226\362\252\253\220t\361}\270\303j\222\261{l\233\233\365\005\\\363\322KB\242\246\205\246T\226]B\303\343k\361\265\276\270)Y\230eIH\365P\230\243p\2271\201m\3767\304\032\237\334\0351\224\255,\363MQ2\257\360\351\201\261Qo\346$m\365\355\333\276*\374\3538J\272\271=\325\004j\315}(M\227\341\206b\306\344\377\000B\262dQA\236\256\352)+\352\036I\0268cA\246(ck\250\377\000\250\372\2220\252\322OS g;Z\300|1\344Q\336B\3237<c)\\Z\311\262\216\376\230\272\020Q\322#\224\234\214\364\244[\213_\343\214%\225N\354~w\304Y*\354t\247\230\372\333\030\301\033;jo18r\217\261m\231\273\264\217\245v__\\n\246\247:\270\304\212JC\370\255lN\t\034B\347s\215Z@\331\327]94p\345kMN\261\306\"\272(D\340\r\207\345l\025\222F\013\252I\032\307\2606\030]|\333/\310\372R\277\250\263H\375\336\222\2304\223\003\370\224qa\352I\332\337\034sg]\376\320}I\236\306i\372n\236L\252\210\352O\0220d\231\377\000\356\260\013\317o\276<\354x2M\353\201\262\311\030\235G]^\253[C\036\250\326)e\010\340\215\310;l{z\337\003s\376\242\312\362c\242\252PK\267\207\032\215\314\215\240\261\003\327`o\216&\247\315z\3272\253I \254\317*gg\001O\275\260vbx\002\367\'\345\203\271\007V\346\322\347\371uf{Y[,\224\025\013\014\361\314\344\276\233\220\340\336\346\366\362\237\\S\377\000\r\245\315\212\371\227\243\261r\006J\\\216\216\221cX\326(\200:E\205\355s\267\314\234F\2374Jr\210\322\306\240\263 \'\323\266\026:K?\375\371\220\317\234CK,T\342\242H\241\014l]T\333Q\007\215\357\2664UCS\231J\360\274\272cG\014\000^>\270\215\247m1\311\241\322\t\221a\361@\006\342\367\365\305m\355V\034\2737\246\256\226\247-\246\314V*s\031J\204m\037\211$\336\304_\374?\\<\320*\255\014t\304\311u[\026=\360#\252i!\375\321S\010\001\025\320\033\023\351\267\364\'\nZ\220\305\301\312\375M\232V\303\010\207$\241\312\262\270\357`\264\364\021+\223\353\257Ia\367\305S\231E_P\363VU3\274\200\213\227;\236\177A\213\237<\201\251\332\2425@\322\206+\270$l}1[\347\215.\211i$\201C\304K\031@ \266\347\261\371\343\320\351\362>\010\345b\255\022H\265\nQ\013\270`\025m{\234X\265\024\362\247I\345\363\314\214\004\215*\277\237\314,\023\361\017\373\273b\'F\344\350[\306\2365.\010:]n\246\343n~\026\333\014]^t\364\306V\210\240\204\252\234\265\232\326\272\305kv\365\373c\263d\356\222C1\305\325\201=\230I\340{W\310\256\023\374k*\001`\240\251\337s\365\307Rg\023\264t\254\205R@\366\000\333P\003\2361\316\276\303:s\367\307\264\001\230>\221\016W\t\235\302\033\352s\345Q\317\304\237\246:\0171\270\244Q\255\224^\373m{b.\255\2475^\217C\246ME\377\000A-\023\027\216G\215Ymk\240\267\177C\2024M\026]MhL\254I\002\347s\277\246\001\3455\236\341$\324\3655\206y\201\032\025\330\233-\254>\270%H\376\363T*\222Vf+\245E\354\024o\307\373\342j*\356\260\235T\320\245)q\020\252f$\030\312\203\261\026\'\177Bp\252\231uNe_s*\314\360\rB \000@\033\275\217\317\006hhX,\320V\261+#\330+\261%W\236G\351\266!\346y\346Y\220)\243\246\2442LI\001C\220\221\256\373\263\177@7\371ru\024\370F$\322\333&\307\223\345yRTVL\354\321TT\231]\231\005\321\312()\267\240Qo\200\300\236\245\353H\250 \222\227%f\211\017\342\231\317\231\277\322?\224m\317?,-\346}]\233\325e\362\320\265@\367i\035du\021\200\245\200\260 r-s\372\337\t\271\204\316M\345s\271\330\177\277\307\025\342\301n\344I\2235*D\212\254\312\2470\225\335\230\351-\311\345\261\242\353\020\272\2171\304?\033\177-\325q\256J\233\002\251b\307\276-\214|\0227\262d\365\n\006\2476>\230\210\362<\240\377\000*\366\307\221@\362\035O\271\365\304\310\251\200;\361\206(\321\223],:\230\000,pZ\232\024As\316#+,[\216?<n\214\263rl\017\030,\004\246\234~\025\037\370\307\221\254\2225\206\367\357\333\031\323R\226 \260\260\356=pB4\tm+\376\330\316\227\001:\202\252\226\014\352\251r\234\302\231*\250\'ui#qu`\267;\217\365\025\373b\232\311\272\013<\312\177iJ\251\353:y\347\351v\232f\214\032}T\313\033DJiR,H6\026\033\334a\303\330\207P\347\271\367\276\347\031\3546ij\344\247\2020\244$h\226\032V\373\2370k\237\\[PU,h\002\240S\277\226\333\342\010\346xn,k\306\247\263\232\377\000j\332\376\234l\273-\243\310TQ\346\3245\204\350X<?\rt\356l\273\203}6\270\035\361\317\222TOW_%\\\306V\222i\013\273\261\3631&\344\237\215\361`{y\314\244\250\366\227\237E2\312@\251o#\260kl\000\365\036\207\025\2752\237w\361A>&\262\243O|z\370\177\024\310f\222t\216\254\350G\222>\206\312h\342\236(\322ZQ8U{\265\213\033\223\365\276\036\362\032*h\251\210\222[3\367sn1\316\236\306hk:\227\332\005\024u\025\265\017\r\024h\322EN\332Q\"A\345\004\013\017\304@\260\034\2618\352%\240\243\242@\320\322D\030\r\232V\324o\363\307\227\233\037l\252\312\241+D&\254\215*\274:x^f\265\254\211\215y\244OSN\311=/\206\n\233\026\"\342\343\323\030\324\326\324\\T{\300P\267\032b\033\363\360\300\214\346\256f\014\253\014\254\216.\030\216\376\247\023\270\216L\241z\336\2268\272\212\262\n\230\274\214\272\324\337\344O\346H\372aF\253%\021\316\313\341CQ\034\367\002\327\005o\375\363\213\027\332\2053I\034U\216\272\310&6\270\371\221\372\237\246*\304\317\232\212\246\251f&\301\025!D7blI\333\260\337\033\205\276\005\345U \212\322S\345\350|i\212\262\200nN\313\267\'\373\355\2159\305L\031\257HR{\256\270\374\n\311\207\210G\225\206\210\270\027\276\330\323\323\335-\324\275w\230\005X\202S\206\335\317\2264\036\244\3678\350\216\231\366}\323\275;\224\322e\225\024p\327\264%\244f\221vwk\\\333\277\000}1\323j\034\362o\024[\177\242\271\375\235\251&\313\362\214\332\246\246\t\"Z\231S\302\225\343\267\212\000<\036-s\213\016TI\216\251gH\327\371Am\376\330\307\254\372\263%\245\221(a\2229*We\261\036\034\"\333k\337\2168\343\234V\255\234\3137QTT:\271*\002H\030\004Xm\266\344\354>\370\235\302SnE\261\311\030\306\206\n\332\037v\316e\225[\313/\027\346\376\270\'A,0B\323I!\215\024l\334_\373\364\300j\216\240\264\262+,l\321\222\255 \007O\241\265\367&\343\235\276\270X\317:\225\036B\272\2651\330/\247\323\r\216\'.E\313-p5\347\335a\022^\032\033\305u\322\323\037\304\327\364\364\376\270R\226\265\352CI\342\026\337{\340\rE@\234x\262?\227\361*\342]\035dEv \205[0\305q\304\242\264O,\215\230\324\317\3414\204\310\t\344w\300\232\312\315lY\310\364\003\323\036\347A\322o\371}\343c\261\3541\026\236\224\222\031\216\246\347\024B\013\221-\237)\222s\265\321}=pB\216\227aa\214\340\247\000\006&\303\345\211\250\311\022\202\010\004cw\350\007\261\304\024o\266=(\\l,>8\364\266\275\305\366\340\001\211\024\364\245\374\322\237\373F\002\327\'\032 \246i\032\352\277S\306\t\322\323,{\235\317\2567E\030\330\001a\362\304.\240\314\323*\240yQ\004\222\332\352\226\275\367\000\237\317\002\333tw\004\312\331=\322\216i\302\007\360\320\270_[\003\205\246\353\252h\243E\367G\222r\314\035\024\360\007\030\001\232u\rK\326\325O$\217\022\324B\2428\301\277\206\303I;|w\304\\\243!\251\256\317\242W%\022FR\322I\300\277\257\324\201\206\250kf\034\235\350\377\331"
      }
    }
  }
  feature {
    key: "image/filename"
    value {
      bytes_list {
        value: "000000000.jpg"
      }
    }
  }
  feature {
    key: "image/format"
    value {
      bytes_list {
        value: "jpeg"
      }
    }
  }
  feature {
    key: "image/height"
    value {
      int64_list {
        value: 224
      }
    }
  }
  feature {
    key: "image/key/sha256"
    value {
      bytes_list {
        value: "5ec019893b646aa03ec7cacb33b9038e4cc0c41848c6ea056074c1ae29c79bc4"
      }
    }
  }
  feature {
    key: "image/object/area"
    value {
      float_list {
        value: 0.002411511493846774
        value: 0.002411511493846774
        value: 0.0007971939048729837
        value: 0.002411511493846774
      }
    }
  }
  feature {
    key: "image/object/bbox/xmax"
    value {
      float_list {
        value: 0.3125
        value: 0.0982142835855484
        value: 0.2276785671710968
        value: 0.2589285671710968
      }
    }
  }
  feature {
    key: "image/object/bbox/xmin"
    value {
      float_list {
        value: 0.2633928656578064
        value: 0.0491071417927742
        value: 0.1830357164144516
        value: 0.2098214328289032
      }
    }
  }
  feature {
    key: "image/object/bbox/ymax"
    value {
      float_list {
        value: 0.7321428656578064
        value: 0.96875
        value: 0.01785714365541935
        value: 0.2366071492433548
      }
    }
  }
  feature {
    key: "image/object/bbox/ymin"
    value {
      float_list {
        value: 0.6830357313156128
        value: 0.9196428656578064
        value: 0.0
        value: 0.1875
      }
    }
  }
  feature {
    key: "image/object/class/label"
    value {
      int64_list {
        value: 1
        value: 1
        value: 1
        value: 1
      }
    }
  }
  feature {
    key: "image/object/class/text"
    value {
      bytes_list {
        value: "1"
        value: "1"
        value: "1"
        value: "1"
      }
    }
  }
  feature {
    key: "image/object/difficult"
    value {
      int64_list {
        value: 0
        value: 0
        value: 0
        value: 0
        value: 0
        value: 0
        value: 0
        value: 0
      }
    }
  }
  feature {
    key: "image/object/truncated"
    value {
      int64_list {
        value: 0
        value: 0
        value: 0
        value: 0
      }
    }
  }
  feature {
    key: "image/object/view"
    value {
      bytes_list {
        value: "Unspecified"
        value: "Unspecified"
        value: "Unspecified"
        value: "Unspecified"
      }
    }
  }
  feature {
    key: "image/source_id"
    value {
      bytes_list {
        value: "1"
      }
    }
  }
  feature {
    key: "image/width"
    value {
      int64_list {
        value: 224
      }
    }
  }
}

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

 

반응형
728x90
반응형

 AutoML efficientDet 을 이용한 custom data train

- AutoML efficientDet은 efficientDet모델 위에 최신의 성능 향상 기법이 도입된, 잘짜여진 모듈로 구성

- config 기반으로 다양한 환경 설정이 필요하지만 이를 위한 tutorial/document가 부족

- tfrecord 기반으로 학습/검증데이터 만들어야 함

 

 

TFrecords 개요

- 매우 많은 갯수의 이미지, 오디오, 비디오, 텍스트들의 (주로 비정형) 데이터를 보다 빠르게 access 하기 위해 만들어진 데이터 포멧

- 파일 시스템에 있는 여러 개의 데이터 파일들을 하나의 큰 protobuf(protocol buffers) 포맷을 가지는 파일로 변경하여 Random IO access를 줄여 access 성능을 향상시킴

  -- 하드디스크는 각 데이터를 Random i/o를 해줘야 하는데, filesize가 크지 않더라도 움직임이 큼

  => 그래서 뭉탱이로, tfrecord로 만들어주어서 움직임을 작게 만들어줌 

  => 원래는 img가 imread로 numpy 인코딩되어서 읽어들이는데 tfrecord는 압축된 상태로 올라감

  => i/o가 안받쳐주니까 gpu가 활약을 못하는 걸 해결함

- Tensorflow에서 cpu에서 gpu로의 데이터 전송속도를 향상시켜 학습속도를 빠르게 하기 위해서 도입

(하지만 병렬 I/O처리로도 충분히 gpu 데이터 전송속도를 향상시켜서 학습속도를 빠르게 할 수 있음)

- 직관적이지 않은인터페이스 api, 어려운 debugging 등의 문제가 있다.

   => 그래서 보통 엄청 큰, 학습속도가 중요한 데이터 처리할 때 사용

 

 

TFrecord 의 구성

tfrecord #1 : 이미지파일, annotation의 example # 1 ~ # 250

원래는 img하나, annotation하나 읽어야했던걸, sequential 하게 쭉 읽어버림

 

protobuf의 이해

json, xml, avro와 같이 데이터전송과 저장(serialization)을 위한 스키마 정보를 포함한 데이터 포맷

xml이 스키마 정보를 가지고 있는데 formatting하는데 소모가 많이 됨

 

 

일반 파일 기반의 딥러닝 모델 data pipeline

- cpu 처리 : 소스 (img file, annotation file) => dataGenerator/sequence(numpy array) => iterator(batch size 만큼 데이터를 담음)

- gpu 처리 : fit_generator(), tensor로 vectorization, 병렬 처리

=> cpu에서 gpu로 학습 데이터 전달 속도가 원활하지 않아서 gpu utilization 이 떨어지고, 학습시간이 오래 걸리는 현상이 발생

 

tfrecord 기반의 딥러닝 모델 data pipeline => cpu의 병목현상 해소

- cpu 처리 : tfrecord => tf.Data (텐서화) => iterator

- gpu 처리 : model 학습 fit_generator() 

 

- tfrecord를 이용하여 파일 i/o 시간을 많이 줄여 줄 수 있으므로 더 많은 데이터를 gpu로 전달하여 gpu utilization을 높이고 학습수행속도 향상

- 일반 파일 기반에서도 8코어, 병렬로 i/o를 cpu로 로딩 후 gpu로 전달하면 gpu utilization이 많이 향상되어 tfrecord 기반의 학습시간과 별차이가 없음

- 하지만 tpu의 경우는 보다 높은 용량, bandwidth로 데이터 처리가 가능하므로 tfrecord를 이용하여 효율적인 학습시간단축이 가능

 

 

 

반응형
728x90
반응형
!git clone --depth 1 https://github.com/google/automl

 

!cd /content/automl/efficientdet; pip install -r requirements.txt

 

!nvidia-smi

Wed Dec  8 11:02:34 2021       
+-----------------------------------------------------------------------------+
| NVIDIA-SMI 495.44       Driver Version: 460.32.03    CUDA Version: 11.2     |
|-------------------------------+----------------------+----------------------+
| GPU  Name        Persistence-M| Bus-Id        Disp.A | Volatile Uncorr. ECC |
| Fan  Temp  Perf  Pwr:Usage/Cap|         Memory-Usage | GPU-Util  Compute M. |
|                               |                      |               MIG M. |
|===============================+======================+======================|
|   0  Tesla K80           Off  | 00000000:00:04.0 Off |                    0 |
| N/A   72C    P8    34W / 149W |      0MiB / 11441MiB |      0%      Default |
|                               |                      |                  N/A |
+-------------------------------+----------------------+----------------------+
                                                                               
+-----------------------------------------------------------------------------+
| Processes:                                                                  |
|  GPU   GI   CI        PID   Type   Process name                  GPU Memory |
|        ID   ID                                                   Usage      |
|=============================================================================|
|  No running processes found                                                 |
+-----------------------------------------------------------------------------+

 

import os
import sys
import tensorflow.compat.v1 as tf

sys.path.append('/content/automl/efficientdet')

import hparams_config
from tf2 import anchors
from model_inspect import ModelInspector

PASCAL V0C 2007 데이터 세트 다운로드

# yolo 미러사이트에서 데이터셋 다운, pascal 공홈은 접근이 힘들어서
!wget http://pjreddie.com/media/files/VOCtrainval_06-Nov-2007.tar
!tar -xvf VOCtrainval_06-Nov-2007.tar > /dev/null 2>&1

--2021-12-08 11:03:44--  http://pjreddie.com/media/files/VOCtrainval_06-Nov-2007.tar
Resolving pjreddie.com (pjreddie.com)... 128.208.4.108
Connecting to pjreddie.com (pjreddie.com)|128.208.4.108|:80... connected.
HTTP request sent, awaiting response... 301 Moved Permanently
Location: https://pjreddie.com/media/files/VOCtrainval_06-Nov-2007.tar [following]
--2021-12-08 11:03:45--  https://pjreddie.com/media/files/VOCtrainval_06-Nov-2007.tar
Connecting to pjreddie.com (pjreddie.com)|128.208.4.108|:443... connected.
HTTP request sent, awaiting response... 200 OK
Length: 460032000 (439M) [application/octet-stream]
Saving to: ‘VOCtrainval_06-Nov-2007.tar’

VOCtrainval_06-Nov- 100%[===================>] 438.72M  21.1MB/s    in 22s     

2021-12-08 11:04:07 (20.4 MB/s) - ‘VOCtrainval_06-Nov-2007.tar’ saved [460032000/460032000]

 

!ls -lia /content/VOCdevkit/VOC2007/Annotations/*.xml| wc -l

#  5011

학습 데이터와 검증 데이터를 tfrecord 형태로 변환

  • google/automl/efficientdet/dataset/create_pascal_tfrecord.py
  • create_pascal_tfrecord.py를 이용하여 XML 포맷의 Annotation을 tfrecord로 변환.
  • create_pascal_tfrecord.py 는 ImageSet 디렉토리에 위치한 train.txt를 읽어서 해당 xml과 image를 train용 tfrecord로 변환. val.txt를 읽어서 valid용 tfrecord로 변환.
  • train과 val용 각각 약 2500여개의 image/xml를 100개씩 하나의 tfrecord로 생성.
!mkdir -p /content/tfrecord/train
!mkdir -p /content/tfrecord/val

# train, val
# --output_path=/content/tfrecord/train/pascal에서 directory는 /content/tfrecord/train/ 까지, 뒤의 pascal을 tfrecord파일의 prefix임.. 
!cd /content/automl/efficientdet; PYTHONPATH="/content/automl/efficientdet:$PYTHONPATH" python dataset/create_pascal_tfrecord.py  \
    --data_dir=/content/VOCdevkit --year=VOC2007 --set=train --output_path=/content/tfrecord/train/pascal # pascal은 구분자

!cd /content/automl/efficientdet; PYTHONPATH="/content/automl/efficientdet:$PYTHONPATH" python dataset/create_pascal_tfrecord.py  \
    --data_dir=/content/VOCdevkit --year=VOC2007 --set=val --output_path=/content/tfrecord/val/pascal
                                      # 버전 지정  # imageset의 main에서 찾음
# 한번에 100개씩 저장, image와 annotation을 함께 저장함


I1208 11:06:21.520007 139845113943936 create_pascal_tfrecord.py:254] Writing to output directory: /content/tfrecord/train
I1208 11:06:21.531928 139845113943936 create_pascal_tfrecord.py:287] Reading from PASCAL VOC2007 dataset.
I1208 11:06:21.532043 139845113943936 create_pascal_tfrecord.py:292] On image 0 of 2501
I1208 11:06:21.681272 139845113943936 create_pascal_tfrecord.py:292] On image 100 of 2501
I1208 11:06:21.820690 139845113943936 create_pascal_tfrecord.py:292] On image 200 of 2501
I1208 11:06:21.942792 139845113943936 create_pascal_tfrecord.py:292] On image 300 of 2501
I1208 11:06:22.065514 139845113943936 create_pascal_tfrecord.py:292] On image 400 of 2501
I1208 11:06:22.190493 139845113943936 create_pascal_tfrecord.py:292] On image 500 of 2501
I1208 11:06:22.317059 139845113943936 create_pascal_tfrecord.py:292] On image 600 of 2501
I1208 11:06:22.437084 139845113943936 create_pascal_tfrecord.py:292] On image 700 of 2501
I1208 11:06:22.564532 139845113943936 create_pascal_tfrecord.py:292] On image 800 of 2501
I1208 11:06:22.681631 139845113943936 create_pascal_tfrecord.py:292] On image 900 of 2501
I1208 11:06:22.803892 139845113943936 create_pascal_tfrecord.py:292] On image 1000 of 2501
I1208 11:06:22.930249 139845113943936 create_pascal_tfrecord.py:292] On image 1100 of 2501
I1208 11:06:23.092439 139845113943936 create_pascal_tfrecord.py:292] On image 1200 of 2501
I1208 11:06:23.220940 139845113943936 create_pascal_tfrecord.py:292] On image 1300 of 2501
I1208 11:06:23.388720 139845113943936 create_pascal_tfrecord.py:292] On image 1400 of 2501
I1208 11:06:23.519618 139845113943936 create_pascal_tfrecord.py:292] On image 1500 of 2501
I1208 11:06:23.643929 139845113943936 create_pascal_tfrecord.py:292] On image 1600 of 2501
I1208 11:06:23.772373 139845113943936 create_pascal_tfrecord.py:292] On image 1700 of 2501
I1208 11:06:23.902039 139845113943936 create_pascal_tfrecord.py:292] On image 1800 of 2501
I1208 11:06:24.024939 139845113943936 create_pascal_tfrecord.py:292] On image 1900 of 2501
I1208 11:06:24.143962 139845113943936 create_pascal_tfrecord.py:292] On image 2000 of 2501
I1208 11:06:24.280410 139845113943936 create_pascal_tfrecord.py:292] On image 2100 of 2501
I1208 11:06:24.428011 139845113943936 create_pascal_tfrecord.py:292] On image 2200 of 2501
I1208 11:06:24.549917 139845113943936 create_pascal_tfrecord.py:292] On image 2300 of 2501
I1208 11:06:24.677992 139845113943936 create_pascal_tfrecord.py:292] On image 2400 of 2501
I1208 11:06:24.799202 139845113943936 create_pascal_tfrecord.py:292] On image 2500 of 2501
I1208 11:06:29.488929 140697936627584 create_pascal_tfrecord.py:254] Writing to output directory: /content/tfrecord/val
I1208 11:06:29.501246 140697936627584 create_pascal_tfrecord.py:287] Reading from PASCAL VOC2007 dataset.
I1208 11:06:29.501367 140697936627584 create_pascal_tfrecord.py:292] On image 0 of 2510
I1208 11:06:29.630910 140697936627584 create_pascal_tfrecord.py:292] On image 100 of 2510
I1208 11:06:29.753052 140697936627584 create_pascal_tfrecord.py:292] On image 200 of 2510
I1208 11:06:29.876389 140697936627584 create_pascal_tfrecord.py:292] On image 300 of 2510
I1208 11:06:29.998543 140697936627584 create_pascal_tfrecord.py:292] On image 400 of 2510
I1208 11:06:30.122147 140697936627584 create_pascal_tfrecord.py:292] On image 500 of 2510
I1208 11:06:30.238847 140697936627584 create_pascal_tfrecord.py:292] On image 600 of 2510
I1208 11:06:30.364139 140697936627584 create_pascal_tfrecord.py:292] On image 700 of 2510
I1208 11:06:30.498464 140697936627584 create_pascal_tfrecord.py:292] On image 800 of 2510
I1208 11:06:30.622997 140697936627584 create_pascal_tfrecord.py:292] On image 900 of 2510
I1208 11:06:30.817349 140697936627584 create_pascal_tfrecord.py:292] On image 1000 of 2510
I1208 11:06:30.955173 140697936627584 create_pascal_tfrecord.py:292] On image 1100 of 2510
I1208 11:06:31.090512 140697936627584 create_pascal_tfrecord.py:292] On image 1200 of 2510
I1208 11:06:31.228125 140697936627584 create_pascal_tfrecord.py:292] On image 1300 of 2510
I1208 11:06:31.364045 140697936627584 create_pascal_tfrecord.py:292] On image 1400 of 2510
I1208 11:06:31.507602 140697936627584 create_pascal_tfrecord.py:292] On image 1500 of 2510
I1208 11:06:31.642575 140697936627584 create_pascal_tfrecord.py:292] On image 1600 of 2510
I1208 11:06:31.772114 140697936627584 create_pascal_tfrecord.py:292] On image 1700 of 2510
I1208 11:06:31.898878 140697936627584 create_pascal_tfrecord.py:292] On image 1800 of 2510
I1208 11:06:32.020886 140697936627584 create_pascal_tfrecord.py:292] On image 1900 of 2510
I1208 11:06:32.145466 140697936627584 create_pascal_tfrecord.py:292] On image 2000 of 2510
I1208 11:06:32.268044 140697936627584 create_pascal_tfrecord.py:292] On image 2100 of 2510
I1208 11:06:32.395010 140697936627584 create_pascal_tfrecord.py:292] On image 2200 of 2510
I1208 11:06:32.536167 140697936627584 create_pascal_tfrecord.py:292] On image 2300 of 2510
I1208 11:06:32.706319 140697936627584 create_pascal_tfrecord.py:292] On image 2400 of 2510
I1208 11:06:32.842475 140697936627584 create_pascal_tfrecord.py:292] On image 2500 of 2510

Train용 config 설정.

  • 학습을 위한 다양한 설정을 config로 저장. model은 efficientdet-d0 로 적용.
# epochs시마다 학습된 weight파일을 저장한 디렉토리 Google drive로 설정. 
# Google Drive 접근을 위한 Mount 적용. 
import os, sys 
from google.colab import drive 

drive.mount('/content/gdrive')

# soft link로 Google Drive Directory 연결. 
!ln -s /content/gdrive/My\ Drive/ /mydrive
!ls /mydrive
!mkdir -p /mydrive/model_trained


Mounted at /content/gdrive

 

config = hparams_config.get_detection_config('efficientdet-d0')
print(config)
# mmdetection은 이미 default 값이 있고 그걸 업데이터 하는 방식
# automl은 default.config를 직접 채우는 방식

act_type: swish
alpha: 0.25
anchor_scale: 4.0
apply_bn_for_resampling: true
aspect_ratios:
- 1.0
- 2.0
- 0.5
autoaugment_policy: null
backbone_config: null
backbone_name: efficientnet-b0
box_class_repeats: 3
box_loss_weight: 50.0
ckpt_var_scope: null
clip_gradients_norm: 10.0
conv_after_downsample: false
conv_bn_act_pattern: false
data_format: channels_last
dataset_type: null
delta: 0.1
drop_remainder: true
first_lr_drop_epoch: 200.0
fpn_cell_repeats: 3
fpn_config: null
fpn_name: null
fpn_num_filters: 64
fpn_weight_method: null
gamma: 1.5
grad_checkpoint: false
grid_mask: false
heads:
- object_detection
image_size: 512
img_summary_steps: null
input_rand_hflip: true
iou_loss_type: null
iou_loss_weight: 1.0
is_training_bn: true
jitter_max: 2.0
jitter_min: 0.1
label_map: null
label_smoothing: 0.0
learning_rate: 0.08
loss_scale: null
lr_decay_method: cosine
lr_warmup_epoch: 1.0
lr_warmup_init: 0.008
map_freq: 5
max_instances_per_image: 100
max_level: 7
mean_rgb:
- 123.675
- 116.28
- 103.53
min_level: 3
mixed_precision: false
model_optimizations: {}
momentum: 0.9
moving_average_decay: 0.9998
name: efficientdet-d0
nms_configs:
    iou_thresh: null
    max_nms_inputs: 0
    max_output_size: 100
    method: gaussian
    pyfunc: false
    score_thresh: 0.0
    sigma: null
num_classes: 90
num_epochs: 300
num_scales: 3
optimizer: sgd
poly_lr_power: 0.9
positives_momentum: null
regenerate_source_id: false
sample_image: null
save_freq: epoch
scale_range: false
second_lr_drop_epoch: 250.0
seg_num_classes: 3
separable_conv: true
skip_crowd_during_training: true
skip_mismatch: true
stddev_rgb:
- 58.395
- 57.120000000000005
- 57.375
strategy: null
survival_prob: null
target_size: null
tflite_max_detections: 100
use_keras_model: true
var_freeze_expr: null
verbose: 1
weight_decay: 4.0e-05

 

class TRAIN_CFG:
  model_name = 'efficientdet-d0' # efficientdet 모델명
  strategy = '' # tpu, 여러개의 GPU들, 단일 GPU 일때 학습 strategy 설정. 
  model_dir = '/mydrive/model_trained' # 학습된 모델이 저장될 위치, 학습할때 callbacks에서 모델체크포인트가 돌면서 저장함 
  pretrained_ckpt = '/content/efficientdet-d0' # download 할 pretrained 모델
  hparams = 'num_classes=20,moving_average_decay=0,mixed_precision=true' # 한번에 쓰려고
  use_xla = False
  use_fake_data = False
  batch_size = 8
  eval_samples = 5000 # evaluation image 데이터 갯수
  steps_per_execution = 1 # ModelCheckPoint의 save_freq 를 숫자로 설정할 경우 사용. 
  num_examples_per_epoch = 2500 # 1 epochs 시 적용하는 examples 개수 ( 한record를 example이라 표현함 
  num_epochs = 15 # epochs 횟수
  train_file_pattern = '/content/tfrecord/train/pascal-*.tfrecord' # 학습용 tfrecords를 glob 형태로 가져오는 표현식. 
  val_file_pattern = '/content/tfrecord/val/pascal-*.tfrecord' # 검증용 tfrecords를 glob 형태로 가져오는 표현식. 
  val_json_file = None # optional coco validation json,   
  mode = 'traineval' # train만 적용 또는 train과 eval함께 적용(traineval)
   
  #  나중에 config에 다 들어가지만 flags에 있는 인자를 옮겨온 것
  
  num_cores = 2 # tpu 8 일때 적용.  
  tpu = None
  gcp_project = None
  tpu_zone = None
  eval_master = ''
  eval_name = None
  tf_random_seed = 2021
  profile = False
  debug = False

 

from tf2.train import setup_model
import hparams_config

import utils
from tf2 import tfmot
from tf2 import train_lib
from tf2 import util_keras

config = hparams_config.get_detection_config(TRAIN_CFG.model_name)
config.override(TRAIN_CFG.hparams)

steps_per_epoch = TRAIN_CFG.num_examples_per_epoch // TRAIN_CFG.batch_size

if tf.config.list_physical_devices('GPU'):
  ds_strategy = tf.distribute.OneDeviceStrategy('device:GPU:0')
else:
  ds_strategy = tf.distribute.OneDeviceStrategy('device:CPU:0')

print(ds_strategy)

#steps_per_execution은 ModelCheckpoint의 save_freq를 숫자로 설정할 시 적용. num_epochs, steps_per_epoch는 추후에 model.fit()에서 설정되지만, 여기서는 일단 값을 설정해야함. 
params = dict(
      profile=TRAIN_CFG.profile,
      mode = TRAIN_CFG.mode,
      model_name=TRAIN_CFG.model_name,
      steps_per_execution=TRAIN_CFG.steps_per_execution,
      num_epochs = TRAIN_CFG.num_epochs,
      model_dir=TRAIN_CFG.model_dir,
      steps_per_epoch=steps_per_epoch,
      strategy=TRAIN_CFG.strategy,
      batch_size=TRAIN_CFG.batch_size,
      tf_random_seed=TRAIN_CFG.tf_random_seed,
      debug=TRAIN_CFG.debug,
      val_json_file=TRAIN_CFG.val_json_file,
      eval_samples=TRAIN_CFG.eval_samples,
      num_shards=ds_strategy.num_replicas_in_sync
      )

config.override(params, True)




# image size를 tuple 형태로 변환. 512는 (512, 512)로 '1920x880' 은 (1920, 880) 으로 변환.  
config.image_size = utils.parse_image_size(config.image_size)
print(config)

<tensorflow.python.distribute.one_device_strategy.OneDeviceStrategyV1 object at 0x7f12d2237990>
act_type: swish
alpha: 0.25
anchor_scale: 4.0
apply_bn_for_resampling: true
aspect_ratios:
- 1.0
- 2.0
- 0.5
autoaugment_policy: null
backbone_config: null
backbone_name: efficientnet-b0
batch_size: 8
box_class_repeats: 3
box_loss_weight: 50.0
ckpt_var_scope: null
clip_gradients_norm: 10.0
conv_after_downsample: false
conv_bn_act_pattern: false
data_format: channels_last
dataset_type: null
debug: false
delta: 0.1
drop_remainder: true
eval_samples: 5000
first_lr_drop_epoch: 200.0
fpn_cell_repeats: 3
fpn_config: null
fpn_name: null
fpn_num_filters: 64
fpn_weight_method: null
gamma: 1.5
grad_checkpoint: false
grid_mask: false
heads:
- object_detection
image_size: !!python/tuple
- 512
- 512
img_summary_steps: null
input_rand_hflip: true
iou_loss_type: null
iou_loss_weight: 1.0
is_training_bn: true
jitter_max: 2.0
jitter_min: 0.1
label_map: null
label_smoothing: 0.0
learning_rate: 0.08
loss_scale: null
lr_decay_method: cosine
lr_warmup_epoch: 1.0
lr_warmup_init: 0.008
map_freq: 5
max_instances_per_image: 100
max_level: 7
mean_rgb:
- 123.675
- 116.28
- 103.53
min_level: 3
mixed_precision: true
mode: traineval
model_dir: /mydrive/model_trained
model_name: efficientdet-d0
model_optimizations: {}
momentum: 0.9
moving_average_decay: 0
name: efficientdet-d0
nms_configs:
    iou_thresh: null
    max_nms_inputs: 0
    max_output_size: 100
    method: gaussian
    pyfunc: false
    score_thresh: 0.0
    sigma: null
num_classes: 20
num_epochs: 15
num_scales: 3
num_shards: 1
optimizer: sgd
poly_lr_power: 0.9
positives_momentum: null
profile: false
regenerate_source_id: false
sample_image: null
save_freq: epoch
scale_range: false
second_lr_drop_epoch: 250.0
seg_num_classes: 3
separable_conv: true
skip_crowd_during_training: true
skip_mismatch: true
stddev_rgb:
- 58.395
- 57.120000000000005
- 57.375
steps_per_epoch: 312
steps_per_execution: 1
strategy: ''
survival_prob: null
target_size: null
tf_random_seed: 2021
tflite_max_detections: 100
use_keras_model: true
val_json_file: null
var_freeze_expr: null
verbose: 1
weight_decay: 4.0e-05

 

Model 생성

  • Config를 기반으로 EfficientDet d0 모델을 생성
  • Coco Pretrained 파일을 다운로드 한 뒤 이 checkpoint파일의 weight를 생성한 d0 모델로 로딩
import utils
from tf2 import tfmot
from tf2 import train_lib
from tf2 import util_keras
# P100 GPU Card에서는 아래 수행하지 말것. V100 GPU 시에는 mixed_float16으로 mixed_precision 설정. 
#precision = utils.get_precision(config.strategy, config.mixed_precision)
#policy = tf.keras.mixed_precision.Policy(precision)
#tf.keras.mixed_precision.set_global_policy(policy)

 

MODEL = 'efficientdet-d0' 

def download(m):
  if m not in os.listdir():
    !wget https://storage.googleapis.com/cloud-tpu-checkpoints/efficientdet/coco/{m}.tar.gz
    !tar zxf {m}.tar.gz
  ckpt_path = os.path.join(os.getcwd(), m)
  return ckpt_path

# Download checkpoint.
ckpt_path = download(MODEL)
print('Use model in {}'.format(ckpt_path))



--2021-12-08 11:08:54--  https://storage.googleapis.com/cloud-tpu-checkpoints/efficientdet/coco/efficientdet-d0.tar.gz
Resolving storage.googleapis.com (storage.googleapis.com)... 74.125.133.128, 74.125.140.128, 108.177.15.128, ...
Connecting to storage.googleapis.com (storage.googleapis.com)|74.125.133.128|:443... connected.
HTTP request sent, awaiting response... 200 OK
Length: 28994253 (28M) [application/octet-stream]
Saving to: ‘efficientdet-d0.tar.gz’

efficientdet-d0.tar 100%[===================>]  27.65M   177MB/s    in 0.2s    

2021-12-08 11:08:54 (177 MB/s) - ‘efficientdet-d0.tar.gz’ saved [28994253/28994253]

Use model in /content/efficientdet-d0

coco pretrained model의 num=80을 num_class=20으로 바꿔야 함, 심지어 다운된 모델을 90개를 가지고 있음

 

from tf2 import train_lib
from tf2 import train

# 20개의 class를 가진 efficientdet d0 모델을 생성. 
model = train_lib.EfficientDetNetTrain(config=config)
model = train.setup_model(model, config)

# 만약 pretrained 모델이 있으면, 해당 checkpoint weight를 모델로 로딩. 이때 classification layer는 제외. 
# transfer learning을 위해 classification layer는 80개는 가져오면 안됨
# last checkpoint를 읽어서 restore check point를 하되, exclude_layers는 class_net이다
#class TRAIN_CFG: pretrained_ckpt = '/content/efficientdet-d0' 
if TRAIN_CFG.pretrained_ckpt:
  ckpt_path = tf.train.latest_checkpoint(TRAIN_CFG.pretrained_ckpt)
  util_keras.restore_ckpt(
      model,
      ckpt_path,
      config.moving_average_decay,
      exclude_layers=['class_net'])
  
train.init_experimental(config)

model.summary()



/content/automl/efficientdet/utils.py:23: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  from tensorflow.python.tpu import tpu_function  # pylint:disable=g-direct-tensorflow-import
/content/automl/efficientdet/utils.py:255: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  for u in self.updates:
WARNING:absl:Shape mismatch: class_net/class-predict/pointwise_kernel
WARNING:absl:Shape mismatch: class_net/class-predict/bias
Model: ""
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 efficientnet-b0 (Model)     multiple                  3634844   
                                                                 
 resample_p6 (ResampleFeatur  multiple                 20800     
 eMap)                                                           
                                                                 
 resample_p7 (ResampleFeatur  multiple                 0         
 eMap)                                                           
                                                                 
 fpn_cells (FPNCells)        multiple                  179321    
                                                                 
 class_net (ClassNet)        multiple                  30324     
                                                                 
 box_net (BoxNet)            multiple                  20964     
                                                                 
=================================================================
Total params: 3,886,253
Trainable params: 3,839,117
Non-trainable params: 47,136
_________________________________________________________________

학습과 검증용 Dataset을 생성하고, Train 수행.

  • 학습과 검증 데이터용 dataset 생성을 위한 get_dataset() 함수 생성.
'''
Class TRAIN_CFG:
  train_file_pattern = '/content/tfrecord/train/pascal-*.tfrecord' # 학습용 tfrecords를 glob 형태로 가져오는 표현식. 
  val_file_pattern = '/content/tfrecord/val/pascal-*.tfrecord' # 검증용 tfrecords를 glob 형태로 가져오는 표현식. 
'''

import dataloader

def get_dataset(is_training, config):
  # is_training이 True이면 TRAIN_CFG의 train_file_pattern, 그렇지 아니면 val_file_pattern
  file_pattern = (
    TRAIN_CFG.train_file_pattern
    if is_training else TRAIN_CFG.val_file_pattern) # 만들어진 tfrecord에서 train, val을 지정
  if not file_pattern:
    raise ValueError('No matching files.')

  return dataloader.InputReader( # 읽혀진 데이터셋의 패턴을 모델에 집어넣어주는 역할
    file_pattern,
    is_training=is_training,
    use_fake_data=TRAIN_CFG.use_fake_data,
    max_instances_per_image=config.max_instances_per_image,
    debug=TRAIN_CFG.debug)(
        config.as_dict())

 

import pandas as pd

#  train.txt와 val.txt를 읽어서 train과 val 용 image 건수를 구함 
train_df = pd.read_csv('/content/VOCdevkit/VOC2007/ImageSets/Main/train.txt', sep=' ', 
                       header=None, names=['file_id'], dtype={'file_id':str})
val_df = pd.read_csv('/content/VOCdevkit/VOC2007/ImageSets/Main/val.txt', sep=' ', 
                       header=None, names=['file_id'], dtype={'file_id':str})

train_images_num = train_df.shape[0]
val_images_num = val_df.shape[0]
print(train_images_num, val_images_num)

train_df.head()

2501 2510
file_id
0	000012
1	000017
2	000023
3	000026
4	000032

 

 

import tensorflow as tf
from tf2 import train_lib
from tf2 import train

# config에 기반하여 모델을 생성하고 pretrained weight를 로딩하는 함수 생성. 
# 바꿀 때 config 수정하면 됨
def get_efficientdet_model(config):
  model = train_lib.EfficientDetNetTrain(config=config)
  model = train.setup_model(model, config)

  if TRAIN_CFG.pretrained_ckpt:
    ckpt_path = tf.train.latest_checkpoint(TRAIN_CFG.pretrained_ckpt)
    util_keras.restore_ckpt(
        model,
        ckpt_path,
        config.moving_average_decay,
        exclude_layers=['class_net'])
    
  train.init_experimental(config)
  return model

model = get_efficientdet_model(config)
model.summary()

/content/automl/efficientdet/utils.py:23: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  from tensorflow.python.tpu import tpu_function  # pylint:disable=g-direct-tensorflow-import
/content/automl/efficientdet/utils.py:255: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  for u in self.updates:
WARNING:absl:Not found efficientnet-b0/stem_1/conv2d_1/kernel in /content/efficientdet-d0/model
WARNING:absl:Not found efficientnet-b0/stem_1/tpu_batch_normalization/gamma in /content/efficientdet-d0/model
WARNING:absl:Not found efficientnet-b0/stem_1/tpu_batch_normalization/beta in /content/efficientdet-d0/model
WARNING:absl:Shape mismatch: class_net/class-predict/pointwise_kernel
WARNING:absl:Shape mismatch: class_net/class-predict/bias
WARNING:absl:Not found efficientnet-b0/stem_1/tpu_batch_normalization/moving_mean in /content/efficientdet-d0/model
WARNING:absl:Not found efficientnet-b0/stem_1/tpu_batch_normalization/moving_variance in /content/efficientdet-d0/model
Model: ""
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 efficientnet-b0 (Model)     multiple                  3634844   
                                                                 
 resample_p6 (ResampleFeatur  multiple                 20800     
 eMap)                                                           
                                                                 
 resample_p7 (ResampleFeatur  multiple                 0         
 eMap)                                                           
                                                                 
 fpn_cells (FPNCells)        multiple                  179321    
                                                                 
 class_net (ClassNet)        multiple                  30324     
                                                                 
 box_net (BoxNet)            multiple                  20964     
                                                                 
=================================================================
Total params: 3,886,253
Trainable params: 3,839,117
Non-trainable params: 47,136
_________________________________________________________________

 

from tf2 import train
import numpy as np

# config에 설정된 steps_per_epoch, num_epochs는 무시하고 여기서 새로 설정. 
# steps_per_epoch는 전체 학습데이터 이미지 건수//batch_size, val_steps_per_epoch는 전체 검증 데이터 이미지 건수//batch_size
tr_steps_per_epoch = train_images_num//config.batch_size
val_steps_per_epoch = val_images_num//config.batch_size
print('tr_steps_per_epoch:', tr_steps_per_epoch, 'val_steps_per_epoch:', val_steps_per_epoch)

#  config.mode가 traineval 또는 eval일 경우 검증 dataset 생성.
val_dataset = get_dataset(False, config) if 'eval' in config.mode else None
#callback은 config에 설정된 구성대로 생성. ModelCheckpoint는 epoch시마다, COCO Evaluation는 5회 epoch시마다 수행됨. 
#config.save_freq = eval;config.map_freq = 5
# 1 epoch시마다 P100에서 약 3분30초 걸림. 적절한 epochs 수 설정 필요. 
model.fit(
    get_dataset(True, config), # train dataset
    epochs=15, 
    steps_per_epoch=tr_steps_per_epoch ,
    callbacks=train_lib.get_callbacks(config.as_dict(), val_dataset),
    validation_data=val_dataset,
    validation_steps=val_steps_per_epoch)

tf.keras.backend.clear_session()

tr_steps_per_epoch: 312 val_steps_per_epoch: 313
Epoch 1/15
/content/automl/efficientdet/utils.py:23: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  from tensorflow.python.tpu import tpu_function  # pylint:disable=g-direct-tensorflow-import
/content/automl/efficientdet/utils.py:255: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  for u in self.updates:
  312/312 [==============================] - ETA: 0s - det_loss: 129.3025 - cls_loss: 129.0228 - box_loss: 0.0056 - reg_l2_loss: 0.0954 - loss: 129.3978 - learning_rate: 0.0090 - gradient_norm: 3.3109
Epoch 00001: saving model to /mydrive/model_trained/ckpt-1
312/312 [==============================] - 450s 1s/step - det_loss: 128.8929 - cls_loss: 128.6131 - box_loss: 0.0056 - reg_l2_loss: 0.0954 - loss: 128.9883 - learning_rate: 0.0090 - gradient_norm: 3.3069 - val_det_loss: 0.9879 - val_cls_loss: 0.6648 - val_box_loss: 0.0065 - val_reg_l2_loss: 0.0955 - val_loss: 1.0834
Epoch 2/15
312/312 [==============================] - ETA: 0s - det_loss: 0.8750 - cls_loss: 0.6585 - box_loss: 0.0043 - reg_l2_loss: 0.0956 - loss: 0.9706 - learning_rate: 0.0097 - gradient_norm: 2.6721
Epoch 00002: saving model to /mydrive/model_trained/ckpt-2
312/312 [==============================] - 387s 1s/step - det_loss: 0.8749 - cls_loss: 0.6584 - box_loss: 0.0043 - reg_l2_loss: 0.0956 - loss: 0.9705 - learning_rate: 0.0097 - gradient_norm: 2.6714 - val_det_loss: 0.8318 - val_cls_loss: 0.5513 - val_box_loss: 0.0056 - val_reg_l2_loss: 0.0957 - val_loss: 0.9275
Epoch 3/15
312/312 [==============================] - ETA: 0s - det_loss: 0.7677 - cls_loss: 0.5700 - box_loss: 0.0040 - reg_l2_loss: 0.0958 - loss: 0.8634 - learning_rate: 0.0092 - gradient_norm: 2.7775
Epoch 00003: saving model to /mydrive/model_trained/ckpt-3
312/312 [==============================] - 401s 1s/step - det_loss: 0.7678 - cls_loss: 0.5701 - box_loss: 0.0040 - reg_l2_loss: 0.0958 - loss: 0.8636 - learning_rate: 0.0092 - gradient_norm: 2.7790 - val_det_loss: 0.7571 - val_cls_loss: 0.4841 - val_box_loss: 0.0055 - val_reg_l2_loss: 0.0959 - val_loss: 0.8529

학습된 모델 파일을 이용하여 Inference 수행.

import hparams_config

infer_config = hparams_config.get_efficientdet_config('efficientdet-d0')
print(infer_config)


act_type: swish
alpha: 0.25
anchor_scale: 4.0
apply_bn_for_resampling: true
aspect_ratios:
- 1.0
- 2.0
- 0.5
autoaugment_policy: null
backbone_config: null
backbone_name: efficientnet-b0
box_class_repeats: 3
box_loss_weight: 50.0
ckpt_var_scope: null
clip_gradients_norm: 10.0
conv_after_downsample: false
conv_bn_act_pattern: false
data_format: channels_last
dataset_type: null
delta: 0.1
drop_remainder: true
first_lr_drop_epoch: 200.0
fpn_cell_repeats: 3
fpn_config: null
fpn_name: null
fpn_num_filters: 64
fpn_weight_method: null
gamma: 1.5
grad_checkpoint: false
grid_mask: false
heads:
- object_detection
image_size: 512
img_summary_steps: null
input_rand_hflip: true
iou_loss_type: null
iou_loss_weight: 1.0
is_training_bn: true
jitter_max: 2.0
jitter_min: 0.1
label_map: null
label_smoothing: 0.0
learning_rate: 0.08
loss_scale: null
lr_decay_method: cosine
lr_warmup_epoch: 1.0
lr_warmup_init: 0.008
map_freq: 5
max_instances_per_image: 100
max_level: 7
mean_rgb:
- 123.675
- 116.28
- 103.53
min_level: 3
mixed_precision: false
model_optimizations: {}
momentum: 0.9
moving_average_decay: 0.9998
name: efficientdet-d0
nms_configs:
    iou_thresh: null
    max_nms_inputs: 0
    max_output_size: 100
    method: gaussian
    pyfunc: false
    score_thresh: 0.0
    sigma: null
num_classes: 90
num_epochs: 300
num_scales: 3
optimizer: sgd
poly_lr_power: 0.9
positives_momentum: null
regenerate_source_id: false
sample_image: null
save_freq: epoch
scale_range: false
second_lr_drop_epoch: 250.0
seg_num_classes: 3
separable_conv: true
skip_crowd_during_training: true
skip_mismatch: true
stddev_rgb:
- 58.395
- 57.120000000000005
- 57.375
strategy: null
survival_prob: null
target_size: null
tflite_max_detections: 100
use_keras_model: true
var_freeze_expr: null
verbose: 1
weight_decay: 4.0e-05

 

infer_config = hparams_config.get_efficientdet_config('efficientdet-d0')
# config의 특정 항목을 update
infer_config.model_name = 'efficientdet-d0'
infer_config.model_dir = '/mydrive/model_trained'
# infer_config의 num_classes는 20로 바뀌어야 함. 
infer_config.num_classes =20 # 여길 바꿔줘야함
infer_config.is_training_bn = False
infer_config.nms_configs.score_thresh = 0.4
infer_config.nms_configs.max_output_size = 100

 

import inference
from keras import efficientdet_keras

model = efficientdet_keras.EfficientDetModel(config=infer_config)
model.build((None, None, None, 3))
print('#### checkpoint name:', tf.train.latest_checkpoint(infer_config.model_dir))
model.load_weights(tf.train.latest_checkpoint(infer_config.model_dir))
model.summary()


WARNING:tensorflow:From /usr/local/lib/python3.7/dist-packages/tensorflow/python/ops/parallel_for/pfor.py:2382: calling gather (from tensorflow.python.ops.array_ops) with validate_indices is deprecated and will be removed in a future version.
Instructions for updating:
The `validate_indices` argument has no effect. Indices are always validated on CPU and never validated on GPU.
WARNING:tensorflow:From /usr/local/lib/python3.7/dist-packages/tensorflow/python/ops/parallel_for/pfor.py:2382: calling gather (from tensorflow.python.ops.array_ops) with validate_indices is deprecated and will be removed in a future version.
Instructions for updating:
The `validate_indices` argument has no effect. Indices are always validated on CPU and never validated on GPU.
WARNING:tensorflow:Using a while_loop for converting ResizeBilinear
WARNING:tensorflow:Using a while_loop for converting ResizeBilinear
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/base_layer.py:1331: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  warnings.warn('`layer.updates` will be removed in a future version. '
WARNING:tensorflow:Using a while_loop for converting NonMaxSuppressionV5
WARNING:tensorflow:Using a while_loop for converting NonMaxSuppressionV5
#### checkpoint name: /mydrive/model_trained/ckpt-15
Model: ""
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
efficientnet-b0 (Model)      multiple                  3634844   
_________________________________________________________________
resample_p6 (ResampleFeature multiple                  20800     
_________________________________________________________________
resample_p7 (ResampleFeature multiple                  0         
_________________________________________________________________
fpn_cells (FPNCells)         multiple                  179321    
_________________________________________________________________
class_net (ClassNet)         multiple                  30324     
_________________________________________________________________
box_net (BoxNet)             multiple                  20964     
=================================================================
Total params: 3,886,253
Trainable params: 3,839,117
Non-trainable params: 47,136
_________________________________________________________________

 

import time

class ExportModel(tf.Module):

  def __init__(self, model):
    super().__init__()
    self.model = model

  @tf.function
  def f(self, imgs):
    #model(imgs, training=False, post_mode='global')
    return self.model(imgs, training=False, post_mode='global')

export_model = ExportModel(model)

 

!mkdir -p /content/data
!wget -O ./data/beatles01.jpg https://raw.githubusercontent.com/chulminkw/DLCV/master/data/image/beatles01.jpg
!wget -O ./data/baseball01.jpg https://raw.githubusercontent.com/chulminkw/DLCV/master/data/image/baseball01.jpg

 

import cv2
import matplotlib.pyplot as plt
import numpy as np

img = cv2.cvtColor(cv2.imread('/content/data/beatles01.jpg'), cv2.COLOR_BGR2RGB)
imgs= img[np.newaxis, ...]

start_time = time.time()
boxes, scores, classes, valid_len = export_model.f(imgs)

print('elapsed time:', time.time() - start_time)

# elapsed time: 0.021811485290527344

 

labels_to_names =  {1:'aeroplane', 2:'bicycle', 3:'bird', 4:'boat', 5:'bottle', 6:'bus', 7:'car',
               8:'cat', 9:'chair', 10:'cow', 11:'diningtable', 12:'dog', 13:'horse',
               14:'motorbike', 15:'person', 16:'pottedplant', 17:'sheep', 18:'sofa', 19:'train',
               20:'tvmonitor'}

 

def get_detected_img(export_model, img_array, is_print=True):   
  # automl efficent은 반환 bbox 좌표값이 원본 이미지 좌표값으로 되어 있으므로 별도의 scaling작업 필요 없음. 
  '''
  height = img_array.shape[0]
  width = img_array.shape[1]
  '''
  # cv2의 rectangle()은 인자로 들어온 이미지 배열에 직접 사각형을 업데이트 하므로 그림 표현을 위한 별도의 이미지 배열 생성. 
  draw_img = img_array.copy()

  # bounding box의 테두리와 caption 글자색 지정
  green_color=(0, 255, 0)
  red_color=(0, 0, 255)

  # cv2로 만들어진 numpy image array를 tensor로 변환
  img_tensor = tf.convert_to_tensor(img_array, dtype=tf.uint8)[tf.newaxis, ...]
  #img_tensor = tf.convert_to_tensor(img_array, dtype=tf.float32)[tf.newaxis, ...]

  # efficientdet 모델을 다운로드 한 뒤 inference 수행. 
  start_time = time.time()
  # automl efficientdet 모델은 boxes, score, classes, num_detections를 각각 Tensor로 반환. 
  boxes, scores, classes, valid_len = export_model.f(img_tensor)
  # Tensor값을 시각화를 위해 numpy 로 변환. 
  boxes = boxes.numpy()
  scores = scores.numpy()
  classes = classes.numpy()
  valid_len = valid_len.numpy()
  
  # detected 된 object들을 iteration 하면서 정보 추출. detect된 object의 갯수는 100개
  for i in range(valid_len[0]):
    # detection score를 iteration시 마다 높은 순으로 추출하고 SCORE_THRESHOLD보다 낮으면 loop 중단. 
    score = scores[0, i]
    
    # detected된 object들은 scale된 기준으로 예측되었으므로 다시 원본 이미지 비율로 계산
    box = boxes[0, i]

    ''' **** 주의 ******
    box는 ymin, xmin, ymax, xmax 순서로 되어 있음. 또한 원본 좌표값으로 되어 있음. '''
    left = box[1]
    top = box[0] 
    right = box[3] 
    bottom = box[2] 

    # class id 추출하고 class 명으로 매핑
    class_id = classes[0, i]
    caption = "{}: {:.4f}".format(labels_to_names[class_id], score)
    print(caption)
    #cv2.rectangle()은 인자로 들어온 draw_img에 사각형을 그림. 위치 인자는 반드시 정수형.
    cv2.rectangle(draw_img, (int(left), int(top)), (int(right), int(bottom)), color=green_color, thickness=2)
    cv2.putText(draw_img, caption, (int(left), int(top - 5)), cv2.FONT_HERSHEY_SIMPLEX, 0.4, red_color, 1)

  if is_print:
    print('Detection 수행시간:',round(time.time() - start_time, 2),"초")

  return draw_img

 

import cv2
img_array = cv2.cvtColor(cv2.imread('/content/data/beatles01.jpg'), cv2.COLOR_BGR2RGB)

draw_img = get_detected_img(export_model, img_array, is_print=True)
plt.figure(figsize=(16, 16))
plt.imshow(draw_img)


person: 0.9398
person: 0.9376
person: 0.8987
person: 0.8970
car: 0.7241
car: 0.5073
Detection 수행시간: 0.02 초

 

반응형
728x90
반응형

 AutoML efficientDet 을 이용한 custom data train

- AutoML efficientDet은 efficientDet모델 위에 최신의 성능 향상 기법이 도입된, 잘짜여진 모듈로 구성

- config 기반으로 다양한 환경 설정이 필요하지만 이를 위한 tutorial/document가 부족

- tfrecord 기반으로 학습/검증데이터 만들어야 함

   => file을 tfrecord로 바꿈 - 속도가 빠른 장점, 복잡하고 디버깅이 어려운 단점

- pascal voc에서 tfrecord로 바꾸는 컨버터 유틸리티 제공

 

 

반응형
728x90
반응형

pretrained된 last checkpoint 모델의 weight를 다시 load_weight() 적용시 런타임 재시작을 적용해야 함.

  • 이를 위해 앞의 로직을 아래 셀에서 모두 일괄 정리함
import os
import sys
import tensorflow.compat.v1 as tf
import numpy as np

sys.path.append('/content/automl/efficientdet')

import hparams_config
from tf2 import anchors # keras를 tf2 로 변경
from model_inspect import ModelInspector

class INFER_CFG:
  model_name = 'efficientdet-d0' # efficientdet 모델명
  model_dir = '/content/efficientdet-d0' # pretrained checkpoint 파일이 있는 디렉토리
  hparams = '' # csv 형식의 k=v 쌍 또는 yaml file

config = hparams_config.get_efficientdet_config(INFER_CFG.model_name)
config.is_training_bn = False
# config의 image_size를 원본 이미지 사이즈로 재 조정. config의 image_size에 가로x세로 형식으로 문자열 입력 
config.image_size = '1920x1280'
config.nms_configs.score_thresh = 0.4
config.nms_configs.max_output_size = 100
config.override(INFER_CFG.hparams)

import inference
from tf2 import efficientdet_keras # keras를 tf2로 변경

model = efficientdet_keras.EfficientDetModel(config=config)
model.build((None, None, None, 3))
print('#### checkpoint name:', tf.train.latest_checkpoint(INFER_CFG.model_dir))
# pretrained된 last checkpoint 모델의 weight를 다시 load_weight() 적용시 런타임 재시작을 적용해야 함. 
model.load_weights(tf.train.latest_checkpoint(INFER_CFG.model_dir))
model.summary()

class ExportModel(tf.Module):

  def __init__(self, model):
    super().__init__()
    self.model = model

  @tf.function
  def f(self, imgs):
    return self.model(imgs, training=False, post_mode='global')

export_model = ExportModel(model)

WARNING:tensorflow:Using a while_loop for converting ResizeBilinear
WARNING:tensorflow:Using a while_loop for converting ResizeBilinear
/content/automl/efficientdet/utils.py:23: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  from tensorflow.python.tpu import tpu_function  # pylint:disable=g-direct-tensorflow-import
/content/automl/efficientdet/utils.py:255: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  for u in self.updates:
WARNING:tensorflow:Using a while_loop for converting NonMaxSuppressionV5
WARNING:tensorflow:Using a while_loop for converting NonMaxSuppressionV5
#### checkpoint name: /content/efficientdet-d0/model
WARNING:tensorflow:From /usr/local/lib/python3.7/dist-packages/tensorflow/python/training/tracking/util.py:1345: NameBasedSaverStatus.__init__ (from tensorflow.python.training.tracking.util) is deprecated and will be removed in a future version.
Instructions for updating:
Restoring a name-based tf.train.Saver checkpoint using the object-based restore API. This mode uses global names to match variables, and so is somewhat fragile. It also adds new restore ops to the graph each time it is called when graph building. Prefer re-encoding training checkpoints in the object-based format: run save() on the object-based saver (the same one this message is coming from) and use that checkpoint in the future.
WARNING:tensorflow:From /usr/local/lib/python3.7/dist-packages/tensorflow/python/training/tracking/util.py:1345: NameBasedSaverStatus.__init__ (from tensorflow.python.training.tracking.util) is deprecated and will be removed in a future version.
Instructions for updating:
Restoring a name-based tf.train.Saver checkpoint using the object-based restore API. This mode uses global names to match variables, and so is somewhat fragile. It also adds new restore ops to the graph each time it is called when graph building. Prefer re-encoding training checkpoints in the object-based format: run save() on the object-based saver (the same one this message is coming from) and use that checkpoint in the future.
Model: ""
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 efficientnet-b0 (Model)     multiple                  3634844   
                                                                 
 resample_p6 (ResampleFeatur  multiple                 20800     
 eMap)                                                           
                                                                 
 resample_p7 (ResampleFeatur  multiple                 0         
 eMap)                                                           
                                                                 
 fpn_cells (FPNCells)        multiple                  179321    
                                                                 
 class_net (ClassNet)        multiple                  71274     
                                                                 
 box_net (BoxNet)            multiple                  20964     
                                                                 
=================================================================
Total params: 3,927,203
Trainable params: 3,880,067
Non-trainable params: 47,136
_________________________________________________________________

 

# p100에서 image 1920x1280일 경우 74ms, image 512x512일 경우 27ms, v100에서 image 512x512일 경우 24ms
import time
import cv2 

img = cv2.cvtColor(cv2.imread('/content/data/img01.png'), cv2.COLOR_BGR2RGB)
imgs= img[np.newaxis, ...]

start_time = time.time()
boxes, scores, classes, valid_len = export_model.f(imgs)

print('elapsed time:', time.time() - start_time)

/content/automl/efficientdet/utils.py:23: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  from tensorflow.python.tpu import tpu_function  # pylint:disable=g-direct-tensorflow-import
/content/automl/efficientdet/utils.py:255: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  for u in self.updates:
elapsed time: 9.74593186378479

 

labels_to_names = {1:'person',2:'bicycle',3:'car',4:'motorcycle',5:'airplane',6:'bus',7:'train',8:'truck',9:'boat',10:'traffic light',
                    11:'fire hydrant',12:'street sign',13:'stop sign',14:'parking meter',15:'bench',16:'bird',17:'cat',18:'dog',19:'horse',20:'sheep',
                    21:'cow',22:'elephant',23:'bear',24:'zebra',25:'giraffe',26:'hat',27:'backpack',28:'umbrella',29:'shoe',30:'eye glasses',
                    31:'handbag',32:'tie',33:'suitcase',34:'frisbee',35:'skis',36:'snowboard',37:'sports ball',38:'kite',39:'baseball bat',40:'baseball glove',
                    41:'skateboard',42:'surfboard',43:'tennis racket',44:'bottle',45:'plate',46:'wine glass',47:'cup',48:'fork',49:'knife',50:'spoon',
                    51:'bowl',52:'banana',53:'apple',54:'sandwich',55:'orange',56:'broccoli',57:'carrot',58:'hot dog',59:'pizza',60:'donut',
                    61:'cake',62:'chair',63:'couch',64:'potted plant',65:'bed',66:'mirror',67:'dining table',68:'window',69:'desk',70:'toilet',
                    71:'door',72:'tv',73:'laptop',74:'mouse',75:'remote',76:'keyboard',77:'cell phone',78:'microwave',79:'oven',80:'toaster',
                    81:'sink',82:'refrigerator',83:'blender',84:'book',85:'clock',86:'vase',87:'scissors',88:'teddy bear',89:'hair drier',90:'toothbrush',
                    91:'hair brush'}

 

def get_detected_img(export_model, img_array, is_print=True):   
  # automl efficent은 반환 bbox 좌표값이 원본 이미지 좌표값으로 되어 있으므로 별도의 scaling작업 필요 없음. 
  '''
  height = img_array.shape[0]
  width = img_array.shape[1]
  '''
  # cv2의 rectangle()은 인자로 들어온 이미지 배열에 직접 사각형을 업데이트 하므로 그림 표현을 위한 별도의 이미지 배열 생성. 
  draw_img = img_array.copy()

  # bounding box의 테두리와 caption 글자색 지정
  green_color=(0, 255, 0)
  red_color=(0, 0, 255)

  # cv2로 만들어진 numpy image array를 tensor로 변환
  img_tensor = tf.convert_to_tensor(img_array, dtype=tf.uint8)[tf.newaxis, ...]
  #img_tensor = tf.convert_to_tensor(img_array, dtype=tf.float32)[tf.newaxis, ...]

  # efficientdet 모델을 다운로드 한 뒤 inference 수행. 
  start_time = time.time()
  # automl efficientdet 모델은 boxes, score, classes, num_detections를 각각 Tensor로 반환. 
  boxes, scores, classes, valid_len = export_model.f(img_tensor)
  # Tensor값을 시각화를 위해 numpy 로 변환. 
  boxes = boxes.numpy()
  scores = scores.numpy()
  classes = classes.numpy()
  valid_len = valid_len.numpy()
  
  # detected 된 object들을 iteration 하면서 정보 추출. detect된 object의 갯수는 100개
  for i in range(valid_len[0]):
    # detection score를 iteration시 마다 높은 순으로 추출하고 SCORE_THRESHOLD보다 낮으면 loop 중단. 
    score = scores[0, i]
    
    # detected된 object들은 scale된 기준으로 예측되었으므로 다시 원본 이미지 비율로 계산
    box = boxes[0, i]

    ''' **** 주의 ******
    box는 ymin, xmin, ymax, xmax 순서로 되어 있음. 또한 원본 좌표값으로 되어 있음. '''
    left = box[1]
    top = box[0] 
    right = box[3] 
    bottom = box[2] 

    # class id 추출하고 class 명으로 매핑
    class_id = classes[0, i]
    caption = "{}: {:.4f}".format(labels_to_names[class_id], score)
    print(caption)
    #cv2.rectangle()은 인자로 들어온 draw_img에 사각형을 그림. 위치 인자는 반드시 정수형.
    cv2.rectangle(draw_img, (int(left), int(top)), (int(right), int(bottom)), color=green_color, thickness=2)
    cv2.putText(draw_img, caption, (int(left), int(top - 5)), cv2.FONT_HERSHEY_SIMPLEX, 0.4, red_color, 1)

  if is_print:
    print('Detection 수행시간:',round(time.time() - start_time, 2),"초")

  return draw_img

 

!wget -O ./data/beatles01.jpg https://raw.githubusercontent.com/chulminkw/DLCV/master/data/image/beatles01.jpg
!wget -O ./data/baseball01.jpg https://raw.githubusercontent.com/chulminkw/DLCV/master/data/image/baseball01.jpg

 

import cv2
import matplotlib.pyplot as plt

img_array = cv2.cvtColor(cv2.imread('/content/data/img01.png'), cv2.COLOR_BGR2RGB)

draw_img = get_detected_img(export_model, img_array, is_print=True)
plt.figure(figsize=(16, 16))
plt.imshow(draw_img)

 

import cv2
import matplotlib.pyplot as plt

img_array = cv2.cvtColor(cv2.imread('/content/data/beatles01.jpg'), cv2.COLOR_BGR2RGB)

draw_img = get_detected_img(export_model, img_array, is_print=True)
plt.figure(figsize=(16, 16))
plt.imshow(draw_img)

person: 0.9743
person: 0.9432
person: 0.9181
person: 0.8508
car: 0.7775
car: 0.7682
car: 0.7188
person: 0.7122
car: 0.7111
car: 0.6500
car: 0.6117
car: 0.5698
car: 0.5567
car: 0.5252
Detection 수행시간: 5.42 초

 

 

반응형
728x90
반응형
!git clone --depth 1 https://github.com/google/automl

 

!cd /content/automl/efficientdet; pip install -r requirements.txt

 

 

import tensorflow as tf
print(tf.__version__)

# 2.7.0

 

!nvidia-smi

Tue Nov 30 09:31:28 2021       
+-----------------------------------------------------------------------------+
| NVIDIA-SMI 495.44       Driver Version: 460.32.03    CUDA Version: 11.2     |
|-------------------------------+----------------------+----------------------+
| GPU  Name        Persistence-M| Bus-Id        Disp.A | Volatile Uncorr. ECC |
| Fan  Temp  Perf  Pwr:Usage/Cap|         Memory-Usage | GPU-Util  Compute M. |
|                               |                      |               MIG M. |
|===============================+======================+======================|
|   0  Tesla K80           Off  | 00000000:00:04.0 Off |                    0 |
| N/A   36C    P8    28W / 149W |      0MiB / 11441MiB |      0%      Default |
|                               |                      |                  N/A |
+-------------------------------+----------------------+----------------------+
                                                                               
+-----------------------------------------------------------------------------+
| Processes:                                                                  |
|  GPU   GI   CI        PID   Type   Process name                  GPU Memory |
|        ID   ID                                                   Usage      |
|=============================================================================|
|  No running processes found                                                 |
+-----------------------------------------------------------------------------+

 

efficientdet 수행을 위한 Library path 설정.

import os
import sys
import tensorflow.compat.v1 as tf

 

sys.path.append('/content/automl/efficientdet')

 

# /content/automl/efficient 으로 library path가 정상적으로 잡히면 아래 모듈 import가 되어야함. 
import hparams_config
from tf2 import anchors # keras folder가 tf2로 변경됨
from model_inspect import ModelInspector

COCO 데이터로 Pretrained된 efficientdet-d0 모델을 다운로드

MODEL = 'efficientdet-d0' 

def download(m):
  if m not in os.listdir():
    !wget https://storage.googleapis.com/cloud-tpu-checkpoints/efficientdet/coco/{m}.tar.gz
    !tar zxf {m}.tar.gz
  ckpt_path = os.path.join(os.getcwd(), m)
  return ckpt_path

# Download checkpoint.
ckpt_path = download(MODEL)
print('Use model in {}'.format(ckpt_path))

Use model in /content/efficientdet-d0

 

!mkdir ./data
!wget -O ./data/img01.png https://user-images.githubusercontent.com/11736571/77320690-099af300-6d37-11ea-9d86-24f14dc2d540.png

 

import cv2
import matplotlib.pyplot as plt

image_array = cv2.cvtColor(cv2.imread('/content/data/img01.png'), cv2.COLOR_BGR2RGB)
print(image_array.shape)

plt.figure(figsize=(12, 12))
plt.imshow(image_array)

# (1080, 1920, 3)

Pretrained efficientdet 모델로 Inference 를 수행하기 위한 환경 설정

  • hparams_config.Config 객체를 통해 모델 환경 설정.
class INFER_CFG:
  model_name = 'efficientdet-d0' # efficientdet 모델명
  model_dir = '/content/efficientdet-d0' # pretrained checkpoint 파일이 있는 디렉토리
  hparams = '' # csv 형식의 k=v 쌍 또는 yaml file

 

import numpy as np
from PIL import Image
import tensorflow as tf

import hparams_config
import inference
from tf2 import efficientdet_keras # keras 를 tf2로 변경

 

#efficientdet-d0의 기본 config 확인. 
config = hparams_config.get_efficientdet_config(INFER_CFG.model_name)
print('config type:', type(config))
print(config)


# config 객체, model parameter 등 정보들
config type: <class 'hparams_config.Config'> 
act_type: swish
alpha: 0.25
anchor_scale: 4.0
apply_bn_for_resampling: true
aspect_ratios:
- 1.0
- 2.0
- 0.5
autoaugment_policy: null
backbone_config: null
backbone_name: efficientnet-b0
box_class_repeats: 3
box_loss_weight: 50.0
ckpt_var_scope: null
clip_gradients_norm: 10.0
conv_after_downsample: false
conv_bn_act_pattern: false
data_format: channels_last
dataset_type: null
delta: 0.1
drop_remainder: true
first_lr_drop_epoch: 200.0
fpn_cell_repeats: 3
fpn_config: null
fpn_name: null
fpn_num_filters: 64
fpn_weight_method: null
gamma: 1.5
grad_checkpoint: false
grid_mask: false
heads:
- object_detection
image_size: 512
img_summary_steps: null
input_rand_hflip: true
iou_loss_type: null
iou_loss_weight: 1.0
is_training_bn: true
jitter_max: 2.0
jitter_min: 0.1
label_map: null
label_smoothing: 0.0
learning_rate: 0.08
loss_scale: null
lr_decay_method: cosine
lr_warmup_epoch: 1.0
lr_warmup_init: 0.008
map_freq: 5
max_instances_per_image: 100
max_level: 7
mean_rgb:
- 123.675
- 116.28
- 103.53
min_level: 3
mixed_precision: false
model_optimizations: {}
momentum: 0.9
moving_average_decay: 0.9998
name: efficientdet-d0
nms_configs:
    iou_thresh: null
    max_nms_inputs: 0
    max_output_size: 100
    method: gaussian
    pyfunc: false
    score_thresh: 0.0
    sigma: null
num_classes: 90
num_epochs: 300
num_scales: 3
optimizer: sgd
poly_lr_power: 0.9
positives_momentum: null
regenerate_source_id: false
sample_image: null
save_freq: epoch
scale_range: false
second_lr_drop_epoch: 250.0
seg_num_classes: 3
separable_conv: true
skip_crowd_during_training: true
skip_mismatch: true
stddev_rgb:
- 58.395
- 57.120000000000005
- 57.375
strategy: null
survival_prob: null
target_size: null
tflite_max_detections: 100
use_keras_model: true
var_freeze_expr: null
verbose: 1
weight_decay: 4.0e-05

 

# config의 특정 항목을 update
config.is_training_bn = False # batch_normalization 옵션인데 학습이 아닌 inference라서 false
#config.image_size = '1920x1280'
config.nms_configs.score_thresh = 0.4 # nms의 threshold filtering 0.4이하는 아웃시킴
config.nms_configs.max_output_size = 100

config.override(INFER_CFG.hparams)

Pretrained 모델 생성 후 Inference 수행

  • config인자로 하여 EfficientDetModel생성
  • 만들어진 모델에 다운로드 된 Pretrained Weight 파일의 weight값을 model.load_weights()로 입력
INFER_CFG.model_dir

/content/efficientdet-d0

 

import inference
from tf2 import efficientdet_keras # keras를 tf2로 변경

# 빈 모델,random weight 상태
model = efficientdet_keras.EfficientDetModel(config=config)
model.build((None, None, None, 3)) # input layer가 개수, h,w가 정해지지 않음, 채널은 3
print('#### checkpoint name:', tf.train.latest_checkpoint(INFER_CFG.model_dir))
# load_weight로 weights 채움
model.load_weights(tf.train.latest_checkpoint(INFER_CFG.model_dir))
model.summary()

Model: ""
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 efficientnet-b0 (Model)     multiple                  3634844    # backbone : b1 
                                                                 
 resample_p6 (ResampleFeatur  multiple                 20800     # neck : BiFPN
 eMap)                                                           
                                                                 
 resample_p7 (ResampleFeatur  multiple                 0         
 eMap)                                                           
                                                                 
 fpn_cells (FPNCells)        multiple                  179321    
                                                                 
 class_net (ClassNet)        multiple                  71274     # head 
                                                                 
 box_net (BoxNet)            multiple                  20964     
                                                                 
=================================================================
Total params: 3,927,203
Trainable params: 3,880,067
Non-trainable params: 47,136
_________________________________________________________________

입력

from PIL import Image
import cv2

# image는 4차원 array, Tensor 모두 가능.  
imgs = [np.array(Image.open('/content/data/img01.png'))]
imgs = tf.convert_to_tensor(imgs, dtype=tf.uint8)

### 아래와 같이 numpy array도 모델에 입력되는 이미지 값으로 가능. 
''' 
img = cv2.cvtColor(cv2.imread('/content/data/img01.png'), cv2.COLOR_BGR2RGB)
imgs= img[np.newaxis, ...]
boxes, scores, classes, valid_len = model(imgs, training=False, post_mode='global')
'''
print()

 

import time

# Inference 수행하고 수행 시간을 측정. 
start_time = time.time()
boxes, scores, classes, valid_len = model(imgs, training=False, post_mode='global')
print('elapsed time:', time.time() - start_time)


/content/automl/efficientdet/utils.py:255: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  for u in self.updates:
elapsed time: 7.936372518539429

Inference 반환 결과 살펴보고 API로 시각화 하기

  • inference model에 image tensor를 입력하여 반환된 결과는 모두 tensor이며, bounding box의 좌표, confidence score, class id 값, valid한 갯수가 반환됨.
  • config에 max_instances_per_image이 100으로 설정되었으므로 기본적으로 inference결과는 100개의 object들의 Detection 결과를 가지게 됨.
  • 이들 중 valid한 갯수(valid_len)은 이들중 의미있는 object detection 갯수를 의미함.(0 부터 valid_len-1 까지의 index를 가진 array결과가 의미있는 detection 결과임)
  • inference.visualize_image()로 반화 결과를 입력하여 시각화 적용
boxes

<tf.Tensor: shape=(1, 100, 4), dtype=float32, numpy=
array([[[ 898.7691   , 1250.5381   , 1078.944    , 1593.3597   ],
        [ 485.65512  , 1178.6895   ,  613.15283  , 1317.9205   ],
        [ 620.43     , 1313.1947   ,  905.7822   , 1625.1599   ],
        [ 870.51105  ,  490.04422  , 1078.4515   ,  778.78973  ],
        [ 505.97702  , 1356.5035   ,  636.8839   , 1533.2777   ],
        [ 628.0882   ,  211.97603  ,  842.73444  ,  485.74365  ],
        [ 648.43634  , 1553.6306   ,  911.2138   , 1901.6282   ],
        [ 875.3995   ,    3.3257744, 1039.7441   ,  182.6283   ],
        [ 925.4872   ,  301.89096  , 1075.1144   ,  460.31177  ],
        [ 619.0373   ,  873.7107   ,  927.9857   , 1069.7871   ],
        [ 351.75012  ,  582.6734   ,  511.38934  ,  738.4224   ],
        [ 690.2022   , 1238.6897   ,  860.37756  , 1316.6832   ],
        [ 677.63     , 1087.3639   ,  835.46027  , 1159.2014   ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ],
        [ 677.3427   , 1864.105    ,  759.21936  , 1864.105    ]]],
      dtype=float32)>

ㄴ> 있든 없든 max_instances_per_image 100개를 다 채우려고 중복값이 나옴

valid_len이 13임

 

print(valid_len.numpy())
boxes.shape, scores.shape, classes.shape

# [13]
# (TensorShape([1, 100, 4]), TensorShape([1, 100]), TensorShape([1, 100]))

 

scores

<tf.Tensor: shape=(1, 100), dtype=float32, numpy=
array([[0.84579366, 0.6978561 , 0.68892914, 0.6842166 , 0.67942005,
        0.6492443 , 0.61395675, 0.484145  , 0.44828248, 0.43981278,
        0.41531536, 0.41527689, 0.41297233, 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ,
        0.        , 0.        , 0.        , 0.        , 0.        ]],
      dtype=float32)>

object는 13개보다 많을 수 있는데 필터링을 0.4 초과인 것만 찾아낸 것임

 

print('##bboxes:', boxes[0, :10], '\n##scores:', scores[0, :10], '\n##classes:', classes[0, :10])

##bboxes: tf.Tensor(
[[ 898.7691    1250.5381    1078.944     1593.3597   ]
 [ 485.65512   1178.6895     613.15283   1317.9205   ]
 [ 620.43      1313.1947     905.7822    1625.1599   ]
 [ 870.51105    490.04422   1078.4515     778.78973  ]
 [ 505.97702   1356.5035     636.8839    1533.2777   ]
 [ 628.0882     211.97603    842.73444    485.74365  ]
 [ 648.43634   1553.6306     911.2138    1901.6282   ]
 [ 875.3995       3.3257744 1039.7441     182.6283   ]
 [ 925.4872     301.89096   1075.1144     460.31177  ]
 [ 619.0373     873.7107     927.9857    1069.7871   ]], shape=(10, 4), dtype=float32) 
##scores: tf.Tensor(
[0.84579366 0.6978561  0.68892914 0.6842166  0.67942005 0.6492443
 0.61395675 0.484145   0.44828248 0.43981278], shape=(10,), dtype=float32) 
##classes: tf.Tensor([3. 3. 3. 3. 3. 3. 3. 4. 4. 3.], shape=(10,), dtype=float32)

 

!mkdir -p /content/data_output

 

for i, img in enumerate(imgs):
  length = valid_len[i]
  
  img = inference.visualize_image(
      img,
      boxes[i].numpy()[:length],
      classes[i].numpy().astype(np.int)[:length],
      scores[i].numpy()[:length],
      label_map=config.label_map,
      min_score_thresh=config.nms_configs.score_thresh,
      max_boxes_to_draw=config.nms_configs.max_output_size)
  
  output_image_path = os.path.join('/content/data_output', str(i) + '.jpg')
  Image.fromarray(img).save(output_image_path)
  print('writing annotated image to %s' % output_image_path)
  
  writing annotated image to /content/data_output/0.jpg

 

Static Graph mode(Non eager mode)로 Inference 수행 성능 향상 시키기

  • @tf.function을 이용하여 static mode로 inference를 수행할 수 있도록 ExportModel 클래스 생성
  • inference 수행 시 ExportModel의 @tf.function이 적용된 메소드를 호출할 수 있도록 함.
import time

class ExportModel(tf.Module):

  def __init__(self, model):
    super().__init__()
    self.model = model

  @tf.function
  def f(self, imgs):
    #model(imgs, training=False, post_mode='global')
    return self.model(imgs, training=False, post_mode='global')

export_model = ExportModel(model)

 

# p100에서 image 1920x1280일 경우 74ms, v100에서 image 512x512일 경우 24ms

start_time = time.time()
boxes, scores, classes, valid_len = export_model.f(imgs)

print('elapsed time:', time.time() - start_time)

/content/automl/efficientdet/utils.py:23: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  from tensorflow.python.tpu import tpu_function  # pylint:disable=g-direct-tensorflow-import
/content/automl/efficientdet/utils.py:255: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  for u in self.updates:
elapsed time: 8.002159118652344

시각화 함수 생성하고 inference 결과를 시각화

labels_to_names = {1:'person',2:'bicycle',3:'car',4:'motorcycle',5:'airplane',6:'bus',7:'train',8:'truck',9:'boat',10:'traffic light',
                    11:'fire hydrant',12:'street sign',13:'stop sign',14:'parking meter',15:'bench',16:'bird',17:'cat',18:'dog',19:'horse',20:'sheep',
                    21:'cow',22:'elephant',23:'bear',24:'zebra',25:'giraffe',26:'hat',27:'backpack',28:'umbrella',29:'shoe',30:'eye glasses',
                    31:'handbag',32:'tie',33:'suitcase',34:'frisbee',35:'skis',36:'snowboard',37:'sports ball',38:'kite',39:'baseball bat',40:'baseball glove',
                    41:'skateboard',42:'surfboard',43:'tennis racket',44:'bottle',45:'plate',46:'wine glass',47:'cup',48:'fork',49:'knife',50:'spoon',
                    51:'bowl',52:'banana',53:'apple',54:'sandwich',55:'orange',56:'broccoli',57:'carrot',58:'hot dog',59:'pizza',60:'donut',
                    61:'cake',62:'chair',63:'couch',64:'potted plant',65:'bed',66:'mirror',67:'dining table',68:'window',69:'desk',70:'toilet',
                    71:'door',72:'tv',73:'laptop',74:'mouse',75:'remote',76:'keyboard',77:'cell phone',78:'microwave',79:'oven',80:'toaster',
                    81:'sink',82:'refrigerator',83:'blender',84:'book',85:'clock',86:'vase',87:'scissors',88:'teddy bear',89:'hair drier',90:'toothbrush',
                    91:'hair brush'}

 

def get_detected_img(export_model, img_array, is_print=True):   
  # automl efficent은 반환 bbox 좌표값이 원본 이미지 좌표값으로 되어 있으므로 별도의 scaling작업 필요 없음. 
  '''
  height = img_array.shape[0]
  width = img_array.shape[1]
  '''
  # cv2의 rectangle()은 인자로 들어온 이미지 배열에 직접 사각형을 업데이트 하므로 그림 표현을 위한 별도의 이미지 배열 생성. 
  draw_img = img_array.copy()

  # bounding box의 테두리와 caption 글자색 지정
  green_color=(0, 255, 0)
  red_color=(0, 0, 255)

  # cv2로 만들어진 numpy image array를 tensor로 변환
  img_tensor = tf.convert_to_tensor(img_array, dtype=tf.uint8)[tf.newaxis, ...]
  #img_tensor = tf.convert_to_tensor(img_array, dtype=tf.float32)[tf.newaxis, ...]

  # efficientdet 모델을 다운로드 한 뒤 inference 수행. 
  start_time = time.time()
  # automl efficientdet 모델은 boxes, score, classes, num_detections를 각각 Tensor로 반환. 
  boxes, scores, classes, valid_len = export_model.f(img_tensor)
  # Tensor값을 시각화를 위해 numpy 로 변환. 
  boxes = boxes.numpy()
  scores = scores.numpy()
  classes = classes.numpy()
  valid_len = valid_len.numpy()
  
  # detected 된 object들을 iteration 하면서 정보 추출. detect된 object의 갯수는 100개
  for i in range(valid_len[0]):
    # detection score를 iteration시 마다 높은 순으로 추출하고 SCORE_THRESHOLD보다 낮으면 loop 중단. 
    score = scores[0, i]
    
    # detected된 object들은 scale된 기준으로 예측되었으므로 다시 원본 이미지 비율로 계산
    box = boxes[0, i]

    ''' **** 주의 ******
    box는 ymin, xmin, ymax, xmax 순서로 되어 있음. 또한 원본 좌표값으로 되어 있음. '''
    left = box[1]
    top = box[0] 
    right = box[3] 
    bottom = box[2] 

    # class id 추출하고 class 명으로 매핑
    class_id = classes[0, i]
    caption = "{}: {:.4f}".format(labels_to_names[class_id], score)
    print(caption)
    #cv2.rectangle()은 인자로 들어온 draw_img에 사각형을 그림. 위치 인자는 반드시 정수형.
    cv2.rectangle(draw_img, (int(left), int(top)), (int(right), int(bottom)), color=green_color, thickness=2)
    cv2.putText(draw_img, caption, (int(left), int(top - 5)), cv2.FONT_HERSHEY_SIMPLEX, 0.4, red_color, 1)

  if is_print:
    print('Detection 수행시간:',round(time.time() - start_time, 2),"초")

  return draw_img

 

 

import cv2
img_array = cv2.cvtColor(cv2.imread('/content/data/img01.png'), cv2.COLOR_BGR2RGB)

draw_img = get_detected_img(export_model, img_array, is_print=True)
plt.figure(figsize=(16, 16))
plt.imshow(draw_img)

car: 0.8458
car: 0.6979
car: 0.6889
car: 0.6842
car: 0.6794
car: 0.6492
car: 0.6140
motorcycle: 0.4841
motorcycle: 0.4483
car: 0.4398
bus: 0.4153
person: 0.4153
motorcycle: 0.4130
Detection 수행시간: 0.08 초

 

!wget -O ./data/beatles01.jpg https://raw.githubusercontent.com/chulminkw/DLCV/master/data/image/beatles01.jpg
!wget -O ./data/baseball01.jpg https://raw.githubusercontent.com/chulminkw/DLCV/master/data/image/baseball01.jpg

 

import cv2
img_array = cv2.cvtColor(cv2.imread('/content/data/beatles01.jpg'), cv2.COLOR_BGR2RGB)
print(img_array.shape)

draw_img = get_detected_img(export_model, img_array, is_print=True)
plt.figure(figsize=(12, 12))
plt.imshow(draw_img)

(633, 806, 3)
/content/automl/efficientdet/utils.py:23: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  from tensorflow.python.tpu import tpu_function  # pylint:disable=g-direct-tensorflow-import
/content/automl/efficientdet/utils.py:255: UserWarning: `layer.updates` will be removed in a future version. This property should not be used in TensorFlow 2.0, as `updates` are applied automatically.
  for u in self.updates:
person: 0.9486
person: 0.9406
person: 0.9362
person: 0.8914
car: 0.6025
car: 0.5251
Detection 수행시간: 4.61 초

 

반응형
728x90
반응형

EfficientDet lite0 Pretrained 모델 Inference 수행

  • EfficientDet Lite는 automl 패키지로 구현됨.
  • 입력 이미지로 numpy array, tensor 모두 가능, type은 unit8 필요.
  • inference 결과로 box정보, score정보, class 정보를 각각 Tensor로 반환.
import tensorflow as tf
import tensorflow_hub as hub
import cv2
import numpy as np

detector_automl_lite0 = hub.load("https://tfhub.dev/tensorflow/efficientdet/lite0/detection/1")

 

img_array = cv2.cvtColor(cv2.imread('/content/data/baseball01.jpg'), cv2.COLOR_BGR2RGB)
#img_array_01 = img_array[np.newaxis, ...]
img_tensor = tf.convert_to_tensor(img_array, dtype=tf.uint8)[tf.newaxis, ...]

start_time = time.time()
# image를 detector_model에 인자로 입력하여 inference 수행. 
#boxes, scores, classes, num_detections = detector_automl_lite0(img_array_01)
boxes, scores, classes, num_detections = detector_automl_lite0(img_tensor)

print('elapsed time:', time.time()-start_time)

# elapsed time: 2.2259414196014404

 

boxes.shape, scores.shape, classes.shape, num_detections

# (TensorShape([1, 100, 4]),
#  TensorShape([1, 100]),
#  TensorShape([1, 100]),
#  <tf.Tensor: shape=(1,), dtype=int32, numpy=array([100], dtype=int32)>)

 

#좌표값이 0~1사이로 정규화 되지 않고 원본 이미지의 좌표값으로 반환 
print('원본 이미지 shape:', img_array.shape)
boxes[0, 0:10], scores[0, :10], classes[0, :10]

원본 이미지 shape: (476, 735, 3)
(<tf.Tensor: shape=(10, 4), dtype=float32, numpy=
 array([[202.11865 ,  31.700203, 445.80273 , 188.72594 ],
        [259.24213 , 174.63013 , 455.48645 , 373.06177 ],
        [127.93706 , 324.82584 , 407.09464 , 493.36212 ],
        [300.97815 , 331.1551  , 344.10986 , 374.19156 ],
        [236.80632 , 542.42914 , 261.38422 , 609.6119  ],
        [222.16089 , 659.98816 , 236.1999  , 678.62085 ],
        [227.95496 , 496.07425 , 263.47733 , 610.33655 ],
        [223.7558  , 647.2287  , 238.00728 , 661.4642  ],
        [218.31972 , 654.4792  , 226.83658 , 662.96124 ],
        [220.97037 , 494.3057  , 246.90009 , 588.9908  ]], dtype=float32)>,
 <tf.Tensor: shape=(10,), dtype=float32, numpy=
 array([0.90091765, 0.8834344 , 0.8455462 , 0.4854285 , 0.38026473,
        0.2700993 , 0.26304004, 0.25670242, 0.17502265, 0.1738919 ],
       dtype=float32)>,
 <tf.Tensor: shape=(10,), dtype=float32, numpy=array([ 1.,  1.,  1., 40., 39., 37., 39., 37., 37., 39.], dtype=float32)>)

 

labels_to_names = {1:'person',2:'bicycle',3:'car',4:'motorcycle',5:'airplane',6:'bus',7:'train',8:'truck',9:'boat',10:'traffic light',
                    11:'fire hydrant',12:'street sign',13:'stop sign',14:'parking meter',15:'bench',16:'bird',17:'cat',18:'dog',19:'horse',20:'sheep',
                    21:'cow',22:'elephant',23:'bear',24:'zebra',25:'giraffe',26:'hat',27:'backpack',28:'umbrella',29:'shoe',30:'eye glasses',
                    31:'handbag',32:'tie',33:'suitcase',34:'frisbee',35:'skis',36:'snowboard',37:'sports ball',38:'kite',39:'baseball bat',40:'baseball glove',
                    41:'skateboard',42:'surfboard',43:'tennis racket',44:'bottle',45:'plate',46:'wine glass',47:'cup',48:'fork',49:'knife',50:'spoon',
                    51:'bowl',52:'banana',53:'apple',54:'sandwich',55:'orange',56:'broccoli',57:'carrot',58:'hot dog',59:'pizza',60:'donut',
                    61:'cake',62:'chair',63:'couch',64:'potted plant',65:'bed',66:'mirror',67:'dining table',68:'window',69:'desk',70:'toilet',
                    71:'door',72:'tv',73:'laptop',74:'mouse',75:'remote',76:'keyboard',77:'cell phone',78:'microwave',79:'oven',80:'toaster',
                    81:'sink',82:'refrigerator',83:'blender',84:'book',85:'clock',86:'vase',87:'scissors',88:'teddy bear',89:'hair drier',90:'toothbrush',
                    91:'hair brush'}

 

def get_detected_img_automl(model, img_array, score_threshold, object_show_count=100, is_print=True):   
  # automl efficent은 반환 bbox 좌표값이 원본 이미지 좌표값으로 되어 있으므로 별도의 scaling작업 필요 없음. 
  '''
  height = img_array.shape[0]
  width = img_array.shape[1]
  '''
  # cv2의 rectangle()은 인자로 들어온 이미지 배열에 직접 사각형을 업데이트 하므로 그림 표현을 위한 별도의 이미지 배열 생성. 
  draw_img = img_array.copy()

  # bounding box의 테두리와 caption 글자색 지정
  green_color=(0, 255, 0)
  red_color=(0, 0, 255)

  # cv2로 만들어진 numpy image array를 tensor로 변환
  img_tensor = tf.convert_to_tensor(img_array, dtype=tf.uint8)[tf.newaxis, ...]
  #img_tensor = tf.convert_to_tensor(img_array, dtype=tf.float32)[tf.newaxis, ...]

  # efficientdet 모델을 다운로드 한 뒤 inference 수행. 
  start_time = time.time()
  # automl efficientdet 모델은 boxes, score, classes, num_detections를 각각 Tensor로 반환. 
  boxes, scores, classes, num_detections = model(img_tensor)
  # Tensor값을 시각화를 위해 numpy 로 변환. 
  boxes = boxes.numpy()
  scores = scores.numpy()
  classes = classes.numpy()
  num_detections = num_detections.numpy()
  
  # detected 된 object들을 iteration 하면서 정보 추출. detect된 object의 갯수는 100개
  for i in range(num_detections[0]):
    # detection score를 iteration시 마다 높은 순으로 추출하고 SCORE_THRESHOLD보다 낮으면 loop 중단. 
    score = scores[0, i]
    if score < score_threshold:
      break
    # detected된 object들은 scale된 기준으로 예측되었으므로 다시 원본 이미지 비율로 계산
    box = boxes[0, i]

    ''' **** 주의 ******
    box는 ymin, xmin, ymax, xmax 순서로 되어 있음. 또한 원본 좌표값으로 되어 있음. '''
    left = box[1]
    top = box[0] 
    right = box[3] 
    bottom = box[2] 

    # class id 추출하고 class 명으로 매핑
    class_id = classes[0, i]
    caption = "{}: {:.4f}".format(labels_to_names[class_id], score)
    print(caption)
    #cv2.rectangle()은 인자로 들어온 draw_img에 사각형을 그림. 위치 인자는 반드시 정수형.
    cv2.rectangle(draw_img, (int(left), int(top)), (int(right), int(bottom)), color=green_color, thickness=2)
    cv2.putText(draw_img, caption, (int(left), int(top - 5)), cv2.FONT_HERSHEY_SIMPLEX, 0.4, red_color, 1)

  if is_print:
    print('Detection 수행시간:',round(time.time() - start_time, 2),"초")

  return draw_img

 

img_array = cv2.cvtColor(cv2.imread('/content/data/baseball01.jpg'), cv2.COLOR_BGR2RGB)
draw_img = get_detected_img_automl(detector_automl_lite0, img_array, score_threshold=0.3, object_show_count=100, is_print=True)
plt.figure(figsize=(12, 12))
plt.imshow(draw_img)

person: 0.9009
person: 0.8834
person: 0.8455
baseball glove: 0.4854
baseball bat: 0.3803
Detection 수행시간: 0.04 초

 

img_array = cv2.cvtColor(cv2.imread('/content/data/beatles01.jpg'), cv2.COLOR_BGR2RGB)
img_tensor = tf.convert_to_tensor(img_array, dtype=tf.uint8)[tf.newaxis, ...]
draw_img = get_detected_img_automl(detector_automl_lite0, img_array, score_threshold=0.3, object_show_count=100, is_print=True)
plt.figure(figsize=(12, 12))
plt.imshow(draw_img)


person: 0.8218
person: 0.8134
person: 0.7396
person: 0.6831
car: 0.6212
car: 0.4215
car: 0.3183
Detection 수행시간: 1.72 초

 

EfficientDet lite2 모델로 inference 수행.

detector_automl_lite2 = hub.load("https://tfhub.dev/tensorflow/efficientdet/lite2/detection/1")

 

img_array = cv2.cvtColor(cv2.imread('/content/data/beatles01.jpg'), cv2.COLOR_BGR2RGB)
img_tensor = tf.convert_to_tensor(img_array, dtype=tf.uint8)[tf.newaxis, ...]
draw_img = get_detected_img_automl(detector_automl_lite2, img_array, score_threshold=0.5, object_show_count=100, is_print=True)
plt.figure(figsize=(12, 12))
plt.imshow(draw_img)

person: 0.9152
person: 0.9089
person: 0.8914
person: 0.8808
car: 0.6071
car: 0.5114
Detection 수행시간: 2.73 초

 

def do_detected_video_automl(model, input_path, output_path, score_threshold, is_print):
    
    cap = cv2.VideoCapture(input_path)

    codec = cv2.VideoWriter_fourcc(*'XVID')

    vid_size = (round(cap.get(cv2.CAP_PROP_FRAME_WIDTH)),round(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))
    vid_fps = cap.get(cv2.CAP_PROP_FPS)

    vid_writer = cv2.VideoWriter(output_path, codec, vid_fps, vid_size) 

    frame_cnt = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
    print('총 Frame 갯수:', frame_cnt)

    green_color=(0, 255, 0)
    red_color=(0, 0, 255)
    while True:
        hasFrame, img_frame = cap.read()
        if not hasFrame:
            print('더 이상 처리할 frame이 없습니다.')
            break
        # get_detected_img_automl() 호출 
        img_frame = get_detected_img_automl(model, img_frame, score_threshold=score_threshold, object_show_count=100, is_print=is_print)
        
        vid_writer.write(img_frame)
    # end of while loop

    vid_writer.release()
    cap.release()

 

do_detected_video_automl(detector_automl_lite2, '/content/data/Jonh_Wick_small.mp4', './data/John_Wick_small_lite_02.mp4', 0.5, True)

총 Frame 갯수: 58
car: 0.7213
car: 0.7048
person: 0.6839
car: 0.6667
car: 0.6558
car: 0.5763
Detection 수행시간: 2.18 초
car: 0.7208
car: 0.7070
person: 0.6858
car: 0.6600
car: 0.6558
car: 0.5790
Detection 수행시간: 0.07 초
car: 0.6072
person: 0.5958
car: 0.5877
car: 0.5727
Detection 수행시간: 0.06 초
car: 0.6773
person: 0.6331
car: 0.5557
Detection 수행시간: 0.06 초
car: 0.7090
person: 0.6330
car: 0.6061
car: 0.5067
Detection 수행시간: 0.06 초
car: 0.6947
car: 0.6896
person: 0.5417
Detection 수행시간: 0.06 초
car: 0.6893
car: 0.6833
person: 0.5335
Detection 수행시간: 0.06 초
car: 0.7336
car: 0.7109
car: 0.6813
motorcycle: 0.5120
Detection 수행시간: 0.06 초
car: 0.7302
car: 0.7150
motorcycle: 0.6327
car: 0.6062
Detection 수행시간: 0.06 초
car: 0.7211
car: 0.7191
motorcycle: 0.6324
car: 0.6052
Detection 수행시간: 0.06 초
car: 0.7077
car: 0.6678
car: 0.6344
motorcycle: 0.6321
Detection 수행시간: 0.06 초
car: 0.7145
car: 0.7095
car: 0.6083
motorcycle: 0.6038
Detection 수행시간: 0.06 초
car: 0.7381
car: 0.6946
motorcycle: 0.5520
car: 0.5499
Detection 수행시간: 0.06 초
car: 0.7156
car: 0.6753
car: 0.6429
motorcycle: 0.5482
Detection 수행시간: 0.06 초
car: 0.7285
car: 0.6851
car: 0.6462
motorcycle: 0.5571
Detection 수행시간: 0.06 초
car: 0.7230
car: 0.6673
person: 0.6127
car: 0.5793
motorcycle: 0.5393
Detection 수행시간: 0.06 초
car: 0.7441
car: 0.6925
motorcycle: 0.5914
person: 0.5100
Detection 수행시간: 0.07 초
car: 0.6714
car: 0.6340
person: 0.5797
Detection 수행시간: 0.06 초
car: 0.7087
motorcycle: 0.6122
car: 0.6025
car: 0.5611
car: 0.5445
Detection 수행시간: 0.06 초
car: 0.7057
motorcycle: 0.6146
car: 0.5860
car: 0.5615
car: 0.5392
Detection 수행시간: 0.06 초
car: 0.7421
motorcycle: 0.6152
car: 0.5331
car: 0.5200
car: 0.5190
Detection 수행시간: 0.06 초
car: 0.7229
motorcycle: 0.6333
car: 0.5775
car: 0.5757
Detection 수행시간: 0.06 초
car: 0.6589
car: 0.5912
motorcycle: 0.5899
Detection 수행시간: 0.06 초
person: 0.7521
person: 0.6622
motorcycle: 0.5707
Detection 수행시간: 0.06 초
person: 0.7558
person: 0.6703
motorcycle: 0.5853
Detection 수행시간: 0.06 초
person: 0.7575
person: 0.7115
Detection 수행시간: 0.06 초
person: 0.7646
person: 0.7391
Detection 수행시간: 0.06 초
person: 0.7744
person: 0.7480
Detection 수행시간: 0.06 초
person: 0.7778
car: 0.7730
horse: 0.7068
car: 0.5107
Detection 수행시간: 0.06 초
car: 0.7756
person: 0.7676
horse: 0.7113
car: 0.5043
Detection 수행시간: 0.06 초
car: 0.7500
person: 0.6981
person: 0.6900
car: 0.5125
Detection 수행시간: 0.06 초
car: 0.6990
person: 0.6905
person: 0.6854
Detection 수행시간: 0.06 초
person: 0.7262
car: 0.7237
person: 0.6961
motorcycle: 0.5003
Detection 수행시간: 0.06 초
car: 0.7820
person: 0.7677
person: 0.6664
Detection 수행시간: 0.06 초
car: 0.7785
person: 0.7709
person: 0.6833
Detection 수행시간: 0.06 초
car: 0.8247
person: 0.7472
person: 0.7321
car: 0.6222
motorcycle: 0.5294
Detection 수행시간: 0.06 초
car: 0.8342
person: 0.7343
person: 0.7152
car: 0.5990
motorcycle: 0.5350
Detection 수행시간: 0.06 초
car: 0.8224
person: 0.7061
person: 0.7014
motorcycle: 0.5112
Detection 수행시간: 0.06 초
car: 0.8180
person: 0.7044
person: 0.6811
car: 0.5980
Detection 수행시간: 0.06 초
car: 0.8190
person: 0.7017
person: 0.6996
car: 0.6456
motorcycle: 0.5053
Detection 수행시간: 0.06 초
car: 0.8029
person: 0.7463
person: 0.6732
car: 0.6710
Detection 수행시간: 0.06 초
car: 0.7811
person: 0.7525
person: 0.7167
car: 0.7015
Detection 수행시간: 0.06 초
car: 0.7778
person: 0.7342
car: 0.6729
person: 0.6687
motorcycle: 0.5495
Detection 수행시간: 0.06 초
car: 0.8041
person: 0.7691
horse: 0.6278
car: 0.5558
Detection 수행시간: 0.06 초
car: 0.8021
person: 0.7739
horse: 0.6354
car: 0.5613
Detection 수행시간: 0.06 초
car: 0.8237
person: 0.7113
person: 0.6251
horse: 0.5672
car: 0.5561
motorcycle: 0.5362
Detection 수행시간: 0.06 초
car: 0.8573
car: 0.6713
person: 0.5939
person: 0.5814
car: 0.5653
Detection 수행시간: 0.06 초
car: 0.8395
car: 0.7330
person: 0.5949
person: 0.5404
horse: 0.5206
Detection 수행시간: 0.06 초
car: 0.8135
horse: 0.6740
car: 0.6538
person: 0.6302
person: 0.5339
Detection 수행시간: 0.06 초
car: 0.8160
horse: 0.6635
car: 0.6524
person: 0.6147
person: 0.5334
Detection 수행시간: 0.06 초
car: 0.8571
person: 0.6551
car: 0.6094
horse: 0.5733
person: 0.5346
Detection 수행시간: 0.06 초
car: 0.8386
car: 0.6963
horse: 0.6246
person: 0.5747
person: 0.5439
Detection 수행시간: 0.07 초
car: 0.8146
car: 0.6012
person: 0.5879
horse: 0.5211
Detection 수행시간: 0.06 초
car: 0.7695
car: 0.7263
person: 0.5264
horse: 0.5262
motorcycle: 0.5096
Detection 수행시간: 0.06 초
car: 0.7618
car: 0.7197
person: 0.5401
horse: 0.5215
motorcycle: 0.5116
Detection 수행시간: 0.06 초
car: 0.7638
car: 0.7306
person: 0.6056
car: 0.5946
person: 0.5777
Detection 수행시간: 0.07 초
car: 0.7433
car: 0.6687
car: 0.6672
person: 0.5707
person: 0.5527
Detection 수행시간: 0.06 초
car: 0.7775
car: 0.7218
car: 0.6586
person: 0.5992
Detection 수행시간: 0.06 초
더 이상 처리할 frame이 없습니다.

 

 

반응형
728x90
반응형
import tensorflow as tf
#tensorflow_hub import 수행. 
import tensorflow_hub as hub
import matplotlib.pyplot as plt

 

print(tf.__version__)

# 2.7.0

 

!nvidia-smi

Sat Dec 25 03:17:11 2021       
+-----------------------------------------------------------------------------+
| NVIDIA-SMI 495.44       Driver Version: 460.32.03    CUDA Version: 11.2     |
|-------------------------------+----------------------+----------------------+
| GPU  Name        Persistence-M| Bus-Id        Disp.A | Volatile Uncorr. ECC |
| Fan  Temp  Perf  Pwr:Usage/Cap|         Memory-Usage | GPU-Util  Compute M. |
|                               |                      |               MIG M. |
|===============================+======================+======================|
|   0  Tesla K80           Off  | 00000000:00:04.0 Off |                    0 |
| N/A   71C    P8    34W / 149W |      0MiB / 11441MiB |      0%      Default |
|                               |                      |                  N/A |
+-------------------------------+----------------------+----------------------+
                                                                               
+-----------------------------------------------------------------------------+
| Processes:                                                                  |
|  GPU   GI   CI        PID   Type   Process name                  GPU Memory |
|        ID   ID                                                   Usage      |
|=============================================================================|
|  No running processes found                                                 |
+-----------------------------------------------------------------------------+

 

입력 이미지로 사용될 이미지 다운로드

!mkdir /content/data
!wget -O ./data/beatles01.jpg https://raw.githubusercontent.com/chulminkw/DLCV/master/data/image/beatles01.jpg

 

TF Hub에서 EfficientDet d0 Inference 모델 다운로드 후 Inference 수행.

  • 원하는 모델명은 TF Hub에서 검색해서 hub.lod()로 다운로드 후 tensorflow로 사용 가능할 수 있도록 로딩됨
  • EfficientDet Tensorflow Object Detection API로 구현된 모델로 Download
  • 로딩된 모델은 바로 원본 이미지로 Object Detection이 가능. 입력 값으로 numpy array, tensor 모두 가능하며 uint8로 구성 필요.
module_handle = "https://tfhub.dev/tensorflow/efficientdet/d0/1"
detector_model = hub.load(module_handle)

WARNING:absl:Importing a function (__inference___call___32344) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_EfficientDet-D0_layer_call_and_return_conditional_losses_97451) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_bifpn_layer_call_and_return_conditional_losses_77595) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_EfficientDet-D0_layer_call_and_return_conditional_losses_103456) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_EfficientDet-D0_layer_call_and_return_conditional_losses_93843) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_EfficientDet-D0_layer_call_and_return_conditional_losses_107064) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_bifpn_layer_call_and_return_conditional_losses_75975) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.

 

import cv2
import time
import numpy as np

img_array_np = cv2.imread('/content/data/beatles01.jpg')
img_array = img_array_np[np.newaxis, ...]
print(img_array_np.shape, img_array.shape)

start_time = time.time()
# image를 detector_model에 인자로 입력하여 inference 수행. 
result = detector_model(img_array)
print('elapsed time:', time.time()-start_time)

# (633, 806, 3) (1, 633, 806, 3)
# elapsed time: 13.12512469291687

 

img_tensor = tf.convert_to_tensor(img_array_np, dtype=tf.uint8)[tf.newaxis, ...]
start_time = time.time()
# image를 detector_model에 인자로 입력하여 inference 수행. 
result = detector_model(img_tensor)
print('elapsed time:', time.time()-start_time)

# elapsed time: 0.3127138614654541

 

# image를 numpy가 아니라 tensor로 로딩
def load_img(path):
  img = tf.io.read_file(path)
  #png 파일일 경우 decode_png()호출 
  img = tf.image.decode_jpeg(img, channels=3)
  print(img.shape, type(img))
  return img

 

import time 

# image를 tensor형태로 로딩. 
img = load_img('/content/data/beatles01.jpg')
# 3차원 image tensor를 4차원 tensor로 변환. 
# Efficientdet d0의 경우에는 입력 image를 unit8로 적용 필요. 
converted_img = tf.image.convert_image_dtype(img, tf.uint8)[tf.newaxis, ...]

start_time = time.time()
# image를 detector_model에 인자로 입력하여 inference 수행. 
result = detector_model(converted_img)
print('elapsed time:', time.time()-start_time)

# (633, 806, 3) <class 'tensorflow.python.framework.ops.EagerTensor'>
# elapsed time: 0.3045518398284912

 

inference 수행 반환값 확인

  • inference 결과는 dictionary 형태로 반환되며, 개별 key값은 Object Detection 모델에 따라 달라질 수 있음. 개별 value는 tensor로 되어 있음.
  • inference 반환된 bbox 좌표는 이미지 사이즈 대비 스케일링 된 0~1 사이 값이며 ymin, xmin, ymax, xmax 형태로 반환되므로 반드시 주의 필요
# inference 결과 출력. dictionary 형태의 출력 결과. dict내부의 key는 model 별로 서로 다름. 출력하여 key값 확인 필요. 
print(result)

{'detection_classes': <tf.Tensor: shape=(1, 100), dtype=float32, numpy=
array([[ 1.,  1.,  1.,  1.,  3.,  3.,  8., 41.,  3.,  3.,  3.,  3.,  3.,
        41.,  8.,  3.,  3.,  3.,  3., 31., 31.,  3.,  3.,  3.,  3.,  3.,
         3.,  3., 31.,  3.,  3.,  3.,  3.,  1.,  3., 10.,  3.,  3.,  3.,
        31., 15., 32., 10.,  3., 10.,  1., 10.,  3.,  3.,  3., 31.,  1.,
        27., 10., 33.,  8.,  3.,  1., 31., 10.,  3.,  8.,  3.,  1.,  3.,
         1.,  3.,  3.,  3.,  3.,  3., 32.,  3.,  1.,  1., 31.,  3., 31.,
         1.,  3., 31., 10.,  8.,  3.,  3., 32.,  1.,  1.,  3., 10., 41.,
        31.,  1., 31., 10.,  3.,  3., 32., 31.,  3.]], dtype=float32)>, 'raw_detection_scores': <tf.Tensor: shape=(1, 49104, 90), dtype=float32, numpy=
array([[[1.4354929e-01, 3.5354502e-02, 1.3373634e-01, ...,
         3.8939363e-03, 4.5267404e-03, 7.4508404e-03],
        [5.8584111e-03, 1.5053916e-03, 5.7255975e-03, ...,
         2.8605445e-04, 7.9238508e-04, 5.8379129e-04],
        [3.8943693e-04, 1.2312026e-04, 2.9878854e-04, ...,
         1.8558223e-05, 2.8921681e-04, 6.1487677e-05],
        ...,
        [1.9985980e-03, 9.0183894e-04, 1.1327897e-03, ...,
         6.9412554e-04, 9.2282181e-04, 1.1239841e-03],
        [2.4827726e-03, 1.0236464e-03, 1.1463074e-03, ...,
         8.3448330e-04, 9.3340850e-04, 9.7718462e-04],
        [3.6730708e-03, 1.0787870e-03, 1.3733003e-03, ...,
         1.0809251e-03, 9.1082731e-04, 1.2321979e-03]]], dtype=float32)>, 'raw_detection_boxes': <tf.Tensor: shape=(1, 49104, 4), dtype=float32, numpy=
array([[[ 2.9779205e-01,  9.7853315e-01,  4.0974966e-01,  9.7853315e-01],
        [ 4.0937748e-02,  7.5187005e-02,  6.7204766e-02,  7.7955268e-02],
        [-1.1543997e-02,  1.3033487e-04,  7.8647465e-02,  5.4135054e-02],
        ...,
        [ 3.0034673e-01,  5.5941677e-01,  1.4586637e+00,  1.1626569e+00],
        [ 1.6176426e-01,  4.5902258e-01,  1.6337620e+00,  1.2052057e+00],
        [-1.5788192e-01,  2.7244717e-01,  1.8246810e+00,  1.3288105e+00]]],
      dtype=float32)>, 'detection_anchor_indices': <tf.Tensor: shape=(1, 100), dtype=float32, numpy=
array([[47257., 47320., 47347., 47284., 40095., 12485., 11912., 23426.,
        11178., 10677., 11280., 11871., 10632., 22826., 12800., 10686.,
        10623., 10056., 10047., 22085., 18745.,  9498., 10074., 10065.,
        11271., 11880.,  9489., 10101., 41415., 11262., 10614., 10668.,
        10083., 13020., 10092.,  9462., 10038.,  9480., 10641., 18304.,
        42941., 16369.,  9498., 10695.,  9453., 10569.,  9489.,  9471.,
         9462., 12875., 41128., 11166., 42426.,  9471., 42426., 11178.,
        11145.,  9462., 18764.,  9480., 10569., 11295., 10605., 47297.,
        10110., 11157., 10704., 10119., 10683., 10713., 10659., 15783.,
        12888., 13029., 10011., 41417., 10128., 22060.,  9453.,  9507.,
        41352.,  8877., 11880., 11856., 10650., 17529., 11958., 10047.,
        12330.,  7824., 42823., 41697., 11967., 17016.,  8886., 12327.,
         9453., 17520., 22051., 10578.]], dtype=float32)>, 'detection_scores': <tf.Tensor: shape=(1, 100), dtype=float32, numpy=
array([[0.94711405, 0.935974  , 0.930035  , 0.89913994, 0.625541  ,
        0.48422325, 0.3482024 , 0.31519073, 0.31252164, 0.3096477 ,
        0.2892261 , 0.26785725, 0.26200417, 0.2544667 , 0.24920423,
        0.24709295, 0.22555462, 0.22262326, 0.20144816, 0.1989274 ,
        0.19818683, 0.19184774, 0.18933022, 0.18806441, 0.17253922,
        0.16980891, 0.16840717, 0.16792467, 0.16733843, 0.16649997,
        0.16592245, 0.16406913, 0.15594032, 0.14497948, 0.144825  ,
        0.14451697, 0.14210103, 0.13976108, 0.13904284, 0.1389742 ,
        0.13756527, 0.13691278, 0.13502952, 0.13211057, 0.13042402,
        0.12915237, 0.12603028, 0.124609  , 0.12447888, 0.12250288,
        0.12192409, 0.12113374, 0.12100718, 0.11963245, 0.11917206,
        0.11773309, 0.11646152, 0.11581548, 0.11349872, 0.11340918,
        0.11095154, 0.1098927 , 0.10858452, 0.10815845, 0.10694698,
        0.10670073, 0.10592487, 0.1055002 , 0.10536031, 0.1041959 ,
        0.10418969, 0.10327245, 0.10291874, 0.10186002, 0.1013522 ,
        0.10121182, 0.10055887, 0.09975425, 0.09954076, 0.09945919,
        0.09853141, 0.09829449, 0.09807272, 0.09666188, 0.09628956,
        0.09607641, 0.09603674, 0.09379527, 0.09334304, 0.09319282,
        0.09244616, 0.09126819, 0.09113833, 0.09095921, 0.09013715,
        0.08883683, 0.08855603, 0.08836359, 0.0877557 , 0.08759023]],
      dtype=float32)>, 'detection_multiclass_scores': <tf.Tensor: shape=(1, 100, 90), dtype=float32, numpy=
array([[[9.4711405e-01, 8.6817035e-04, 3.0984138e-03, ...,
         4.7520236e-03, 5.2246830e-04, 4.5701285e-04],
        [9.3597400e-01, 6.4510002e-04, 2.2451645e-03, ...,
         3.9743381e-03, 8.1474200e-04, 5.4064585e-04],
        [9.3003500e-01, 1.2423380e-03, 4.1281143e-03, ...,
         4.1296966e-03, 1.3249756e-03, 8.3464832e-04],
        ...,
        [1.8119732e-02, 5.0535421e-03, 1.0724876e-03, ...,
         1.3032763e-02, 8.9723011e-03, 1.0205678e-02],
        [1.1448574e-02, 1.6255280e-02, 1.1670285e-02, ...,
         1.7721199e-03, 5.5407584e-03, 2.2403533e-03],
        [8.4233090e-02, 1.9779259e-02, 8.7590225e-02, ...,
         9.6069882e-04, 1.3168838e-03, 1.2544082e-03]]], dtype=float32)>, 'num_detections': <tf.Tensor: shape=(1,), dtype=float32, numpy=array([100.], dtype=float32)>, 'detection_boxes': <tf.Tensor: shape=(1, 100, 4), dtype=float32, numpy=
array([[[4.1179040e-01, 6.3390382e-02, 8.8111836e-01, 2.6741692e-01],
        [4.3304449e-01, 4.7706339e-01, 8.9250046e-01, 6.8628871e-01],
        [4.1960636e-01, 6.8270773e-01, 8.9688581e-01, 8.9498097e-01],
        [4.1164526e-01, 2.6410934e-01, 8.6583102e-01, 4.6428421e-01],
        [3.8654572e-01, 1.7934969e-01, 5.4316032e-01, 3.2028845e-01],
        [3.6050096e-01, 6.2638736e-01, 4.6446508e-01, 7.1950281e-01],
        [3.5996163e-01, 6.2458235e-01, 4.6350214e-01, 7.2000319e-01],
        [7.1983784e-01, 6.2759423e-01, 8.6870378e-01, 7.0305586e-01],
        [3.6646506e-01, 3.8801342e-01, 4.2244112e-01, 4.3741155e-01],
        [3.5229647e-01, 5.4654634e-01, 3.8833630e-01, 5.7694817e-01],
        [3.6912179e-01, 5.8222091e-01, 4.1025555e-01, 6.2288415e-01],
        [3.7102419e-01, 5.9425962e-01, 4.3720052e-01, 6.3780034e-01],
        [3.5444006e-01, 4.7450614e-01, 3.8303307e-01, 4.9580967e-01],
        [7.1531910e-01, 5.7743710e-01, 8.7339032e-01, 6.8964785e-01],
        [3.8982463e-01, 1.7769308e-01, 5.3870612e-01, 3.1275189e-01],
        [3.5579172e-01, 5.5832678e-01, 3.9027628e-01, 5.8628565e-01],
        [3.5591349e-01, 4.6058959e-01, 3.8360608e-01, 4.8057848e-01],
        [3.4581965e-01, 4.7958860e-01, 3.6583403e-01, 4.9762627e-01],
        [3.4433401e-01, 4.6549153e-01, 3.6546969e-01, 4.8306775e-01],
        [6.8952668e-01, 2.8691068e-01, 8.4429270e-01, 3.8039652e-01],
        [5.6328285e-01, 5.2353203e-01, 7.0950598e-01, 5.6712413e-01],
        [3.2798409e-01, 5.0720298e-01, 3.4583938e-01, 5.2633405e-01],
        [3.4163418e-01, 5.0696808e-01, 3.6424884e-01, 5.2613205e-01],
        [3.4342399e-01, 4.9362123e-01, 3.6436176e-01, 5.1271427e-01],
        [3.6820206e-01, 5.7783151e-01, 4.0318635e-01, 6.0513425e-01],
        [3.7084836e-01, 6.0645926e-01, 4.4600427e-01, 6.5543246e-01],
        [3.3108306e-01, 4.9567217e-01, 3.4666312e-01, 5.1192003e-01],
        [3.4755635e-01, 5.5521488e-01, 3.6801580e-01, 5.7478154e-01],
        [5.0978678e-01, 7.5387210e-01, 6.8267655e-01, 8.3977443e-01],
        [3.6682692e-01, 5.6671417e-01, 3.9800602e-01, 5.9056568e-01],
        [3.5695317e-01, 4.3974501e-01, 3.8740516e-01, 4.6231419e-01],
        [3.5100028e-01, 5.3704566e-01, 3.8353223e-01, 5.6229430e-01],
        [3.4340435e-01, 5.2385479e-01, 3.6579999e-01, 5.4249030e-01],
        [4.2547816e-01, 5.8302408e-01, 5.0048357e-01, 6.2211710e-01],
        [3.4608695e-01, 5.3925014e-01, 3.6758476e-01, 5.5907607e-01],
        [3.2927078e-01, 4.5257777e-01, 3.4853238e-01, 4.6549708e-01],
        [3.4211871e-01, 4.5176423e-01, 3.6421466e-01, 4.6783006e-01],
        [3.3232009e-01, 4.8148558e-01, 3.4683454e-01, 4.9636772e-01],
        [3.5374671e-01, 4.8738575e-01, 3.8187450e-01, 5.0869471e-01],
        [5.4104501e-01, 7.4378175e-01, 6.8199605e-01, 8.0373544e-01],
        [7.4930298e-01, 6.7912787e-04, 9.9816215e-01, 2.2687393e-01],
        [4.8811194e-01, 3.8788208e-01, 6.5886742e-01, 4.4012472e-01],
        [3.2798409e-01, 5.0720298e-01, 3.4583938e-01, 5.2633405e-01],
        [3.6192855e-01, 5.7177728e-01, 3.8971961e-01, 5.9754187e-01],
        [3.3015379e-01, 4.4123974e-01, 3.4820625e-01, 4.5039704e-01],
        [3.5280249e-01, 3.7495017e-01, 3.9356515e-01, 3.8802660e-01],
        [3.3108306e-01, 4.9567217e-01, 3.4666312e-01, 5.1192003e-01],
        [3.3070818e-01, 4.6405959e-01, 3.4772059e-01, 4.7952139e-01],
        [3.2927078e-01, 4.5257777e-01, 3.4853238e-01, 4.6549708e-01],
        [3.9903688e-01, 2.8748375e-01, 4.9881092e-01, 4.1517448e-01],
        [4.5144099e-01, 7.3607433e-01, 7.1501023e-01, 8.5484862e-01],
        [3.5879132e-01, 3.7910637e-01, 4.1088080e-01, 3.9561763e-01],
        [6.9514066e-01, 2.8370458e-01, 8.4553832e-01, 3.8235956e-01],
        [3.3070818e-01, 4.6405959e-01, 3.4772059e-01, 4.7952139e-01],
        [6.9514066e-01, 2.8370458e-01, 8.4553832e-01, 3.8235956e-01],
        [3.6646506e-01, 3.8801342e-01, 4.2244112e-01, 4.3741155e-01],
        [3.7016463e-01, 3.7109971e-01, 4.0168965e-01, 3.8679004e-01],
        [3.2927078e-01, 4.5257777e-01, 3.4853238e-01, 4.6549708e-01],
        [5.4758275e-01, 5.2163035e-01, 7.1873194e-01, 5.9703213e-01],
        [3.3232009e-01, 4.8148558e-01, 3.4683454e-01, 4.9636772e-01],
        [3.5280249e-01, 3.7495017e-01, 3.9356515e-01, 3.8802660e-01],
        [3.6760172e-01, 5.8778751e-01, 4.2486581e-01, 6.3663971e-01],
        [3.6506388e-01, 4.2663202e-01, 3.9124954e-01, 4.4209841e-01],
        [4.1215658e-01, 3.4202617e-01, 8.8318264e-01, 6.1285341e-01],
        [3.4818730e-01, 5.6928444e-01, 3.6841545e-01, 5.8634388e-01],
        [3.5158402e-01, 3.6752591e-01, 3.9787045e-01, 3.8494465e-01],
        [3.6561641e-01, 5.8981210e-01, 3.8907713e-01, 6.1613721e-01],
        [3.4828454e-01, 5.8140194e-01, 3.6721477e-01, 5.9566200e-01],
        [3.4462604e-01, 5.3514403e-01, 3.9644474e-01, 5.8370894e-01],
        [3.6365288e-01, 5.9567219e-01, 3.9053750e-01, 6.3109928e-01],
        [3.5137120e-01, 5.2337110e-01, 3.7918985e-01, 5.4226983e-01],
        [4.9033237e-01, 3.8423172e-01, 6.1302155e-01, 4.1986457e-01],
        [4.1236705e-01, 3.5832933e-01, 4.8268488e-01, 4.1150209e-01],
        [4.1843241e-01, 6.0260159e-01, 4.9375936e-01, 6.3197130e-01],
        [3.4338030e-01, 4.0502068e-01, 3.6552405e-01, 4.1651794e-01],
        [4.2186460e-01, 7.0060635e-01, 8.9403832e-01, 8.9042270e-01],
        [3.4787107e-01, 5.9927994e-01, 3.6625677e-01, 6.1748618e-01],
        [7.1091282e-01, 2.7190217e-01, 8.2108068e-01, 3.5610101e-01],
        [3.3015379e-01, 4.4123974e-01, 3.4820625e-01, 4.5039704e-01],
        [3.2861453e-01, 5.2156895e-01, 3.4669027e-01, 5.3986579e-01],
        [4.9491951e-01, 5.3556001e-01, 7.1317983e-01, 6.2238050e-01],
        [3.1718564e-01, 4.4188350e-01, 3.3180881e-01, 4.5035261e-01],
        [3.7084836e-01, 6.0645926e-01, 4.4600427e-01, 6.5543246e-01],
        [3.8386708e-01, 5.8912140e-01, 4.2566502e-01, 6.2365121e-01],
        [3.5371971e-01, 5.0196201e-01, 3.8005614e-01, 5.2139860e-01],
        [5.9429705e-01, 4.1528758e-01, 6.5035301e-01, 4.4080022e-01],
        [3.6265263e-01, 7.5203842e-01, 4.6041501e-01, 7.7500635e-01],
        [3.4433401e-01, 4.6549153e-01, 3.6546969e-01, 4.8306775e-01],
        [3.8074300e-01, 3.8487837e-01, 4.5196366e-01, 4.3506762e-01],
        [2.5742817e-01, 6.0831654e-01, 2.8681454e-01, 6.2162197e-01],
        [7.1172494e-01, 6.1941981e-01, 8.8754946e-01, 7.4794185e-01],
        [5.6854314e-01, 7.4925357e-01, 7.2917712e-01, 8.5470790e-01],
        [3.6275440e-01, 7.5717753e-01, 4.6149722e-01, 7.8934973e-01],
        [5.5307239e-01, 5.2523065e-01, 6.6746283e-01, 5.5445266e-01],
        [3.1785089e-01, 4.5512110e-01, 3.3233309e-01, 4.6439195e-01],
        [4.1034609e-01, 3.8362762e-01, 4.7151357e-01, 4.1939363e-01],
        [3.3015379e-01, 4.4123974e-01, 3.4820625e-01, 4.5039704e-01],
        [5.6086457e-01, 4.0125006e-01, 6.4354509e-01, 4.3456870e-01],
        [7.1788090e-01, 2.4195449e-01, 8.1452870e-01, 3.1889099e-01],
        [3.5509771e-01, 3.8778654e-01, 3.9057735e-01, 3.9996943e-01]]],
      dtype=float32)>}

 

print(result.keys())
# detect 결과는 100개를 기본으로 Detect 함(즉 Detect된 오브젝트는 무조건 100개. 그래서 tensor(array)는 100개 단위, num_detections는 100) 
print(result['detection_boxes'].shape, result['detection_classes'].shape,  result['detection_scores'].shape, result['num_detections'])

# dict_keys(['detection_classes', 'raw_detection_scores', 'raw_detection_boxes', 'detection_anchor_indices', 'detection_scores', 'detection_multiclass_scores', 'num_detections', 'detection_boxes'])
# (1, 100, 4) (1, 100) (1, 100) tf.Tensor([100.], shape=(1,), dtype=float32)

 

# detect된 object들은 detection score가 높은 순으로 array값을 순차적으로 채움. 
print('#### detection_classes #####')
print(result['detection_classes'])
print('#### detection_scores #####')
print(result['detection_scores'])

#### detection_classes #####
tf.Tensor(
[[ 1.  1.  1.  1.  3.  3.  8. 41.  3.  3.  3.  3.  3. 41.  8.  3.  3.  3.
   3. 31. 31.  3.  3.  3.  3.  3.  3.  3. 31.  3.  3.  3.  3.  1.  3. 10.
   3.  3.  3. 31. 15. 32. 10.  3. 10.  1. 10.  3.  3.  3. 31.  1. 27. 10.
  33.  8.  3.  1. 31. 10.  3.  8.  3.  1.  3.  1.  3.  3.  3.  3.  3. 32.
   3.  1.  1. 31.  3. 31.  1.  3. 31. 10.  8.  3.  3. 32.  1.  1.  3. 10.
  41. 31.  1. 31. 10.  3.  3. 32. 31.  3.]], shape=(1, 100), dtype=float32)
#### detection_scores #####
tf.Tensor(
[[0.94711405 0.935974   0.930035   0.89913994 0.625541   0.48422325
  0.3482024  0.31519073 0.31252164 0.3096477  0.2892261  0.26785725
  0.26200417 0.2544667  0.24920423 0.24709295 0.22555462 0.22262326
  0.20144816 0.1989274  0.19818683 0.19184774 0.18933022 0.18806441
  0.17253922 0.16980891 0.16840717 0.16792467 0.16733843 0.16649997
  0.16592245 0.16406913 0.15594032 0.14497948 0.144825   0.14451697
  0.14210103 0.13976108 0.13904284 0.1389742  0.13756527 0.13691278
  0.13502952 0.13211057 0.13042402 0.12915237 0.12603028 0.124609
  0.12447888 0.12250288 0.12192409 0.12113374 0.12100718 0.11963245
  0.11917206 0.11773309 0.11646152 0.11581548 0.11349872 0.11340918
  0.11095154 0.1098927  0.10858452 0.10815845 0.10694698 0.10670073
  0.10592487 0.1055002  0.10536031 0.1041959  0.10418969 0.10327245
  0.10291874 0.10186002 0.1013522  0.10121182 0.10055887 0.09975425
  0.09954076 0.09945919 0.09853141 0.09829449 0.09807272 0.09666188
  0.09628956 0.09607641 0.09603674 0.09379527 0.09334304 0.09319282
  0.09244616 0.09126819 0.09113833 0.09095921 0.09013715 0.08883683
  0.08855603 0.08836359 0.0877557  0.08759023]], shape=(1, 100), dtype=float32)

 

# bounding box 좌표는 ymin, xmin, ymax, xmax 순서로 반환됨. y가 먼저, x가 나중에 나오므로 반드시 주의해야 함. 
# 좌표 값은 원본 이미지의 width, height로 0~1 사이값으로 정규화됨. 
print('#### detection_boxes #####')
print(result['detection_boxes'])

#### detection_boxes #####
tf.Tensor(
[[[4.1179040e-01 6.3390382e-02 8.8111836e-01 2.6741692e-01]
  [4.3304449e-01 4.7706339e-01 8.9250046e-01 6.8628871e-01]
  [4.1960636e-01 6.8270773e-01 8.9688581e-01 8.9498097e-01]
  [4.1164526e-01 2.6410934e-01 8.6583102e-01 4.6428421e-01]
  [3.8654572e-01 1.7934969e-01 5.4316032e-01 3.2028845e-01]
  [3.6050096e-01 6.2638736e-01 4.6446508e-01 7.1950281e-01]
  [3.5996163e-01 6.2458235e-01 4.6350214e-01 7.2000319e-01]
  [7.1983784e-01 6.2759423e-01 8.6870378e-01 7.0305586e-01]
  [3.6646506e-01 3.8801342e-01 4.2244112e-01 4.3741155e-01]
  [3.5229647e-01 5.4654634e-01 3.8833630e-01 5.7694817e-01]
  [3.6912179e-01 5.8222091e-01 4.1025555e-01 6.2288415e-01]
  [3.7102419e-01 5.9425962e-01 4.3720052e-01 6.3780034e-01]
  [3.5444006e-01 4.7450614e-01 3.8303307e-01 4.9580967e-01]
  [7.1531910e-01 5.7743710e-01 8.7339032e-01 6.8964785e-01]
  [3.8982463e-01 1.7769308e-01 5.3870612e-01 3.1275189e-01]
  [3.5579172e-01 5.5832678e-01 3.9027628e-01 5.8628565e-01]
  [3.5591349e-01 4.6058959e-01 3.8360608e-01 4.8057848e-01]
  [3.4581965e-01 4.7958860e-01 3.6583403e-01 4.9762627e-01]
  [3.4433401e-01 4.6549153e-01 3.6546969e-01 4.8306775e-01]
  [6.8952668e-01 2.8691068e-01 8.4429270e-01 3.8039652e-01]
  [5.6328285e-01 5.2353203e-01 7.0950598e-01 5.6712413e-01]
  [3.2798409e-01 5.0720298e-01 3.4583938e-01 5.2633405e-01]
  [3.4163418e-01 5.0696808e-01 3.6424884e-01 5.2613205e-01]
  [3.4342399e-01 4.9362123e-01 3.6436176e-01 5.1271427e-01]
  [3.6820206e-01 5.7783151e-01 4.0318635e-01 6.0513425e-01]
  [3.7084836e-01 6.0645926e-01 4.4600427e-01 6.5543246e-01]
  [3.3108306e-01 4.9567217e-01 3.4666312e-01 5.1192003e-01]
  [3.4755635e-01 5.5521488e-01 3.6801580e-01 5.7478154e-01]
  [5.0978678e-01 7.5387210e-01 6.8267655e-01 8.3977443e-01]
  [3.6682692e-01 5.6671417e-01 3.9800602e-01 5.9056568e-01]
  [3.5695317e-01 4.3974501e-01 3.8740516e-01 4.6231419e-01]
  [3.5100028e-01 5.3704566e-01 3.8353223e-01 5.6229430e-01]
  [3.4340435e-01 5.2385479e-01 3.6579999e-01 5.4249030e-01]
  [4.2547816e-01 5.8302408e-01 5.0048357e-01 6.2211710e-01]
  [3.4608695e-01 5.3925014e-01 3.6758476e-01 5.5907607e-01]
  [3.2927078e-01 4.5257777e-01 3.4853238e-01 4.6549708e-01]
  [3.4211871e-01 4.5176423e-01 3.6421466e-01 4.6783006e-01]
  [3.3232009e-01 4.8148558e-01 3.4683454e-01 4.9636772e-01]
  [3.5374671e-01 4.8738575e-01 3.8187450e-01 5.0869471e-01]
  [5.4104501e-01 7.4378175e-01 6.8199605e-01 8.0373544e-01]
  [7.4930298e-01 6.7912787e-04 9.9816215e-01 2.2687393e-01]
  [4.8811194e-01 3.8788208e-01 6.5886742e-01 4.4012472e-01]
  [3.2798409e-01 5.0720298e-01 3.4583938e-01 5.2633405e-01]
  [3.6192855e-01 5.7177728e-01 3.8971961e-01 5.9754187e-01]
  [3.3015379e-01 4.4123974e-01 3.4820625e-01 4.5039704e-01]
  [3.5280249e-01 3.7495017e-01 3.9356515e-01 3.8802660e-01]
  [3.3108306e-01 4.9567217e-01 3.4666312e-01 5.1192003e-01]
  [3.3070818e-01 4.6405959e-01 3.4772059e-01 4.7952139e-01]
  [3.2927078e-01 4.5257777e-01 3.4853238e-01 4.6549708e-01]
  [3.9903688e-01 2.8748375e-01 4.9881092e-01 4.1517448e-01]
  [4.5144099e-01 7.3607433e-01 7.1501023e-01 8.5484862e-01]
  [3.5879132e-01 3.7910637e-01 4.1088080e-01 3.9561763e-01]
  [6.9514066e-01 2.8370458e-01 8.4553832e-01 3.8235956e-01]
  [3.3070818e-01 4.6405959e-01 3.4772059e-01 4.7952139e-01]
  [6.9514066e-01 2.8370458e-01 8.4553832e-01 3.8235956e-01]
  [3.6646506e-01 3.8801342e-01 4.2244112e-01 4.3741155e-01]
  [3.7016463e-01 3.7109971e-01 4.0168965e-01 3.8679004e-01]
  [3.2927078e-01 4.5257777e-01 3.4853238e-01 4.6549708e-01]
  [5.4758275e-01 5.2163035e-01 7.1873194e-01 5.9703213e-01]
  [3.3232009e-01 4.8148558e-01 3.4683454e-01 4.9636772e-01]
  [3.5280249e-01 3.7495017e-01 3.9356515e-01 3.8802660e-01]
  [3.6760172e-01 5.8778751e-01 4.2486581e-01 6.3663971e-01]
  [3.6506388e-01 4.2663202e-01 3.9124954e-01 4.4209841e-01]
  [4.1215658e-01 3.4202617e-01 8.8318264e-01 6.1285341e-01]
  [3.4818730e-01 5.6928444e-01 3.6841545e-01 5.8634388e-01]
  [3.5158402e-01 3.6752591e-01 3.9787045e-01 3.8494465e-01]
  [3.6561641e-01 5.8981210e-01 3.8907713e-01 6.1613721e-01]
  [3.4828454e-01 5.8140194e-01 3.6721477e-01 5.9566200e-01]
  [3.4462604e-01 5.3514403e-01 3.9644474e-01 5.8370894e-01]
  [3.6365288e-01 5.9567219e-01 3.9053750e-01 6.3109928e-01]
  [3.5137120e-01 5.2337110e-01 3.7918985e-01 5.4226983e-01]
  [4.9033237e-01 3.8423172e-01 6.1302155e-01 4.1986457e-01]
  [4.1236705e-01 3.5832933e-01 4.8268488e-01 4.1150209e-01]
  [4.1843241e-01 6.0260159e-01 4.9375936e-01 6.3197130e-01]
  [3.4338030e-01 4.0502068e-01 3.6552405e-01 4.1651794e-01]
  [4.2186460e-01 7.0060635e-01 8.9403832e-01 8.9042270e-01]
  [3.4787107e-01 5.9927994e-01 3.6625677e-01 6.1748618e-01]
  [7.1091282e-01 2.7190217e-01 8.2108068e-01 3.5610101e-01]
  [3.3015379e-01 4.4123974e-01 3.4820625e-01 4.5039704e-01]
  [3.2861453e-01 5.2156895e-01 3.4669027e-01 5.3986579e-01]
  [4.9491951e-01 5.3556001e-01 7.1317983e-01 6.2238050e-01]
  [3.1718564e-01 4.4188350e-01 3.3180881e-01 4.5035261e-01]
  [3.7084836e-01 6.0645926e-01 4.4600427e-01 6.5543246e-01]
  [3.8386708e-01 5.8912140e-01 4.2566502e-01 6.2365121e-01]
  [3.5371971e-01 5.0196201e-01 3.8005614e-01 5.2139860e-01]
  [5.9429705e-01 4.1528758e-01 6.5035301e-01 4.4080022e-01]
  [3.6265263e-01 7.5203842e-01 4.6041501e-01 7.7500635e-01]
  [3.4433401e-01 4.6549153e-01 3.6546969e-01 4.8306775e-01]
  [3.8074300e-01 3.8487837e-01 4.5196366e-01 4.3506762e-01]
  [2.5742817e-01 6.0831654e-01 2.8681454e-01 6.2162197e-01]
  [7.1172494e-01 6.1941981e-01 8.8754946e-01 7.4794185e-01]
  [5.6854314e-01 7.4925357e-01 7.2917712e-01 8.5470790e-01]
  [3.6275440e-01 7.5717753e-01 4.6149722e-01 7.8934973e-01]
  [5.5307239e-01 5.2523065e-01 6.6746283e-01 5.5445266e-01]
  [3.1785089e-01 4.5512110e-01 3.3233309e-01 4.6439195e-01]
  [4.1034609e-01 3.8362762e-01 4.7151357e-01 4.1939363e-01]
  [3.3015379e-01 4.4123974e-01 3.4820625e-01 4.5039704e-01]
  [5.6086457e-01 4.0125006e-01 6.4354509e-01 4.3456870e-01]
  [7.1788090e-01 2.4195449e-01 8.1452870e-01 3.1889099e-01]
  [3.5509771e-01 3.8778654e-01 3.9057735e-01 3.9996943e-01]]], shape=(1, 100, 4), dtype=float32)

 

inference 결과를 이미지로 시각화

# result내의 value들을 모두 numpy로 변환. 
result = {key:value.numpy() for key,value in result.items()}

 

# 1부터 91까지의 COCO Class id 매핑. 
labels_to_names = {1:'person',2:'bicycle',3:'car',4:'motorcycle',5:'airplane',6:'bus',7:'train',8:'truck',9:'boat',10:'traffic light',
                    11:'fire hydrant',12:'street sign',13:'stop sign',14:'parking meter',15:'bench',16:'bird',17:'cat',18:'dog',19:'horse',20:'sheep',
                    21:'cow',22:'elephant',23:'bear',24:'zebra',25:'giraffe',26:'hat',27:'backpack',28:'umbrella',29:'shoe',30:'eye glasses',
                    31:'handbag',32:'tie',33:'suitcase',34:'frisbee',35:'skis',36:'snowboard',37:'sports ball',38:'kite',39:'baseball bat',40:'baseball glove',
                    41:'skateboard',42:'surfboard',43:'tennis racket',44:'bottle',45:'plate',46:'wine glass',47:'cup',48:'fork',49:'knife',50:'spoon',
                    51:'bowl',52:'banana',53:'apple',54:'sandwich',55:'orange',56:'broccoli',57:'carrot',58:'hot dog',59:'pizza',60:'donut',
                    61:'cake',62:'chair',63:'couch',64:'potted plant',65:'bed',66:'mirror',67:'dining table',68:'window',69:'desk',70:'toilet',
                    71:'door',72:'tv',73:'laptop',74:'mouse',75:'remote',76:'keyboard',77:'cell phone',78:'microwave',79:'oven',80:'toaster',
                    81:'sink',82:'refrigerator',83:'blender',84:'book',85:'clock',86:'vase',87:'scissors',88:'teddy bear',89:'hair drier',90:'toothbrush',
                    91:'hair brush'}

 

def get_detector(module_handle="https://tfhub.dev/tensorflow/efficientdet/d0/1"):
  detector = hub.load(module_handle)
  return detector

detector_model = get_detector()

WARNING:absl:Importing a function (__inference___call___32344) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_EfficientDet-D0_layer_call_and_return_conditional_losses_97451) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_bifpn_layer_call_and_return_conditional_losses_77595) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_EfficientDet-D0_layer_call_and_return_conditional_losses_103456) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_EfficientDet-D0_layer_call_and_return_conditional_losses_93843) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_EfficientDet-D0_layer_call_and_return_conditional_losses_107064) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_bifpn_layer_call_and_return_conditional_losses_75975) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.

 

import cv2

img_array = cv2.cvtColor(cv2.imread('/content/data/beatles01.jpg'), cv2.COLOR_BGR2RGB)

# scaling된 이미지 기반으로 bounding box 위치가 예측 되므로 이를 다시 원복하기 위해 원본 이미지 shape정보 필요
height = img_array.shape[0]
width = img_array.shape[1]
# cv2의 rectangle()은 인자로 들어온 이미지 배열에 직접 사각형을 업데이트 하므로 그림 표현을 위한 별도의 이미지 배열 생성. 
draw_img = img_array.copy()

# bounding box의 테두리와 caption 글자색 지정
green_color=(0, 255, 0)
red_color=(0, 0, 255)

# cv2로 만들어진 numpy image array를 tensor로 변환
img_tensor = tf.convert_to_tensor(img_array, dtype=tf.uint8)[tf.newaxis, ...]
#img_tensor = tf.convert_to_tensor(img_array, dtype=tf.float32)[tf.newaxis, ...]

# pretrained 모델을 다운로드 한 뒤 inference 수행. 
result = detector_model(img_tensor)
# result 내부의 value를 numpy 로 변환. 
result = {key:value.numpy() for key,value in result.items()}

SCORE_THRESHOLD = 0.3
OBJECT_DEFAULT_COUNT = 100

# detected 된 object들을 iteration 하면서 정보 추출. detect된 object의 갯수는 100개
for i in range(min(result['detection_scores'][0].shape[0], OBJECT_DEFAULT_COUNT)):
  # detection score를 iteration시 마다 높은 순으로 추출하고 SCORE_THRESHOLD보다 낮으면 loop 중단. 
  score = result['detection_scores'][0, i]
  if score < SCORE_THRESHOLD:
    break
  # detected된 object들은 scale된 기준으로 예측되었으므로 다시 원본 이미지 비율로 계산
  box = result['detection_boxes'][0, i]

  ''' **** 주의 ******
   box는 ymin, xmin, ymax, xmax 순서로 되어 있음. '''
  left = box[1] * width
  top = box[0] * height
  right = box[3] * width
  bottom = box[2] * height

  # class id 추출하고 class 명으로 매핑
  class_id = result['detection_classes'][0, i]
  caption = "{}: {:.4f}".format(labels_to_names[class_id], score)
  print(caption)
  #cv2.rectangle()은 인자로 들어온 draw_img에 사각형을 그림. 위치 인자는 반드시 정수형.
  cv2.rectangle(draw_img, (int(left), int(top)), (int(right), int(bottom)), color=green_color, thickness=2)
  cv2.putText(draw_img, caption, (int(left), int(top - 5)), cv2.FONT_HERSHEY_SIMPLEX, 0.4, red_color, 1)

plt.figure(figsize=(12, 12))
plt.imshow(draw_img)


person: 0.9484
person: 0.9401
person: 0.9359
person: 0.8954
car: 0.6267
car: 0.5109
truck: 0.3303
car: 0.3149

 

import time

def get_detected_img(model, img_array, score_threshold, object_show_count=100, is_print=True):   
  # scaling된 이미지 기반으로 bounding box 위치가 예측 되므로 이를 다시 원복하기 위해 원본 이미지 shape정보 필요
  height = img_array.shape[0]
  width = img_array.shape[1]
  # cv2의 rectangle()은 인자로 들어온 이미지 배열에 직접 사각형을 업데이트 하므로 그림 표현을 위한 별도의 이미지 배열 생성. 
  draw_img = img_array.copy()

  # bounding box의 테두리와 caption 글자색 지정
  green_color=(0, 255, 0)
  red_color=(0, 0, 255)

  # cv2로 만들어진 numpy image array를 tensor로 변환
  img_tensor = tf.convert_to_tensor(img_array, dtype=tf.uint8)[tf.newaxis, ...]
  #img_tensor = tf.convert_to_tensor(img_array, dtype=tf.float32)[tf.newaxis, ...]

  # efficientdet모델로 inference 수행. 
  start_time = time.time()
  # inference 결과로 내부 원소가 Tensor이 Dict 반환 
  result = model(img_tensor)
  # result 내부의 value를 numpy 로 변환. 
  result = {key:value.numpy() for key,value in result.items()}

  # detected 된 object들을 iteration 하면서 정보 추출. detect된 object의 갯수는 100개
  for i in range(min(result['detection_scores'][0].shape[0], object_show_count)):
    # detection score를 iteration시 마다 높은 순으로 추출하고 SCORE_THRESHOLD보다 낮으면 loop 중단. 
    score = result['detection_scores'][0, i]
    if score < score_threshold:
      break
    # detected된 object들은 scale된 기준으로 예측되었으므로 다시 원본 이미지 비율로 계산
    box = result['detection_boxes'][0, i]

    ''' **** 주의 ******
    box는 ymin, xmin, ymax, xmax 순서로 되어 있음. '''
    left = box[1] * width
    top = box[0] * height
    right = box[3] * width
    bottom = box[2] * height

    # class id 추출하고 class 명으로 매핑
    class_id = result['detection_classes'][0, i]
    caption = "{}: {:.4f}".format(labels_to_names[class_id], score)
    print(caption)
    #cv2.rectangle()은 인자로 들어온 draw_img에 사각형을 그림. 위치 인자는 반드시 정수형.
    cv2.rectangle(draw_img, (int(left), int(top)), (int(right), int(bottom)), color=green_color, thickness=2)
    cv2.putText(draw_img, caption, (int(left), int(top - 5)), cv2.FONT_HERSHEY_SIMPLEX, 0.4, red_color, 1)

  if is_print:
    print('Detection 수행시간:',round(time.time() - start_time, 2),"초")

  return draw_img

 

img_array = cv2.cvtColor(cv2.imread('/content/data/beatles01.jpg'), cv2.COLOR_BGR2RGB)
draw_img = get_detected_img(detector_model, img_array, score_threshold=0.4, object_show_count=100, is_print=True)

plt.figure(figsize=(12, 12))
plt.imshow(draw_img)


person: 0.9484
person: 0.9401
person: 0.9359
person: 0.8954
car: 0.6267
car: 0.5109
Detection 수행시간: 0.36 초

 

!wget -O ./data/baseball01.jpg https://raw.githubusercontent.com/chulminkw/DLCV/master/data/image/baseball01.jpg

 

img_array = cv2.cvtColor(cv2.imread('/content/data/baseball01.jpg'), cv2.COLOR_BGR2RGB)
draw_img = get_detected_img(detector_model, img_array, score_threshold=0.4, object_show_count=100, is_print=True)

plt.figure(figsize=(12, 12))
plt.imshow(draw_img)

person: 0.9570
person: 0.9252
person: 0.9068
baseball glove: 0.6338
baseball bat: 0.5929
Detection 수행시간: 0.34 초

 

EfficientDet D2 모델로 Inference 수행.

detector_model_d2 = get_detector('https://tfhub.dev/tensorflow/efficientdet/d2/1')

WARNING:absl:Importing a function (__inference_EfficientDet-D2_layer_call_and_return_conditional_losses_130857) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference___call___38449) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_EfficientDet-D2_layer_call_and_return_conditional_losses_145024) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_bifpn_layer_call_and_return_conditional_losses_99017) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_EfficientDet-D2_layer_call_and_return_conditional_losses_139687) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_EfficientDet-D2_layer_call_and_return_conditional_losses_125520) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.
WARNING:absl:Importing a function (__inference_bifpn_layer_call_and_return_conditional_losses_101605) with ops with unsaved custom gradients. Will likely fail if a gradient is requested.

 

img_array = cv2.cvtColor(cv2.imread('/content/data/baseball01.jpg'), cv2.COLOR_BGR2RGB)
draw_img = get_detected_img(detector_model_d2, img_array, score_threshold=0.4, object_show_count=100, is_print=True)

plt.figure(figsize=(12, 12))
plt.imshow(draw_img)

person: 0.9488
person: 0.9321
person: 0.8895
baseball glove: 0.8303
baseball bat: 0.6166
sports ball: 0.5037
Detection 수행시간: 7.82 초

 

Video Inference 수행

!wget -O ./data/Jonh_Wick_small.mp4 https://github.com/chulminkw/DLCV/blob/master/data/video/John_Wick_small.mp4?raw=true

 

def do_detected_video(model, input_path, output_path, score_threshold, is_print):
    
    cap = cv2.VideoCapture(input_path)

    codec = cv2.VideoWriter_fourcc(*'XVID')

    vid_size = (round(cap.get(cv2.CAP_PROP_FRAME_WIDTH)),round(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))
    vid_fps = cap.get(cv2.CAP_PROP_FPS)

    vid_writer = cv2.VideoWriter(output_path, codec, vid_fps, vid_size) 

    frame_cnt = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
    print('총 Frame 갯수:', frame_cnt)

    green_color=(0, 255, 0)
    red_color=(0, 0, 255)
    while True:
        hasFrame, img_frame = cap.read()
        if not hasFrame:
            print('더 이상 처리할 frame이 없습니다.')
            break
        
        img_frame = get_detected_img(model, img_frame, score_threshold=score_threshold, object_show_count=100, is_print=is_print)
        
        vid_writer.write(img_frame)
    # end of while loop

    vid_writer.release()
    cap.release()

 

do_detected_video(detector_model, '/content/data/Jonh_Wick_small.mp4', './data/John_Wick_small_02.mp4', 0.5, True)

총 Frame 갯수: 58
car: 0.7054
car: 0.6741
car: 0.6634
car: 0.6490
Detection 수행시간: 0.3 초
car: 0.7121
car: 0.6782
car: 0.6640
car: 0.6493
Detection 수행시간: 0.31 초
car: 0.6018
motorcycle: 0.5719
car: 0.5339
person: 0.5157
car: 0.5006
person: 0.5002
Detection 수행시간: 0.33 초
motorcycle: 0.5697
car: 0.5522
car: 0.5182
person: 0.5053
Detection 수행시간: 0.3 초
car: 0.6813
motorcycle: 0.5480
car: 0.5432
person: 0.5387
Detection 수행시간: 0.3 초
car: 0.6372
motorcycle: 0.5610
car: 0.5422
Detection 수행시간: 0.29 초
car: 0.6398
motorcycle: 0.5655
car: 0.5501
car: 0.5255
Detection 수행시간: 0.28 초
car: 0.7404
car: 0.7017
car: 0.6398
motorcycle: 0.5679
person: 0.5268
Detection 수행시간: 0.29 초
car: 0.6955
car: 0.6933
car: 0.6458
motorcycle: 0.5257
Detection 수행시간: 0.29 초
car: 0.7060
car: 0.6844
car: 0.6742
motorcycle: 0.5419
Detection 수행시간: 0.29 초
car: 0.7386
car: 0.6630
car: 0.6616
motorcycle: 0.5867
Detection 수행시간: 0.29 초
car: 0.7788
car: 0.7119
car: 0.6659
motorcycle: 0.5716
Detection 수행시간: 0.29 초
car: 0.7903
person: 0.6799
car: 0.6535
car: 0.5919
motorcycle: 0.5642
person: 0.5333
Detection 수행시간: 0.29 초
car: 0.7585
car: 0.6350
person: 0.6308
person: 0.5748
car: 0.5594
Detection 수행시간: 0.29 초
car: 0.7656
person: 0.6383
car: 0.6349
car: 0.5677
person: 0.5621
motorcycle: 0.5030
Detection 수행시간: 0.28 초
car: 0.7444
car: 0.6351
person: 0.6318
car: 0.5491
motorcycle: 0.5110
Detection 수행시간: 0.3 초
car: 0.7239
car: 0.6659
person: 0.6601
motorcycle: 0.6324
person: 0.5235
Detection 수행시간: 0.29 초
car: 0.6936
person: 0.6540
car: 0.6427
motorcycle: 0.6184
Detection 수행시간: 0.29 초
person: 0.7235
car: 0.7211
car: 0.6419
motorcycle: 0.6291
car: 0.5875
car: 0.5150
Detection 수행시간: 0.29 초
car: 0.7176
person: 0.7174
car: 0.6390
motorcycle: 0.6266
car: 0.5862
car: 0.5188
Detection 수행시간: 0.3 초
car: 0.7741
person: 0.7211
motorcycle: 0.6405
car: 0.5664
car: 0.5461
person: 0.5386
Detection 수행시간: 0.28 초
car: 0.7878
person: 0.7171
car: 0.6266
motorcycle: 0.6054
car: 0.5526
Detection 수행시간: 0.28 초
car: 0.7586
car: 0.6461
car: 0.6202
person: 0.6173
person: 0.5188
car: 0.5160
Detection 수행시간: 0.31 초
person: 0.6640
person: 0.6427
car: 0.6098
horse: 0.6051
bicycle: 0.5354
Detection 수행시간: 0.27 초
person: 0.6494
person: 0.6488
horse: 0.6284
car: 0.5893
bicycle: 0.5526
Detection 수행시간: 0.28 초
person: 0.5867
person: 0.5624
bicycle: 0.5620
horse: 0.5345
Detection 수행시간: 0.29 초
person: 0.7165
car: 0.6705
horse: 0.5624
bicycle: 0.5571
person: 0.5267
Detection 수행시간: 0.3 초
person: 0.7392
horse: 0.6653
person: 0.6073
car: 0.5965
motorcycle: 0.5671
Detection 수행시간: 0.29 초
person: 0.8238
horse: 0.7039
car: 0.7006
motorcycle: 0.6065
person: 0.5841
Detection 수행시간: 0.28 초
person: 0.8135
horse: 0.7154
car: 0.7002
motorcycle: 0.6138
person: 0.5630
Detection 수행시간: 0.3 초
person: 0.8248
horse: 0.6896
person: 0.6198
car: 0.6137
motorcycle: 0.5657
Detection 수행시간: 0.29 초
person: 0.7325
horse: 0.6855
car: 0.6851
person: 0.6655
motorcycle: 0.6141
Detection 수행시간: 0.28 초
car: 0.7927
person: 0.7040
horse: 0.6783
person: 0.6495
motorcycle: 0.5853
Detection 수행시간: 0.27 초
car: 0.7433
person: 0.5885
person: 0.5693
bicycle: 0.5155
Detection 수행시간: 0.28 초
car: 0.7356
person: 0.5936
person: 0.5531
bicycle: 0.5072
Detection 수행시간: 0.28 초
car: 0.6917
person: 0.6702
person: 0.6286
motorcycle: 0.6036
Detection 수행시간: 0.29 초
car: 0.7995
person: 0.6749
motorcycle: 0.6159
person: 0.5632
horse: 0.5463
Detection 수행시간: 0.3 초
car: 0.7973
person: 0.6764
person: 0.6213
Detection 수행시간: 0.29 초
car: 0.8104
person: 0.6457
person: 0.6258
motorcycle: 0.5341
Detection 수행시간: 0.29 초
car: 0.8288
person: 0.6424
person: 0.6219
motorcycle: 0.5400
Detection 수행시간: 0.29 초
car: 0.8241
person: 0.6231
person: 0.5867
car: 0.5452
Detection 수행시간: 0.3 초
car: 0.8452
car: 0.7434
horse: 0.6338
person: 0.5680
person: 0.5553
Detection 수행시간: 0.29 초
car: 0.8632
car: 0.6939
horse: 0.6710
person: 0.5565
Detection 수행시간: 0.28 초
car: 0.8645
horse: 0.7133
car: 0.6535
person: 0.6077
Detection 수행시간: 0.29 초
car: 0.8637
horse: 0.7250
car: 0.6580
person: 0.6062
Detection 수행시간: 0.28 초
car: 0.8458
car: 0.6883
person: 0.6304
Detection 수행시간: 0.29 초
car: 0.8172
car: 0.5968
car: 0.5710
person: 0.5665
Detection 수행시간: 0.3 초
car: 0.7921
car: 0.6119
horse: 0.5238
Detection 수행시간: 0.28 초
car: 0.7709
car: 0.6165
person: 0.5716
car: 0.5662
horse: 0.5606
Detection 수행시간: 0.29 초
car: 0.7777
car: 0.6200
car: 0.5678
person: 0.5671
horse: 0.5564
Detection 수행시간: 0.29 초
car: 0.7557
horse: 0.5966
person: 0.5510
Detection 수행시간: 0.29 초
car: 0.7844
person: 0.5607
horse: 0.5251
Detection 수행시간: 0.3 초
car: 0.7524
person: 0.5303
Detection 수행시간: 0.3 초
car: 0.7289
car: 0.5995
Detection 수행시간: 0.3 초
car: 0.7325
car: 0.6154
Detection 수행시간: 0.3 초
car: 0.7161
car: 0.6772
car: 0.5412
horse: 0.5058
Detection 수행시간: 0.3 초
car: 0.7333
car: 0.7279
car: 0.6378
Detection 수행시간: 0.31 초
car: 0.8108
car: 0.7212
car: 0.7009
horse: 0.5520
Detection 수행시간: 0.29 초
더 이상 처리할 frame이 없습니다.

 

 

반응형
728x90
반응형

github - google/automl

- pretrained effiecientdet checkpoint

efficientdet - D0 (h5, ckpt, val, test-dev)

* chpt = native tensorflow, h5 = tensorflow ver2의 tf.keras

 

1) AutoML EfficientDet 패키지의 특징

- efficientdet 구현 모델 중 가장 예측 성능이 뛰어난 모델 보유

- GPU, TPU 모두 구동가능

- Tensorflow 2의 native tensorflow와 tf.keras 모두로 구현

- 약간의 버그(소스코드 자체 문제라기 보다는 tensorflow 자체의 문제로 보임)

- TFRECORD 형태로 입력 데이터 세트를 만들어야 하는 복잡함

    * 유틸리티는 있음

 

Tensorflow hub

- EfficientDet d0~d7

  * tensorflow object detection api로 구현됨

- efficientdet-lite0~4

  * autoML efficientdet으로 구성

 

 

 

 

반응형
728x90
반응형

Image-to-Image Translation with Conditional Adversarial Networks

Conditional GAN

GAN에서 조건을 부여하면 이미지를 생성시키는 모델 
이미지를 변환할 때 생성 모델과 판별 모델에 조건으로 이미지를 넣어 학습하면 
생성 모델은 입력 이미지에 따르는 결과 이미지를 생성할 수 있게 된다 

흑백 이미지를 컬러와 하거나, 윤곽이 주어지면 물체를 그리는 문제와 같은 것들을 해결 할 수 있다

픽셀 하나 하나를 바꾼다

Generator

Encoder와 decoder가 skip connection(combination)으로 연결되어 있는 U-net을 사용한다 
이미지의 맥락을 보존시키는데 효과적인 구조이다

Discriminator

기존의 discriminator는 이미지 전체에 대해 fake/real 판단을 내리는 반면 
PatchGAN은 이미지를 작은 patch들로 쪼개 그 patch에 대해 fake/real를 판단한다 
이렇게 함으로써 L1 loss가 만들어낸 전체적인 특징과 더불어 좀 더 디테일한 특징들을 이미지에서 살려낼 수 있다

부가적 이점으로는 전체 이미지보다 작은 크기에 적용하기 때문에 파라미터 수가 적고 좀 더 빠른 속도로 연산이 가능하다 
또한 더 큰 사이즈의 이미지에 대해 일반화 하여 적용시킬 수 있는 장점이 있다

PatchGAN discriminator

원본 이미지에 70x70만큼 움직이면서 window연산을 하고 연산 결과 30x30 크기 이미지가 나온다 
이때 30x30 크기 이미지를 통해 각각의 부분을 예측한다 

convolution은 특징이 있는지 없는지 여부를 찾고 주변 데이터와의 상관관계를 따지지 않기 때문에
전체 이미지를 통해 예측을 하게 되면 정확한 예측이 불가능하다 
따라서 부분 부분의 이미지를 통해 예측을 하는 방식을 사용해야 한다

 

 

import tensorflow as tf 

resnet = tf.keras.applications.ResNet152V2()
resnet.summary() # Add를 사용해서 결과를 유지했다

Model: "resnet152v2"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_2 (InputLayer)            [(None, 224, 224, 3) 0                                            
__________________________________________________________________________________________________
conv1_pad (ZeroPadding2D)       (None, 230, 230, 3)  0           input_2[0][0]                    
__________________________________________________________________________________________________
conv1_conv (Conv2D)             (None, 112, 112, 64) 9472        conv1_pad[0][0]                  
__________________________________________________________________________________________________
pool1_pad (ZeroPadding2D)       (None, 114, 114, 64) 0           conv1_conv[0][0]                 
__________________________________________________________________________________________________
pool1_pool (MaxPooling2D)       (None, 56, 56, 64)   0           pool1_pad[0][0]                  
__________________________________________________________________________________________________
conv2_block1_preact_bn (BatchNo (None, 56, 56, 64)   256         pool1_pool[0][0]                 
__________________________________________________________________________________________________
conv2_block1_preact_relu (Activ (None, 56, 56, 64)   0           conv2_block1_preact_bn[0][0]     
__________________________________________________________________________________________________
conv2_block1_1_conv (Conv2D)    (None, 56, 56, 64)   4096        conv2_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv2_block1_1_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block1_1_relu (Activation (None, 56, 56, 64)   0           conv2_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block1_2_pad (ZeroPadding (None, 58, 58, 64)   0           conv2_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block1_2_conv (Conv2D)    (None, 56, 56, 64)   36864       conv2_block1_2_pad[0][0]         
__________________________________________________________________________________________________
conv2_block1_2_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block1_2_relu (Activation (None, 56, 56, 64)   0           conv2_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv2_block1_0_conv (Conv2D)    (None, 56, 56, 256)  16640       conv2_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv2_block1_3_conv (Conv2D)    (None, 56, 56, 256)  16640       conv2_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv2_block1_out (Add)          (None, 56, 56, 256)  0           conv2_block1_0_conv[0][0]        
                                                                 conv2_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_preact_bn (BatchNo (None, 56, 56, 256)  1024        conv2_block1_out[0][0]           
__________________________________________________________________________________________________
conv2_block2_preact_relu (Activ (None, 56, 56, 256)  0           conv2_block2_preact_bn[0][0]     
__________________________________________________________________________________________________
conv2_block2_1_conv (Conv2D)    (None, 56, 56, 64)   16384       conv2_block2_preact_relu[0][0]   
__________________________________________________________________________________________________
conv2_block2_1_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_1_relu (Activation (None, 56, 56, 64)   0           conv2_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block2_2_pad (ZeroPadding (None, 58, 58, 64)   0           conv2_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block2_2_conv (Conv2D)    (None, 56, 56, 64)   36864       conv2_block2_2_pad[0][0]         
__________________________________________________________________________________________________
conv2_block2_2_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_2_relu (Activation (None, 56, 56, 64)   0           conv2_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv2_block2_3_conv (Conv2D)    (None, 56, 56, 256)  16640       conv2_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv2_block2_out (Add)          (None, 56, 56, 256)  0           conv2_block1_out[0][0]           
                                                                 conv2_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_preact_bn (BatchNo (None, 56, 56, 256)  1024        conv2_block2_out[0][0]           
__________________________________________________________________________________________________
conv2_block3_preact_relu (Activ (None, 56, 56, 256)  0           conv2_block3_preact_bn[0][0]     
__________________________________________________________________________________________________
conv2_block3_1_conv (Conv2D)    (None, 56, 56, 64)   16384       conv2_block3_preact_relu[0][0]   
__________________________________________________________________________________________________
conv2_block3_1_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_1_relu (Activation (None, 56, 56, 64)   0           conv2_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block3_2_pad (ZeroPadding (None, 58, 58, 64)   0           conv2_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block3_2_conv (Conv2D)    (None, 28, 28, 64)   36864       conv2_block3_2_pad[0][0]         
__________________________________________________________________________________________________
conv2_block3_2_bn (BatchNormali (None, 28, 28, 64)   256         conv2_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_2_relu (Activation (None, 28, 28, 64)   0           conv2_block3_2_bn[0][0]          
__________________________________________________________________________________________________
max_pooling2d_3 (MaxPooling2D)  (None, 28, 28, 256)  0           conv2_block2_out[0][0]           
__________________________________________________________________________________________________
conv2_block3_3_conv (Conv2D)    (None, 28, 28, 256)  16640       conv2_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv2_block3_out (Add)          (None, 28, 28, 256)  0           max_pooling2d_3[0][0]            
                                                                 conv2_block3_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_preact_bn (BatchNo (None, 28, 28, 256)  1024        conv2_block3_out[0][0]           
__________________________________________________________________________________________________
conv3_block1_preact_relu (Activ (None, 28, 28, 256)  0           conv3_block1_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block1_1_conv (Conv2D)    (None, 28, 28, 128)  32768       conv3_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block1_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_1_relu (Activation (None, 28, 28, 128)  0           conv3_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block1_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block1_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block1_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block1_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_2_relu (Activation (None, 28, 28, 128)  0           conv3_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block1_0_conv (Conv2D)    (None, 28, 28, 512)  131584      conv3_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block1_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block1_out (Add)          (None, 28, 28, 512)  0           conv3_block1_0_conv[0][0]        
                                                                 conv3_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block1_out[0][0]           
__________________________________________________________________________________________________
conv3_block2_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block2_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block2_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block2_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block2_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_1_relu (Activation (None, 28, 28, 128)  0           conv3_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block2_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block2_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block2_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block2_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_2_relu (Activation (None, 28, 28, 128)  0           conv3_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block2_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block2_out (Add)          (None, 28, 28, 512)  0           conv3_block1_out[0][0]           
                                                                 conv3_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block2_out[0][0]           
__________________________________________________________________________________________________
conv3_block3_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block3_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block3_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block3_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block3_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_1_relu (Activation (None, 28, 28, 128)  0           conv3_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block3_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block3_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block3_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block3_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_2_relu (Activation (None, 28, 28, 128)  0           conv3_block3_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block3_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block3_out (Add)          (None, 28, 28, 512)  0           conv3_block2_out[0][0]           
                                                                 conv3_block3_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block3_out[0][0]           
__________________________________________________________________________________________________
conv3_block4_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block4_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block4_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block4_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block4_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_1_relu (Activation (None, 28, 28, 128)  0           conv3_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block4_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block4_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block4_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block4_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_2_relu (Activation (None, 28, 28, 128)  0           conv3_block4_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block4_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block4_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block4_out (Add)          (None, 28, 28, 512)  0           conv3_block3_out[0][0]           
                                                                 conv3_block4_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block5_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block4_out[0][0]           
__________________________________________________________________________________________________
conv3_block5_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block5_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block5_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block5_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block5_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block5_1_relu (Activation (None, 28, 28, 128)  0           conv3_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block5_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block5_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block5_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block5_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block5_2_relu (Activation (None, 28, 28, 128)  0           conv3_block5_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block5_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block5_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block5_out (Add)          (None, 28, 28, 512)  0           conv3_block4_out[0][0]           
                                                                 conv3_block5_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block6_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block5_out[0][0]           
__________________________________________________________________________________________________
conv3_block6_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block6_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block6_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block6_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block6_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block6_1_relu (Activation (None, 28, 28, 128)  0           conv3_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block6_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block6_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block6_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block6_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block6_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block6_2_relu (Activation (None, 28, 28, 128)  0           conv3_block6_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block6_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block6_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block6_out (Add)          (None, 28, 28, 512)  0           conv3_block5_out[0][0]           
                                                                 conv3_block6_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block7_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block6_out[0][0]           
__________________________________________________________________________________________________
conv3_block7_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block7_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block7_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block7_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block7_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block7_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block7_1_relu (Activation (None, 28, 28, 128)  0           conv3_block7_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block7_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block7_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block7_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block7_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block7_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block7_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block7_2_relu (Activation (None, 28, 28, 128)  0           conv3_block7_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block7_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block7_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block7_out (Add)          (None, 28, 28, 512)  0           conv3_block6_out[0][0]           
                                                                 conv3_block7_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block8_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block7_out[0][0]           
__________________________________________________________________________________________________
conv3_block8_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block8_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block8_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block8_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block8_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block8_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block8_1_relu (Activation (None, 28, 28, 128)  0           conv3_block8_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block8_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block8_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block8_2_conv (Conv2D)    (None, 14, 14, 128)  147456      conv3_block8_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block8_2_bn (BatchNormali (None, 14, 14, 128)  512         conv3_block8_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block8_2_relu (Activation (None, 14, 14, 128)  0           conv3_block8_2_bn[0][0]          
__________________________________________________________________________________________________
max_pooling2d_4 (MaxPooling2D)  (None, 14, 14, 512)  0           conv3_block7_out[0][0]           
__________________________________________________________________________________________________
conv3_block8_3_conv (Conv2D)    (None, 14, 14, 512)  66048       conv3_block8_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block8_out (Add)          (None, 14, 14, 512)  0           max_pooling2d_4[0][0]            
                                                                 conv3_block8_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_preact_bn (BatchNo (None, 14, 14, 512)  2048        conv3_block8_out[0][0]           
__________________________________________________________________________________________________
conv4_block1_preact_relu (Activ (None, 14, 14, 512)  0           conv4_block1_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block1_1_conv (Conv2D)    (None, 14, 14, 256)  131072      conv4_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block1_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_1_relu (Activation (None, 14, 14, 256)  0           conv4_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block1_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block1_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block1_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block1_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_2_relu (Activation (None, 14, 14, 256)  0           conv4_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block1_0_conv (Conv2D)    (None, 14, 14, 1024) 525312      conv4_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block1_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block1_out (Add)          (None, 14, 14, 1024) 0           conv4_block1_0_conv[0][0]        
                                                                 conv4_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block1_out[0][0]           
__________________________________________________________________________________________________
conv4_block2_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block2_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block2_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block2_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block2_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_1_relu (Activation (None, 14, 14, 256)  0           conv4_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block2_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block2_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block2_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block2_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_2_relu (Activation (None, 14, 14, 256)  0           conv4_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block2_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block2_out (Add)          (None, 14, 14, 1024) 0           conv4_block1_out[0][0]           
                                                                 conv4_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block2_out[0][0]           
__________________________________________________________________________________________________
conv4_block3_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block3_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block3_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block3_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block3_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_1_relu (Activation (None, 14, 14, 256)  0           conv4_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block3_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block3_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block3_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block3_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_2_relu (Activation (None, 14, 14, 256)  0           conv4_block3_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block3_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block3_out (Add)          (None, 14, 14, 1024) 0           conv4_block2_out[0][0]           
                                                                 conv4_block3_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block3_out[0][0]           
__________________________________________________________________________________________________
conv4_block4_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block4_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block4_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block4_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block4_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_1_relu (Activation (None, 14, 14, 256)  0           conv4_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block4_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block4_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block4_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block4_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_2_relu (Activation (None, 14, 14, 256)  0           conv4_block4_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block4_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block4_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block4_out (Add)          (None, 14, 14, 1024) 0           conv4_block3_out[0][0]           
                                                                 conv4_block4_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block4_out[0][0]           
__________________________________________________________________________________________________
conv4_block5_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block5_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block5_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block5_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block5_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_1_relu (Activation (None, 14, 14, 256)  0           conv4_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block5_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block5_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block5_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block5_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_2_relu (Activation (None, 14, 14, 256)  0           conv4_block5_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block5_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block5_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block5_out (Add)          (None, 14, 14, 1024) 0           conv4_block4_out[0][0]           
                                                                 conv4_block5_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block5_out[0][0]           
__________________________________________________________________________________________________
conv4_block6_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block6_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block6_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block6_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block6_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_1_relu (Activation (None, 14, 14, 256)  0           conv4_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block6_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block6_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block6_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block6_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block6_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_2_relu (Activation (None, 14, 14, 256)  0           conv4_block6_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block6_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block6_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block6_out (Add)          (None, 14, 14, 1024) 0           conv4_block5_out[0][0]           
                                                                 conv4_block6_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block7_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block6_out[0][0]           
__________________________________________________________________________________________________
conv4_block7_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block7_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block7_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block7_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block7_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block7_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block7_1_relu (Activation (None, 14, 14, 256)  0           conv4_block7_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block7_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block7_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block7_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block7_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block7_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block7_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block7_2_relu (Activation (None, 14, 14, 256)  0           conv4_block7_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block7_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block7_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block7_out (Add)          (None, 14, 14, 1024) 0           conv4_block6_out[0][0]           
                                                                 conv4_block7_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block8_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block7_out[0][0]           
__________________________________________________________________________________________________
conv4_block8_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block8_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block8_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block8_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block8_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block8_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block8_1_relu (Activation (None, 14, 14, 256)  0           conv4_block8_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block8_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block8_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block8_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block8_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block8_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block8_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block8_2_relu (Activation (None, 14, 14, 256)  0           conv4_block8_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block8_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block8_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block8_out (Add)          (None, 14, 14, 1024) 0           conv4_block7_out[0][0]           
                                                                 conv4_block8_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block9_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block8_out[0][0]           
__________________________________________________________________________________________________
conv4_block9_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block9_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block9_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block9_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block9_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block9_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block9_1_relu (Activation (None, 14, 14, 256)  0           conv4_block9_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block9_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block9_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block9_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block9_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block9_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block9_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block9_2_relu (Activation (None, 14, 14, 256)  0           conv4_block9_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block9_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block9_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block9_out (Add)          (None, 14, 14, 1024) 0           conv4_block8_out[0][0]           
                                                                 conv4_block9_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block10_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block9_out[0][0]           
__________________________________________________________________________________________________
conv4_block10_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block10_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block10_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block10_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block10_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block10_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block10_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block10_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block10_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block10_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block10_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block10_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block10_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block10_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block10_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block10_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block10_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block10_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block10_out (Add)         (None, 14, 14, 1024) 0           conv4_block9_out[0][0]           
                                                                 conv4_block10_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block11_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block10_out[0][0]          
__________________________________________________________________________________________________
conv4_block11_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block11_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block11_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block11_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block11_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block11_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block11_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block11_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block11_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block11_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block11_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block11_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block11_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block11_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block11_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block11_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block11_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block11_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block11_out (Add)         (None, 14, 14, 1024) 0           conv4_block10_out[0][0]          
                                                                 conv4_block11_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block12_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block11_out[0][0]          
__________________________________________________________________________________________________
conv4_block12_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block12_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block12_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block12_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block12_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block12_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block12_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block12_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block12_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block12_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block12_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block12_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block12_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block12_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block12_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block12_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block12_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block12_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block12_out (Add)         (None, 14, 14, 1024) 0           conv4_block11_out[0][0]          
                                                                 conv4_block12_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block13_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block12_out[0][0]          
__________________________________________________________________________________________________
conv4_block13_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block13_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block13_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block13_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block13_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block13_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block13_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block13_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block13_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block13_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block13_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block13_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block13_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block13_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block13_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block13_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block13_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block13_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block13_out (Add)         (None, 14, 14, 1024) 0           conv4_block12_out[0][0]          
                                                                 conv4_block13_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block14_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block13_out[0][0]          
__________________________________________________________________________________________________
conv4_block14_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block14_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block14_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block14_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block14_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block14_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block14_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block14_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block14_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block14_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block14_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block14_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block14_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block14_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block14_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block14_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block14_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block14_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block14_out (Add)         (None, 14, 14, 1024) 0           conv4_block13_out[0][0]          
                                                                 conv4_block14_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block15_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block14_out[0][0]          
__________________________________________________________________________________________________
conv4_block15_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block15_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block15_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block15_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block15_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block15_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block15_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block15_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block15_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block15_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block15_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block15_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block15_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block15_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block15_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block15_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block15_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block15_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block15_out (Add)         (None, 14, 14, 1024) 0           conv4_block14_out[0][0]          
                                                                 conv4_block15_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block16_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block15_out[0][0]          
__________________________________________________________________________________________________
conv4_block16_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block16_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block16_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block16_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block16_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block16_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block16_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block16_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block16_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block16_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block16_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block16_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block16_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block16_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block16_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block16_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block16_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block16_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block16_out (Add)         (None, 14, 14, 1024) 0           conv4_block15_out[0][0]          
                                                                 conv4_block16_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block17_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block16_out[0][0]          
__________________________________________________________________________________________________
conv4_block17_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block17_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block17_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block17_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block17_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block17_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block17_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block17_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block17_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block17_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block17_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block17_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block17_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block17_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block17_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block17_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block17_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block17_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block17_out (Add)         (None, 14, 14, 1024) 0           conv4_block16_out[0][0]          
                                                                 conv4_block17_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block18_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block17_out[0][0]          
__________________________________________________________________________________________________
conv4_block18_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block18_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block18_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block18_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block18_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block18_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block18_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block18_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block18_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block18_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block18_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block18_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block18_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block18_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block18_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block18_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block18_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block18_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block18_out (Add)         (None, 14, 14, 1024) 0           conv4_block17_out[0][0]          
                                                                 conv4_block18_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block19_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block18_out[0][0]          
__________________________________________________________________________________________________
conv4_block19_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block19_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block19_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block19_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block19_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block19_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block19_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block19_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block19_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block19_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block19_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block19_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block19_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block19_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block19_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block19_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block19_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block19_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block19_out (Add)         (None, 14, 14, 1024) 0           conv4_block18_out[0][0]          
                                                                 conv4_block19_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block20_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block19_out[0][0]          
__________________________________________________________________________________________________
conv4_block20_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block20_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block20_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block20_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block20_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block20_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block20_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block20_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block20_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block20_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block20_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block20_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block20_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block20_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block20_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block20_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block20_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block20_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block20_out (Add)         (None, 14, 14, 1024) 0           conv4_block19_out[0][0]          
                                                                 conv4_block20_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block21_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block20_out[0][0]          
__________________________________________________________________________________________________
conv4_block21_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block21_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block21_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block21_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block21_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block21_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block21_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block21_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block21_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block21_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block21_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block21_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block21_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block21_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block21_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block21_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block21_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block21_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block21_out (Add)         (None, 14, 14, 1024) 0           conv4_block20_out[0][0]          
                                                                 conv4_block21_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block22_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block21_out[0][0]          
__________________________________________________________________________________________________
conv4_block22_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block22_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block22_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block22_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block22_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block22_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block22_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block22_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block22_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block22_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block22_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block22_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block22_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block22_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block22_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block22_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block22_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block22_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block22_out (Add)         (None, 14, 14, 1024) 0           conv4_block21_out[0][0]          
                                                                 conv4_block22_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block23_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block22_out[0][0]          
__________________________________________________________________________________________________
conv4_block23_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block23_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block23_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block23_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block23_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block23_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block23_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block23_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block23_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block23_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block23_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block23_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block23_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block23_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block23_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block23_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block23_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block23_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block23_out (Add)         (None, 14, 14, 1024) 0           conv4_block22_out[0][0]          
                                                                 conv4_block23_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block24_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block23_out[0][0]          
__________________________________________________________________________________________________
conv4_block24_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block24_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block24_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block24_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block24_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block24_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block24_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block24_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block24_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block24_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block24_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block24_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block24_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block24_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block24_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block24_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block24_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block24_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block24_out (Add)         (None, 14, 14, 1024) 0           conv4_block23_out[0][0]          
                                                                 conv4_block24_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block25_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block24_out[0][0]          
__________________________________________________________________________________________________
conv4_block25_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block25_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block25_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block25_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block25_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block25_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block25_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block25_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block25_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block25_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block25_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block25_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block25_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block25_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block25_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block25_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block25_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block25_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block25_out (Add)         (None, 14, 14, 1024) 0           conv4_block24_out[0][0]          
                                                                 conv4_block25_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block26_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block25_out[0][0]          
__________________________________________________________________________________________________
conv4_block26_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block26_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block26_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block26_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block26_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block26_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block26_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block26_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block26_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block26_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block26_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block26_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block26_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block26_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block26_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block26_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block26_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block26_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block26_out (Add)         (None, 14, 14, 1024) 0           conv4_block25_out[0][0]          
                                                                 conv4_block26_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block27_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block26_out[0][0]          
__________________________________________________________________________________________________
conv4_block27_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block27_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block27_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block27_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block27_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block27_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block27_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block27_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block27_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block27_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block27_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block27_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block27_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block27_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block27_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block27_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block27_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block27_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block27_out (Add)         (None, 14, 14, 1024) 0           conv4_block26_out[0][0]          
                                                                 conv4_block27_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block28_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block27_out[0][0]          
__________________________________________________________________________________________________
conv4_block28_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block28_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block28_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block28_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block28_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block28_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block28_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block28_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block28_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block28_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block28_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block28_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block28_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block28_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block28_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block28_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block28_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block28_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block28_out (Add)         (None, 14, 14, 1024) 0           conv4_block27_out[0][0]          
                                                                 conv4_block28_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block29_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block28_out[0][0]          
__________________________________________________________________________________________________
conv4_block29_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block29_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block29_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block29_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block29_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block29_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block29_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block29_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block29_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block29_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block29_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block29_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block29_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block29_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block29_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block29_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block29_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block29_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block29_out (Add)         (None, 14, 14, 1024) 0           conv4_block28_out[0][0]          
                                                                 conv4_block29_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block30_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block29_out[0][0]          
__________________________________________________________________________________________________
conv4_block30_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block30_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block30_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block30_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block30_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block30_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block30_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block30_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block30_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block30_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block30_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block30_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block30_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block30_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block30_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block30_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block30_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block30_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block30_out (Add)         (None, 14, 14, 1024) 0           conv4_block29_out[0][0]          
                                                                 conv4_block30_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block31_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block30_out[0][0]          
__________________________________________________________________________________________________
conv4_block31_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block31_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block31_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block31_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block31_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block31_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block31_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block31_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block31_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block31_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block31_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block31_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block31_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block31_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block31_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block31_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block31_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block31_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block31_out (Add)         (None, 14, 14, 1024) 0           conv4_block30_out[0][0]          
                                                                 conv4_block31_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block32_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block31_out[0][0]          
__________________________________________________________________________________________________
conv4_block32_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block32_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block32_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block32_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block32_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block32_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block32_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block32_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block32_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block32_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block32_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block32_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block32_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block32_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block32_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block32_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block32_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block32_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block32_out (Add)         (None, 14, 14, 1024) 0           conv4_block31_out[0][0]          
                                                                 conv4_block32_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block33_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block32_out[0][0]          
__________________________________________________________________________________________________
conv4_block33_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block33_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block33_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block33_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block33_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block33_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block33_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block33_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block33_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block33_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block33_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block33_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block33_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block33_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block33_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block33_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block33_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block33_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block33_out (Add)         (None, 14, 14, 1024) 0           conv4_block32_out[0][0]          
                                                                 conv4_block33_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block34_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block33_out[0][0]          
__________________________________________________________________________________________________
conv4_block34_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block34_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block34_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block34_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block34_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block34_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block34_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block34_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block34_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block34_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block34_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block34_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block34_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block34_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block34_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block34_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block34_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block34_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block34_out (Add)         (None, 14, 14, 1024) 0           conv4_block33_out[0][0]          
                                                                 conv4_block34_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block35_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block34_out[0][0]          
__________________________________________________________________________________________________
conv4_block35_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block35_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block35_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block35_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block35_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block35_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block35_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block35_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block35_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block35_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block35_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block35_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block35_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block35_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block35_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block35_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block35_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block35_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block35_out (Add)         (None, 14, 14, 1024) 0           conv4_block34_out[0][0]          
                                                                 conv4_block35_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block36_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block35_out[0][0]          
__________________________________________________________________________________________________
conv4_block36_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block36_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block36_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block36_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block36_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block36_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block36_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block36_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block36_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block36_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block36_2_conv (Conv2D)   (None, 7, 7, 256)    589824      conv4_block36_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block36_2_bn (BatchNormal (None, 7, 7, 256)    1024        conv4_block36_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block36_2_relu (Activatio (None, 7, 7, 256)    0           conv4_block36_2_bn[0][0]         
__________________________________________________________________________________________________
max_pooling2d_5 (MaxPooling2D)  (None, 7, 7, 1024)   0           conv4_block35_out[0][0]          
__________________________________________________________________________________________________
conv4_block36_3_conv (Conv2D)   (None, 7, 7, 1024)   263168      conv4_block36_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block36_out (Add)         (None, 7, 7, 1024)   0           max_pooling2d_5[0][0]            
                                                                 conv4_block36_3_conv[0][0]       
__________________________________________________________________________________________________
conv5_block1_preact_bn (BatchNo (None, 7, 7, 1024)   4096        conv4_block36_out[0][0]          
__________________________________________________________________________________________________
conv5_block1_preact_relu (Activ (None, 7, 7, 1024)   0           conv5_block1_preact_bn[0][0]     
__________________________________________________________________________________________________
conv5_block1_1_conv (Conv2D)    (None, 7, 7, 512)    524288      conv5_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv5_block1_1_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block1_1_relu (Activation (None, 7, 7, 512)    0           conv5_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block1_2_pad (ZeroPadding (None, 9, 9, 512)    0           conv5_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block1_2_conv (Conv2D)    (None, 7, 7, 512)    2359296     conv5_block1_2_pad[0][0]         
__________________________________________________________________________________________________
conv5_block1_2_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block1_2_relu (Activation (None, 7, 7, 512)    0           conv5_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv5_block1_0_conv (Conv2D)    (None, 7, 7, 2048)   2099200     conv5_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv5_block1_3_conv (Conv2D)    (None, 7, 7, 2048)   1050624     conv5_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv5_block1_out (Add)          (None, 7, 7, 2048)   0           conv5_block1_0_conv[0][0]        
                                                                 conv5_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_preact_bn (BatchNo (None, 7, 7, 2048)   8192        conv5_block1_out[0][0]           
__________________________________________________________________________________________________
conv5_block2_preact_relu (Activ (None, 7, 7, 2048)   0           conv5_block2_preact_bn[0][0]     
__________________________________________________________________________________________________
conv5_block2_1_conv (Conv2D)    (None, 7, 7, 512)    1048576     conv5_block2_preact_relu[0][0]   
__________________________________________________________________________________________________
conv5_block2_1_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_1_relu (Activation (None, 7, 7, 512)    0           conv5_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block2_2_pad (ZeroPadding (None, 9, 9, 512)    0           conv5_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block2_2_conv (Conv2D)    (None, 7, 7, 512)    2359296     conv5_block2_2_pad[0][0]         
__________________________________________________________________________________________________
conv5_block2_2_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_2_relu (Activation (None, 7, 7, 512)    0           conv5_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv5_block2_3_conv (Conv2D)    (None, 7, 7, 2048)   1050624     conv5_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv5_block2_out (Add)          (None, 7, 7, 2048)   0           conv5_block1_out[0][0]           
                                                                 conv5_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_preact_bn (BatchNo (None, 7, 7, 2048)   8192        conv5_block2_out[0][0]           
__________________________________________________________________________________________________
conv5_block3_preact_relu (Activ (None, 7, 7, 2048)   0           conv5_block3_preact_bn[0][0]     
__________________________________________________________________________________________________
conv5_block3_1_conv (Conv2D)    (None, 7, 7, 512)    1048576     conv5_block3_preact_relu[0][0]   
__________________________________________________________________________________________________
conv5_block3_1_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_1_relu (Activation (None, 7, 7, 512)    0           conv5_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block3_2_pad (ZeroPadding (None, 9, 9, 512)    0           conv5_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block3_2_conv (Conv2D)    (None, 7, 7, 512)    2359296     conv5_block3_2_pad[0][0]         
__________________________________________________________________________________________________
conv5_block3_2_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_2_relu (Activation (None, 7, 7, 512)    0           conv5_block3_2_bn[0][0]          
__________________________________________________________________________________________________
conv5_block3_3_conv (Conv2D)    (None, 7, 7, 2048)   1050624     conv5_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv5_block3_out (Add)          (None, 7, 7, 2048)   0           conv5_block2_out[0][0]           
                                                                 conv5_block3_3_conv[0][0]        
__________________________________________________________________________________________________
post_bn (BatchNormalization)    (None, 7, 7, 2048)   8192        conv5_block3_out[0][0]           
__________________________________________________________________________________________________
post_relu (Activation)          (None, 7, 7, 2048)   0           post_bn[0][0]                    
__________________________________________________________________________________________________
avg_pool (GlobalAveragePooling2 (None, 2048)         0           post_relu[0][0]                  
__________________________________________________________________________________________________
predictions (Dense)             (None, 1000)         2049000     avg_pool[0][0]                   
==================================================================================================
Total params: 60,380,648
Trainable params: 60,236,904
Non-trainable params: 143,744
__________________________________________________________________________________________________
inception = tf.keras.applications.InceptionV3() 
inception.summary() # concatenate

Model: "inception_v3"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_3 (InputLayer)            [(None, 299, 299, 3) 0                                            
__________________________________________________________________________________________________
conv2d (Conv2D)                 (None, 149, 149, 32) 864         input_3[0][0]                    
__________________________________________________________________________________________________
batch_normalization (BatchNorma (None, 149, 149, 32) 96          conv2d[0][0]                     
__________________________________________________________________________________________________
activation (Activation)         (None, 149, 149, 32) 0           batch_normalization[0][0]        
__________________________________________________________________________________________________
conv2d_1 (Conv2D)               (None, 147, 147, 32) 9216        activation[0][0]                 
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 147, 147, 32) 96          conv2d_1[0][0]                   
__________________________________________________________________________________________________
activation_1 (Activation)       (None, 147, 147, 32) 0           batch_normalization_1[0][0]      
__________________________________________________________________________________________________
conv2d_2 (Conv2D)               (None, 147, 147, 64) 18432       activation_1[0][0]               
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, 147, 147, 64) 192         conv2d_2[0][0]                   
__________________________________________________________________________________________________
activation_2 (Activation)       (None, 147, 147, 64) 0           batch_normalization_2[0][0]      
__________________________________________________________________________________________________
max_pooling2d_6 (MaxPooling2D)  (None, 73, 73, 64)   0           activation_2[0][0]               
__________________________________________________________________________________________________
conv2d_3 (Conv2D)               (None, 73, 73, 80)   5120        max_pooling2d_6[0][0]            
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, 73, 73, 80)   240         conv2d_3[0][0]                   
__________________________________________________________________________________________________
activation_3 (Activation)       (None, 73, 73, 80)   0           batch_normalization_3[0][0]      
__________________________________________________________________________________________________
conv2d_4 (Conv2D)               (None, 71, 71, 192)  138240      activation_3[0][0]               
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, 71, 71, 192)  576         conv2d_4[0][0]                   
__________________________________________________________________________________________________
activation_4 (Activation)       (None, 71, 71, 192)  0           batch_normalization_4[0][0]      
__________________________________________________________________________________________________
max_pooling2d_7 (MaxPooling2D)  (None, 35, 35, 192)  0           activation_4[0][0]               
__________________________________________________________________________________________________
conv2d_8 (Conv2D)               (None, 35, 35, 64)   12288       max_pooling2d_7[0][0]            
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, 35, 35, 64)   192         conv2d_8[0][0]                   
__________________________________________________________________________________________________
activation_8 (Activation)       (None, 35, 35, 64)   0           batch_normalization_8[0][0]      
__________________________________________________________________________________________________
conv2d_6 (Conv2D)               (None, 35, 35, 48)   9216        max_pooling2d_7[0][0]            
__________________________________________________________________________________________________
conv2d_9 (Conv2D)               (None, 35, 35, 96)   55296       activation_8[0][0]               
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, 35, 35, 48)   144         conv2d_6[0][0]                   
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, 35, 35, 96)   288         conv2d_9[0][0]                   
__________________________________________________________________________________________________
activation_6 (Activation)       (None, 35, 35, 48)   0           batch_normalization_6[0][0]      
__________________________________________________________________________________________________
activation_9 (Activation)       (None, 35, 35, 96)   0           batch_normalization_9[0][0]      
__________________________________________________________________________________________________
average_pooling2d (AveragePooli (None, 35, 35, 192)  0           max_pooling2d_7[0][0]            
__________________________________________________________________________________________________
conv2d_5 (Conv2D)               (None, 35, 35, 64)   12288       max_pooling2d_7[0][0]            
__________________________________________________________________________________________________
conv2d_7 (Conv2D)               (None, 35, 35, 64)   76800       activation_6[0][0]               
__________________________________________________________________________________________________
conv2d_10 (Conv2D)              (None, 35, 35, 96)   82944       activation_9[0][0]               
__________________________________________________________________________________________________
conv2d_11 (Conv2D)              (None, 35, 35, 32)   6144        average_pooling2d[0][0]          
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, 35, 35, 64)   192         conv2d_5[0][0]                   
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, 35, 35, 64)   192         conv2d_7[0][0]                   
__________________________________________________________________________________________________
batch_normalization_10 (BatchNo (None, 35, 35, 96)   288         conv2d_10[0][0]                  
__________________________________________________________________________________________________
batch_normalization_11 (BatchNo (None, 35, 35, 32)   96          conv2d_11[0][0]                  
__________________________________________________________________________________________________
activation_5 (Activation)       (None, 35, 35, 64)   0           batch_normalization_5[0][0]      
__________________________________________________________________________________________________
activation_7 (Activation)       (None, 35, 35, 64)   0           batch_normalization_7[0][0]      
__________________________________________________________________________________________________
activation_10 (Activation)      (None, 35, 35, 96)   0           batch_normalization_10[0][0]     
__________________________________________________________________________________________________
activation_11 (Activation)      (None, 35, 35, 32)   0           batch_normalization_11[0][0]     
__________________________________________________________________________________________________
mixed0 (Concatenate)            (None, 35, 35, 256)  0           activation_5[0][0]               
                                                                 activation_7[0][0]               
                                                                 activation_10[0][0]              
                                                                 activation_11[0][0]              
__________________________________________________________________________________________________
conv2d_15 (Conv2D)              (None, 35, 35, 64)   16384       mixed0[0][0]                     
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, 35, 35, 64)   192         conv2d_15[0][0]                  
__________________________________________________________________________________________________
activation_15 (Activation)      (None, 35, 35, 64)   0           batch_normalization_15[0][0]     
__________________________________________________________________________________________________
conv2d_13 (Conv2D)              (None, 35, 35, 48)   12288       mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_16 (Conv2D)              (None, 35, 35, 96)   55296       activation_15[0][0]              
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, 35, 35, 48)   144         conv2d_13[0][0]                  
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, 35, 35, 96)   288         conv2d_16[0][0]                  
__________________________________________________________________________________________________
activation_13 (Activation)      (None, 35, 35, 48)   0           batch_normalization_13[0][0]     
__________________________________________________________________________________________________
activation_16 (Activation)      (None, 35, 35, 96)   0           batch_normalization_16[0][0]     
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, 35, 35, 256)  0           mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_12 (Conv2D)              (None, 35, 35, 64)   16384       mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_14 (Conv2D)              (None, 35, 35, 64)   76800       activation_13[0][0]              
__________________________________________________________________________________________________
conv2d_17 (Conv2D)              (None, 35, 35, 96)   82944       activation_16[0][0]              
__________________________________________________________________________________________________
conv2d_18 (Conv2D)              (None, 35, 35, 64)   16384       average_pooling2d_1[0][0]        
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, 35, 35, 64)   192         conv2d_12[0][0]                  
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, 35, 35, 64)   192         conv2d_14[0][0]                  
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, 35, 35, 96)   288         conv2d_17[0][0]                  
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, 35, 35, 64)   192         conv2d_18[0][0]                  
__________________________________________________________________________________________________
activation_12 (Activation)      (None, 35, 35, 64)   0           batch_normalization_12[0][0]     
__________________________________________________________________________________________________
activation_14 (Activation)      (None, 35, 35, 64)   0           batch_normalization_14[0][0]     
__________________________________________________________________________________________________
activation_17 (Activation)      (None, 35, 35, 96)   0           batch_normalization_17[0][0]     
__________________________________________________________________________________________________
activation_18 (Activation)      (None, 35, 35, 64)   0           batch_normalization_18[0][0]     
__________________________________________________________________________________________________
mixed1 (Concatenate)            (None, 35, 35, 288)  0           activation_12[0][0]              
                                                                 activation_14[0][0]              
                                                                 activation_17[0][0]              
                                                                 activation_18[0][0]              
__________________________________________________________________________________________________
conv2d_22 (Conv2D)              (None, 35, 35, 64)   18432       mixed1[0][0]                     
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, 35, 35, 64)   192         conv2d_22[0][0]                  
__________________________________________________________________________________________________
activation_22 (Activation)      (None, 35, 35, 64)   0           batch_normalization_22[0][0]     
__________________________________________________________________________________________________
conv2d_20 (Conv2D)              (None, 35, 35, 48)   13824       mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_23 (Conv2D)              (None, 35, 35, 96)   55296       activation_22[0][0]              
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, 35, 35, 48)   144         conv2d_20[0][0]                  
__________________________________________________________________________________________________
batch_normalization_23 (BatchNo (None, 35, 35, 96)   288         conv2d_23[0][0]                  
__________________________________________________________________________________________________
activation_20 (Activation)      (None, 35, 35, 48)   0           batch_normalization_20[0][0]     
__________________________________________________________________________________________________
activation_23 (Activation)      (None, 35, 35, 96)   0           batch_normalization_23[0][0]     
__________________________________________________________________________________________________
average_pooling2d_2 (AveragePoo (None, 35, 35, 288)  0           mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_19 (Conv2D)              (None, 35, 35, 64)   18432       mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_21 (Conv2D)              (None, 35, 35, 64)   76800       activation_20[0][0]              
__________________________________________________________________________________________________
conv2d_24 (Conv2D)              (None, 35, 35, 96)   82944       activation_23[0][0]              
__________________________________________________________________________________________________
conv2d_25 (Conv2D)              (None, 35, 35, 64)   18432       average_pooling2d_2[0][0]        
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, 35, 35, 64)   192         conv2d_19[0][0]                  
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, 35, 35, 64)   192         conv2d_21[0][0]                  
__________________________________________________________________________________________________
batch_normalization_24 (BatchNo (None, 35, 35, 96)   288         conv2d_24[0][0]                  
__________________________________________________________________________________________________
batch_normalization_25 (BatchNo (None, 35, 35, 64)   192         conv2d_25[0][0]                  
__________________________________________________________________________________________________
activation_19 (Activation)      (None, 35, 35, 64)   0           batch_normalization_19[0][0]     
__________________________________________________________________________________________________
activation_21 (Activation)      (None, 35, 35, 64)   0           batch_normalization_21[0][0]     
__________________________________________________________________________________________________
activation_24 (Activation)      (None, 35, 35, 96)   0           batch_normalization_24[0][0]     
__________________________________________________________________________________________________
activation_25 (Activation)      (None, 35, 35, 64)   0           batch_normalization_25[0][0]     
__________________________________________________________________________________________________
mixed2 (Concatenate)            (None, 35, 35, 288)  0           activation_19[0][0]              
                                                                 activation_21[0][0]              
                                                                 activation_24[0][0]              
                                                                 activation_25[0][0]              
__________________________________________________________________________________________________
conv2d_27 (Conv2D)              (None, 35, 35, 64)   18432       mixed2[0][0]                     
__________________________________________________________________________________________________
batch_normalization_27 (BatchNo (None, 35, 35, 64)   192         conv2d_27[0][0]                  
__________________________________________________________________________________________________
activation_27 (Activation)      (None, 35, 35, 64)   0           batch_normalization_27[0][0]     
__________________________________________________________________________________________________
conv2d_28 (Conv2D)              (None, 35, 35, 96)   55296       activation_27[0][0]              
__________________________________________________________________________________________________
batch_normalization_28 (BatchNo (None, 35, 35, 96)   288         conv2d_28[0][0]                  
__________________________________________________________________________________________________
activation_28 (Activation)      (None, 35, 35, 96)   0           batch_normalization_28[0][0]     
__________________________________________________________________________________________________
conv2d_26 (Conv2D)              (None, 17, 17, 384)  995328      mixed2[0][0]                     
__________________________________________________________________________________________________
conv2d_29 (Conv2D)              (None, 17, 17, 96)   82944       activation_28[0][0]              
__________________________________________________________________________________________________
batch_normalization_26 (BatchNo (None, 17, 17, 384)  1152        conv2d_26[0][0]                  
__________________________________________________________________________________________________
batch_normalization_29 (BatchNo (None, 17, 17, 96)   288         conv2d_29[0][0]                  
__________________________________________________________________________________________________
activation_26 (Activation)      (None, 17, 17, 384)  0           batch_normalization_26[0][0]     
__________________________________________________________________________________________________
activation_29 (Activation)      (None, 17, 17, 96)   0           batch_normalization_29[0][0]     
__________________________________________________________________________________________________
max_pooling2d_8 (MaxPooling2D)  (None, 17, 17, 288)  0           mixed2[0][0]                     
__________________________________________________________________________________________________
mixed3 (Concatenate)            (None, 17, 17, 768)  0           activation_26[0][0]              
                                                                 activation_29[0][0]              
                                                                 max_pooling2d_8[0][0]            
__________________________________________________________________________________________________
conv2d_34 (Conv2D)              (None, 17, 17, 128)  98304       mixed3[0][0]                     
__________________________________________________________________________________________________
batch_normalization_34 (BatchNo (None, 17, 17, 128)  384         conv2d_34[0][0]                  
__________________________________________________________________________________________________
activation_34 (Activation)      (None, 17, 17, 128)  0           batch_normalization_34[0][0]     
__________________________________________________________________________________________________
conv2d_35 (Conv2D)              (None, 17, 17, 128)  114688      activation_34[0][0]              
__________________________________________________________________________________________________
batch_normalization_35 (BatchNo (None, 17, 17, 128)  384         conv2d_35[0][0]                  
__________________________________________________________________________________________________
activation_35 (Activation)      (None, 17, 17, 128)  0           batch_normalization_35[0][0]     
__________________________________________________________________________________________________
conv2d_31 (Conv2D)              (None, 17, 17, 128)  98304       mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_36 (Conv2D)              (None, 17, 17, 128)  114688      activation_35[0][0]              
__________________________________________________________________________________________________
batch_normalization_31 (BatchNo (None, 17, 17, 128)  384         conv2d_31[0][0]                  
__________________________________________________________________________________________________
batch_normalization_36 (BatchNo (None, 17, 17, 128)  384         conv2d_36[0][0]                  
__________________________________________________________________________________________________
activation_31 (Activation)      (None, 17, 17, 128)  0           batch_normalization_31[0][0]     
__________________________________________________________________________________________________
activation_36 (Activation)      (None, 17, 17, 128)  0           batch_normalization_36[0][0]     
__________________________________________________________________________________________________
conv2d_32 (Conv2D)              (None, 17, 17, 128)  114688      activation_31[0][0]              
__________________________________________________________________________________________________
conv2d_37 (Conv2D)              (None, 17, 17, 128)  114688      activation_36[0][0]              
__________________________________________________________________________________________________
batch_normalization_32 (BatchNo (None, 17, 17, 128)  384         conv2d_32[0][0]                  
__________________________________________________________________________________________________
batch_normalization_37 (BatchNo (None, 17, 17, 128)  384         conv2d_37[0][0]                  
__________________________________________________________________________________________________
activation_32 (Activation)      (None, 17, 17, 128)  0           batch_normalization_32[0][0]     
__________________________________________________________________________________________________
activation_37 (Activation)      (None, 17, 17, 128)  0           batch_normalization_37[0][0]     
__________________________________________________________________________________________________
average_pooling2d_3 (AveragePoo (None, 17, 17, 768)  0           mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_30 (Conv2D)              (None, 17, 17, 192)  147456      mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_33 (Conv2D)              (None, 17, 17, 192)  172032      activation_32[0][0]              
__________________________________________________________________________________________________
conv2d_38 (Conv2D)              (None, 17, 17, 192)  172032      activation_37[0][0]              
__________________________________________________________________________________________________
conv2d_39 (Conv2D)              (None, 17, 17, 192)  147456      average_pooling2d_3[0][0]        
__________________________________________________________________________________________________
batch_normalization_30 (BatchNo (None, 17, 17, 192)  576         conv2d_30[0][0]                  
__________________________________________________________________________________________________
batch_normalization_33 (BatchNo (None, 17, 17, 192)  576         conv2d_33[0][0]                  
__________________________________________________________________________________________________
batch_normalization_38 (BatchNo (None, 17, 17, 192)  576         conv2d_38[0][0]                  
__________________________________________________________________________________________________
batch_normalization_39 (BatchNo (None, 17, 17, 192)  576         conv2d_39[0][0]                  
__________________________________________________________________________________________________
activation_30 (Activation)      (None, 17, 17, 192)  0           batch_normalization_30[0][0]     
__________________________________________________________________________________________________
activation_33 (Activation)      (None, 17, 17, 192)  0           batch_normalization_33[0][0]     
__________________________________________________________________________________________________
activation_38 (Activation)      (None, 17, 17, 192)  0           batch_normalization_38[0][0]     
__________________________________________________________________________________________________
activation_39 (Activation)      (None, 17, 17, 192)  0           batch_normalization_39[0][0]     
__________________________________________________________________________________________________
mixed4 (Concatenate)            (None, 17, 17, 768)  0           activation_30[0][0]              
                                                                 activation_33[0][0]              
                                                                 activation_38[0][0]              
                                                                 activation_39[0][0]              
__________________________________________________________________________________________________
conv2d_44 (Conv2D)              (None, 17, 17, 160)  122880      mixed4[0][0]                     
__________________________________________________________________________________________________
batch_normalization_44 (BatchNo (None, 17, 17, 160)  480         conv2d_44[0][0]                  
__________________________________________________________________________________________________
activation_44 (Activation)      (None, 17, 17, 160)  0           batch_normalization_44[0][0]     
__________________________________________________________________________________________________
conv2d_45 (Conv2D)              (None, 17, 17, 160)  179200      activation_44[0][0]              
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, 17, 17, 160)  480         conv2d_45[0][0]                  
__________________________________________________________________________________________________
activation_45 (Activation)      (None, 17, 17, 160)  0           batch_normalization_45[0][0]     
__________________________________________________________________________________________________
conv2d_41 (Conv2D)              (None, 17, 17, 160)  122880      mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_46 (Conv2D)              (None, 17, 17, 160)  179200      activation_45[0][0]              
__________________________________________________________________________________________________
batch_normalization_41 (BatchNo (None, 17, 17, 160)  480         conv2d_41[0][0]                  
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, 17, 17, 160)  480         conv2d_46[0][0]                  
__________________________________________________________________________________________________
activation_41 (Activation)      (None, 17, 17, 160)  0           batch_normalization_41[0][0]     
__________________________________________________________________________________________________
activation_46 (Activation)      (None, 17, 17, 160)  0           batch_normalization_46[0][0]     
__________________________________________________________________________________________________
conv2d_42 (Conv2D)              (None, 17, 17, 160)  179200      activation_41[0][0]              
__________________________________________________________________________________________________
conv2d_47 (Conv2D)              (None, 17, 17, 160)  179200      activation_46[0][0]              
__________________________________________________________________________________________________
batch_normalization_42 (BatchNo (None, 17, 17, 160)  480         conv2d_42[0][0]                  
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, 17, 17, 160)  480         conv2d_47[0][0]                  
__________________________________________________________________________________________________
activation_42 (Activation)      (None, 17, 17, 160)  0           batch_normalization_42[0][0]     
__________________________________________________________________________________________________
activation_47 (Activation)      (None, 17, 17, 160)  0           batch_normalization_47[0][0]     
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, 17, 17, 768)  0           mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_40 (Conv2D)              (None, 17, 17, 192)  147456      mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_43 (Conv2D)              (None, 17, 17, 192)  215040      activation_42[0][0]              
__________________________________________________________________________________________________
conv2d_48 (Conv2D)              (None, 17, 17, 192)  215040      activation_47[0][0]              
__________________________________________________________________________________________________
conv2d_49 (Conv2D)              (None, 17, 17, 192)  147456      average_pooling2d_4[0][0]        
__________________________________________________________________________________________________
batch_normalization_40 (BatchNo (None, 17, 17, 192)  576         conv2d_40[0][0]                  
__________________________________________________________________________________________________
batch_normalization_43 (BatchNo (None, 17, 17, 192)  576         conv2d_43[0][0]                  
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, 17, 17, 192)  576         conv2d_48[0][0]                  
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, 17, 17, 192)  576         conv2d_49[0][0]                  
__________________________________________________________________________________________________
activation_40 (Activation)      (None, 17, 17, 192)  0           batch_normalization_40[0][0]     
__________________________________________________________________________________________________
activation_43 (Activation)      (None, 17, 17, 192)  0           batch_normalization_43[0][0]     
__________________________________________________________________________________________________
activation_48 (Activation)      (None, 17, 17, 192)  0           batch_normalization_48[0][0]     
__________________________________________________________________________________________________
activation_49 (Activation)      (None, 17, 17, 192)  0           batch_normalization_49[0][0]     
__________________________________________________________________________________________________
mixed5 (Concatenate)            (None, 17, 17, 768)  0           activation_40[0][0]              
                                                                 activation_43[0][0]              
                                                                 activation_48[0][0]              
                                                                 activation_49[0][0]              
__________________________________________________________________________________________________
conv2d_54 (Conv2D)              (None, 17, 17, 160)  122880      mixed5[0][0]                     
__________________________________________________________________________________________________
batch_normalization_54 (BatchNo (None, 17, 17, 160)  480         conv2d_54[0][0]                  
__________________________________________________________________________________________________
activation_54 (Activation)      (None, 17, 17, 160)  0           batch_normalization_54[0][0]     
__________________________________________________________________________________________________
conv2d_55 (Conv2D)              (None, 17, 17, 160)  179200      activation_54[0][0]              
__________________________________________________________________________________________________
batch_normalization_55 (BatchNo (None, 17, 17, 160)  480         conv2d_55[0][0]                  
__________________________________________________________________________________________________
activation_55 (Activation)      (None, 17, 17, 160)  0           batch_normalization_55[0][0]     
__________________________________________________________________________________________________
conv2d_51 (Conv2D)              (None, 17, 17, 160)  122880      mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_56 (Conv2D)              (None, 17, 17, 160)  179200      activation_55[0][0]              
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, 17, 17, 160)  480         conv2d_51[0][0]                  
__________________________________________________________________________________________________
batch_normalization_56 (BatchNo (None, 17, 17, 160)  480         conv2d_56[0][0]                  
__________________________________________________________________________________________________
activation_51 (Activation)      (None, 17, 17, 160)  0           batch_normalization_51[0][0]     
__________________________________________________________________________________________________
activation_56 (Activation)      (None, 17, 17, 160)  0           batch_normalization_56[0][0]     
__________________________________________________________________________________________________
conv2d_52 (Conv2D)              (None, 17, 17, 160)  179200      activation_51[0][0]              
__________________________________________________________________________________________________
conv2d_57 (Conv2D)              (None, 17, 17, 160)  179200      activation_56[0][0]              
__________________________________________________________________________________________________
batch_normalization_52 (BatchNo (None, 17, 17, 160)  480         conv2d_52[0][0]                  
__________________________________________________________________________________________________
batch_normalization_57 (BatchNo (None, 17, 17, 160)  480         conv2d_57[0][0]                  
__________________________________________________________________________________________________
activation_52 (Activation)      (None, 17, 17, 160)  0           batch_normalization_52[0][0]     
__________________________________________________________________________________________________
activation_57 (Activation)      (None, 17, 17, 160)  0           batch_normalization_57[0][0]     
__________________________________________________________________________________________________
average_pooling2d_5 (AveragePoo (None, 17, 17, 768)  0           mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_50 (Conv2D)              (None, 17, 17, 192)  147456      mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_53 (Conv2D)              (None, 17, 17, 192)  215040      activation_52[0][0]              
__________________________________________________________________________________________________
conv2d_58 (Conv2D)              (None, 17, 17, 192)  215040      activation_57[0][0]              
__________________________________________________________________________________________________
conv2d_59 (Conv2D)              (None, 17, 17, 192)  147456      average_pooling2d_5[0][0]        
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, 17, 17, 192)  576         conv2d_50[0][0]                  
__________________________________________________________________________________________________
batch_normalization_53 (BatchNo (None, 17, 17, 192)  576         conv2d_53[0][0]                  
__________________________________________________________________________________________________
batch_normalization_58 (BatchNo (None, 17, 17, 192)  576         conv2d_58[0][0]                  
__________________________________________________________________________________________________
batch_normalization_59 (BatchNo (None, 17, 17, 192)  576         conv2d_59[0][0]                  
__________________________________________________________________________________________________
activation_50 (Activation)      (None, 17, 17, 192)  0           batch_normalization_50[0][0]     
__________________________________________________________________________________________________
activation_53 (Activation)      (None, 17, 17, 192)  0           batch_normalization_53[0][0]     
__________________________________________________________________________________________________
activation_58 (Activation)      (None, 17, 17, 192)  0           batch_normalization_58[0][0]     
__________________________________________________________________________________________________
activation_59 (Activation)      (None, 17, 17, 192)  0           batch_normalization_59[0][0]     
__________________________________________________________________________________________________
mixed6 (Concatenate)            (None, 17, 17, 768)  0           activation_50[0][0]              
                                                                 activation_53[0][0]              
                                                                 activation_58[0][0]              
                                                                 activation_59[0][0]              
__________________________________________________________________________________________________
conv2d_64 (Conv2D)              (None, 17, 17, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
batch_normalization_64 (BatchNo (None, 17, 17, 192)  576         conv2d_64[0][0]                  
__________________________________________________________________________________________________
activation_64 (Activation)      (None, 17, 17, 192)  0           batch_normalization_64[0][0]     
__________________________________________________________________________________________________
conv2d_65 (Conv2D)              (None, 17, 17, 192)  258048      activation_64[0][0]              
__________________________________________________________________________________________________
batch_normalization_65 (BatchNo (None, 17, 17, 192)  576         conv2d_65[0][0]                  
__________________________________________________________________________________________________
activation_65 (Activation)      (None, 17, 17, 192)  0           batch_normalization_65[0][0]     
__________________________________________________________________________________________________
conv2d_61 (Conv2D)              (None, 17, 17, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_66 (Conv2D)              (None, 17, 17, 192)  258048      activation_65[0][0]              
__________________________________________________________________________________________________
batch_normalization_61 (BatchNo (None, 17, 17, 192)  576         conv2d_61[0][0]                  
__________________________________________________________________________________________________
batch_normalization_66 (BatchNo (None, 17, 17, 192)  576         conv2d_66[0][0]                  
__________________________________________________________________________________________________
activation_61 (Activation)      (None, 17, 17, 192)  0           batch_normalization_61[0][0]     
__________________________________________________________________________________________________
activation_66 (Activation)      (None, 17, 17, 192)  0           batch_normalization_66[0][0]     
__________________________________________________________________________________________________
conv2d_62 (Conv2D)              (None, 17, 17, 192)  258048      activation_61[0][0]              
__________________________________________________________________________________________________
conv2d_67 (Conv2D)              (None, 17, 17, 192)  258048      activation_66[0][0]              
__________________________________________________________________________________________________
batch_normalization_62 (BatchNo (None, 17, 17, 192)  576         conv2d_62[0][0]                  
__________________________________________________________________________________________________
batch_normalization_67 (BatchNo (None, 17, 17, 192)  576         conv2d_67[0][0]                  
__________________________________________________________________________________________________
activation_62 (Activation)      (None, 17, 17, 192)  0           batch_normalization_62[0][0]     
__________________________________________________________________________________________________
activation_67 (Activation)      (None, 17, 17, 192)  0           batch_normalization_67[0][0]     
__________________________________________________________________________________________________
average_pooling2d_6 (AveragePoo (None, 17, 17, 768)  0           mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_60 (Conv2D)              (None, 17, 17, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_63 (Conv2D)              (None, 17, 17, 192)  258048      activation_62[0][0]              
__________________________________________________________________________________________________
conv2d_68 (Conv2D)              (None, 17, 17, 192)  258048      activation_67[0][0]              
__________________________________________________________________________________________________
conv2d_69 (Conv2D)              (None, 17, 17, 192)  147456      average_pooling2d_6[0][0]        
__________________________________________________________________________________________________
batch_normalization_60 (BatchNo (None, 17, 17, 192)  576         conv2d_60[0][0]                  
__________________________________________________________________________________________________
batch_normalization_63 (BatchNo (None, 17, 17, 192)  576         conv2d_63[0][0]                  
__________________________________________________________________________________________________
batch_normalization_68 (BatchNo (None, 17, 17, 192)  576         conv2d_68[0][0]                  
__________________________________________________________________________________________________
batch_normalization_69 (BatchNo (None, 17, 17, 192)  576         conv2d_69[0][0]                  
__________________________________________________________________________________________________
activation_60 (Activation)      (None, 17, 17, 192)  0           batch_normalization_60[0][0]     
__________________________________________________________________________________________________
activation_63 (Activation)      (None, 17, 17, 192)  0           batch_normalization_63[0][0]     
__________________________________________________________________________________________________
activation_68 (Activation)      (None, 17, 17, 192)  0           batch_normalization_68[0][0]     
__________________________________________________________________________________________________
activation_69 (Activation)      (None, 17, 17, 192)  0           batch_normalization_69[0][0]     
__________________________________________________________________________________________________
mixed7 (Concatenate)            (None, 17, 17, 768)  0           activation_60[0][0]              
                                                                 activation_63[0][0]              
                                                                 activation_68[0][0]              
                                                                 activation_69[0][0]              
__________________________________________________________________________________________________
conv2d_72 (Conv2D)              (None, 17, 17, 192)  147456      mixed7[0][0]                     
__________________________________________________________________________________________________
batch_normalization_72 (BatchNo (None, 17, 17, 192)  576         conv2d_72[0][0]                  
__________________________________________________________________________________________________
activation_72 (Activation)      (None, 17, 17, 192)  0           batch_normalization_72[0][0]     
__________________________________________________________________________________________________
conv2d_73 (Conv2D)              (None, 17, 17, 192)  258048      activation_72[0][0]              
__________________________________________________________________________________________________
batch_normalization_73 (BatchNo (None, 17, 17, 192)  576         conv2d_73[0][0]                  
__________________________________________________________________________________________________
activation_73 (Activation)      (None, 17, 17, 192)  0           batch_normalization_73[0][0]     
__________________________________________________________________________________________________
conv2d_70 (Conv2D)              (None, 17, 17, 192)  147456      mixed7[0][0]                     
__________________________________________________________________________________________________
conv2d_74 (Conv2D)              (None, 17, 17, 192)  258048      activation_73[0][0]              
__________________________________________________________________________________________________
batch_normalization_70 (BatchNo (None, 17, 17, 192)  576         conv2d_70[0][0]                  
__________________________________________________________________________________________________
batch_normalization_74 (BatchNo (None, 17, 17, 192)  576         conv2d_74[0][0]                  
__________________________________________________________________________________________________
activation_70 (Activation)      (None, 17, 17, 192)  0           batch_normalization_70[0][0]     
__________________________________________________________________________________________________
activation_74 (Activation)      (None, 17, 17, 192)  0           batch_normalization_74[0][0]     
__________________________________________________________________________________________________
conv2d_71 (Conv2D)              (None, 8, 8, 320)    552960      activation_70[0][0]              
__________________________________________________________________________________________________
conv2d_75 (Conv2D)              (None, 8, 8, 192)    331776      activation_74[0][0]              
__________________________________________________________________________________________________
batch_normalization_71 (BatchNo (None, 8, 8, 320)    960         conv2d_71[0][0]                  
__________________________________________________________________________________________________
batch_normalization_75 (BatchNo (None, 8, 8, 192)    576         conv2d_75[0][0]                  
__________________________________________________________________________________________________
activation_71 (Activation)      (None, 8, 8, 320)    0           batch_normalization_71[0][0]     
__________________________________________________________________________________________________
activation_75 (Activation)      (None, 8, 8, 192)    0           batch_normalization_75[0][0]     
__________________________________________________________________________________________________
max_pooling2d_9 (MaxPooling2D)  (None, 8, 8, 768)    0           mixed7[0][0]                     
__________________________________________________________________________________________________
mixed8 (Concatenate)            (None, 8, 8, 1280)   0           activation_71[0][0]              
                                                                 activation_75[0][0]              
                                                                 max_pooling2d_9[0][0]            
__________________________________________________________________________________________________
conv2d_80 (Conv2D)              (None, 8, 8, 448)    573440      mixed8[0][0]                     
__________________________________________________________________________________________________
batch_normalization_80 (BatchNo (None, 8, 8, 448)    1344        conv2d_80[0][0]                  
__________________________________________________________________________________________________
activation_80 (Activation)      (None, 8, 8, 448)    0           batch_normalization_80[0][0]     
__________________________________________________________________________________________________
conv2d_77 (Conv2D)              (None, 8, 8, 384)    491520      mixed8[0][0]                     
__________________________________________________________________________________________________
conv2d_81 (Conv2D)              (None, 8, 8, 384)    1548288     activation_80[0][0]              
__________________________________________________________________________________________________
batch_normalization_77 (BatchNo (None, 8, 8, 384)    1152        conv2d_77[0][0]                  
__________________________________________________________________________________________________
batch_normalization_81 (BatchNo (None, 8, 8, 384)    1152        conv2d_81[0][0]                  
__________________________________________________________________________________________________
activation_77 (Activation)      (None, 8, 8, 384)    0           batch_normalization_77[0][0]     
__________________________________________________________________________________________________
activation_81 (Activation)      (None, 8, 8, 384)    0           batch_normalization_81[0][0]     
__________________________________________________________________________________________________
conv2d_78 (Conv2D)              (None, 8, 8, 384)    442368      activation_77[0][0]              
__________________________________________________________________________________________________
conv2d_79 (Conv2D)              (None, 8, 8, 384)    442368      activation_77[0][0]              
__________________________________________________________________________________________________
conv2d_82 (Conv2D)              (None, 8, 8, 384)    442368      activation_81[0][0]              
__________________________________________________________________________________________________
conv2d_83 (Conv2D)              (None, 8, 8, 384)    442368      activation_81[0][0]              
__________________________________________________________________________________________________
average_pooling2d_7 (AveragePoo (None, 8, 8, 1280)   0           mixed8[0][0]                     
__________________________________________________________________________________________________
conv2d_76 (Conv2D)              (None, 8, 8, 320)    409600      mixed8[0][0]                     
__________________________________________________________________________________________________
batch_normalization_78 (BatchNo (None, 8, 8, 384)    1152        conv2d_78[0][0]                  
__________________________________________________________________________________________________
batch_normalization_79 (BatchNo (None, 8, 8, 384)    1152        conv2d_79[0][0]                  
__________________________________________________________________________________________________
batch_normalization_82 (BatchNo (None, 8, 8, 384)    1152        conv2d_82[0][0]                  
__________________________________________________________________________________________________
batch_normalization_83 (BatchNo (None, 8, 8, 384)    1152        conv2d_83[0][0]                  
__________________________________________________________________________________________________
conv2d_84 (Conv2D)              (None, 8, 8, 192)    245760      average_pooling2d_7[0][0]        
__________________________________________________________________________________________________
batch_normalization_76 (BatchNo (None, 8, 8, 320)    960         conv2d_76[0][0]                  
__________________________________________________________________________________________________
activation_78 (Activation)      (None, 8, 8, 384)    0           batch_normalization_78[0][0]     
__________________________________________________________________________________________________
activation_79 (Activation)      (None, 8, 8, 384)    0           batch_normalization_79[0][0]     
__________________________________________________________________________________________________
activation_82 (Activation)      (None, 8, 8, 384)    0           batch_normalization_82[0][0]     
__________________________________________________________________________________________________
activation_83 (Activation)      (None, 8, 8, 384)    0           batch_normalization_83[0][0]     
__________________________________________________________________________________________________
batch_normalization_84 (BatchNo (None, 8, 8, 192)    576         conv2d_84[0][0]                  
__________________________________________________________________________________________________
activation_76 (Activation)      (None, 8, 8, 320)    0           batch_normalization_76[0][0]     
__________________________________________________________________________________________________
mixed9_0 (Concatenate)          (None, 8, 8, 768)    0           activation_78[0][0]              
                                                                 activation_79[0][0]              
__________________________________________________________________________________________________
concatenate (Concatenate)       (None, 8, 8, 768)    0           activation_82[0][0]              
                                                                 activation_83[0][0]              
__________________________________________________________________________________________________
activation_84 (Activation)      (None, 8, 8, 192)    0           batch_normalization_84[0][0]     
__________________________________________________________________________________________________
mixed9 (Concatenate)            (None, 8, 8, 2048)   0           activation_76[0][0]              
                                                                 mixed9_0[0][0]                   
                                                                 concatenate[0][0]                
                                                                 activation_84[0][0]              
__________________________________________________________________________________________________
conv2d_89 (Conv2D)              (None, 8, 8, 448)    917504      mixed9[0][0]                     
__________________________________________________________________________________________________
batch_normalization_89 (BatchNo (None, 8, 8, 448)    1344        conv2d_89[0][0]                  
__________________________________________________________________________________________________
activation_89 (Activation)      (None, 8, 8, 448)    0           batch_normalization_89[0][0]     
__________________________________________________________________________________________________
conv2d_86 (Conv2D)              (None, 8, 8, 384)    786432      mixed9[0][0]                     
__________________________________________________________________________________________________
conv2d_90 (Conv2D)              (None, 8, 8, 384)    1548288     activation_89[0][0]              
__________________________________________________________________________________________________
batch_normalization_86 (BatchNo (None, 8, 8, 384)    1152        conv2d_86[0][0]                  
__________________________________________________________________________________________________
batch_normalization_90 (BatchNo (None, 8, 8, 384)    1152        conv2d_90[0][0]                  
__________________________________________________________________________________________________
activation_86 (Activation)      (None, 8, 8, 384)    0           batch_normalization_86[0][0]     
__________________________________________________________________________________________________
activation_90 (Activation)      (None, 8, 8, 384)    0           batch_normalization_90[0][0]     
__________________________________________________________________________________________________
conv2d_87 (Conv2D)              (None, 8, 8, 384)    442368      activation_86[0][0]              
__________________________________________________________________________________________________
conv2d_88 (Conv2D)              (None, 8, 8, 384)    442368      activation_86[0][0]              
__________________________________________________________________________________________________
conv2d_91 (Conv2D)              (None, 8, 8, 384)    442368      activation_90[0][0]              
__________________________________________________________________________________________________
conv2d_92 (Conv2D)              (None, 8, 8, 384)    442368      activation_90[0][0]              
__________________________________________________________________________________________________
average_pooling2d_8 (AveragePoo (None, 8, 8, 2048)   0           mixed9[0][0]                     
__________________________________________________________________________________________________
conv2d_85 (Conv2D)              (None, 8, 8, 320)    655360      mixed9[0][0]                     
__________________________________________________________________________________________________
batch_normalization_87 (BatchNo (None, 8, 8, 384)    1152        conv2d_87[0][0]                  
__________________________________________________________________________________________________
batch_normalization_88 (BatchNo (None, 8, 8, 384)    1152        conv2d_88[0][0]                  
__________________________________________________________________________________________________
batch_normalization_91 (BatchNo (None, 8, 8, 384)    1152        conv2d_91[0][0]                  
__________________________________________________________________________________________________
batch_normalization_92 (BatchNo (None, 8, 8, 384)    1152        conv2d_92[0][0]                  
__________________________________________________________________________________________________
conv2d_93 (Conv2D)              (None, 8, 8, 192)    393216      average_pooling2d_8[0][0]        
__________________________________________________________________________________________________
batch_normalization_85 (BatchNo (None, 8, 8, 320)    960         conv2d_85[0][0]                  
__________________________________________________________________________________________________
activation_87 (Activation)      (None, 8, 8, 384)    0           batch_normalization_87[0][0]     
__________________________________________________________________________________________________
activation_88 (Activation)      (None, 8, 8, 384)    0           batch_normalization_88[0][0]     
__________________________________________________________________________________________________
activation_91 (Activation)      (None, 8, 8, 384)    0           batch_normalization_91[0][0]     
__________________________________________________________________________________________________
activation_92 (Activation)      (None, 8, 8, 384)    0           batch_normalization_92[0][0]     
__________________________________________________________________________________________________
batch_normalization_93 (BatchNo (None, 8, 8, 192)    576         conv2d_93[0][0]                  
__________________________________________________________________________________________________
activation_85 (Activation)      (None, 8, 8, 320)    0           batch_normalization_85[0][0]     
__________________________________________________________________________________________________
mixed9_1 (Concatenate)          (None, 8, 8, 768)    0           activation_87[0][0]              
                                                                 activation_88[0][0]              
__________________________________________________________________________________________________
concatenate_1 (Concatenate)     (None, 8, 8, 768)    0           activation_91[0][0]              
                                                                 activation_92[0][0]              
__________________________________________________________________________________________________
activation_93 (Activation)      (None, 8, 8, 192)    0           batch_normalization_93[0][0]     
__________________________________________________________________________________________________
mixed10 (Concatenate)           (None, 8, 8, 2048)   0           activation_85[0][0]              
                                                                 mixed9_1[0][0]                   
                                                                 concatenate_1[0][0]              
                                                                 activation_93[0][0]              
__________________________________________________________________________________________________
avg_pool (GlobalAveragePooling2 (None, 2048)         0           mixed10[0][0]                    
__________________________________________________________________________________________________
predictions (Dense)             (None, 1000)         2049000     avg_pool[0][0]                   
==================================================================================================
Total params: 23,851,784
Trainable params: 23,817,352
Non-trainable params: 34,432
__________________________________________________________________________________________________

concatenate는 원본 값을 그대로 유지하고 싶을 때 (구조를 합침)
add는 합쳐서 하나의 결과를 유지한다 

원본 이미지와 조건을 더할때 일반적으로 concatenate를 사용하는 것이 유용한다 
add를 사용해야 할때는 concatenate를 하고나서도 해도 되기 때문이다

 

import numpy as np

a = np.array([[1,2,3]])
b = np.array([[1,2,3]])

a+b
array([[2, 4, 6]])
c = np.concatenate((a,b))

c
# array([[1, 2, 3],
#        [1, 2, 3]])
c.sum(axis=0) 
# array([2, 4, 6])

 

 

 

cGAN의 목표

이러한 과정을 통해 G는 입력 이미지 x와 noise z로 부터 출력 이미지 y로 매핑하도록 학습이 된다

 

Pix2Pix 구현

import tensorflow as tf

import os
import pathlib
import time
import datetime

from matplotlib import pyplot as plt
from IPython import display
dataset_name = "facades"

_URL = f'http://efrosgans.eecs.berkeley.edu/pix2pix/datasets/{dataset_name}.tar.gz'

path_to_zip = tf.keras.utils.get_file(
    fname=f"{dataset_name}.tar.gz",
    origin=_URL,
    extract=True)

path_to_zip  = pathlib.Path(path_to_zip)

PATH = path_to_zip.parent/dataset_name
list(PATH.parent.iterdir())

[PosixPath('/Users/jihyeokjeong/.keras/datasets/imdb_word_index.json'),
 PosixPath('/Users/jihyeokjeong/.keras/datasets/mnist.npz'),
 PosixPath('/Users/jihyeokjeong/.keras/datasets/fashion-mnist'),
 PosixPath('/Users/jihyeokjeong/.keras/datasets/imdb.npz'),
 PosixPath('/Users/jihyeokjeong/.keras/datasets/facades'),
 PosixPath('/Users/jihyeokjeong/.keras/datasets/facades.tar.gz')]
sample_image = tf.io.read_file(str(PATH / 'train/1.jpg'))
sample_image = tf.io.decode_jpeg(sample_image)
print(sample_image.shape)
# (256, 512, 3)
plt.figure()
plt.imshow(sample_image)

def load(image_file):
  image = tf.io.read_file(image_file)
  image = tf.image.decode_jpeg(image)
    
  w = tf.shape(image)[1]
  w = w // 2
  input_image = image[:, w:, :]
  real_image = image[:, :w, :]

  input_image = tf.cast(input_image, tf.float32)
  real_image = tf.cast(real_image, tf.float32)

  return input_image, real_image

 

inp, re = load(str(PATH / 'train/100.jpg'))

plt.figure()
plt.imshow(inp / 255.0)
plt.figure()
plt.imshow(re / 255.0)

BUFFER_SIZE = 400
BATCH_SIZE = 1
IMG_WIDTH = 256
IMG_HEIGHT = 256
def resize(input_image, real_image, height, width):
  input_image = tf.image.resize(input_image, [height, width],
                                method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
  real_image = tf.image.resize(real_image, [height, width],
                               method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)

  return input_image, real_image

def random_crop(input_image, real_image):
  stacked_image = tf.stack([input_image, real_image], axis=0)
  cropped_image = tf.image.random_crop(
      stacked_image, size=[2, IMG_HEIGHT, IMG_WIDTH, 3])

  return cropped_image[0], cropped_image[1]

def normalize(input_image, real_image):
  input_image = (input_image / 127.5) - 1
  real_image = (real_image / 127.5) - 1

  return input_image, real_image

@tf.function()
def random_jitter(input_image, real_image):
  input_image, real_image = resize(input_image, real_image, 286, 286)

  input_image, real_image = random_crop(input_image, real_image)

  if tf.random.uniform(()) > 0.5:
    input_image = tf.image.flip_left_right(input_image)
    real_image = tf.image.flip_left_right(real_image)

  return input_image, real_image
plt.figure(figsize=(6, 6))
for i in range(4):
  rj_inp, rj_re = random_jitter(inp, re)
  plt.subplot(2, 2, i + 1)
  plt.imshow(rj_inp / 255.0)
  plt.axis('off')
plt.show()

def load_image_train(image_file):
  input_image, real_image = load(image_file)
  input_image, real_image = random_jitter(input_image, real_image)
  input_image, real_image = normalize(input_image, real_image)

  return input_image, real_image

def load_image_test(image_file):
  input_image, real_image = load(image_file)
  input_image, real_image = resize(input_image, real_image,
                                   IMG_HEIGHT, IMG_WIDTH)
  input_image, real_image = normalize(input_image, real_image)

  return input_image, real_image
train_dataset = tf.data.Dataset.list_files(str(PATH / 'train/*.jpg'))
train_dataset = train_dataset.map(load_image_train,
                                  num_parallel_calls=tf.data.AUTOTUNE)
train_dataset = train_dataset.shuffle(BUFFER_SIZE)
train_dataset = train_dataset.batch(BATCH_SIZE)
try:
  test_dataset = tf.data.Dataset.list_files(str(PATH / 'test/*.jpg'))
except tf.errors.InvalidArgumentError:
  test_dataset = tf.data.Dataset.list_files(str(PATH / 'val/*.jpg'))
test_dataset = test_dataset.map(load_image_test)
test_dataset = test_dataset.batch(BATCH_SIZE)

 

OUTPUT_CHANNELS = 3

def downsample(filters, size, apply_batchnorm=True):
  initializer = tf.random_normal_initializer(0., 0.02)

  result = tf.keras.Sequential()
  result.add(
      tf.keras.layers.Conv2D(filters, size, strides=2, padding='same',
                             kernel_initializer=initializer, use_bias=False)) # bias 사용 옵션

  if apply_batchnorm:
    result.add(tf.keras.layers.BatchNormalization()) # BM는 옵션

  result.add(tf.keras.layers.LeakyReLU())

  return result
down_model = downsample(3, 4)
down_result = down_model(tf.expand_dims(inp, 0))
print (down_result.shape)
# (1, 128, 128, 3)
def upsample(filters, size, apply_dropout=False):
  initializer = tf.random_normal_initializer(0., 0.02)

  result = tf.keras.Sequential()
  result.add(
    tf.keras.layers.Conv2DTranspose(filters, size, strides=2,
                                    padding='same',
                                    kernel_initializer=initializer,
                                    use_bias=False)) 
  # bias를 사용하지 않는 이유 : GAN은 기본적으로 학습이 잘 안되기 때문에 간소화 시켜야 하므로 bias를 사용하지 않는다
  
  result.add(tf.keras.layers.BatchNormalization())
  
  # layer가 많을 때는 dropout을 사용하지 않아도 되기 때문에 옵션으로 사용 
  # 범용적 사용을 위해 옵션으로 만듦
  if apply_dropout:
      result.add(tf.keras.layers.Dropout(0.5))

  result.add(tf.keras.layers.ReLU())
  
  return result
up_model = upsample(3, 4)
up_result = up_model(down_result)
print (up_result.shape)
# (1, 256, 256, 3)
# Generator안에서 U-net 생성
# U-net을 가짜 데이터 생성용으로 사용
def Generator():
  inputs = tf.keras.layers.Input(shape=[256, 256, 3])

  down_stack = [
    # U-net구조를 위해 size확인을 해야 한다
    downsample(64, 4, apply_batchnorm=False),  # (batch_size, 128, 128, 64)
    downsample(128, 4),  # (batch_size, 64, 64, 128)
    downsample(256, 4),  # (batch_size, 32, 32, 256)
    downsample(512, 4),  # (batch_size, 16, 16, 512)
    downsample(512, 4),  # (batch_size, 8, 8, 512)
    downsample(512, 4),  # (batch_size, 4, 4, 512)
    downsample(512, 4),  # (batch_size, 2, 2, 512)
    downsample(512, 4),  # (batch_size, 1, 1, 512)
  ]

  up_stack = [
    upsample(512, 4, apply_dropout=True),  # (batch_size, 2, 2, 1024)
    upsample(512, 4, apply_dropout=True),  # (batch_size, 4, 4, 1024)
    upsample(512, 4, apply_dropout=True),  # (batch_size, 8, 8, 1024)
    upsample(512, 4),  # (batch_size, 16, 16, 1024)
    upsample(256, 4),  # (batch_size, 32, 32, 512)
    upsample(128, 4),  # (batch_size, 64, 64, 256)
    upsample(64, 4),  # (batch_size, 128, 128, 128)
  ]

  initializer = tf.random_normal_initializer(0., 0.02)
  last = tf.keras.layers.Conv2DTranspose(OUTPUT_CHANNELS, 4,
                                         strides=2,
                                         padding='same',
                                         kernel_initializer=initializer,
                                         activation='tanh')  # (batch_size, 256, 256, 3)
                            # sigmoid를 사용하면 zero centered가 되지 않아 학습이 잘 안되기 때문에 tanh를 사용한다
                            # zero centered가 되지 않으면 약간의 변화에도 값이 크게 변하기 때문에 학습이 잘 안될 수 있다 
  x = inputs

  skips = []
  for down in down_stack:
    x = down(x)
    skips.append(x)

  skips = reversed(skips[:-1])

  for up, skip in zip(up_stack, skips):
    x = up(x)
    x = tf.keras.layers.Concatenate()([x, skip])

  x = last(x)

  return tf.keras.Model(inputs=inputs, outputs=x)

AutoEncoder, U-net 구조는 loss를 어떻게 하느냐에 따라, 학습 데이터를 어떻게 하느냐에 따라 목적이 달라질 수 있다
U-net구조를 통해 segmentation을 사용하기도 하고, 원본 이미지를 복원하기도 하고, 원본 이미지의 색상을 바꾸기도 한다

generator = Generator()

gen_output = generator(inp[tf.newaxis, ...], training=False)
plt.imshow(gen_output[0, ...])

LAMBDA = 100
# tanh을 사용했기 때문에 BinaryCrossentropy을 사용
loss_object = tf.keras.losses.BinaryCrossentropy(from_logits=True)

# One-sided label smoothing: 실제 데이터에 대한 target 값을 1보다 약간 작은 값으로 설정하는 방식 
# 여기서는 one-sided label smoothing 방식을 사용하지 않았다
def generator_loss(disc_generated_output, gen_output, target):
  gan_loss = loss_object(tf.ones_like(disc_generated_output), disc_generated_output)

  l1_loss = tf.reduce_mean(tf.abs(target - gen_output))

  total_gen_loss = gan_loss + (LAMBDA * l1_loss)

  return total_gen_loss, gan_loss, l1_loss
def Discriminator():
  initializer = tf.random_normal_initializer(0., 0.02)

  inp = tf.keras.layers.Input(shape=[256, 256, 3], name='input_image')
  tar = tf.keras.layers.Input(shape=[256, 256, 3], name='target_image')
  
  # 실제값 + 목표 이미지 concatenate
  x = tf.keras.layers.concatenate([inp, tar])  # (batch_size, 256, 256, channels*2)

  down1 = downsample(64, 4, False)(x)  # (batch_size, 128, 128, 64)
  down2 = downsample(128, 4)(down1)  # (batch_size, 64, 64, 128)
  down3 = downsample(256, 4)(down2)  # (batch_size, 32, 32, 256)

  # 출력의 각 30x30 크기 이미지 패치는 입력 이미지의 70x70부분을 판별한다 (부분으로 쪼개어 판별한다/사이즈는 실험을 통해 알아내야 한다)
  # 크기 맞추는 방법을 사용한다 
  zero_pad1 = tf.keras.layers.ZeroPadding2D()(down3)  # (batch_size, 34, 34, 256)
  conv = tf.keras.layers.Conv2D(512, 4, strides=1,
                                kernel_initializer=initializer,
                                use_bias=False)(zero_pad1)  # (batch_size, 31, 31, 512)

  batchnorm1 = tf.keras.layers.BatchNormalization()(conv)

  leaky_relu = tf.keras.layers.LeakyReLU()(batchnorm1)

  zero_pad2 = tf.keras.layers.ZeroPadding2D()(leaky_relu)  # (batch_size, 33, 33, 512)

  last = tf.keras.layers.Conv2D(1, 4, strides=1,
                                kernel_initializer=initializer)(zero_pad2)  # (batch_size, 30, 30, 1)

  return tf.keras.Model(inputs=[inp, tar], outputs=last)
discriminator = Discriminator()

disc_out = discriminator([inp[tf.newaxis, ...], gen_output], training=False)
plt.imshow(disc_out[0, ..., -1], vmin=-20, vmax=20, cmap='RdBu_r')
plt.colorbar()

def discriminator_loss(disc_real_output, disc_generated_output):
  # 진짜를 진짜로 1
  real_loss = loss_object(tf.ones_like(disc_real_output), disc_real_output)
  
  # 가짜는 가짜로 0
  generated_loss = loss_object(tf.zeros_like(disc_generated_output), disc_generated_output)

  total_disc_loss = real_loss + generated_loss

  return total_disc_loss
generator_optimizer = tf.keras.optimizers.Adam(2e-4, beta_1=0.5)
discriminator_optimizer = tf.keras.optimizers.Adam(2e-4, beta_1=0.5)
checkpoint_dir = './training_checkpoints'
checkpoint_prefix = os.path.join(checkpoint_dir, "ckpt")
checkpoint = tf.train.Checkpoint(generator_optimizer=generator_optimizer,
                                 discriminator_optimizer=discriminator_optimizer,
                                 generator=generator,
                                 discriminator=discriminator)
def generate_images(model, test_input, tar):
  prediction = model(test_input, training=True)
  plt.figure(figsize=(15, 15))

  display_list = [test_input[0], tar[0], prediction[0]]
  title = ['Input Image', 'Ground Truth', 'Predicted Image']

  for i in range(3):
    plt.subplot(1, 3, i+1)
    plt.title(title[i])
    plt.imshow(display_list[i] * 0.5 + 0.5)
    plt.axis('off')
  plt.show()
for example_input, example_target in test_dataset.take(1):
  generate_images(generator, example_input, example_target)

log_dir="logs/"

summary_writer = tf.summary.create_file_writer(
  log_dir + "fit/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
@tf.function
def train_step(input_image, target, step):
  # generator가 먼저 실행되지만 discriminator가 먼저 전체가 실행된다 (with 중첩 때문에)
  # discriminator 학습 -> generator 학습 -> discriminator 학습 -> generator 학습 반복 
  with tf.GradientTape() as gen_tape, tf.GradientTape() as disc_tape:
    # 미분할 대상을 지정한다
    gen_output = generator(input_image, training=True)

    disc_real_output = discriminator([input_image, target], training=True)
    disc_generated_output = discriminator([input_image, gen_output], training=True)

    gen_total_loss, gen_gan_loss, gen_l1_loss = generator_loss(disc_generated_output, gen_output, target)
    disc_loss = discriminator_loss(disc_real_output, disc_generated_output)

  generator_gradients = gen_tape.gradient(gen_total_loss,
                                          generator.trainable_variables)
  discriminator_gradients = disc_tape.gradient(disc_loss,
                                               discriminator.trainable_variables)

  generator_optimizer.apply_gradients(zip(generator_gradients,
                                          generator.trainable_variables))
  discriminator_optimizer.apply_gradients(zip(discriminator_gradients,
                                              discriminator.trainable_variables))

  with summary_writer.as_default():
    tf.summary.scalar('gen_total_loss', gen_total_loss, step=step//1000)
    tf.summary.scalar('gen_gan_loss', gen_gan_loss, step=step//1000)
    tf.summary.scalar('gen_l1_loss', gen_l1_loss, step=step//1000)
    tf.summary.scalar('disc_loss', disc_loss, step=step//1000)

with A: 
    with B:
        X()
    Y()
    
1. A __enter__ 
2. B __enter__
3. X()
4. B __exit__
5. Y()
6. A __exit__

 

def fit(train_ds, test_ds, steps):
  example_input, example_target = next(iter(test_ds.take(1)))
  start = time.time()

  for step, (input_image, target) in train_ds.repeat().take(steps).enumerate():
    if (step) % 1000 == 0:
      display.clear_output(wait=True)

      if step != 0:
        print(f'Time taken for 1000 steps: {time.time()-start:.2f} sec\n')

      start = time.time()

      generate_images(generator, example_input, example_target)
      print(f"Step: {step//1000}k")

    train_step(input_image, target, step)

    if (step+1) % 10 == 0:
      print('.', end='', flush=True)

    if (step + 1) % 5000 == 0:
      checkpoint.save(file_prefix=checkpoint_prefix)
# 총 weight 업데이트를 4만번 (GAN은 기본적으로 학습이 잘 안되기 때문에 Epochs을 높게 잡아야 한다)
fit(train_dataset, test_dataset, steps=40000)
# Time taken for 1000 steps: 159.76 sec

for inp, tar in test_dataset.take(5):
  generate_images(generator, inp, tar)

for inp, tar in test_dataset.take(6):
  generate_images(generator, inp, tar)

 

반응형

+ Recent posts