diff --git a/Main_Processor_w_Color_and_PatternAnalysis.ipynb b/Main_Processor_w_Color_and_PatternAnalysis.ipynb new file mode 100644 index 00000000..5ea28504 --- /dev/null +++ b/Main_Processor_w_Color_and_PatternAnalysis.ipynb @@ -0,0 +1,1151 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "Main_Processor_w Color and PatternAnalysis.ipynb", + "private_outputs": true, + "provenance": [], + "collapsed_sections": [], + "toc_visible": true, + "mount_file_id": "1Z5uLvjZ07jnMeZnkKJF3LIakzdGLnANy", + "authorship_tag": "ABX9TyMesYb8PifJYNnWr6S454uw", + "include_colab_link": true + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "U7U9a_0wy965" + }, + "source": [ + "# **Main Process Tester - *Visual Style Builder* - Detector/Classifier Pattern**\n", + "\n", + "This script illustrates the basic skeleton of the extraction of an item's features (color and pattern) from an image that contains the item. The basic logic is:\n", + "\n", + "For each image: \n", + "- Detect where in the image the item is located by producing a detection box (sub-image) that contains the best representation of the item.\n", + "- Crop the image to isolate the item, then process the sub-image with feature ckassifiers. In this first attempt, we try to estract the best color that matches the item. \n", + "\n", + "Script Parts Sequence:\n", + "\n", + "1. Imports\n", + "2. Load detectors / classifiers / add restrictions\n", + " - In this TF Hub-based script, we can use Inception_Resnet_V2, or Mobilenet V2 (lesser accuracy, but faster). Both are trained from the Openimages V4 dataset. The detection restriction we add is to only keep results from the 'Clothing' branch of the dataset. \n", + " - The color classifier uses sampling and comparison to a standard set of colors that have a descritpion attached to them.\n", + " - The pattern classifier uses a pre-trained model to determine the pattern class. \n", + "3. Load data file\n", + " - This is a file with the same format as the one used to submit data to SAP Upscale, with columns for 'asin' and the ssociated image URLs\n", + "4. Process a chosen image:\n", + "\n", + " a. import image from URL\n", + "\n", + " b. reshape image to match detector input\n", + "\n", + " c. detect from the image possible item boxes\n", + "\n", + " d. use a criteria for selecting the box that best captures the item we want to examine\n", + " - In this tester, we use the highest scored item from a list that is common to all of them (intersection)\n", + "\n", + " e. crop the image to isolate the item.\n", + "\n", + " f. analyze the sub-image with the classifies (color and pattern in this case)\n", + " - The color is picked from the highest scored color among all images \n", + "\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "1GFTZSIKPJm-" + }, + "source": [ + "!pip install webcolors # may not be really needed in the short future\n" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "1J05TMQe2JVL" + }, + "source": [ + "" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "1RpWjl7RywLB" + }, + "source": [ + "# 1. imports\n", + "\n", + "# Tensorflow\n", + "import tensorflow as tf\n", + "# For running inference on the TF-Hub module for the inception model \n", + "import tensorflow_hub as hub\n", + "\n", + "# main working imports\n", + "import pandas as pd\n", + "import json\n", + "import os\n", + "#import gzip\n", + "import subprocess\n", + "import numpy as np\n", + "#from datetime import datetime\n", + "import requests\n", + "\n", + "# imnporting, displaying, and drawing into images imports\n", + "import matplotlib.pyplot as plt\n", + "import tempfile\n", + "from six.moves.urllib.request import urlopen\n", + "from six import BytesIO\n", + "\n", + "from tensorflow.keras.preprocessing import image\n", + "from keras.preprocessing.image import load_img\n", + "from keras.preprocessing.image import img_to_array\n", + "\n", + "# For drawing onto the image.\n", + "import numpy as np\n", + "from PIL import Image\n", + "from PIL import ImageColor\n", + "from PIL import ImageDraw\n", + "from PIL import ImageFont\n", + "from PIL import ImageOps\n", + "import cv2 as cv\n", + "\n", + "from sklearn.cluster import KMeans\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib.patches as patches\n", + "import cv2 \n", + "from google.colab.patches import cv2_imshow\n", + "import webcolors \n", + "import os\n", + "\n", + "from colorthief2 import ColorThief\n", + "\n", + "from keras.models import load_model" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "T2sA_vjAqNrW" + }, + "source": [ + "\n", + "# 2.a\n", + "# load image detection module - models from Tensorflow Hub:\n", + "module_handle = \"https://tfhub.dev/google/faster_rcnn/openimages_v4/inception_resnet_v2/1/" \n", + "# module_handle = \"https://tfhub.dev/google/openimages_v4/ssd/mobilenet_v2/1/"\n", + "\n", + "# TF Hub uses the pattern below to evaluate pre-processed images \n", + "# i.e. given a valid input image 'my_image', run the detector simply like this:\n", + "# result = detector('my_image')\n", + "detector = hub.load(module_handle).signatures['default']\n", + "\n", + "# define the classes that are admissible (in this case, under the main class 'Clothing' from OpenInages V4)\n", + "'''\n", + "class_inclusion_set = set([\"/m/07qxg_\", \"/m/03p3bw\", \"/m/080hkjn\", \"/m/0584n8\", \"/m/01s55n\", \n", + " \"/m/01940j\", \"/m/0zvk5\", \"/m/0hf58v5\", \"/m/0h8mhzd\", \"/m/03nfch\",\n", + " \"/m/06k2mb\", \"/m/01b638\", \"/m/02p3w7d\", \"/m/09j5n\", \"/m/0fly7\",\n", + " \"/m/07mhn\", \"/m/04tn4x\", \"/m/0nl46\", \"/m/0jyfg\", \"/m/0hnnb\", \"/m/0gjkl\",\n", + " \"/m/080hkjn\", \"/m/02h19r\", \"/m/02wbtzl\", \"/m/02jfl0\", \"/m/02fq_6\", \"/m/025rp__\",\n", + " \"/m/02dl1y\", \"/m/02_n6y\", \"/m/01rkbr\", \"/m/01r546\", \"/m/01nq26\", \"/m/01llwg\",\n", + " \"/m/01krhy\", \"/m/017ftj\", \"/m/0176mf\", \"/m/03grzl\", \"/m/0174n1\", \"/m/0463sg\",\n", + " \"/m/032b3c\", \"/m/01cmb2\", \"/m/02wv6h6\", \"/m/02h19r\", \"/m/02wbtzl\", \"/m/02jfl0\",\n", + " \"/m/02fq_6\", \"/m/025rp__\", \"/m/02dl1y\", \"/m/01xyhv\", \"/m/01xygc\", \"/m/01n4qj\",\n", + " \"/m/01krhy\", \"/m/01gmv2\", \"/m/01gkx_\", \"/m/01d40f\", \"/m/01bfm9\"])\n", + "''' \n", + "\n", + "class_inclusion_set = set([\"/m/07qxg_\", \"/m/03p3bw\", \"/m/080hkjn\", \"/m/0584n8\", \"/m/01s55n\", \n", + " \"/m/01940j\", \"/m/0zvk5\", \"/m/0hf58v5\", \"/m/0h8mhzd\", \"/m/03nfch\",\n", + " \"/m/06k2mb\", \"/m/01b638\", \"/m/02p3w7d\", \"/m/0fly7\",\n", + " \"/m/07mhn\", \"/m/04tn4x\", \"/m/0nl46\", \"/m/0jyfg\", \"/m/0hnnb\",\n", + " \"/m/080hkjn\", \"/m/02h19r\", \"/m/02wbtzl\", \"/m/02jfl0\", \"/m/02fq_6\", \"/m/025rp__\",\n", + " \"/m/02dl1y\", \"/m/02_n6y\", \"/m/01rkbr\", \"/m/01r546\", \"/m/01nq26\", \"/m/01llwg\",\n", + " \"/m/01krhy\", \"/m/017ftj\", \"/m/0176mf\", \"/m/03grzl\", \"/m/0174n1\", \"/m/0463sg\",\n", + " \"/m/032b3c\", \"/m/01cmb2\", \"/m/02wv6h6\", \"/m/02h19r\", \"/m/02wbtzl\", \"/m/02jfl0\",\n", + " \"/m/02fq_6\", \"/m/025rp__\", \"/m/02dl1y\", \"/m/01xyhv\", \"/m/01xygc\", \"/m/01n4qj\",\n", + " \"/m/01krhy\", \"/m/01gmv2\", \"/m/01gkx_\", \"/m/01d40f\", \"/m/01bfm9\"])\n" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "1knuWsNO2KLM" + }, + "source": [ + "# get ready for pattern analysis\n", + "# model = tf.keras.models.load_model('/content/drive/MyDrive/Models/pattern_predictor_feat_resnet50_dress_patterns_New_Smaller_3')\n", + "model = load_model('/content/drive/MyDrive/Models/pattern/pattern_model.h5')\n" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "anJi3oSpojxO" + }, + "source": [ + "# bring in colors dataset\n", + "# df_color_data = pd.read_csv(\"colors_rgb_vals_csv.csv\")\n", + "df_color_data = pd.read_csv(\"updated_colors_hex_rgb.csv\")\n", + "# df_color_data.head()\n", + "\n", + "# make a dict fcn\n", + "color_dict = {}\n", + "mother_color_dict = {}\n", + "color_descriptor_dict = {}\n", + "def add_to_color_dict(in_data):\n", + " color_dict[in_data[2]] = in_data[0]\n", + " mother_color_dict[in_data[0]] = in_data[6]\n", + " color_descriptor_dict[in_data[0]] = in_data[1]\n", + " return \n", + "\n", + "c = df_color_data.apply(add_to_color_dict, axis=1)\n", + "\n", + "# add black\n", + "color_dict[\"#000000\"] = \"black\"\n", + "mother_color_dict[\"black\"] = \"black\"\n", + "color_descriptor_dict[\"black\"] = \"black\"\n", + "\n", + "print(color_dict)\n", + "print(mother_color_dict)\n", + "print(color_descriptor_dict)\n" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "7jbR0X3ptItn" + }, + "source": [ + "# 2.b\n", + "# Color detection using K-Means \n", + "\n", + "kmeans = KMeans(n_clusters=4)\n", + "\n", + "HEX_TO_NORMALIZED_NAMES = {\n", + " \"#f5f5dc\": \"beige\",\n", + " \"#000000\": \"black\",\n", + " \"#0000ff\": \"blue\",\n", + " \"#a52a2a\": \"brown\",\n", + " \"#808080\": \"gray\",\n", + " \"#808080\": \"grey\",\n", + " \"#008000\": \"green\",\n", + " \"#ffa500\": \"orange\",\n", + " \"#ffc0cb\": \"pink\",\n", + " \"#800080\": \"purple\",\n", + " \"#ff0000\": \"red\",\n", + " \"#ffffff\": \"white\",\n", + " \"#ffff00\": \"yellow\",\n", + "}\n", + "\n", + "def closest_color(requested_color):\n", + " min_colors = {}\n", + " #for key, name in webcolors.CSS3_HEX_TO_NAMES.items():\n", + " #for key, name in webcolors.CSS3_HEX_TO_NAMES.items():\n", + " for key, name in color_dict.items(): \n", + " r_c, g_c, b_c = webcolors.hex_to_rgb(key)\n", + " rd = (r_c - requested_color[0]) ** 2\n", + " gd = (g_c - requested_color[1]) ** 2\n", + " bd = (b_c - requested_color[2]) ** 2\n", + " min_colors[(rd + gd + bd)] = name\n", + " return min_colors[min(min_colors.keys())]\n", + "\n", + "def get_color_name(requested_color):\n", + " try:\n", + " closest_name = webcolors.rgb_to_name(requested_color)\n", + " except ValueError:\n", + " closest_name = closest_color(requested_color)\n", + " return closest_name\n", + "\n", + "def detect_color(img_path):\n", + " img = cv2.imread(img_path)\n", + " height, width, dim = img.shape\n", + " img = img[int(height/4):int(3*height/4), int(width/4):int(3*width/4), :]\n", + " height, width, dim = img.shape\n", + " \n", + " img_vec = np.reshape(img, [height * width, dim] )\n", + " \n", + " #kmeans = KMeans(n_clusters=4)\n", + " kmeans.fit( img_vec )\n", + "\n", + " X_dist = kmeans.transform(img_vec)\n", + " print('-----')\n", + " print(X_dist)\n", + "\n", + " unique_l, counts_l = np.unique(kmeans.labels_, return_counts=True)\n", + " sort_ix = np.argsort(counts_l)\n", + " sort_ix = sort_ix[::-1]\n", + "\n", + " color_list = []\n", + " for cluster_center in kmeans.cluster_centers_[sort_ix]:\n", + " color_list.append(get_color_name((int(cluster_center[2]), int(cluster_center[1]), int(cluster_center[0])))) \n", + "\n", + " print('Colors for ' + img_path + ': ', color_list) \n", + "\n", + " cluster_center = kmeans.cluster_centers_[sort_ix][0]\n", + "\n", + " # return cluster_center\n", + " \n", + " return get_color_name((int(cluster_center[2]), int(cluster_center[1]), int(cluster_center[0])))\n" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "4hn9wrePi5Ve" + }, + "source": [ + "# read from main data set\n", + "df_main_data = pd.read_csv(\"main_cleaned_data_latest.csv\") \n", + "df_filter_skus = pd.read_csv(\"dresses_skus.csv\") \n" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "t7uKKAShzMcO" + }, + "source": [ + "# 3\n", + "# Read the data to be examined (csv)\n", + "# df_img_data = pd.read_csv(\"ImagesToExamine.csv\") \n", + "# df_img_data = pd.read_csv(\"WomensDreses.csv\") \n", + "\n", + "# if using the main data file, filter from df_filter_skus\n", + "\n", + "df_img_data = pd.merge(df_filter_skus, df_main_data, on='asin', how='inner') \n", + "# drop redundant column 'name'\n", + "df_img_data = df_img_data.drop(columns='name')\n", + "list(df_img_data)\n", + "\n", + "# drop duplicates\n", + "df_img_data = df_img_data.drop_duplicates(subset=['asin'])\n", + "\n", + "df_img_data.head" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "Nh-WukPmn-mb" + }, + "source": [ + "df_img_data.head()" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "rGDWjLssQZnj" + }, + "source": [ + "# add columns to collect analysis\n", + "df_img_data[\"img_feature_analysis\"] = \"\"\n", + "df_img_data[\"tenant_id\"] = \"\"\n", + "df_img_data[\"color\"] = \"\"\n", + "df_img_data[\"descriptor\"] = \"\"\n", + "df_img_data[\"mother_color\"] = \"\"\n", + "df_img_data[\"detected_item\"] = \"\"\n", + "df_img_data[\"confidence_color\"] = \"\"\n", + "df_img_data[\"confidence_mother_color\"] = \"\"\n", + "df_img_data[\"patterns\"] = \"\"\n", + "df_img_data[\"pattern_winner\"] = \"\"\n", + "\n", + "list(df_img_data)" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "AN11Ypqh6jLz" + }, + "source": [ + "df_img_data.head()" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "vWkyS-nHUFPA" + }, + "source": [ + "# Main detection helper functions\n", + "def display_image(image):\n", + " fig = plt.figure(figsize=(20, 15))\n", + " plt.grid(False)\n", + " plt.imshow(image)\n", + "\n", + "def download_and_resize_image(url, new_width=256, new_height=256,\n", + " display=False):\n", + " _, filename = tempfile.mkstemp(suffix=\".jpg\")\n", + " response = urlopen(url)\n", + " image_data = response.read()\n", + " image_data = BytesIO(image_data)\n", + " pil_image = Image.open(image_data)\n", + " pil_image = ImageOps.fit(pil_image, (new_width, new_height), Image.ANTIALIAS)\n", + " pil_image_rgb = pil_image.convert(\"RGB\")\n", + " pil_image_rgb.save(filename, format=\"JPEG\", quality=90)\n", + " print(\"Image downloaded to %s.\" % filename)\n", + " if display:\n", + " display_image(pil_image)\n", + " return filename\n", + "\n", + "def draw_bounding_box_on_image(image,\n", + " ymin,\n", + " xmin,\n", + " ymax,\n", + " xmax,\n", + " color,\n", + " font,\n", + " thickness=4,\n", + " display_str_list=()):\n", + " draw = ImageDraw.Draw(image)\n", + " im_width, im_height = image.size\n", + " (left, right, top, bottom) = (xmin * im_width, xmax * im_width,\n", + " ymin * im_height, ymax * im_height)\n", + " draw.line([(left, top), (left, bottom), (right, bottom), (right, top),\n", + " (left, top)],\n", + " width=thickness,\n", + " fill=color)\n", + "\n", + " display_str_heights = [font.getsize(ds)[1] for ds in display_str_list]\n", + " total_display_str_height = (1 + 2 * 0.05) * sum(display_str_heights)\n", + "\n", + " if top > total_display_str_height:\n", + " text_bottom = top\n", + " else:\n", + " text_bottom = top + total_display_str_height\n", + " # Reverse list and print from bottom to top.\n", + " for display_str in display_str_list[::-1]:\n", + " text_width, text_height = font.getsize(display_str)\n", + " margin = np.ceil(0.05 * text_height)\n", + " draw.rectangle([(left, text_bottom - text_height - 2 * margin),\n", + " (left + text_width, text_bottom)],\n", + " fill=color)\n", + " draw.text((left + margin, text_bottom - text_height - margin),\n", + " display_str,\n", + " fill=\"white\",\n", + " font=font)\n", + " text_bottom -= text_height - 2 * margin\n", + "\n", + "def draw_box(winner, path, cimage, boxes, class_names, scores, max_boxes=1, min_score=0.5):\n", + " colors = list(ImageColor.colormap.values())\n", + "\n", + " try:\n", + " font = ImageFont.truetype(\"/usr/share/fonts/truetype/liberation/LiberationSansNarrow-Regular.ttf\",\n", + " 25)\n", + " except IOError:\n", + " print(\"Font not found, using default font.\")\n", + " font = ImageFont.load_default()\n", + "\n", + " lidx = np.where(class_names == winner.encode(\"ascii\"))\n", + " nidx = lidx[0]\n", + " if len(scores[nidx]) == 0:\n", + " # Nothing that matches the winner class found. Exit gracefully\n", + " return [], [], \"\", 0.0\n", + "\n", + " idx = nidx[np.argmax(scores[nidx])] \n", + " cimage2 = cimage.copy()\n", + " score = scores[idx]\n", + " ymin, xmin, ymax, xmax = tuple(boxes[idx])\n", + " display_str = \"{}: {}%\".format(class_names[idx].decode(\"ascii\"),\n", + " int(100 * scores[idx]))\n", + " color = colors[hash(class_names[idx]) % len(colors)]\n", + " image_pil = Image.fromarray(np.uint8(cimage2)).convert(\"RGB\")\n", + " draw_bounding_box_on_image(\n", + " image_pil, ymin, xmin, ymax, xmax, color, font, display_str_list=[display_str])\n", + " np.copyto(cimage2, np.array(image_pil))\n", + " im_width, im_height = cimage.shape[:2]\n", + " cropped, cropped_path = crop_image(path, xmin, ymin, xmax, ymax, im_width, im_height, '', cimage)\n", + "\n", + " return cimage2, cropped, display_str, score\n", + "\n", + "def crop_image(path, xmin, ymin, xmax, ymax, im_width, im_height, save_path = '', in_image = []):\n", + " if in_image == []:\n", + " myImage = cv.imread(str(path))\n", + " else:\n", + " myImage = in_image \n", + "\n", + " (left, right, top, bottom) = (xmin * im_width, xmax * im_width, \n", + " ymin * im_height, ymax *im_height)\n", + " myImage = cv.resize(myImage,(im_width,im_height))\n", + " cropped = myImage[int(top): int(bottom), int(left): int(right)]\n", + " if len(cropped) != 0:\n", + " if save_path == '':\n", + " cropped_saved = cv2.cvtColor(cropped, cv2.COLOR_RGB2BGR) \n", + " cv.imwrite(\"img_cropped.jpg\", cropped_saved)\n", + " cropped_path = \"img_cropped.jpg\"\n", + " else:\n", + " cv.imwrite(save_path, cropped) \n", + " cropped_path = save_path\n", + " else:\n", + " cropped_path = ''\n", + " print('cropped_path: ', cropped_path)\n", + " return cropped, cropped_path\n", + "\n", + "def crop_image2(path, xmin, ymin, xmax, ymax, im_width, im_height, save_path = '', in_image = []):\n", + " if in_image == []:\n", + " myImage = cv.imread(str(path))\n", + " else:\n", + " myImage = in_image \n", + " cropped = myImage[int(xmin): int(xmax), int(ymin): int(ymax)]\n", + " if len(cropped) != 0:\n", + " if save_path == '':\n", + " cropped_path = \"img_cropped.jpg\"\n", + " else:\n", + " cropped_path = save_path\n", + " else:\n", + " cropped_path = ''\n", + " return cropped, cropped_path" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "xf2lZepZaUBT" + }, + "source": [ + "def getAverageRGBCircle(in_img, in_x, in_y, in_radius):\n", + " sum = 0\n", + " red = 0.\n", + " green = 0.\n", + " blue = 0.\n", + " for i in range(in_x - in_radius, in_x + in_radius):\n", + " for j in range(in_y - in_radius, in_y + in_radius):\n", + " sum += 1\n", + " red += in_img[i][j][0]**2\n", + " green += in_img[i][j][1]**2\n", + " blue += in_img[i][j][2]**2\n", + "\n", + " return [np.sqrt(red/sum), np.sqrt(green/sum), np.sqrt(blue/sum)] " + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "Bu4OMHXx7hyi" + }, + "source": [ + "\n", + "verbose_mode = 'Y'\n", + "\n", + "def detect_color_sampling(img_path, in_img = []): \n", + " NUM_SQUARES = 5\n", + " SQUARE_SIZE = 25\n", + " RADIUS = 10\n", + "\n", + " colors = list(ImageColor.colormap.values())\n", + " try:\n", + " font = ImageFont.truetype(\"/usr/share/fonts/truetype/liberation/LiberationSansNarrow-Regular.ttf\",\n", + " 55)\n", + " except IOError:\n", + " print(\"Font not found, using default font.\")\n", + " font = ImageFont.load_default()\n", + "\n", + " if in_img == []:\n", + " img = tf.io.read_file(img_path)\n", + " img = tf.image.decode_jpeg(img, channels=3)\n", + " cimage = img.numpy()\n", + " else:\n", + " cimage = in_img \n", + " height, width, dim = cimage.shape\n", + " print('in_img height and width: ', height, width)\n", + " cimage2 = cimage.copy()\n", + "\n", + " if verbose_mode == 'Y':\n", + " print('inside detect_color_sampling - image before analysis')\n", + " plt.imshow(cimage)\n", + " plt.show()\n", + "\n", + " display_str = \"\"\n", + " color = colors[10]\n", + " image_pil = Image.fromarray(np.uint8(cimage2)).convert(\"RGB\")\n", + "\n", + " near_colors_list = []\n", + " near_colors_list_mean = []\n", + " near_colors_list_median = []\n", + " near_colors_list_circular = []\n", + " near_colors_list_dominant = []\n", + "\n", + " for i in range(NUM_SQUARES): \n", + "\n", + " display_str = str(i+1)\n", + " # establish the upper left corner for the evaluation box:\n", + " xGauss = np.random.normal(0.5,0.125,1)\n", + " yGauss = np.random.normal(0.5,0.125,1)\n", + " x_pix = int(xGauss*height)\n", + " y_pix = int(yGauss*width)\n", + " x_pix_to = x_pix + SQUARE_SIZE\n", + " y_pix_to = y_pix + SQUARE_SIZE\n", + "\n", + " xmax = xGauss + SQUARE_SIZE/height\n", + " ymax = yGauss + SQUARE_SIZE/width\n", + "\n", + " draw_bounding_box_on_image(\n", + " image_pil,\n", + " xGauss,\n", + " yGauss,\n", + " xmax,\n", + " ymax,\n", + " color,\n", + " font,\n", + " display_str_list=[display_str])\n", + " np.copyto(cimage2, np.array(image_pil))\n", + "\n", + " im_width, im_height = cimage.shape[:2]\n", + "\n", + " cropped, cropped_path = crop_image2(img_path, x_pix, y_pix, x_pix_to, y_pix_to, \n", + " im_width, im_height, \"cropped_for_color.jpg\", cimage) \n", + "\n", + " mean_rgb = np.mean(cropped, axis=(0, 1))\n", + " median_rgb = np.median(cropped, axis=(0, 1))\n", + "\n", + " color_thief = ColorThief(img_path)\n", + " color_palette = color_thief.get_palette_from_image(cropped, 5, 1)\n", + " if len(color_palette) != 0:\n", + " dominant_color = color_palette[0]\n", + " else:\n", + " dominant_color = [] \n", + "\n", + " if verbose_mode == 'Y':\n", + " print('mean_rgb ', mean_rgb)\n", + " print('median_rgb ', median_rgb)\n", + " print('colorthief dominant ', dominant_color)\n", + " plt.imshow(cropped)\n", + " plt.show()\n", + "\n", + " closest_color_mean = closest_color(mean_rgb)\n", + " closest_color_median = closest_color(median_rgb)\n", + " if len(dominant_color) != 0:\n", + " closest_color_dominant = closest_color(dominant_color)\n", + " else:\n", + " closest_color_dominant = ''\n", + "\n", + " if verbose_mode == 'Y':\n", + " print('closest color_mean: ' + closest_color_mean)\n", + " print('closest_color_median: ', closest_color_median)\n", + " print('closest_color_dominant: ', closest_color_dominant)\n", + "\n", + "\n", + " near_colors_list_mean.append(closest_color_mean)\n", + " near_colors_list_median.append(closest_color_median)\n", + " if closest_color_dominant != '':\n", + " near_colors_list_dominant.append(closest_color_dominant)\n", + "\n", + " return near_colors_list_mean, near_colors_list_median, near_colors_list_circular, near_colors_list_dominant, cimage2 " + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "jZhCQz523CEb" + }, + "source": [ + "# 4\n", + "# Process Images\n", + "\n", + "MAX_DETECT_BOXES = 10\n", + "num_it = 0 \n", + "flag_skip_classifiers = 'N'\n", + "verbose_mode = 'Y'\n", + "\n", + "# main_results = {}\n", + "\n", + "# main function to add features to a row of input data\n", + "def add_img_features(in_params):\n", + " out_params = in_params \n", + "\n", + " global num_it\n", + " num_it += 1\n", + "\n", + " main_results = {}\n", + "\n", + " print(num_it, ' ====================================================================', in_params[0])\n", + "\n", + " if pd.isnull(in_params[6]):\n", + " print('No images to evaluate')\n", + " return out_params\n", + "\n", + " # 4.a Make a list of image urls\n", + " image_urls = in_params[6].split('|')\n", + " print(image_urls)\n", + "\n", + " img_feature_analysis = {}\n", + " patterns = set()\n", + " colors = set()\n", + " img_record = {}\n", + "\n", + " for image_url in image_urls:\n", + " print(image_url)\n", + " try:\n", + " image_path = download_and_resize_image(image_url, 640, 640)\n", + " except:\n", + " print('Skip image. Error trying to open ', image_url)\n", + " continue \n", + " if verbose_mode == 'Y':\n", + " plt.imshow(load_img(image_path))\n", + " plt.show()\n", + " print(image_path)\n", + " \n", + " img = tf.io.read_file(image_path)\n", + " img = tf.image.decode_jpeg(img, channels=3)\n", + " # 4.b Pre-process image\n", + " converted_img = tf.image.convert_image_dtype(img, tf.float32)[tf.newaxis, ...]\n", + " #start_time = time.time()\n", + " # 4.c Use the Object Detector on processed image\n", + " result = detector(converted_img)\n", + " #end_time = time.time()\n", + "\n", + " result = {key:value.numpy() for key,value in result.items()}\n", + "\n", + " img_record[image_url] = image_path\n", + " main_results[image_url] = result\n", + "\n", + " print(img_record)\n", + "\n", + " if len(main_results.keys()) == 0:\n", + " print('No valid images to evaluate')\n", + " return out_params \n", + "\n", + "\n", + " # &&& Form the detection votes dict for all images\n", + " detect_votes_all_dict = {}\n", + " weighted_detect_votes_all_dict = {}\n", + " num_images = 0\n", + " for img in main_results:\n", + " num_images += 1\n", + " cleaned_list = []\n", + " for ithclass in main_results[img]['detection_class_names']:\n", + " if ithclass.decode(\"ascii\").lower() in class_inclusion_set: \n", + " cleaned_list.append(True)\n", + " else:\n", + " cleaned_list.append(False) \n", + " \n", + " filtered_detections_for_voting = main_results[img]['detection_class_entities'][cleaned_list]\n", + " filtered_detections_for_voting = filtered_detections_for_voting[:MAX_DETECT_BOXES]\n", + " filtered_detections_scores = main_results[img]['detection_scores'][cleaned_list]\n", + " if len(filtered_detections_scores) > 0 and filtered_detections_scores[0] != 0.0:\n", + " filtered_detections_scores = filtered_detections_scores[:MAX_DETECT_BOXES]/filtered_detections_scores[0]\n", + " else:\n", + " filtered_detections_scores = filtered_detections_scores[:MAX_DETECT_BOXES] \n", + "\n", + " if verbose_mode == 'Y': \n", + " print(filtered_detections_for_voting)\n", + " print(filtered_detections_scores)\n", + "\n", + " for i, item in enumerate(filtered_detections_for_voting):\n", + " if item not in detect_votes_all_dict:\n", + " detect_votes_all_dict.setdefault(item, 1)\n", + " weighted_detect_votes_all_dict.setdefault(item, filtered_detections_scores[i])\n", + " else:\n", + " detect_votes_all_dict[item] += 1\n", + " weighted_detect_votes_all_dict[item] += filtered_detections_scores[i]\n", + " if verbose_mode == 'Y':\n", + " print('total raw votes: ', detect_votes_all_dict)\n", + " print('total weighted votes: ', weighted_detect_votes_all_dict)\n", + "\n", + " # majority voting\n", + " if len(weighted_detect_votes_all_dict.values()) != 0: \n", + " max_votes_items = max(weighted_detect_votes_all_dict.values())\n", + " winner_item = list(detect_votes_all_dict.keys())[list(weighted_detect_votes_all_dict.values()).index(max_votes_items)]\n", + " item_winner_label = winner_item.decode(\"ascii\") \n", + " else:\n", + " item_winner_label = 'no winner'\n", + "\n", + " print(' >>>>> winner_item: ', item_winner_label )\n", + " out_params[22] = item_winner_label\n", + "\n", + " # Begin analysis with individual classifiers \n", + "\n", + " if flag_skip_classifiers == 'Y' or item_winner_label == 'no winner':\n", + " out_params[22] = item_winner_label\n", + " return out_params\n", + "\n", + " print('**** Begin analysis with individual classifiers ****')\n", + "\n", + " image_colors_mean = {}\n", + " image_colors_median = {}\n", + " image_colors_circular = {}\n", + " image_colors_dominant = {}\n", + "\n", + " image_patterns = {}\n", + " image_patterns_full_vote = {}\n", + "\n", + " ensemble_dist = np.zeros(6)\n", + "\n", + " collected_pattern_weights ={}\n", + "\n", + " A = np.array([[]])\n", + " A = np.zeros((6))\n", + "\n", + " pattern_labels = {'0': 'floral', '1':'graphics', '2':'plaid', '3':'solid', '4':'spotted', '5':'striped'}\n", + " num_pattern_labels = len(list(pattern_labels.keys()))\n", + " print(\"num_pattern_labels: \", num_pattern_labels)\n", + "\n", + " main_color = ''\n", + " main_pattern = ''\n", + " for image_url in image_urls:\n", + " print(image_url)\n", + " if image_url in img_record.keys():\n", + " img = tf.io.read_file(img_record[image_url])\n", + " img = tf.image.decode_jpeg(img, channels=3)\n", + " else:\n", + " continue\n", + "\n", + " # 4.e\n", + " # Crop the image \n", + " image_with_boxes, cropped_img, display_str, winner_score = draw_box(item_winner_label, image_path,\n", + " img.numpy(), main_results[image_url][\"detection_boxes\"],\n", + " main_results[image_url][\"detection_class_entities\"], main_results[image_url][\"detection_scores\"])\n", + " \n", + " if image_with_boxes == []:\n", + " # something went wrong with the detection for this image. Skip\n", + " print('Detections issue. Skipping ', image_url)\n", + " continue\n", + "\n", + " if verbose_mode == 'Y':\n", + " print('original image with detection boxes')\n", + " plt.imshow(image_with_boxes)\n", + " plt.show()\n", + "\n", + " print('Detection cropped image. Class: ' + display_str)\n", + "\n", + " plt.imshow(cropped_img)\n", + " plt.show()\n", + "\n", + " # 4.f\n", + " # Run the cropped image through the particular feature classifiers \n", + "\n", + " img_colors_list_mean, img_colors_list_median, img_colors_list_circular, img_colors_list_dominant, img_here = detect_color_sampling(\"img_cropped.jpg\", cropped_img) \n", + "\n", + " for i_color in img_colors_list_mean:\n", + " if i_color not in image_colors_mean:\n", + " image_colors_mean.setdefault(i_color, 1)\n", + " else:\n", + " image_colors_mean[i_color] += 1\n", + "\n", + " for i_color in img_colors_list_median:\n", + " if i_color not in image_colors_median:\n", + " image_colors_median.setdefault(i_color, 1)\n", + " else:\n", + " image_colors_median[i_color] += 1\n", + "\n", + " for i_color in img_colors_list_dominant:\n", + " if i_color not in image_colors_dominant:\n", + " image_colors_dominant.setdefault(i_color, 1)\n", + " else:\n", + " image_colors_dominant[i_color] += 1\n", + "\n", + " if verbose_mode == 'Y':\n", + " print('Detection cropped image with color detection regions') \n", + " plt.imshow(img_here)\n", + " plt.show()\n", + "\n", + " # do pattern analysis\n", + " Sample_Image= tf.keras.preprocessing.image.load_img(r'img_cropped.jpg', target_size = (224, 224))\n", + " print('pppppppppppppppp - pattern analysis')\n", + " plt.imshow(Sample_Image)\n", + " plt.show()\n", + "\n", + " Sample_Image = tf.keras.preprocessing.image.img_to_array(Sample_Image)\n", + " np.shape(Sample_Image)\n", + " Sample_Image = np.expand_dims(Sample_Image, axis = 0)\n", + " Sample_Image = tf.keras.applications.resnet50.preprocess_input(Sample_Image)\n", + " \n", + " predictions = model.predict(Sample_Image)\n", + " print('Predictions:', predictions)\n", + " print('Sum of Predictions: ', np.sum(predictions))\n", + " ensemble_dist += predictions[0] \n", + "\n", + " A = np.vstack([A, predictions[0]])\n", + "\n", + " idx = np.argmax(predictions)\n", + " print(idx) \n", + " labels = {'0': 'floral', '1':'graphics', '2':'plaid', '3':'solid', '4':'spotted', '5':'striped'}\n", + " print('pattern: ', labels[str(idx)])\n", + "\n", + " img_pattern = labels[str(idx)] \n", + " if img_pattern not in image_patterns:\n", + " image_patterns.setdefault(img_pattern, predictions[0][idx])\n", + " image_patterns_full_vote.setdefault(img_pattern, 1)\n", + " else:\n", + " image_patterns[img_pattern] += predictions[0][idx]\n", + " image_patterns_full_vote[img_pattern] += 1\n", + "\n", + " B = np.delete(A, 0, 0)\n", + " print(\"COL all image weights :\", B)\n", + " print(\"COL standard mean\", B.mean(0))\n", + " print(\"COL standard sdev\", B.std(0))\n", + " diff_vals = B.mean(0) - B.std(0)\n", + " print(\"COL diff \", diff_vals)\n", + " print(\"labels\" , labels)\n", + "\n", + " print(\"image_patterns: \", image_patterns)\n", + "\n", + " neg_count = len(list(filter(lambda x: (x < 0), diff_vals))) \n", + " print(\"neg_count: \", neg_count)\n", + " if len(list(image_patterns.keys())) == 1:\n", + " out_params[25] = image_patterns \n", + " out_params[26] = list(image_patterns.keys())[0]\n", + " else: \n", + " if neg_count > num_pattern_labels/2:\n", + " out_params[25] = image_patterns \n", + " out_params[26] = \"wide-ranging\"\n", + " else: \n", + " out_params[25] = image_patterns\n", + "\n", + " max_votes_pattern_mean = max(B.mean(0))\n", + " idx = np.argmax(B.mean(0))\n", + " print(\"max_votes_pattern_mean: \", max_votes_pattern_mean)\n", + " print(idx)\n", + " print(labels[str(idx)])\n", + " winner_pattern = labels[str(idx)]\n", + " out_params[26] = winner_pattern\n", + "\n", + " print('Pattern Weighted Votes : ', image_patterns) \n", + " print('Pattern Weighted Votes Mean: ', np.mean(list(image_patterns.values()))) \n", + " print('Pattern Weighted Votes SD: ', np.std(list(image_patterns.values()))) \n", + "\n", + " print('Pattern Full Votes: ', image_patterns_full_vote)\n", + " print('Pattern Full Votes Mean: ', np.mean(list(image_patterns_full_vote.values()))) \n", + " print('Pattern Full Votes SD: ', np.std(list(image_patterns_full_vote.values())))\n", + "\n", + " print('Ensemble Raw Distrib: ', ensemble_dist)\n", + " print('Ensemble Raw Mean: ', np.mean(ensemble_dist))\n", + " print('Ensemble Raw SD: ', np.std(ensemble_dist))\n", + "\n", + " print('Mean: all color votes: ', image_colors_mean)\n", + " # majotity voting\n", + " max_votes_mean = max(image_colors_mean.values())\n", + " main_color_mean = list(image_colors_mean.keys())[list(image_colors_mean.values()).index(max_votes_mean)]\n", + "\n", + " if main_color_mean != '':\n", + " img_feature_analysis['mean_detailed_color'] = main_color_mean\n", + " img_feature_analysis['mean_mother_color'] = mother_color_dict[main_color_mean]\n", + "\n", + " print('Median: all color votes: ', image_colors_median)\n", + " # majotity voting\n", + " max_votes_median = max(image_colors_median.values())\n", + " main_color_median = list(image_colors_median.keys())[list(image_colors_median.values()).index(max_votes_median)]\n", + "\n", + " if main_color_median != '':\n", + " img_feature_analysis['median_detailed_color'] = main_color_median\n", + " img_feature_analysis['median_mother_color'] = mother_color_dict[main_color_median] \n", + "\n", + " print('Dominant: all color votes: ', image_colors_dominant)\n", + " max_votes_dominant = max(image_colors_dominant.values())\n", + " main_color_dominant = list(image_colors_dominant.keys())[list(image_colors_dominant.values()).index(max_votes_dominant)]\n", + "\n", + " if main_color_dominant != '': \n", + " img_feature_analysis['dominant_detailed_color'] = main_color_dominant\n", + " img_feature_analysis['dominant_mother_color'] = mother_color_dict[main_color_dominant] \n", + " num_total = 0\n", + " mother_color_counts = {}\n", + " num_mc_total = 0\n", + " for icolor in image_colors_dominant:\n", + " num_total += image_colors_dominant[icolor] \n", + " mc_list = mother_color_dict[icolor].split(' | ')\n", + " for item in mc_list:\n", + " if item not in mother_color_counts.keys():\n", + " mother_color_counts.setdefault(item, 0)\n", + " mother_color_counts[item] += 1 \n", + " num_mc_total += 1 \n", + " print(mother_color_counts)\n", + " img_feature_analysis['confidence_color'] = image_colors_dominant[main_color_dominant] / num_total \n", + " mother_color_confidence = {}\n", + " mc_list = mother_color_dict[main_color_dominant].split(' | ')\n", + " for icolor in mother_color_counts:\n", + " if icolor in mc_list:\n", + " mother_color_confidence[icolor] = mother_color_counts[icolor] / num_mc_total\n", + " img_feature_analysis['confidence_mother_color'] = mother_color_confidence\n", + "\n", + "\n", + " out_json = json.dumps(img_feature_analysis)\n", + " out_params[17] = out_json\n", + "\n", + " # tenant_id\n", + " out_params[18] = 't2'\n", + "\n", + " # color winner -- In this case, Choose dominant for now\n", + " out_params[19] = main_color_dominant\n", + " # color descriptor\n", + " out_params[20] = color_descriptor_dict[main_color_dominant]\n", + " # mother color\n", + " mother_color_list = mother_color_dict[main_color_dominant].split(' | ')\n", + " out_params[21] =json.dumps(mother_color_list)\n", + "\n", + " # confidence color\n", + "\n", + " mother_color_list = mother_color_dict[main_color_dominant].split(' | ')\n", + " out_params[21] =json.dumps(mother_color_list)\n", + "\n", + " # clean up\n", + " # import os\n", + " for img_url in img_record:\n", + " img_path = img_record[img_url]\n", + " if os.path.exists(img_path):\n", + " os.remove(img_path)\n", + "\n", + " return out_params\n", + " " + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "skCQqHPlRipt" + }, + "source": [ + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B01E9ERCSU'].values.flatten().tolist()\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B01DW71LQO'].values.flatten().tolist() # --- maybe good -p\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00DCCJ75M'].values.flatten().tolist() # OK example -p\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B01G7KOQG0'].values.flatten().tolist() # case to examine - wide-ranging\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00XV1QN7Y'].values.flatten().tolist() # --- mickey mose example\n", + "\n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00N1SZXSM'].values.flatten().tolist()\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00HV713ME'].values.flatten().tolist()\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00DR4918S'].values.flatten().tolist()\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00MSC158E'].values.flatten().tolist() # <---- Blue/grey jeans\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B01HEISONU'].values.flatten().tolist()\n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B012OSER18'].values.flatten().tolist(). # simple one image denim\n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00T7MYZ9O'].values.flatten().tolist() # case to examine - wide-ranging\n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00UWWQZBE'].values.flatten().tolist() # <---- Green dress\n", + "# \n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B0015ZD6HI'].values.flatten().tolist() # bad detect bra\n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B000YFSR5G'].values.flatten().tolist() # trousers bad color detect\n", + "\n", + "# B01EJVSQ38 - dawn dress B00B6AWXYE\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B01EJVSQ38'].values.flatten().tolist() # <--- peach puff dress / pastel \n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B019Q83S6O'].values.flatten().tolist() # RED vs GREEN contrast /# case to examine - wide-ranging\n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00B6AWXYE'].values.flatten().tolist() # bad detect - little faces\n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00MUC7O00'].values.flatten().tolist() # bad detect - dumbell piercings \n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00CFQZUZM'].values.flatten().tolist() # <-- easy black skirt\n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00PJPVCTO'].values.flatten().tolist() # <--- pastel example p\n", + "#\n", + "#sample = df_img_data.loc[df_img_data['asin'] == 'B00K76MK3M'].values.flatten().tolist() # <---- blue blouse\n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00SIZXMMC'].values.flatten().tolist() # blue solid dress\n", + "\n", + "# sample = df_img_data.loc[df_img_data['asin'] == 'B00YGQT2NK'].values.flatten().tolist() # ---- p\n", + "\n", + "sample = df_img_data.loc[df_img_data['asin'] == 'B01EW51A4S'].values.flatten().tolist() # wide ranging ex. p" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "BoUKlidl7rIk" + }, + "source": [ + "sample[4]" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "JgRrYOO3RzU9" + }, + "source": [ + "sampleresult = add_img_features(sample)" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "RL3wWek8SFnM" + }, + "source": [ + "str \n", + "print(sampleresult[17])\n", + "print(sampleresult[18])\n", + "print(sampleresult[19])\n", + "print(sampleresult[20])\n", + "print(sampleresult[21])\n", + "print(sampleresult[22])\n", + "\n", + "print(sampleresult[25])\n", + "print(sampleresult[26])" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "hCWrUaUeP5wu" + }, + "source": [ + "" + ], + "execution_count": null, + "outputs": [] + } + ] +} \ No newline at end of file diff --git a/Pattern_Trainer_Skeleton_TensorFlow_ClassicKeras.ipynb b/Pattern_Trainer_Skeleton_TensorFlow_ClassicKeras.ipynb new file mode 100644 index 00000000..29ce1fde --- /dev/null +++ b/Pattern_Trainer_Skeleton_TensorFlow_ClassicKeras.ipynb @@ -0,0 +1,480 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "Pattern Trainer Skeleton - TensorFlow/ClassicKeras", + "provenance": [], + "collapsed_sections": [], + "authorship_tag": "ABX9TyPyPxrihBN3vuveLyUG6fpN", + "include_colab_link": true + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "8tjJcaAjwlEi" + }, + "source": [ + "Imports" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "h5axOIY-v531" + }, + "source": [ + "import tensorflow as tf\n", + "\n", + "import pandas as pd\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import seaborn as sns\n", + "import random " + ], + "execution_count": 1, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "aMP5EXQ8wi8c", + "outputId": "58b76ce3-c3e7-4fea-de1d-b479621fde6b" + }, + "source": [ + "from google.colab import drive\n", + "drive.mount('/content/drive')" + ], + "execution_count": 4, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "yydKcYAN-UUN" + }, + "source": [ + "Transfer Learn from ResNet50 " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "u2YwnutEwqVS" + }, + "source": [ + "# import only the base of trained model (convolution layers), i.e., --> include_top = False\n", + "\n", + "base_model = tf.keras.applications.ResNet50(weights = 'imagenet', include_top = False)" + ], + "execution_count": 5, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "IqRgHF2h-KPU" + }, + "source": [ + "print(base_model.summary())" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "le9IDR3U-xuD" + }, + "source": [ + "for i, layer in enumerate(base_model.layers):\n", + " print(i, layer.name)" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "VFOhyIq0-KNK" + }, + "source": [ + "x = base_model.output\n", + "x = tf.keras.layers.GlobalAveragePooling2D()(x)" + ], + "execution_count": 7, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "tg1n1MNd-KLU" + }, + "source": [ + "# construct the final dense layers for output of N classes (6 in thid case)\n", + "\n", + "x = tf.keras.layers.Dense(1024, activation = 'relu')(x)\n", + "x = tf.keras.layers.Dense(1024, activation = 'relu')(x)\n", + "x = tf.keras.layers.Dense(1024, activation = 'relu')(x)\n", + "x = tf.keras.layers.Dense(512, activation = 'relu')(x)\n", + "preds = tf.keras.layers.Dense(6, activation = 'softmax')(x)" + ], + "execution_count": 12, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "1JI8G9dAADTr" + }, + "source": [ + "model = tf.keras.models.Model(inputs = base_model.input, outputs = preds)" + ], + "execution_count": 13, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "6WIReXov-KI3" + }, + "source": [ + "# Non trainable part (base layer)\n", + "\n", + "for layer in model.layers[:175]:\n", + " layer.trainable = False" + ], + "execution_count": 14, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "juNsPKCkwqSB" + }, + "source": [ + "# Trainable part - dense NN \n", + "\n", + "for layer in model.layers[175:]:\n", + " layer.trainable = True" + ], + "execution_count": 15, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ImkvLb5jAV2X" + }, + "source": [ + "Load data for classifying N features from images \n", + "\n", + "Dir structure should be:\n", + "\n", + "\n", + " /DataDirectory/\n", + " class1/\n", + " class2/\n", + " ...\n", + " classN/\n", + "\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "EQk9GR2bAOSB" + }, + "source": [ + "data_root = (\"/content/drive/MyDrive/detect/data/smaller_class_set\")\n" + ], + "execution_count": 16, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "YAG-nRL1CKVn" + }, + "source": [ + "# Image shape down to (224, 224) to match ResNet50\n", + "\n", + "IMAGE_SHAPE = (224, 224)\n", + "TRAINING_DATA_DIR = str(data_root)" + ], + "execution_count": 17, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "AbpAyMhHDBHb" + }, + "source": [ + "datagen_kwargs = dict(preprocessing_function= tf.keras.applications.resnet50.preprocess_input, validation_split=.20)" + ], + "execution_count": 18, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "VSrjIWzADAoU", + "outputId": "3ecd9fd9-abc6-4bc8-b051-8a7e03f59b31" + }, + "source": [ + "valid_datagen = tf.keras.preprocessing.image.ImageDataGenerator(**datagen_kwargs)\n", + "\n", + "valid_generator = valid_datagen.flow_from_directory( TRAINING_DATA_DIR,\n", + " subset=\"validation\",\n", + " shuffle=True,\n", + " color_mode = 'rgb',\n", + " batch_size = 32,\n", + " class_mode = 'categorical',\n", + " target_size=IMAGE_SHAPE)\n", + "\n", + "train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(**datagen_kwargs)\n", + "\n", + "train_generator = train_datagen.flow_from_directory( TRAINING_DATA_DIR,\n", + " subset=\"training\",\n", + " shuffle=True,\n", + " color_mode = 'rgb',\n", + " batch_size = 32,\n", + " class_mode = 'categorical',\n", + " target_size=IMAGE_SHAPE)" + ], + "execution_count": 19, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Found 3117 images belonging to 6 classes.\n", + "Found 12487 images belonging to 6 classes.\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "6if_vAS6F9tO", + "outputId": "0c4c33c1-8011-4415-85fb-8058a4269f02" + }, + "source": [ + "print (train_generator.class_indices)" + ], + "execution_count": 20, + "outputs": [ + { + "output_type": "stream", + "text": [ + "{'floral': 0, 'graphics': 1, 'plaid': 2, 'solid': 3, 'spotted': 4, 'striped': 5}\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "7vM5XuJOGUm6" + }, + "source": [ + "model.compile(optimizer = 'Adam', loss = 'categorical_crossentropy', metrics = ['accuracy'])" + ], + "execution_count": 21, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "8y1_qHN0HVk0" + }, + "source": [ + "Do the training" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "GFkCQ949HUfI", + "outputId": "77a1cb51-0491-4498-fde1-fe1fc8e78764" + }, + "source": [ + "steps_per_epoch = np.ceil(train_generator.samples/train_generator.batch_size)\n", + "val_steps_per_epoch = np.ceil(valid_generator.samples/valid_generator.batch_size)\n", + "\n", + "hist = model.fit( train_generator, \n", + " epochs=5,\n", + " #verbose=1,\n", + " steps_per_epoch=steps_per_epoch,\n", + " validation_data=valid_generator,\n", + " validation_steps=val_steps_per_epoch).history" + ], + "execution_count": 22, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Epoch 1/5\n", + "391/391 [==============================] - 4039s 10s/step - loss: 0.8911 - accuracy: 0.6793 - val_loss: 0.8055 - val_accuracy: 0.7267\n", + "Epoch 2/5\n", + "391/391 [==============================] - 70s 178ms/step - loss: 0.7523 - accuracy: 0.7265 - val_loss: 0.7257 - val_accuracy: 0.7295\n", + "Epoch 3/5\n", + "391/391 [==============================] - 69s 177ms/step - loss: 0.7024 - accuracy: 0.7468 - val_loss: 0.7292 - val_accuracy: 0.7295\n", + "Epoch 4/5\n", + "391/391 [==============================] - 70s 178ms/step - loss: 0.6612 - accuracy: 0.7697 - val_loss: 0.7176 - val_accuracy: 0.7642\n", + "Epoch 5/5\n", + "391/391 [==============================] - 69s 177ms/step - loss: 0.6029 - accuracy: 0.7939 - val_loss: 0.7060 - val_accuracy: 0.7526\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "y_4vTHkOZdwk", + "outputId": "1b03d37d-8487-4ae8-e29b-ac8495039118" + }, + "source": [ + "final_loss, final_accuracy = model.evaluate(valid_generator, steps = val_steps_per_epoch)\n", + "print(\"Final loss: {:.2f}\".format(final_loss))\n", + "print(\"Final accuracy: {:.2f}%\".format(final_accuracy * 100))" + ], + "execution_count": 26, + "outputs": [ + { + "output_type": "stream", + "text": [ + "98/98 [==============================] - 13s 131ms/step - loss: 0.7060 - accuracy: 0.7526\n", + "Final loss: 0.71\n", + "Final accuracy: 75.26%\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 566 + }, + "id": "IhBqm8zQH4Cb", + "outputId": "4969f10c-013e-492a-b1ec-a7616d007b6d" + }, + "source": [ + "plt.figure()\n", + "plt.ylabel(\"Loss (training and validation)\")\n", + "plt.xlabel(\"Training Steps\")\n", + "plt.ylim([0,50])\n", + "plt.plot(hist[\"loss\"])\n", + "plt.plot(hist[\"val_loss\"])\n", + "plt.figure()\n", + "plt.ylabel(\"Accuracy (training and validation)\")\n", + "plt.xlabel(\"Training Steps\")\n", + "plt.ylim([0,1])\n", + "plt.plot(hist[\"accuracy\"])\n", + "plt.plot(hist[\"val_accuracy\"])" + ], + "execution_count": 28, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "[]" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 28 + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAEKCAYAAAAVaT4rAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAaEklEQVR4nO3de7hddX3n8ffnhGC45MIl0AwBgpRCwXIzQ6EwPlzEYSi3VspQ0YJS0opWkBkrzDPPFGqfFtsioK1ACmhUUBRRMlGoSAMIIhLkDiLIrTBAEIlJuNUkn/ljrZOzz8k+56xzctbeOWd9Xs+zn70uv/Vb372S/d3r/NZv/ZZsExERzdHT7QAiIqKzkvgjIhomiT8iomGS+CMiGiaJPyKiYZL4IyIaZqM6K5f0NLACWA2ssj1X0pbANcAc4GngBNuv1hlHRET06cQZ/yG297Y9t5w/G7jZ9i7AzeV8RER0SDeaeo4FFpTTC4DjuhBDRERjqc47dyU9BbwKGLjM9nxJy2zPKNcLeLV3fsC284B5AJttttk7d9ttt9rijIiYiO65555f2J45cHmtbfzAQbafl7QNcJOkn7autG1JbX95bM8H5gPMnTvXS5YsqTnUiIiJRdIz7ZbX2tRj+/nyfSnwLWA/4CVJs8qgZgFL64whIiL6qy3xS9pM0tTeaeA9wEPAQuDkstjJwPV1xRAREeuqs6lnW+BbRTM+GwFX275R0t3A1yWdCjwDnFBjDBERMUBtid/2k8BebZa/AhxW134jImJouXM3IqJhkvgjIhomiT8iomGS+CMiGiaJPyKiYZL4IyIaJok/IqJhkvgjIhomiT8iomGS+CMiGiaJPyKiYZL4IyIaJok/IqJhkvgjIhomiT8iomGS+CMiGiaJPyKiYZL4IyIaJok/IqJhkvgjIhomiT8iomGS+CMiGiaJPyKiYZL4IyIaJok/IqJhkvgjIhomiT8iomGS+CMiGiaJPyKiYZL4IyIaJok/IqJhkvgjIhpmo+EKSNoGOBD4T8AbwEPAEttrao4tIiJqMGjil3QIcDawJXAvsBSYAhwH7CzpWuAC28s7EWhERIyNoc74jwROs/3swBWSNgKOAg4HvllTbBERUYNBE7/tTwyxbhXw7So7kDQJWAI8b/soSTsBXwO2Au4BPmD7P0YUdUREjFqVNv63Ae8F5rSWt/3XFfdxBvAoMK2c/zRwoe2vSboUOBW4ZAQxR0TEeqjSq+d64FhgFfBay2tYkmYDvw9cXs4LOBS4tiyygOKaQUREdMiwZ/zAbNtHjLL+i4C/BKaW81sBy8qmIoDngO3abShpHjAPYIcddhjl7iMiYqAqZ/w/lPQ7I61Y0lHAUtv3jDwssD3f9lzbc2fOnDmaKiIioo0qZ/wHAadIegp4CxBg23sOs92BwDGSjqToBjoNuBiYIWmj8qx/NvD8qKOPiIgRq5L4/9toKrZ9DnAOgKSDgf9p+yRJ3wCOp+jZczLFNYSIiOiQYZt6bD8DzACOLl8zymWj9UngLElPULT5X7EedUVExAhV6c55BnAacF256CuS5tv+XNWd2L4FuKWcfhLYb8SRRkTEmKjS1HMq8Lu2XwOQ9GngTqBy4o+IiA1HlV49Ala3zK8ul0VExDhU5Yz/C8Bdkr5Vzh9H2uUjIsatYRO/7c9IuoWiWyfAB23fW2tUERFRm6GGZZ5me7mkLYGny1fvui1t/7L+8CIiYqwNdcZ/NcXQy/cAblmucv7tNcYVERE1GWpY5qPK9506F05ERNRt2F49km6usiwiIsaHodr4pwCbAltL2oK+LpzTGGREzYiI2PAN1cb/Z8CZFA9Zv4e+xL8c+Kea44qIiJoM1cZ/MXCxpL8YyfAMERGxYavSj/9zkt4B7E4xvHLv8i/VGVhERNSjyiBtfwUcTJH4v0sxTPPtQBJ/RMQ4VGWsnuOBw4AXbX8Q2AuYXmtUERFRmyqJ/w3ba4BVkqYBS4Ht6w0rIiLqUmWQtiWSZgD/QtG7ZyXFsMwRETEOVbm4e3o5eamkG4Fpth+oN6yIiKjLUDdw7TvUOts/qSekiIio01Bn/BeU71OAucD9FDdx7QksAQ6oN7SIiKjDoBd3bR9i+xDgBWBf23NtvxPYB3i+UwFGRMTYqtKrZ1fbD/bO2H4I+O36QoqIiDpV6dXzgKTLga+U8ycBubgbETFOVUn8HwQ+DJxRzt8GXFJbRBERUasq3TnfBC4sXxERMc4N1Z3z67ZPkPQg/R+9CIDtPWuNLCIiajHUGX9v085RnQgkIiI6Y6jx+F8o35/pXDgREVG3oZp6VtCmiYfiJi7bnlZbVBERUZuhzvindjKQiIjojCrdOQGQtA39n8D1bC0RRURErYa9c1fSMZIeB54CbgWeBm6oOa6IiKhJlSEbPgXsD/zM9k4UT+P6Ua1RRUREbaok/l/bfgXokdRjezHFaJ0RETEOVWnjXyZpc4qhGq6StBR4rd6wIiKiLlXO+I8FXgc+DtwI/Bw4eriNJE2R9GNJ90t6WNJ55fKdJN0l6QlJ10jaeH0+QEREjEyVxP9nwCzbq2wvsP3ZsulnOG8Bh9reC9gbOELS/sCngQtt/ybwKnDqaIOPiIiRq5L4pwLfk/QDSR+VtG2Vil1YWc5OLl8GDgWuLZcvAI4bYcwREbEehk38ts+zvQfwEWAWcKuk71epXNIkSfcBS4GbKJqJltleVRZ5DthukG3nSVoiacnLL79cZXcREVFBlTP+XkuBF4FXgG2qbGB7te29gdnAfsBuVXdme375uMe5M2fOHEGYERExlCo3cJ0u6RbgZmAr4LSRDslsexmwmOIB7TMk9fYmmk2e3xsR0VFVzvi3B860vYftc20/UqViSTMlzSinNwEOBx6l+AE4vix2MnD9yMOOiIjRqvIErnNGWfcsYIGkSRQ/MF+3vUjSI8DXJP0NcC9wxSjrj4iIUag8SNtI2X4A2KfN8icp2vsjIqILRnJxNyIiJoAk/oiIhhnNE7gAyBO4IiLGp2GfwCXpU8ALwJcpHrt4EsWF24iIGIeqNPUcY/vztlfYXm77EoqB2yIiYhyqkvhfk3RSOfxCj6STyLDMERHjVpXE/z7gBOCl8vVH5bKIiBiHqtzA9TRp2omImDCGTfySZgKnAXNay9v+UH1hRUREXarcuXs98APg+8DqesOJiIi6VUn8m9r+ZO2RRERER1S5uLtI0pG1RxIRER1RJfGfQZH835C0XNIKScvrDiwiIupRpVfP1E4EEhERnVFpWGZJWwC7AFN6l9m+ra6gIiKiPlW6c/4pRXPPbOA+YH/gTuDQekOLiIg6VG3j/8/AM7YPoXi4yrJao4qIiNpUSfxv2n4TQNLbbP8U2LXesCIioi5V2vifKx+a/m3gJkmvAs/UG1ZERNSlSq+ePygnz5W0GJgO3FhrVBERUZsRPWzd9q11BRIREZ2RZ+5GRDRMEn9ERMMk8UdENEyVG7hWAB6w+FfAEuB/2H6yjsAiIqIeVS7uXgQ8B1wNCDgR2Bn4CXAlcHBdwUVExNir0tRzjO3LbK+wvdz2fOC/2r4G2KLm+CIiYoxVSfyvSzpBUk/5OgF4s1w3sAkoIiI2cFUS/0nAB4ClwEvl9PslbQJ8tMbYIiKiBlXu3H0SOHqQ1bePbTgREVG3Kr16ZgKnAXNay9v+UH1hRUREXar06rke+AHwfWB1veFERETdqiT+TW1/svZIIiKiI6pc3F0k6cjaI4mIiI6o+gSuRZLekLRc0gpJy+sOLCIi6jFs4rc91XaP7U1sTyvnpw23naTtJS2W9IikhyWdUS7fUtJNkh4v33MTWEREBw2a+CXtVr7v2+5Voe5VFGP57E7xgPaPSNodOBu42fYuwM3lfEREdMhQF3fPAuYBF7RZZ+DQoSq2/QLwQjm9QtKjwHbAsfSN77MAuAXIxeOIiA4ZNPHbnle+H7K+O5E0B9gHuAvYtvxRAHgR2HaQbeZR/PCwww47rG8IERFRqvToRUm/x7o3cH2p4rabA98EzrS9XNLadbYtqe14P+VgcPMB5s6dmzGBIiLGSJU7d79MMQzzffTdwGVg2MQvaTJF0r/K9nXl4pckzbL9gqRZFGMARUREh1Q5458L7G57RGfdKk7trwAetf2ZllULgZOB88v360dSb0RErJ8q/fgfAn5jFHUfSDGS56GS7itfR1Ik/MMlPQ68u5yPiIgOqXLGvzXwiKQfA2/1LrR9zFAb2b6d4old7RxWOcKIiBhTVRL/uXUHERERnVNlPP5bOxFIRER0xqCJX9Lttg+StIL+j1gURU/MYYdtiIiIDc9QN3AdVL5P7Vw4ERFRt0o3cAFI2gaY0jtv+9laIoqIiFoN251T0jFl18ungFuBp4Ebao4rIiJqUqUf/6coRtf8me2dKLpi/qjWqCIiojZVEv+vbb8C9Ejqsb2Y4m7eiIgYh6q08S8rB1q7DbhK0lLgtXrDioiIulQ54z8WeB34OHAj8HPg6DqDioiI+gx5xi9pErCoHJN/DcWDUyIiYhwb8ozf9mpgjaTpHYonIiJqVqWNfyXwoKSbaGnbt/2x2qKKiIjaVEn815WvVnkiVkTEOFUl8c+wfXHrAkln1BRPRETUrEqvnpPbLDtljOOIiIgOGWp0zj8G3gfsJGlhy6qpwC/rDiwiIuoxVFPPD4EXKJ7AdUHL8hXAA3UGFRER9Rkq8T9r+xnggMEKSNJIH8IeERHdNVQb/2JJfyFph9aFkjaWdKikBbRv/4+IiA3YUGf8RwAfAr4qaSdgGbAJxY/F94CLbN9bf4gRETGWhnoC15vA54HPS5pM0db/hu1lnQouIiLGXqUncNn+NcWF3oiIGOeq9OOPiIgJJIk/IqJhqjxzdzNJPeX0b5XP4J1cf2gREVGHKmf8twFTJG1H0ZvnA8AX6wwqIiLqUyXxy/brwB8Cn7f9R8Ae9YYVERF1qZT4JR0AnAR8p1w2qb6QIiKiTlUS/5nAOcC3bD8s6e3A4nrDioiIugzbj9/2rcCtAOVF3l/k6VsREeNXlV49V0uaJmkz4CHgEUmfqD+0iIioQ5Wmnt1tLweOA24AdqLo2RMREeNQlcQ/uey3fxywsBy+IUMxR0SMU1US/2XA08BmwG2SdgSWD7eRpCslLZX0UMuyLSXdJOnx8n2L0QYeERGjM2zit/1Z29vZPtKFZ4BDKtT9RYqhnVudDdxsexfg5nI+IiI6qMrF3emSPiNpSfm6gOLsf0i2b2PdZ/MeCywopxdQNB9FREQHVWnquZLiObsnlK/lwBdGub9tbfcO7/wisO1gBSXN6/2xefnll0e5u4iIGKjKePw7235vy/x5ku5b3x3btqRBLxLbng/MB5g7d24uJkdEjJEqZ/xvSDqod0bSgcAbo9zfS5JmlfXMApaOsp6IiBilKmf8fw58SdL0cv5VRv+Q9YXltueX79ePsp6IiBilKr167re9F7AnsKftfYBDh9tO0leBO4FdJT0n6VSKhH+4pMeBd5fzERHRQZWeuQtQ3r3b6yzgomHK//Egqw6rus+IiBh7o330osY0ioiI6JjRJv70somIGKcGbeqRtIL2CV7AJrVFFBERtRo08due2slAIiKiM0bb1BMREeNUEn9ERMMk8UdENEwSf0REwyTxR0Q0TBJ/RETDJPFHRDRMEn9ERMMk8UdENEwSf0REwyTxR0Q0TBJ/RETDJPFHRDRMEn9ERMMk8UdENEwSf0REwyTxR0Q0TBJ/RETDJPFHRDRMEn9ERMMk8UdENEwSf0REwyTxR0Q0TBJ/RETDJPFHRDRMEn9ERMMk8UdENEwSf0REwyTxR0Q0zEbdDqBOV3/xc7z24s8xQhKoeDcC9RTL6FuOBPT0za/drliGeuhpqYfWeiSknqIK9bTM95UvXpMGzPfW0dMyX27bU7wj0dNSn3r6l28tq5ZyPT198fX0lunpnRfqmbS2fDHf+rlB5XGUhCgPT7Fk7TrK5b1Leg+j+q2jX3na1N1aj1oKF/UJ7KLcwH3RV3nrtirrXidGUdSl/mVE/8/RGrBa1rfW3S/u1qD7bTlwUbtyg5TtWvm6YxmsmvEQexfKT5o88mM5jAmd+A9cfgM7vnlHt8OIiBi1V065na3m/M6Y1tmVxC/pCOBiYBJwue3z69jPjh++DtasAq8BDHbLO8XyfsvK97bl3VKeEZZvXcY65e01eM0a1th4TTlvs6ZcbhfL1qxxMb1mTTHv3m17twF7dd/02m37tumdp195r92HbUC49yAa3Bs3a9/6DkO/I+6iztZt1Ve2dbuWLdrW5wELiiPVd9Zj9227tpj7zw/cb+tao5a63faz9ft87ouz9SO4/MwDPtTaLd1SjoGf3b2RrHt8++2zX0weWGptTOoXS8vR6Vd364xZN6yB/x7FhAYcv74q1o1l3U/SElfLv1v/ov3rXDf2/nH1j9Nr34eKpeUj930fB4SwtuQ6/wfcF9c6/1brHkdj1PZzDIy9b7lY0/Z7dXTPdLZibHU88UuaBPwzcDjwHHC3pIW2HxnznU2eMuZV1qG3KSEXXCKiE7qRa/YDnrD9pO3/AL4GHNuFOCIiGqkbTT3bAf/eMv8c8LsDC0maB8wrZ1dKemyU+9sa+MUot61T4hqZxDUyiWtkJmpcO7ZbuMFe3LU9H5i/vvVIWmJ77hiENKYS18gkrpFJXCPTtLi60dTzPLB9y/zscllERHRANxL/3cAuknaStDFwIrCwC3FERDRSx5t6bK+S9FHgXym6c15p++Ead7nezUU1SVwjk7hGJnGNTKPiktfphBwRERNZuo5HRDRMEn9ERMNMmMQv6QhJj0l6QtLZbda/TdI15fq7JM3ZQOI6RdLLku4rX3/agZiulLRU0kODrJekz5YxPyBp37pjqhjXwZJ+1XKs/k+H4tpe0mJJj0h6WNIZbcp0/JhVjKvjx0zSFEk/lnR/Gdd5bcp0/PtYMa6Ofx9b9j1J0r2SFrVZN7bHq2+clvH7orhI/HPg7cDGwP3A7gPKnA5cWk6fCFyzgcR1CvBPHT5e7wL2BR4aZP2RwA0UI0nsD9y1gcR1MLCoC/+/ZgH7ltNTgZ+1+Xfs+DGrGFfHj1l5DDYvpycDdwH7DyjTje9jlbg6/n1s2fdZwNXt/r3G+nhNlDP+KsNAHAssKKevBQ5T+7F0Ox1Xx9m+DfjlEEWOBb7kwo+AGZJmbQBxdYXtF2z/pJxeATxKcQd6q44fs4pxdVx5DFaWs5PL18BeJB3/PlaMqyskzQZ+H7h8kCJjerwmSuJvNwzEwC/A2jK2VwG/gjEf9G40cQG8t2weuFbS9m3Wd1rVuLvhgPJP9Rsk7dHpnZd/Yu9DcbbYqqvHbIi4oAvHrGy2uA9YCtxke9Dj1cHvY5W4oDvfx4uAvwTWDLJ+TI/XREn849n/BebY3hO4ib5f9VjXT4Adbe8FfA74did3Lmlz4JvAmbaXd3LfQxkmrq4cM9urbe9NcWf+fpLe0Yn9DqdCXB3/Pko6Clhq+56699VroiT+KsNArC0jaSNgOvBKt+Oy/Yrtt8rZy4F31hxTFRvksBq2l/f+qW77u8BkSVt3Yt+SJlMk16tsX9emSFeO2XBxdfOYlftcBiwGjhiwqhvfx2Hj6tL38UDgGElPUzQHHyrpKwPKjOnxmiiJv8owEAuBk8vp44F/c3mlpJtxDWgHPoainbbbFgJ/UvZU2R/4le0Xuh2UpN/obdeUtB/F/9/ak0W5zyuAR21/ZpBiHT9mVeLqxjGTNFPSjHJ6E4pnb/x0QLGOfx+rxNWN76Ptc2zPtj2HIkf8m+33Dyg2psdrgx2dcyQ8yDAQkv4aWGJ7IcUX5MuSnqC4gHjiBhLXxyQdA6wq4zql7rgkfZWit8fWkp4D/oriQhe2LwW+S9FL5QngdeCDdcdUMa7jgQ9LWgW8AZzYgR9vKM7IPgA8WLYPA/wvYIeW2LpxzKrE1Y1jNgtYoOKhSz3A120v6vb3sWJcHf8+DqbO45UhGyIiGmaiNPVERERFSfwREQ2TxB8R0TBJ/BERDZPEHxHRMEn8Me5I2qpl9MQXJT3fMr/xMNvOlfTZCvv44RjFuqmkqyQ9KOkhSbdL2lzSDEmnj8U+IkYq3TljXJN0LrDS9j+2LNuoHM+k6ySdA8y0fVY5vyvwNEWf8kW2N4ihDKJZcsYfE4KkL0q6VNJdwN9L2k/SnSrGN/9hmXB7x6dfVE6fq+IZALdIelLSx1rqW9lS/pZywK6flmfvvXfCHlkuu0fFWPzrjKNOkeDXDt1g+7FySIDzgZ3Lv1L+oazvE5LuVjFA2Hnlsjkt+320jGPTct35Ksbif0DSP7bZd0RbE+LO3YjSbOD3bK+WNA34L+Xd0+8G/hZ4b5ttdgMOoRjP/jFJl9j+9YAy+wB7AP8PuAM4UNIS4DLgXbafKu86budK4HuSjgduBhbYfhw4G3hHOWAYkt4D7EIxlLeAhZLeBTwL7AqcavsOSVcCp0v6AvAHwG623TsUQUQVOeOPieQbtleX09OBb6h4mteFFIm7ne/Yfsv2LyiG6t22TZkf237O9hrgPmAOxQ/Gk7afKsu0Tfy276N4EM8/AFsCd0v67TZF31O+7qUYUXM3ih8CgH+3fUc5/RXgIIphed8ErpD0hxTDRERUksQfE8lrLdOfAhaXbehHA1MG2eatlunVtP8ruEqZQdleafs626dTJO4j2xQT8He29y5fv2n7it4q1q3Sqyj+OrgWOAq4cSQxRbMl8cdENZ2+tvVTaqj/MeDt6nv26X9vV0jSgZK2KKc3BnYHngFWUDQv9fpX4EMqxtZH0naStinX7SDpgHL6fcDtZbnp5VDLHwf2GqsPFhNf2vhjovp7ipEY/zfwnbGu3PYbZXfMGyW9RjEEdzs7A5eUF4R7yli+WbbL31E2Rd1g+xNlE9Cd5bXjlcD7Kf7CeAz4SNm+/whwCcUP2/WSplD8tXDWWH/GmLjSnTNilCRtbntlmdT/GXjc9oVjvI85pNtnjLE09USM3mnlOPgPU5yBX9bleCIqyRl/RETD5Iw/IqJhkvgjIhomiT8iomGS+CMiGiaJPyKiYf4/IRzzWubpTYsAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEKCAYAAAAfGVI8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3deXQdd3338fdHmyVZshXvxrZsA9lYQkmUEAilUAgnSUNSGgoJ25OWEJ5CWgo9tND20BB6gNI+QIGcQICQlLIHCgaywJOG5mmAYAcCWUqIyWYHZ/Mm27Ks7fv8MXOlq+sraWRr5sq6n9c599zZ7szXk+j3nfn9Zn4/RQRmZla/GmodgJmZ1ZYTgZlZnXMiMDOrc04EZmZ1zonAzKzOORGYmdW53BKBpKskPS7prgnWS9LHJW2W9EtJJ+YVi5mZTSzPO4KrgTMmWX8mcHT6uRi4IsdYzMxsArklgoi4BdgxySbnAv8WiZ8AXZJW5hWPmZlV11TDY68CtpTNb02XbavcUNLFJHcNzJ8//6TjjjuukADNzOaK22+//cmIWFptXS0TQWYRcSVwJUBPT09s2rSpxhGZmR1ZJD000bpaPjX0CLCmbH51uszMzApUy0SwAXhj+vTQqcDuiDioWsjMzPKVW9WQpC8DLwaWSNoK/APQDBARnwKuA84CNgN9wJ/kFYuZmU0st0QQERdMsT6At+V1fDMzy8ZvFpuZ1TknAjOzOudEYGZW55wIzMzqnBOBmVmdcyIwM6tzTgRmZnXOicDMrM45EZiZ1TknAjOzOudEYGZW55wIzMzqnBOBmVmdcyIwM6tzTgRmZnXOicDMrM45EZiZ1TknAjOzOudEYGZW55wIzMzq3JSD10tqAJ4DPAXYD9wVEY/nHZiZmRVjwkQg6WnA3wAvA+4DngBagWMk9QGfBq6JiJEiAjUzs3xMdkfwj8AVwFsiIspXSFoGvBZ4A3BNfuGZmVneJkwEEXHBJOseBz6WS0RmZlaoKdsIACS9AFhXvn1E/FtOMZmZWYGyNBZ/AXgacAcwnC4OwInAzGwOyHJH0AM8o7KdwMzM5oYs7xHcBazIOxAzM6uNLHcES4B7JP0UOFBaGBHn5BaVmZkVJksiuDTvIMzMrHamTAQR8V+SlgMnp4t+6jeLzczmjinbCCS9Gvgp8MfAq4HbJL0q78DMzKwYWaqG/g44uXQXIGkp8H+Ba/MMzMzMipHlqaGGiqqg7Rl/Z2ZmR4AsdwQ3SLoR+HI6/xrguvxCMjOzIk15ZR8R7wKuBE5IP1dGxN9k2bmkMyTdK2mzpHdXWd8t6WZJP5f0S0lnTfcfYGZmhydTX0MR8Q3gG9PZsaRG4HLgdGArsFHShoi4p2yzvwe+FhFXSHoGyZ3Guukcx8zMDs+EdwSS/jv93iOpt+yzR1Jvhn2fAmyOiPsjYgD4CnBuxTYBLEinFwK/nf4/wczMDsdk3VC/MP3uPMR9rwK2lM1vBZ5Xsc2lwPcl/Tkwn2QQnINIuhi4GKC7u/sQwzEzm72Ghkfo7R9i9/5BdvUNsGv/ILv7BtP5QXbtH+DsE1Zy0tpFM37sTL2PRsQbplp2iC4Aro6I/yPp+cAXJD2rctSziLiSpJ2Cnp4ed35nZrNSRLB/cHis8E4L8t37B0anxxXwZcv39A9Nuu+OeU0cv2JBbRIB8MzyGUlNwEkZfvcIsKZsfnW6rNybgDMAIuLHklpJ+jbym8tmVjPDI0FvqdBOr9B37x/MVMAPDE88em9Tg+hqb2ZhW/JZ1tnKMcs6WdDWTFd7M11tzSxsb6arrYWF6XZdbc0saGumuTG/p/YnG7P4PcDfAm1lbQICBkivzqewETha0nqSBHA+yfCW5R4GXgpcLel4kjGRn5jWv8DMrIqIoH9w5KAr791pNctYlctgUuj3jW2X5eq8VJh3tTfz9GUdaQHfMrqsK12/sL2ZrvZk+fyWRiQVdAaym6yN4IPAByV9MCLeM90dR8SQpEuAG4FG4KqIuFvSZcCmiNgA/BXwGUnvIGk4vtDjHphZueGRYE//4Lgr7119A2WF99iVeuUV+sDQxFfnjQ0avQJf2NbMko4Wnr6sY1wBP/bdMm5ZnlfntaAs5a6ko4CjSa7YAYiIW3KMa0I9PT2xadOmWhzazA5RRLBvYJje/YOjVSyjnyr15WNVMAPsOTDEZMXU/JbG9Mq7ZfQqvKu9uaxqpWX0Cn20Cqa9ZdZenedF0u0R0VNtXZbG4ouAt5PU8d8BnAr8GPj9mQzSzGa3kZFgT/pUS+nT2z++UO+tmC49BdO7f5ChkbHSvJkhjmIPIzRwgGYGG+bR3tpKV3sLC9qaWTS/hacumT+ugK92hb6wrZmWprl1dV4LWRqL307SBfVPIuIlko4DPpBvWGaWh8HhkSkL7dIVemUhv3eKK/OmBrG0NVjfupc1Lb2c3NTLyvZdLGvfyaLYQdfQdjqHttN+4ElaBnYevINogAOtMDwP+tugaR40p99NrWOf5tYJ5udBU+XvMu6nju4MqsmSCPojol8SkuZFxK8kHZt7ZGZ2kPIG0NGCeoKCu3f/0PjCvn+QvoHhSfff2tzAwrZmFrQmV9srFrRy7PJOFs8bZkXDbpY37GJJ7KBreDsLBrczf/BJWvufoLnvMbT3MdS/C/pIPiUNTdCxHDpXQMcx0Pm70LkS5i+BGIHBfhgq+1TODx2Awf0wsA/6nkznK7YZHji8E9s4b5oJpSyxTJiYMiSvhsbDi3uGZEkEWyV1Ad8CfiBpJ/BQvmGZzV0Rwd4DQ1MW2pV16aVtJns8EaBzXhML0vrwhW1NrFvSPlqwl55iKc13NQ2waGQ7C4a20zH4JM37Hoe9j8Kex2DPNtj7GGx7DA7sPvhAjS3QsQI6l8OSo2H9i5LpjhVJQV+abl8MDTlX34wMJwlisoQyUYIZOgBD+6vPl37X3zvxfg5HQ/NY8slyF/Oc82H9787MOSuTZYSyV6aTl0q6maQriBtmPBKzI0RE0DcwzJ7+Ifb0D7LnwNDodHmhXV790ts/vhpmZJIqlgaRFuLNo1fnT1nYNrpsQdvYo4vlV+8L25rpbG2iqUFwoDcpzPc+Cnu2wJ5Hk0J95zZ4uKyQH9h7cABNrWNX8MuOh6e+JCnUO1emy1cm69qOmj1VKg2N0NKefIoUUZaAJkkoEyWYqRJV347xv8shCcDk7xFUe33tzvS7A9iRS0RmORoeSa7G9/Qnz4qXT/f2ly0vm07WDabbJr8ZnqwkB1oaG0avyEuNn+vTxs/ygntBlYJ9fksTDQ1VCtgI6N+VFOp7tiQF+WPbygr8R8cK/MG+g3/f3J5Wz6yAlSeUFewrxpZ3LofWrtlTwM92UlLN09w69baz2GR3BLeTPNsvoBvYmU53kbwItj736MzKDA6PjF2F9w9VTKffByZYnhbgew9M/qIQJI2ena1NdLY2p99NrD6qnQXpdGl5R9n0gnS6VNC3NjdkfzQxIrny27sVtpcK84rqmVIhP3zg4N+3dI5Vw6w6KS3Ul4+vnulcAfM6XcBbVZO9ULYeQNJngP+IiOvS+TOBPywmPJsLIoIDQyP0VhTge8uutscK64ML+d50ef/g5HXjAPOaGuhsbR4ttDtam1jW2XpQwT5+vmz5vGkW4pMZGYG+7QcX5pVX73sfq97YOW9hWiWzAtY8r3r1TMdymNdx+LFaXcvSWHxqRLy5NBMR10v6cI4xFSsi/Yykn+Gy6dInxs+PZNjmoP1Urq/c1wTrR/c12frSfqbYZspPjI+JIAIGR0YYHB5hcCgYGB5mYDgYHEqXDY8wMBTJ9+j8CIPDUTY9MmmdeAuwGFjRKJobG2hpbKC5KZlubmygZWG6rLGB5kbR3DQ239LUQEvj2LaN1apUyg2knywdqU/X8CDse2J8AT9S5Q6ktWvsan3J0dWrZzpWFF/fbXUrSyL4raS/B/49nX8dR+K4AT++HG56/8GFH3Xao4UaQA2EGggaCMQIYhgxHGIoxHCaI8s1Am3pZ9zuBEJIySAXyePGoEahpvHrJ5yujHEk/QzmcwpmnBph/tKxRtZq1TMdy4/4+mSbe7IkgguAfwD+I52/JV12ZFlxApxyUfLHmhaCB3+UfDdMtI3Kpqtto4OXjdtXlfUH7WuCbUb3M9E+Svs5eH3vwDBbdvTz8M5+HtzRz8M79/Pwjj4e2t7Hb3ftH3e13tLUQPeidroXtbP6qDYWtjXTMa+yWiWpeinVkdfbq/pmc02mvoZmE/c1dLCRkeDR3n4e2t7Hwzv2jRbyW3b08dCOPnb1jb+kXjS/ZbSwX7u4nTWL2lm7qJ21i+ezrHNe9SdWzOyIdkh9DUn6WET8paTvUKX+JCLOmcEYbQr7B4bZsjMp4B/avm+0kH94Rx9bd+wf95JRY4NY1dXG2sXtnPXslaxNC/3uxcl3Z2tzDf8lZjbbTFY19IX0+1+KCKTeRQRP7h3g4R3JVX1ydd/Hw+n343vGPzbYMa+J7kXtHLu8k9OPXz5ayK9dNJ+ndLXSNMe6yTWz/Ez2+Ojt6fd/FRfO3DYwNMIju/aPXdGXCvv0U9kPzMqFraxZ1M7vHbN09Ip+7eL5dC9q56j2ZtfLm9mMmKxq6E4meaQmIk7IJaIj3O79g6NX8Q/t2Dc2vb2PbbvHN8zOK2uYff7TFo/W2Xcvms/qo9pobZ4dHVKZ2dw2WdXQ2YVFcQQZHm2YHbuqf2hH3+j07v3jG2YXz2+he3E7PeuOYu2iVUnDbHpV74ZZM5sNJqsaqtseRvsGhtiyI6nCKVXblJ7C2bpzfMNsU4NYdVQb3YvaOfuElekVfXJV3724nY55WZ7QNTOrnSwjlJ0KfAI4nuQl0EZgX0QsyDm23EQET+w9MHZFX/ao5cM7+niiomG2c14T3YvbOW5lJ6c/czlrF80frcZZudANs2Z2ZMtyufpJ4Hzg60AP8EbgmDyDysMP7nmMr23aMlpnv39wrGFWgpULkobZlxxbaphNC/tF7XS5YdbM5rBM9RYRsVlSY0QMA5+X9HPgPfmGNrN27DvAQ9v30b2ondOevoTuRW1JXf3idlZ1uWHWzOpXlkTQJ6kFuCPtbG4bSXcyR5TXnNzNa07urnUYZmazTpYC/Q3pdpcA+4A1wHl5BmVmZsXJckdwEvC9iOgF3pdzPGZmVrAsdwSvAH4t6QuSzpbk5yHNzOaQKRNBRPwJ8HSSp4YuAH4j6bN5B2ZmZsXI+tTQoKTrSbqcaCMZqvKiPAMzM7NiTHlHIOlMSVcD95E0En8WWJFzXGZmVpAsdwRvBL4KvCUiDky1sZmZHVmmTAQRceQNS2lmZpkdcS+GmZnZzHIiMDOrc04EZmZ1ziOUmZnVuSwjlL0t/S4NZv+6rDuXdAbwryRjGHw2Ij5UZZtXA5eSJJ1fRMRrs+7fzMwO35QjlEk6PSKeW7bq3ZJ+Brx7sh1LagQuB04HtgIbJW2IiHvKtjmapDvr0yJip6Rlh/5PMTOzQ5GljUCSTiubeUHG350CbI6I+yNiAPgKcG7FNm8GLo+InQAR8Xi2sM3MbKZkeaHsTcBVkhYCAnYCf5rhd6uALWXzW4HnVWxzDICkW0mqjy6NiBsqdyTpYuBigO5ujylgZjaTsrxQdjvwnDQREBG7Z/j4RwMvBlYDt0h6dkTsqojhSuBKgJ6engkbsM3MbPqyDF4/j6SPoXVAU2ns3oi4bIqfPkIyiE3J6nRZua3AbRExCDwg6dckiWFjluDNzOzwZanr/zZJ3f4QyQhlpc9UNgJHS1qfDnV5PrChYptvkdwNIGkJSVXR/ZkiNzOzGZGljWB1RJwx3R1HxJCkS4AbSer/r4qIuyVdBmyKiA3pupdLugcYBt4VEduneywzMzt0WRLBj9J6+zunu/OIuA64rmLZe8umA3hn+jEzsxrIkgheCFwo6QHgAMmTQ+E3i83M5oYsieDM3KMwM7OayfL4aOkN42VAa+4RmZlZobIMVXmOpPuAB4D/Ah4Ers85LjMzK0iWx0ffD5wK/Doi1gMvBX6Sa1RmZlaYLIlgMH2ks0FSQ0TcDPTkHJeZmRUkS2PxLkkdwC3AFyU9TrYXyszM7AiQ5Y7gXKAPeAdwA/Ab4BV5BmVmZsXJ8tRQ6ep/BLgm33DMzKxoHrPYzKzOORGYmdU5JwIzszqXZTyCO0kGli+3G9gE/KN7CzUzO7JleXz0epIuor+Uzp8PtAOPAlfjJ4jMzI5oWRLByyLixLL5OyX9LCJOlPT6vAIzM7NiZGkjaJR0SmlG0skkA81AMmqZmZkdwbLcEVwEXJW+XSygF7hI0nzgg3kGZ2Zm+cvyQtlG4NmSFqbzu8tWfy2vwMzMrBhZnhqaB5wHrAOaJAEQEZflGpmZmRUiS9XQt0keF72dZKhKMzObQ7IkgtURcUbukZiZWU1keWroR5KenXskZmZWE1nuCF4IXCjpAZKqIQERESfkGpmZmRUiSyI4M/cozMysZiZMBJIWREQvsKfAeMzMrGCT3RF8CTib5GmhIKkSKgngqTnGZWZmBZkwEUTE2en3+uLCMTOzomVpI0DSKmBt+fYRcUteQZmZWXGyvFn8T8BrgHtIuqOGpGrIicDMbA7Ickfwh8CxEeG3is3M5qAsL5TdDzTnHYiZmdVGljuCPuAOSTdR1tdQRPxFblGZmVlhsiSCDenHzMzmoCzjEVxTRCBmZlYbk71Z/LWIeLWkO0meEhrHfQ2Zmc0Nk90RvD39PvtQdy7pDOBfScY4/mxEfGiC7c4DrgVOjohNh3o8MzObvsneLN6Wfj90KDuW1AhcDpwObAU2StoQEfdUbNdJknRuO5TjmJnZ4Zny8VFJp0raKGmvpAFJw5J6M+z7FGBzRNwfEQPAV4Bzq2z3fuCfgP5pRW5mZjMiy3sEnwQuAO4D2oCLSK70p7IK2FI2vzVdNkrSicCaiPjeZDuSdLGkTZI2PfHEExkObWZmWWVJBETEZqAxIoYj4vPAYQ9dKakB+AjwVxmOf2VE9EREz9KlSw/30GZmVibTC2WSWkheKvswsI1sCeQRYE3Z/Op0WUkn8Czgh5IAVgAbJJ3jBmMzs+JkKdDfkG53CbCPpHA/L8PvNgJHS1qfJpLzKXsxLSJ2R8SSiFgXEeuAnwBOAmZmBZv0jiB98ucDEfE6ksbc92XdcUQMSboEuJHk8dGrIuJuSZcBmyLCbyubmc0CkyaCiBiWtFZSS/rkz7RExHXAdRXL3jvBti+e7v7NzOzwZWkjuB+4VdIGkqohACLiI7lFZWZmhcmSCH6TfhpIGnihSpcTZmZ2ZMqSCO6JiK+XL5D0xznFY2ZmBcvy1NB7Mi4zM7Mj0GS9j54JnAWskvTxslULgKG8AzMzs2JMVjX0W+B24Jz0u2QP8I48gzIzs+JM1vvoL4BfSPpiRAwWGJOZmRVowjYCSd+R9IoJ1j1V0mWS/jS/0MzMrAiTVQ29GXgn8DFJO4AngFZgHcnjpJ+MiG/nHqGZmeVqsqqhR4G/Bv5a0jpgJbAf+HVE9BUSnZmZ5S7LewRExIPAg7lGYmZmNZFpPAIzM5u7nAjMzOpcljGLX5GOJmZmZnNQlgL+NcB9kj4s6bi8AzIzs2JNmQgi4vXAc0keGb1a0o/TweQ7p/ipmZkdAbIOXt8LXAt8heQx0lcCP5P05znGZmZmBcjSRnCOpP8Afgg0A6dExJnAc4C/yjc8MzPLW5b3CM4DPhoRt5QvjIg+SW/KJywzMytKlkRwKbCtNCOpDVgeEQ9GxE15BWZmZsXI0kbwdWCkbH44XWZmZnNAlkTQFBEDpZl0uiW/kMzMrEhZEsETks4pzUg6F3gyv5DMzKxIWdoI/jfwRUmfBARsAd6Ya1RmZlaYKRNBRPwGOFVSRzq/N/eozMysMJm6oZb0B8AzgVZJAETEZTnGZWZmBcnyQtmnSPob+nOSqqE/BtbmHJeZmRUkS2PxCyLijcDOiHgf8HzgmHzDMjOzomRJBP3pd5+kpwCDJP0NmZnZHJCljeA7krqAfwZ+BgTwmVyjMjOzwkyaCNIBaW6KiF3ANyR9F2iNiN2FRGdmZrmbtGooIkaAy8vmDzgJmJnNLVnaCG6SdJ5Kz42amdmckiURvIWkk7kDknol7ZHUm3NcZmZWkCxDVXZGRENEtETEgnR+QZadSzpD0r2SNkt6d5X175R0j6RfSrpJkt9PMDMr2JRPDUl6UbXllQPVVPldI0n7wunAVmCjpA0RcU/ZZj8HetJBbv4M+DDJy2tmZlaQLI+PvqtsuhU4Bbgd+P0pfncKsDki7geQ9BXgXGA0EUTEzWXb/wR4fYZ4zMxsBmXpdO4V5fOS1gAfy7DvVSQ9lZZsBZ43yfZvAq6vtkLSxcDFAN3d3RkObWZmWWVpLK60FTh+JoOQ9Hqgh+SltYNExJUR0RMRPUuXLp3JQ5uZ1b0sbQSfIHmbGJLE8TskbxhP5RFgTdn86nRZ5f5fBvwd8HsRcSDDfs3MbAZlaSPYVDY9BHw5Im7N8LuNwNGS1pMkgPOB15ZvIOm5wKeBMyLi8Wwhm5nZTMqSCK4F+iNiGJKngSS1R0TfZD+KiCFJlwA3Ao3AVRFxt6TLgE0RsYGkKqgD+Hr6vtrDEXHOhDs1M7MZlyUR3AS8DCiNTNYGfB94wVQ/jIjrgOsqlr23bPplmSM1M7NcZGksbi0fnjKdbs8vJDMzK1KWRLBP0omlGUknAfvzC8nMzIqUpWroL0nq8H9LMlTlCvz2r5nZnJHlhbKNko4Djk0X3RsRg/mGZWZmRckyeP3bgPkRcVdE3AV0SHpr/qGZmVkRsrQRvDkdoQyAiNgJvDm/kMzMrEhZEkFj+aA0aa+iLfmFZGZmRcrSWHwD8FVJn07n35IuMzOzOSBLIvgbkp4//yyd/wHwmdwiMjOzQmUZoWwkIj4VEa+KiFeRjCfwifxDMzOzImS5Iyh1DncB8GrgAeCbeQZlZmbFmTARSDqGpPC/AHgS+CqgiHhJQbGZmVkBJrsj+BXw/4CzI2IzgKR3FBKVmZkVZrI2gj8CtgE3S/qMpJeSdDFhZmZzyISJICK+FRHnA8cBN5P0ObRM0hWSXl5UgGZmlq8sTw3ti4gvpYPYrwZ+TvJIqZmZzQHTGrw+InamA8m/NK+AzMysWNNKBGZmNvc4EZiZ1TknAjOzOudEYGZW55wIzMzqnBOBmVmdcyIwM6tzTgRmZnXOicDMrM45EZiZ1TknAjOzOudEYGZW55wIzMzqnBOBmVmdcyIwM6tzTgRmZnXOicDMrM45EZiZ1blcE4GkMyTdK2mzpHdXWT9P0lfT9bdJWpdnPGZmdrDcEoGkRuBy4EzgGcAFkp5RsdmbgJ0R8XTgo8A/5RWPmZlVl+cdwSnA5oi4PyIGgK8A51Zscy5wTTp9LfBSScoxJjMzq9CU475XAVvK5rcCz5tom4gYkrQbWAw8Wb6RpIuBi9PZvZLuPcSYllTue5ZwXNPjuKZvtsbmuKbncOJaO9GKPBPBjImIK4ErD3c/kjZFRM8MhDSjHNf0OK7pm62xOa7pySuuPKuGHgHWlM2vTpdV3UZSE7AQ2J5jTGZmViHPRLAROFrSekktwPnAhoptNgD/K51+FfCfERE5xmRmZhVyqxpK6/wvAW4EGoGrIuJuSZcBmyJiA/A54AuSNgM7SJJFng67eiknjmt6HNf0zdbYHNf05BKXfAFuZlbf/GaxmVmdcyIwM6tzczIRzNauLTLEdaGkJyTdkX4uKiiuqyQ9LumuCdZL0sfTuH8p6cRZEteLJe0uO1/vLSCmNZJulnSPpLslvb3KNoWfr4xx1eJ8tUr6qaRfpHG9r8o2hf89ZoyrJn+P6bEbJf1c0nerrJv58xURc+pD0jD9G+CpQAvwC+AZFdu8FfhUOn0+8NVZEteFwCdrcM5eBJwI3DXB+rOA6wEBpwK3zZK4Xgx8t+BztRI4MZ3uBH5d5b9j4ecrY1y1OF8COtLpZuA24NSKbWrx95glrpr8PabHfifwpWr/vfI4X3PxjmC2dm2RJa6aiIhbSJ7amsi5wL9F4idAl6SVsyCuwkXEtoj4WTq9B/gfkjfkyxV+vjLGVbj0HOxNZ5vTT+UTKoX/PWaMqyYkrQb+APjsBJvM+Pmai4mgWtcWlX8Q47q2AEpdW9Q6LoDz0uqEayWtqbK+FrLGXgvPT2/vr5f0zCIPnN6SP5fkarJcTc/XJHFBDc5XWs1xB/A48IOImPB8Ffj3mCUuqM3f48eAvwZGJlg/4+drLiaCI9l3gHURcQLwA8ayvlX3M2BtRDwH+ATwraIOLKkD+AbwlxHRW9RxpzJFXDU5XxExHBG/Q9K7wCmSnlXEcaeSIa7C/x4lnQ08HhG3532scnMxEczWri2mjCsitkfEgXT2s8BJOceUVZZzWriI6C3d3kfEdUCzpCV5H1dSM0lh+8WI+GaVTWpyvqaKq1bnq+z4u4CbgTMqVtW0q5mJ4qrR3+NpwDmSHiSpPv59Sf9esc2Mn6+5mAhma9cWU8ZVUY98Dkk972ywAXhj+jTMqcDuiNhW66AkrSjVjUo6heT/51wLkPR4nwP+JyI+MsFmhZ+vLHHV6HwtldSVTrcBpwO/qtis8L/HLHHV4u8xIt4TEasjYh1JGfGfEfH6is1m/HwdEb2PTkfMzq4tssb1F5LOAYbSuC7MOy4ASV8meaJkiaStwD+QNJ4REZ8CriN5EmYz0Af8ySyJ61XAn0kaAvYD5xeQ0E8D3gDcmdYvA/wt0F0WVy3OV5a4anG+VgLXKBmoqgH4WkR8t9Z/jxnjqsnfYzV5ny93MWFmVufmYtWQmZlNgxOBmVmdcyIwM6tzTgRmZnXOicDMrM45EdgRTdList4hH5X0SNl8yxS/7ZH08QzH+NEMxdou6YuS7pR0l6T/ltQhqUvSW2fiGGaHwo+P2pwh6VJgb0T8S9myprQ/lpqT9B5gaUS8M50/FniQ5Jn270bErOh6weqP7whszpF0taRPSboN+LCkUyT9WEn/7j9KC+BS//zfTacvVTL+wQ8l3S/pL8r2t7ds+x+mHZD9Kr26L72pe1a67HYlYxEc1I88SYE/2tVERNybdmHwIeBp6V3MP6f7e5ekjUo6PHtfumxd2bCt4scAAAJKSURBVHH/J42jPV33ISVjEfxS0r9UObbZhObcm8VmqdXACyJiWNIC4HfTt7tfBnwAOK/Kb44DXkLSn/+9kq6IiMGKbZ4LPBP4LXArcJqkTcCngRdFxAPpG9HVXAV8X9KrgJuAayLiPuDdwLPSDtCQ9HLgaJKuywVskPQi4GHgWOBNEXGrpKuAt0r6PPBK4LiIiFLXCWZZ+Y7A5qqvR8RwOr0Q+LqSkc4+SlKQV/O9iDgQEU+SdE28vMo2P42IrRExAtwBrCNJIPdHxAPpNlUTQUTcQTIw0T8Di4CNko6vsunL08/PSXoMPY4kMQBsiYhb0+l/B15I0g1xP/A5SX9E0q2FWWZOBDZX7Subfj9wc1oH/wqgdYLfHCibHqb6HXOWbSYUEXsj4psR8VaSgvysKpsJ+GBE/E76eXpEfK60i4N3GUMkdw/XAmcDN0wnJjMnAqsHCxmrm78wh/3fCzxVY2PHvqbaRpJOk3RUOt0CPAN4CNhDUh1VciPwp0rGFkDSKknL0nXdkp6fTr8W+O90u4Vp19LvAJ4zU/8wqw9uI7B68GGSnib/HvjeTO88Ivanj3/eIGkfSZfj1TwNuCJtYG5IY/lGWq9/a1p1dX1EvCutMvpx2ha9F3g9yR3IvcDb0vaBe4ArSBLdtyW1ktxNvHOm/402t/nxUbMZIKkjIvamhfzlwH0R8dEZPsY6/Jip5cBVQ2Yz483pOAB3k1yhf7rG8Zhl5jsCM7M65zsCM7M650RgZlbnnAjMzOqcE4GZWZ1zIjAzq3P/H7Nrag0fNLleAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [], + "needs_background": "light" + } + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ia1Psmm3AUlv" + }, + "source": [ + "" + ], + "execution_count": null, + "outputs": [] + } + ] +} \ No newline at end of file