{ "cells": [ { "cell_type": "code", "execution_count": 200, "metadata": {}, "outputs": [], "source": [ "import glob\n", "from PIL import Image\n", "import pillow_heif\n", "import pandas as pd\n", "import numpy as np\n", "from torchvision.utils import save_image\n", "import torchvision.transforms.functional as fn\n", "import torchvision.transforms as transforms\n", "import matplotlib.pyplot as plt" ] }, { "cell_type": "code", "execution_count": 201, "metadata": {}, "outputs": [], "source": [ "# generates new dataset by pasting beetles and non-beetles in the same picture. \n", "#returns array of new images and coordinates\n", "#TODO: make sure that the beetles and non-beetles don't overlap with each other\n", "\n", "def generate_data(bg: list[Image], beetle_set: list[Image], num_beetles_arr: list[int], \n", " non_beetle_set: list[Image], num_non_beetles_arr: list[int], overlap: bool):\n", " set_size = len(beetle_set)\n", " set_non_size = len(non_beetle_set)\n", " set_bg_size = len(bg)\n", " sim_arr, coords_arr = [],[]\n", "\n", " for i, num_beetles in enumerate(num_beetles_arr):\n", " num_non_beetles = num_non_beetles_arr[i]\n", " bg_id = np.random.randint(0, set_bg_size)\n", " bg_temp = bg[bg_id].copy()\n", " width, height = bg_temp.size\n", " beetle_coords = []\n", " non_beetle_coords = []\n", " for j in range(num_non_beetles):\n", " # get random non_beetle image\n", " non_beetle_id = np.random.randint(0, set_non_size)\n", " non_beetle_img = non_beetle_set[non_beetle_id]\n", " \n", " # get random non_beetle rotation\n", " angle = np.random.randint(0, 360)\n", " non_beetle_img = non_beetle_img.rotate(angle, resample=Image.BICUBIC, expand=1)\n", " \n", " non_beetle_width, non_beetle_height = non_beetle_img.size\n", " \n", " # get random x,y coords to paste non-beetle\n", " x = np.random.randint(0, width - non_beetle_width)\n", " y = np.random.randint(0, height - non_beetle_height)\n", " \n", " if not overlap:\n", " is_overlapping = True\n", " while (is_overlapping):\n", " is_overlapping = False\n", " for k in range(j):\n", " _, x_, y_, width_, height_, _ = non_beetle_coords[-(k+1)]\n", " if (x_ - width_/2) < x and (x_ + width_/2) > x and (y_ - height_/2) < y and (y_ + height_/2) > y:\n", " is_overlapping = True\n", " x = np.random.randint(0, width - non_beetle_width)\n", " y = np.random.randint(0, height - non_beetle_height)\n", " break\n", "\n", " bg_temp.paste(non_beetle_img, box=(x,y), mask=non_beetle_img)\n", " \n", " #centers x and y for YOLOv5 PyTorch label\n", " x += non_beetle_width/2\n", " y += non_beetle_width/2\n", " non_beetle_coords.append((non_beetle_id, x, y, non_beetle_width, non_beetle_height, angle))\n", "\n", " \n", " for j in range(num_beetles):\n", " # get random beetle image\n", " beetle_id = np.random.randint(0, set_size)\n", " beetle_img = beetle_set[beetle_id]\n", " # get random beetle rotation\n", " angle = np.random.randint(0, 360)\n", " beetle_img = beetle_img.rotate(angle, resample=Image.BICUBIC, expand=1)\n", " \n", " #randomly resize beetle to be smaller as they are much bigger on image\n", " \n", " beetle_width, beetle_height = beetle_img.size\n", " beetle_max = np.max([beetle_width, beetle_height])\n", " factor = np.random.uniform((height/(5*9))/beetle_max, (height/(4*9))/beetle_max)\n", " beetle_img = beetle_img.resize((int(factor * beetle_width), int(factor * beetle_height)), resample=Image.BICUBIC)\n", " beetle_width, beetle_height = beetle_img.size\n", " \n", " # get random x,y coords to paste beetle\n", " x = np.random.randint(0, width - beetle_width)\n", " y = np.random.randint(0, height - beetle_height)\n", " \n", " if not overlap:\n", " is_overlapping = True\n", " while (is_overlapping):\n", " is_overlapping = False\n", " for k in range(j+num_non_beetles):\n", " if (k < num_non_beetles):\n", " _, x_, y_, width_, height_, _ = non_beetle_coords[-(k+1)]\n", " else:\n", " _, x_, y_, width_, height_, _ = beetle_coords[-(k+1-num_non_beetles)]\n", " if (x_ - width_/2) < x and (x_ + width_/2) > x and (y_ - height_/2) < y and (y_ + height_/2) > y:\n", " is_overlapping = True\n", " x = np.random.randint(0, width - non_beetle_width)\n", " y = np.random.randint(0, height - non_beetle_height)\n", " break\n", " \n", "\n", " bg_temp.paste(beetle_img, box=(x,y), mask=beetle_img)\n", " \n", " #centers x and y for YOLOv5 PyTorch label\n", " x += beetle_width/2\n", " y += beetle_height/2\n", " beetle_coords.append((beetle_id, x, y, beetle_width, beetle_height, angle))\n", " sim_arr.append(bg_temp)\n", " coords_arr.append(beetle_coords)\n", " \n", " return sim_arr, coords_arr" ] }, { "cell_type": "code", "execution_count": 202, "metadata": {}, "outputs": [], "source": [ "#???\n", "def find_coeffs(source_coords, target_coords):\n", " matrix = []\n", " for s, t in zip(source_coords, target_coords):\n", " matrix.append([t[0], t[1], 1, 0, 0, 0, -s[0]*t[0], -s[0]*t[1]])\n", " matrix.append([0, 0, 0, t[0], t[1], 1, -s[1]*t[0], -s[1]*t[1]])\n", " A = np.matrix(matrix, dtype=float)\n", " B = np.array(source_coords).reshape(8)\n", " res = np.dot(np.linalg.inv(A.T * A) * A.T, B)\n", " return np.array(res).reshape(8)" ] }, { "cell_type": "code", "execution_count": 203, "metadata": {}, "outputs": [], "source": [ "backgrounds = []\n", "for file in glob.glob(r\"/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/*\"):\n", " if \"bg.png\" in file: #clean trap\n", " bg = Image.open(file)\n", " # map corners of trap to corners of image\n", " coeffs = find_coeffs([(128,6), (1904,62), (2113,3137), (3,3228)], \n", " [(0,0), (bg.size[0], 0), (bg.size[0], bg.size[1]), (0, bg.size[1])])\n", " bg_flat = bg.transform(bg.size, Image.PERSPECTIVE, coeffs, Image.BICUBIC)\n", " backgrounds.append(bg)\n", " continue\n", " bg = Image.open(file)\n", " \n", " backgrounds.append(bg);\n", "\n", "beetles = []\n", "for file in glob.glob(r\"/raid/projects/akhot2/group-01-phys371-sp2023/crop/beetles/*\"):\n", " b0 = Image.open(file)\n", " beetles.append(b0);\n", "non_beetles = []\n", "for file in glob.glob(r\"/raid/projects/akhot2/group-01-phys371-sp2023/crop/non_beetles/*\"):\n", " n_b0 = Image.open(file)\n", " non_beetles.append(n_b0);" ] }, { "cell_type": "code", "execution_count": 204, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "21\n", "20\n", "48\n" ] } ], "source": [ "print(len(backgrounds))\n", "print(len(beetles))\n", "print(len(non_beetles))" ] }, { "cell_type": "code", "execution_count": 188, "metadata": {}, "outputs": [], "source": [ "beetle_counts = np.random.randint(0, 8, size=1250)\n", "non_beetle_counts = np.random.randint(0, 15, size=1250)\n", "overlap = True\n", "sim_img_arr, coords_arr = generate_data(backgrounds, beetles, beetle_counts, non_beetles, non_beetle_counts, overlap)" ] }, { "cell_type": "code", "execution_count": 189, "metadata": {}, "outputs": [], "source": [ "#exports images and coordinates in YOLOv5 PyTorch format\n", "def export(img_arr, coords_arr):\n", " s = \"\"\n", " image_set = \"train\"\n", " change = len(img_arr)*0.8\n", " for i in range(len(img_arr)):\n", " s = \"\"\n", " img = img_arr[i]\n", " for coord in coords_arr[i]:\n", " c, x, y, w, h, a = coord\n", " s += f\"{0} {x/img.size[0]} {y/img.size[1]} {w/img.size[0]} {h/img.size[1]}\\n\"\n", " print(i)\n", " if i > change:\n", " image_set = \"test\"\n", " with open(\"data/\" +image_set+ f\"/labels/sim{i}.txt\", \"w\") as f:\n", " f.write(s)\n", " img.save(\"data/\" +image_set+ f\"/images/sim{i}.png\")" ] }, { "cell_type": "code", "execution_count": 190, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "0\n", "1\n", "2\n", "3\n", "4\n", "5\n", "6\n", "7\n", "8\n", "9\n" ] } ], "source": [ "export(sim_img_arr, coords_arr)" ] }, { "cell_type": "code", "execution_count": 20, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "3.4648" ] }, "execution_count": 20, "metadata": {}, "output_type": "execute_result" } ], "source": [ "beetle_counts.mean()" ] }, { "cell_type": "code", "execution_count": 22, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "2.528" ] }, "execution_count": 22, "metadata": {}, "output_type": "execute_result" } ], "source": [ "non_beetle_counts.mean()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "format\n", "0 x y width height (each value normalized to size of image)\n", "\n", "\n" ] }, { "cell_type": "code", "execution_count": 49, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "RGBA\n", "(2, 4)\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAMIAAAD8CAYAAAAlkXvsAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAKeklEQVR4nO3dT4xdZR3G8e9jKUgEI0UhBIlU0g0aU2pTMRCiiX9qN9WFCS60C5ISA4kuXFRJtC4h0YUhkpRAqIlCSJTIBrUhGDZGWrGWllLaKtFC08YYA25U6s/FeSe9aafl/pk5c+/M95O8OXfeOffOmZM+vWdOO++TqkJa6d611AcgTQODIGEQJMAgSIBBkACDIAFLEIQkm5McSXIsyY6+v740n/T57whJVgGvAp8FTgB7ga9U1cu9HYQ0j77fETYBx6rqz1X1H+AJYGvPxyCd55Kev971wN8GPj4BfOLcnZJsB7a3Dz/ew3FpGaqqDLtv30GY78DOuzarql3ALoAk/h8QLbq+L41OADcMfPxB4I2ej0E6T99B2AusS7I2yaXAncDTPR+DdJ5eL42q6u0k9wK/BlYBj1bVoT6PQZpPr7dPx+HPCBrXKD8s+y/LEgZBAgyCBBgECTAIEmAQJMAgSIBBkACDIAEGQQIMggQYBAkwCBJgECTAIEiAQZAAgyABBkECDIIEGAQJMAgSYBAkwCBIgEGQAIMgARMGIclrSV5Ksj/Jvja3JsmeJEfb9qqB/b/dmnKOJPn8pAcvLZSFeEf4dFWtr6qN7eMdwLNVtQ54tn1MkpvpFv39CLAZ+HFr0JGW3GJcGm0FdrfHu4EvDsw/UVX/rqq/AMfoGnSkJTdpEAr4TZI/tJYbgGur6iRA217T5udry7l+vhdNsj3JvrnLLWmxTbos/G1V9UaSa4A9SV65yL5DteWAjTnq30TvCFX1RtueBp6iu9Q5leQ6gLY93Xa3LUdTa+wgJHlPkivnHgOfAw7SNeBsa7ttA37ZHj8N3JnksiRrgXXAC+N+fWkhTXJpdC3wVJK51/lZVf0qyV7gySR3AX8FvgxQVYeSPAm8DLwN3FNVZyY6emmB2JijZcvGHGlEBkHCIEiAQZAAgyABBkECDIIEGAQJMAgSYBAkwCBIgEGQAIMgAQZBAgyCBBgECTAIEmAQJMAgSIBBkACDIAEGQQIMggQYBAkYIghJHk1yOsnBgbmRy0CSfLyVihxL8qO0JfKkaTDMO8JjdMUeg8YpA3kI2E635um6eV5TWjLvGISqeh74xznTI5WBtFWx31tVv6tujcmfDDxHWnLj/owwahnI9e3xufPSVJi0KORcFyoDGbokBLrGHLrLKKkX474jjFoGcqI9Pnd+XlW1q6o2DhQUSotq3CCMVAbSLp/eSnJru1v0tYHnSEuvqi46gMeBk8B/6f5mvwu4mu5u0dG2XTOw/33AceAI8IWB+Y10jTrHgQdp3QxDfP1yOMYZw/z5mhsWhWjZsihEGpFBkDAIEmAQJMAgSIBBkACDIAEGQQIMggQYBAkwCBJgECTAIEiAQZAAgyABBkECDIIEGAQJMAgSYBAkwCBIgEGQAIMgAQZBAgyCBIzfmLMzyetJ9rexZeBzNuZo9gyx9ugdwAbg4MDcTuBb8+x7M/An4DJgLd06p6va514APkm3RPwzDKyL6tqnjsUYo6x9Om5jzoVsxcYczaBJfka4N8mBduk0Vya4II05SbYn2Zdk3wTHJw1t3CA8BNwErKdbMv4HbX5BGnMsClHfxgpCVZ2qqjNV9T/gYWBT+9SCNOZIfRsrCHO1Uc2X6ApAwMYczah3LBNM8jjwKeD9SU4A3wM+lWQ93eXNa8DdAFV1KMmTwMvA28A9VXWmvdTX6TqbL6e7a/TMAn4f0kRszNGyZWOONCKDIGEQJMAgSIBBkACDIAEGQQIMggQYBAkwCBJgECTAIEiAQZAAgyABBkECDIIEGAQJMAgSYBAkwCBIgEGQAIMgAQZBAgyCBBgECRiuMeeGJM8lOZzkUJJvtPk1SfYkOdq2Vw08x9YczZYhGmuuAza0x1cCr9I14zwA7GjzO4D7F6M1hyloXnHM5ljoxpyTVfVie/wWcJiu5GMrsLvttpuzDThbsTVHM2aknxGS3AjcAvweuLYt907bXtN2m7g1x8Yc9e0dl4Wfk+QK4OfAN6vqzYtc3k/cmlNVu4Bd7evOu4+0kIZ6R0iymi4EP62qX7TpU3OFIW17us3bmqOZM8xdowCPAIer6ocDn3oa2NYeb+NsA46tOZo9Q9y1uZ3uEuYAsL+NLcDVwLPA0bZdM/Cc++juFh1h4M4QsJGuZuo48CCtqMS7Ro7FGKPcNbIxR8uWjTnSiAyChEGQAIMgAQZBAgyCBBgECTAIEmAQJMAgSIBBkACDIAEGQQIMggQYBAkwCBJgECTAIEiAQZAAgyABBkECDIIEGAQJMAgSMFlRyM4kryfZ38aWgedYFKLZMkFRyE7gW/Psb1GIYypGX0UhF7IVi0I0YyYpCgG4N8mBJI8OdKhZFKKZM3QQzi0KAR4CbgLWAyeBH8ztOs/T6yLz509W7aqqjVW1cdjjkyYxdlFIVZ2qqjNV9T/gYWBT292iEM2csYtC5tpymi/R9R6ARSGaQcN0qN0GfBV4Kcn+Nvcd4CtJ1tNd3rwG3A1QVYeSPAm8DLwN3FNVZ9rzvg48BlxOd9fomYX4JqRJWRSiZcuiEGlEBkHCIEiAQZAAgyABBkECDIIEGAQJMAgSYBAkwCBIgEGQAIMgAQZBAgyCBBgECTAIEmAQJMAgSIBBkACDIAEGQQIMggQYBAkwCBIw3Nqn707yQpI/tcac77f5NUn2JDnatlcNPMfGHM2WIRprAlzRHq+m60a4FXgA2NHmdwD325jjmKax0I05VVX/ah+ubqPomnF2t/ndnG2/2YqNOZoxw/YjrGorYZ8G9lTV74Fr21LvtO01bXcbczRzhgpCKwRZT1fusSnJRy+yu405mjkj3TWqqn8CvwU2A6fmykLa9nTbzcYczZxh7hp9IMn72uPLgc8Ar9A142xru23jbPuNjTmaPUPctfkY8EfgAF091Hfb/NXAs8DRtl0z8Jz76O4WHWHgzhCwsb3GceBBWlGJd40cizFGuWtkY46WLRtzpBEZBAmDIAEGQQIMggQYBAkwCBJgECTAIEiAQZAAuGSpD2AI/6L7P0uC9wN/X+qDmBLvdC4+NMqLzUIQjvh7CZ0k+zwXnYU+F14aSRgECZiNIOxa6gOYIp6Lsxb0XEz97yNIfZiFdwRp0RkEiSkOQpLNbcnIY0l2LPXxLIYkjyY5neTgwNyKXEozyQ1JnktyuC0t+o0238/5GOUXnPsawCq6X/D/MHAp3RKSNy/1cS3C93kHsAE4ODDXy1Ka0zaA64AN7fGVwKvte+7lfEzrO8Im4FhV/bmq/gM8QbeU5LJSVc8D/zhneisrcCnNqjpZVS+2x28Bh+lWQuzlfExrEC60bORKsGhLac6KJDcCt9AtON3L+ZjWIAy9POQKMvFSmrMgyRXAz4FvVtWbF9t1nrmxz8e0BuFCy0auBCt2Kc0kq+lC8NOq+kWb7uV8TGsQ9gLrkqxNcilwJ91SkivBilxKsx37I8DhqvrhwKf6OR9LfbfgIncRttDdOTgO3LfUx7NI3+PjwEngv3R/k91FT0tpTtsAbqe7hDkA7G9jS1/nw/9iITG9l0ZSrwyChEGQAIMgAQZBAgyCBBgECYD/A79FAQx8InyaAAAAAElFTkSuQmCC", "text/plain": [ "<Figure size 432x288 with 1 Axes>" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAMIAAAD8CAYAAAAlkXvsAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAKeklEQVR4nO3dT4xdZR3G8e9jKUgEI0UhBIlU0g0aU2pTMRCiiX9qN9WFCS60C5ISA4kuXFRJtC4h0YUhkpRAqIlCSJTIBrUhGDZGWrGWllLaKtFC08YYA25U6s/FeSe9aafl/pk5c+/M95O8OXfeOffOmZM+vWdOO++TqkJa6d611AcgTQODIGEQJMAgSIBBkACDIAFLEIQkm5McSXIsyY6+v740n/T57whJVgGvAp8FTgB7ga9U1cu9HYQ0j77fETYBx6rqz1X1H+AJYGvPxyCd55Kev971wN8GPj4BfOLcnZJsB7a3Dz/ew3FpGaqqDLtv30GY78DOuzarql3ALoAk/h8QLbq+L41OADcMfPxB4I2ej0E6T99B2AusS7I2yaXAncDTPR+DdJ5eL42q6u0k9wK/BlYBj1bVoT6PQZpPr7dPx+HPCBrXKD8s+y/LEgZBAgyCBBgECTAIEmAQJMAgSIBBkACDIAEGQQIMggQYBAkwCBJgECTAIEiAQZAAgyABBkECDIIEGAQJMAgSYBAkwCBIgEGQAIMgARMGIclrSV5Ksj/Jvja3JsmeJEfb9qqB/b/dmnKOJPn8pAcvLZSFeEf4dFWtr6qN7eMdwLNVtQ54tn1MkpvpFv39CLAZ+HFr0JGW3GJcGm0FdrfHu4EvDsw/UVX/rqq/AMfoGnSkJTdpEAr4TZI/tJYbgGur6iRA217T5udry7l+vhdNsj3JvrnLLWmxTbos/G1V9UaSa4A9SV65yL5DteWAjTnq30TvCFX1RtueBp6iu9Q5leQ6gLY93Xa3LUdTa+wgJHlPkivnHgOfAw7SNeBsa7ttA37ZHj8N3JnksiRrgXXAC+N+fWkhTXJpdC3wVJK51/lZVf0qyV7gySR3AX8FvgxQVYeSPAm8DLwN3FNVZyY6emmB2JijZcvGHGlEBkHCIEiAQZAAgyABBkECDIIEGAQJMAgSYBAkwCBIgEGQAIMgAQZBAgyCBBgECTAIEmAQJMAgSIBBkACDIAEGQQIMggQYBAkYIghJHk1yOsnBgbmRy0CSfLyVihxL8qO0JfKkaTDMO8JjdMUeg8YpA3kI2E635um6eV5TWjLvGISqeh74xznTI5WBtFWx31tVv6tujcmfDDxHWnLj/owwahnI9e3xufPSVJi0KORcFyoDGbokBLrGHLrLKKkX474jjFoGcqI9Pnd+XlW1q6o2DhQUSotq3CCMVAbSLp/eSnJru1v0tYHnSEuvqi46gMeBk8B/6f5mvwu4mu5u0dG2XTOw/33AceAI8IWB+Y10jTrHgQdp3QxDfP1yOMYZw/z5mhsWhWjZsihEGpFBkDAIEmAQJMAgSIBBkACDIAEGQQIMggQYBAkwCBJgECTAIEiAQZAAgyABBkECDIIEGAQJMAgSYBAkwCBIgEGQAIMgAQZBAgyCBIzfmLMzyetJ9rexZeBzNuZo9gyx9ugdwAbg4MDcTuBb8+x7M/An4DJgLd06p6va514APkm3RPwzDKyL6tqnjsUYo6x9Om5jzoVsxcYczaBJfka4N8mBduk0Vya4II05SbYn2Zdk3wTHJw1t3CA8BNwErKdbMv4HbX5BGnMsClHfxgpCVZ2qqjNV9T/gYWBT+9SCNOZIfRsrCHO1Uc2X6ApAwMYczah3LBNM8jjwKeD9SU4A3wM+lWQ93eXNa8DdAFV1KMmTwMvA28A9VXWmvdTX6TqbL6e7a/TMAn4f0kRszNGyZWOONCKDIGEQJMAgSIBBkACDIAEGQQIMggQYBAkwCBJgECTAIEiAQZAAgyABBkECDIIEGAQJMAgSYBAkwCBIgEGQAIMgAQZBAgyCBBgECRiuMeeGJM8lOZzkUJJvtPk1SfYkOdq2Vw08x9YczZYhGmuuAza0x1cCr9I14zwA7GjzO4D7F6M1hyloXnHM5ljoxpyTVfVie/wWcJiu5GMrsLvttpuzDThbsTVHM2aknxGS3AjcAvweuLYt907bXtN2m7g1x8Yc9e0dl4Wfk+QK4OfAN6vqzYtc3k/cmlNVu4Bd7evOu4+0kIZ6R0iymi4EP62qX7TpU3OFIW17us3bmqOZM8xdowCPAIer6ocDn3oa2NYeb+NsA46tOZo9Q9y1uZ3uEuYAsL+NLcDVwLPA0bZdM/Cc++juFh1h4M4QsJGuZuo48CCtqMS7Ro7FGKPcNbIxR8uWjTnSiAyChEGQAIMgAQZBAgyCBBgECTAIEmAQJMAgSIBBkACDIAEGQQIMggQYBAkwCBJgECTAIEiAQZAAgyABBkECDIIEGAQJMAgSMFlRyM4kryfZ38aWgedYFKLZMkFRyE7gW/Psb1GIYypGX0UhF7IVi0I0YyYpCgG4N8mBJI8OdKhZFKKZM3QQzi0KAR4CbgLWAyeBH8ztOs/T6yLz509W7aqqjVW1cdjjkyYxdlFIVZ2qqjNV9T/gYWBT292iEM2csYtC5tpymi/R9R6ARSGaQcN0qN0GfBV4Kcn+Nvcd4CtJ1tNd3rwG3A1QVYeSPAm8DLwN3FNVZ9rzvg48BlxOd9fomYX4JqRJWRSiZcuiEGlEBkHCIEiAQZAAgyABBkECDIIEGAQJMAgSYBAkwCBIgEGQAIMgAQZBAgyCBBgECTAIEmAQJMAgSIBBkACDIAEGQQIMggQYBAkwCBIw3Nqn707yQpI/tcac77f5NUn2JDnatlcNPMfGHM2WIRprAlzRHq+m60a4FXgA2NHmdwD325jjmKax0I05VVX/ah+ubqPomnF2t/ndnG2/2YqNOZoxw/YjrGorYZ8G9lTV74Fr21LvtO01bXcbczRzhgpCKwRZT1fusSnJRy+yu405mjkj3TWqqn8CvwU2A6fmykLa9nTbzcYczZxh7hp9IMn72uPLgc8Ar9A142xru23jbPuNjTmaPUPctfkY8EfgAF091Hfb/NXAs8DRtl0z8Jz76O4WHWHgzhCwsb3GceBBWlGJd40cizFGuWtkY46WLRtzpBEZBAmDIAEGQQIMggQYBAkwCBJgECTAIEiAQZAAuGSpD2AI/6L7P0uC9wN/X+qDmBLvdC4+NMqLzUIQjvh7CZ0k+zwXnYU+F14aSRgECZiNIOxa6gOYIp6Lsxb0XEz97yNIfZiFdwRp0RkEiSkOQpLNbcnIY0l2LPXxLIYkjyY5neTgwNyKXEozyQ1JnktyuC0t+o0238/5GOUXnPsawCq6X/D/MHAp3RKSNy/1cS3C93kHsAE4ODDXy1Ka0zaA64AN7fGVwKvte+7lfEzrO8Im4FhV/bmq/gM8QbeU5LJSVc8D/zhneisrcCnNqjpZVS+2x28Bh+lWQuzlfExrEC60bORKsGhLac6KJDcCt9AtON3L+ZjWIAy9POQKMvFSmrMgyRXAz4FvVtWbF9t1nrmxz8e0BuFCy0auBCt2Kc0kq+lC8NOq+kWb7uV8TGsQ9gLrkqxNcilwJ91SkivBilxKsx37I8DhqvrhwKf6OR9LfbfgIncRttDdOTgO3LfUx7NI3+PjwEngv3R/k91FT0tpTtsAbqe7hDkA7G9jS1/nw/9iITG9l0ZSrwyChEGQAIMgAQZBAgyCBBgECYD/A79FAQx8InyaAAAAAElFTkSuQmCC", "text/plain": [ "<Figure size 432x288 with 1 Axes>" ] }, "metadata": { "needs_background": "light" }, "output_type": "display_data" } ], "source": [ "from torchvision.utils import draw_bounding_boxes\n", "import torch\n", "import torchvision\n", "transform = transforms.Compose([transforms.ToTensor()])\n", "for i in range(len(sim_img_arr)):\n", " print(sim_img_arr[i].mode)\n", " \n", " tensor = transform(sim_img_arr[i].convert(\"RGB\")).type(torch.uint8)\n", " coords_list = np.zeros((len(coords_arr[i]), 4))\n", " j=0\n", " for k in coords_arr[i]:\n", " c, x, y, w, h, a = k\n", " coords_list[j, 0] = x\n", " coords_list[j, 1] = y\n", " coords_list[j, 2] = int(x+w)\n", " coords_list[j, 3] = int(y+h)\n", " j+=1\n", " print(coords_list.shape)\n", " draw_bounding_boxes(tensor, torch.Tensor(coords_list))\n", " plt.imshow(tensor.permute(1,2,0)[:,:,0:3])\n", " plt.show()\n", " break\n", " \n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "First:\n", " - created 1250 images on a clean background variable 1 to 5 of the same beetle, model detected well on test set of 100 images (val/exp or exp2 or exp3) (proof of concept)\n", " \n", " fixed labels, making sure it's in the center\n", " \n", " - created 1250 images on a clean background variable 0 to 5 beetles of 6 different types of beetles, performing nearly 100%\n", " https://wandb.ai/akhot2/YOLOv5/runs/294etqct?workspace=user-fresleven\n", " https://wandb.ai/akhot2/YOLOv5/runs/i0ykd1a6?workspace=user-fresleven\n", " \n", "TODO:\n", " - (fixed autocropping during rotation and added option for not overlapping and multiple backgrounds and non beetles)\n", " - created 1250 images on a clean background variable 0 to 7 beetles of 6 different images and 0 to 5 non-beetles 7 different images, performing \n", " - created 1250 images on a clean and dirty background variables 0 to 15 beetles of 15 different images and 0 to 10 non-beetle of 10 different images, overlap True\n", " \n", " - created 1250 images on a clean and dirty background variables 0 to 10 beetles of 15 different images and 0 to 10 non-beetle of 10 different images, overlap False\n", " \n", " - perform auto cropping on arduino and include " ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "4" ] }, "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], "source": [ "len(backgrounds)" ] }, { "cell_type": "code", "execution_count": 196, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Using cache found in /home/akhot2/.cache/torch/hub/ultralytics_yolov5_master\n", "YOLOv5 🚀 2023-3-31 Python-3.9.12 torch-1.11.0 CUDA:0 (A100-SXM4-40GB, 40537MiB)\n", "\n", "Fusing layers... \n", "Model summary: 212 layers, 20852934 parameters, 0 gradients, 47.9 GFLOPs\n", "Adding AutoShape... \n" ] }, { "data": { "text/plain": [ "AutoShape(\n", " (model): DetectMultiBackend(\n", " (model): DetectionModel(\n", " (model): Sequential(\n", " (0): Conv(\n", " (conv): Conv2d(3, 48, kernel_size=(6, 6), stride=(2, 2), padding=(2, 2))\n", " (act): SiLU(inplace=True)\n", " )\n", " (1): Conv(\n", " (conv): Conv2d(48, 96, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (2): C3(\n", " (cv1): Conv(\n", " (conv): Conv2d(96, 48, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(96, 48, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv3): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (m): Sequential(\n", " (0): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(48, 48, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(48, 48, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(48, 48, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(48, 48, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " )\n", " )\n", " (3): Conv(\n", " (conv): Conv2d(96, 192, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (4): C3(\n", " (cv1): Conv(\n", " (conv): Conv2d(192, 96, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(192, 96, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv3): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (m): Sequential(\n", " (0): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (2): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (3): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " )\n", " )\n", " (5): Conv(\n", " (conv): Conv2d(192, 384, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (6): C3(\n", " (cv1): Conv(\n", " (conv): Conv2d(384, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(384, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv3): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (m): Sequential(\n", " (0): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (2): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (3): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (4): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (5): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " )\n", " )\n", " (7): Conv(\n", " (conv): Conv2d(384, 768, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (8): C3(\n", " (cv1): Conv(\n", " (conv): Conv2d(768, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(768, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv3): Conv(\n", " (conv): Conv2d(768, 768, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (m): Sequential(\n", " (0): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " )\n", " )\n", " (9): SPPF(\n", " (cv1): Conv(\n", " (conv): Conv2d(768, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(1536, 768, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (m): MaxPool2d(kernel_size=5, stride=1, padding=2, dilation=1, ceil_mode=False)\n", " )\n", " (10): Conv(\n", " (conv): Conv2d(768, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (11): Upsample(scale_factor=2.0, mode=nearest)\n", " (12): Concat()\n", " (13): C3(\n", " (cv1): Conv(\n", " (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv3): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (m): Sequential(\n", " (0): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " )\n", " )\n", " (14): Conv(\n", " (conv): Conv2d(384, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (15): Upsample(scale_factor=2.0, mode=nearest)\n", " (16): Concat()\n", " (17): C3(\n", " (cv1): Conv(\n", " (conv): Conv2d(384, 96, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(384, 96, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv3): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (m): Sequential(\n", " (0): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " )\n", " )\n", " (18): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (19): Concat()\n", " (20): C3(\n", " (cv1): Conv(\n", " (conv): Conv2d(384, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(384, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv3): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (m): Sequential(\n", " (0): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " )\n", " )\n", " (21): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (22): Concat()\n", " (23): C3(\n", " (cv1): Conv(\n", " (conv): Conv2d(768, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(768, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv3): Conv(\n", " (conv): Conv2d(768, 768, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (m): Sequential(\n", " (0): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " (1): Bottleneck(\n", " (cv1): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(1, 1), stride=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " (cv2): Conv(\n", " (conv): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", " (act): SiLU(inplace=True)\n", " )\n", " )\n", " )\n", " )\n", " (24): Detect(\n", " (m): ModuleList(\n", " (0): Conv2d(192, 18, kernel_size=(1, 1), stride=(1, 1))\n", " (1): Conv2d(384, 18, kernel_size=(1, 1), stride=(1, 1))\n", " (2): Conv2d(768, 18, kernel_size=(1, 1), stride=(1, 1))\n", " )\n", " )\n", " )\n", " )\n", " )\n", ")" ] }, "execution_count": 196, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import torch\n", "model = torch.hub.load('ultralytics/yolov5', 'custom', '/raid/projects/akhot2/group-01-phys371-sp2023/yolov5_model/runs/train/20beetle_40-non_20dirt_bkg_overlap/weights/best.pt')\n", "model.eval()" ] }, { "cell_type": "code", "execution_count": 199, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "YOLOv5 <class 'models.common.Detections'> instance\n", "image 1/1: 2746x1610 3 beetless\n", "Speed: 125.2ms pre-process, 13.4ms inference, 1.1ms NMS per image at shape (1, 3, 640, 384)" ] }, "execution_count": 199, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model('/raid/projects/akhot2/group-01-phys371-sp2023/crop/data/train/images/sim1.png')" ] }, { "cell_type": "code", "execution_count": 153, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real18.jpg\n", "(1744, 2778)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real16.jpg\n", "(1894, 3155)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real7.jpg\n", "(1837, 2978)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real8.jpg\n", "(1680, 2791)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real10.jpg\n", "(1880, 3101)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real13.jpg\n", "(1572, 2582)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real15.jpg\n", "(1486, 2422)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real6.jpg\n", "(1635, 2699)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/F28_3AUGUST2022_cropped.jpg\n", "(1850, 2765)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/bg.png\n", "(2114, 3236)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real17.jpg\n", "(1769, 3095)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real1.jpg\n", "(1751, 2631)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real20.jpg\n", "(1521, 2306)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real4.jpg\n", "(1640, 2289)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real3.jpg\n", "(1610, 2746)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real19.jpg\n", "(1662, 2767)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real9.jpg\n", "(1627, 2155)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real14.jpg\n", "(1548, 2187)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real5.jpg\n", "(1940, 2713)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real12.jpg\n", "(1494, 2695)\n", "/raid/projects/akhot2/group-01-phys371-sp2023/crop/cropped_imgs/real2.jpg\n", "(1940, 2713)\n" ] } ], "source": [ "width = []\n", "height = []\n", "for file in glob.glob('/raid/projects/akhot2/group-01-phys371-sp2023/crop/data/train/sim0.png'):\n", " print(file)\n", " n_b0 = Image.open(file)\n", " print(n_b0.size)\n", " w, h = n_b0.size\n", " width.append(w)\n", " height.append(h)\n", " #f = model(file)\n", " #print(f)" ] }, { "cell_type": "code", "execution_count": 76, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "78.57894736842105" ] }, "execution_count": 76, "metadata": {}, "output_type": "execute_result" } ], "source": [ "np.mean(width)" ] }, { "cell_type": "code", "execution_count": 77, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "92.47368421052632" ] }, "execution_count": 77, "metadata": {}, "output_type": "execute_result" } ], "source": [ "np.mean(height)" ] }, { "cell_type": "code", "execution_count": 154, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "1723.5238095238096" ] }, "execution_count": 154, "metadata": {}, "output_type": "execute_result" } ], "source": [ "np.mean(width)" ] }, { "cell_type": "code", "execution_count": 155, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "2704.9523809523807" ] }, "execution_count": 155, "metadata": {}, "output_type": "execute_result" } ], "source": [ "np.mean(height)" ] }, { "cell_type": "code", "execution_count": 93, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "94.5" ] }, "execution_count": 93, "metadata": {}, "output_type": "execute_result" } ], "source": [ "width[8]*.7" ] }, { "cell_type": "code", "execution_count": 94, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "54.599999999999994" ] }, "execution_count": 94, "metadata": {}, "output_type": "execute_result" } ], "source": [ "height[8]*.7" ] }, { "cell_type": "code", "execution_count": 156, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[1744,\n", " 1894,\n", " 1837,\n", " 1680,\n", " 1880,\n", " 1572,\n", " 1486,\n", " 1635,\n", " 1850,\n", " 2114,\n", " 1769,\n", " 1751,\n", " 1521,\n", " 1640,\n", " 1610,\n", " 1662,\n", " 1627,\n", " 1548,\n", " 1940,\n", " 1494,\n", " 1940]" ] }, "execution_count": 156, "metadata": {}, "output_type": "execute_result" } ], "source": [ "width" ] }, { "cell_type": "code", "execution_count": 166, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "51.44444444444444\n", "41.15555555555556\n", "58.425925925925924\n", "46.74074074074074\n", "55.148148148148145\n", "44.11851851851852\n", "51.68518518518518\n", "41.34814814814815\n", "57.425925925925924\n", "45.94074074074074\n", "47.81481481481482\n", "38.25185185185185\n", "44.851851851851855\n", "35.88148148148148\n", "49.98148148148148\n", "39.98518518518519\n", "51.2037037037037\n", "40.96296296296296\n", "59.925925925925924\n", "47.94074074074074\n", "57.31481481481482\n", "45.851851851851855\n", "48.72222222222222\n", "38.977777777777774\n", "42.7037037037037\n", "34.162962962962965\n", "42.388888888888886\n", "33.91111111111111\n", "50.851851851851855\n", "40.681481481481484\n", "51.24074074074074\n", "40.992592592592594\n", "39.907407407407405\n", "31.925925925925927\n", "40.5\n", "32.4\n", "50.24074074074074\n", "40.19259259259259\n", "49.907407407407405\n", "39.925925925925924\n", "50.24074074074074\n", "40.19259259259259\n" ] } ], "source": [ "for i in height:\n", " print((i/(6*9)))\n", " print((i/(7.5*9)))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.9" }, "vscode": { "interpreter": { "hash": "5501471bf458387d76cdb9f487659a1abd30d9114d99ffb68b67c5dd3feef292" } } }, "nbformat": 4, "nbformat_minor": 2 }