{"id":3018,"date":"2025-05-23T21:54:35","date_gmt":"2025-05-23T12:54:35","guid":{"rendered":"https:\/\/manvscloud.com\/?p=3018"},"modified":"2025-05-23T21:54:36","modified_gmt":"2025-05-23T12:54:36","slug":"ncloud-cpu-%ed%99%98%ea%b2%bd%ec%97%90%ec%84%9c%eb%8f%84-%ea%b0%80%eb%8a%a5%ed%95%9c-%eb%94%a5%eb%9f%ac%eb%8b%9d-ai-hub%ec%99%80-%eb%84%a4%ec%9d%b4%eb%b2%84-%ed%81%b4%eb%9d%bc%ec%9a%b0%eb%93%9c","status":"publish","type":"post","link":"https:\/\/manvscloud.com\/?p=3018","title":{"rendered":"[NCLOUD] CPU \ud658\uacbd\uc5d0\uc11c\ub3c4 \uac00\ub2a5\ud55c \ub525\ub7ec\ub2dd: AI-Hub\uc640 \ub124\uc774\ubc84 \ud074\ub77c\uc6b0\ub4dc\ub85c \uc774\ubbf8\uc9c0 \ubd84\ub958 \ubaa8\ub378 \uac1c\ubc1c \ub3c4\uc804\uae30"},"content":{"rendered":"\n<p>\uc548\ub155\ud558\uc138\uc694 MANVSCLOUD \uae40\uc218\ud604\uc785\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uba38\uc2e0\ub7ec\ub2dd\uacfc \uc778\uacf5\uc9c0\ub2a5 \uae30\uc220\uc774 \ube60\ub974\uac8c \ubc1c\uc804\ud558\uace0 \uc774\uc81c\ub294 \uc774\ub7ec\ud55c \uae30\uc220\ub4e4\uc774 \uc77c\uc0c1 \uc18d\uc5d0\uc11c \ud568\uaed8\ud558\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\ube44\ub85d \uc81c\uac00 \ud074\ub77c\uc6b0\ub4dc \uc5d4\uc9c0\ub2c8\uc5b4\uc9c0\ub9cc AI\/ML \uae30\uc220\uc758 \ubc1c\uc804\uc5d0 \ub530\ub77c \uc774 \ubd84\uc57c\uc5d0\ub3c4 \uc790\uc5f0\uc2a4\ub7fd\uac8c \uad00\uc2ec\uc774 \uc0dd\uacbc\uc2b5\ub2c8\ub2e4. \ub354 \ub113\uc740 \ubc94\uc704\uc758 \ud074\ub77c\uc6b0\ub4dc \uc11c\ube44\uc2a4\ub97c \ub2e4\ub8e8\uace0 \uc2f6\uc740 \uc695\uad6c\uac00 \ucee4\uc84c\uace0 \uc774\ub97c \uc704\ud55c \ud559\uc2b5\uc774 \ud544\uc694\ud558\ub2e4\uace0 \ub290\uaf08\uc2b5\ub2c8\ub2e4. \ucc98\uc74c \uba38\uc2e0\ub7ec\ub2dd\uc744 \ubc30\uc6b0\ub294 \uc785\ubb38\uc790 \uc785\uc7a5\uc5d0\uc11c \uc774\ub860\uc801\uc778 \uac1c\ub150\ub4e4\uc740 \uc989\uac01\uc801\uc73c\ub85c \uc774\ud574\ud558\uae30 \uc5b4\ub824\uc6e0\uae30\uc5d0 \ube60\ub974\uac8c \uc2e4\uc81c \ud504\ub85c\uc81d\ud2b8\ub97c \ud1b5\ud55c \uacbd\ud5d8\uc73c\ub85c \ud765\ubbf8\ub97c \uc5bb\uace0\uc790 \ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uc608\uc804\uac19\uc558\uc73c\uba74 AI\/ML\uc5d0 \ub300\ud55c \uc9c0\uc2dd\uc774 \uac70\uc758 \uc5c6\uace0 \uac1c\ubc1c\uc790\uac00 \uc544\ub2c8\ub77c\ub294 \uc810\uc774 \uc2dc\uc791\ud558\ub294 \ub370 \ud070 \uc7a5\ubcbd\uc73c\ub85c \ub290\uaf08\uaca0\uc9c0\ub9cc \uc694\uc998\uc740 \ubc14\uc774\ube0c \ucf54\ub529\uc774 \uc77c\uc0c1\uc774\ub77c \uc5d4\uc9c0\ub2c8\uc5b4 \uc785\uc7a5\uc5d0\uc11c\ub3c4 \uc758\uc9c0\ub9cc \uc788\ub2e4\uba74 \ubb34\uc5c7\uc774\ub4e0 \uc2dc\uc791\ud560 \uc218 \uc788\ub2e4\uace0 \uc0dd\uac01\ud569\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uc624\ub298 \ud3ec\uc2a4\ud305\uc5d0\uc11c\ub294 AI\/ML \uacbd\ud5d8\uc774 \uc5c6\ub358 \uc81c\uac00 \ub124\uc774\ubc84 \ud074\ub77c\uc6b0\ub4dc \ud658\uacbd\uc744 \ud65c\uc6a9\ud558\uc5ec \ub3fc\uc9c0\uac10\uc790\ub97c \uc2dd\ubcc4\ud558\ub294 \ub525\ub7ec\ub2dd \uae30\ubc18 \ucd94\ub860 \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\uba70 \uc2a4\ud130\ub514\ud588\ub358 \uacfc\uc815\uc744 \uc18c\uac1c\ud558\uace0\uc790 \ud569\ub2c8\ub2e4.<\/p>\n\n\n\n<hr class=\"wp-block-separator has-alpha-channel-opacity\"\/>\n\n\n\n<h3 class=\"wp-block-heading has-white-color has-vivid-green-cyan-background-color has-text-color has-background has-link-color wp-elements-5ae04260b6b8acddfc47224be6e38760\"> \uc2a4\ud130\ub514 \uac1c\uc694<\/h3>\n\n\n\n<ul>\n<li><strong>\uc774\ubbf8\uc9c0 \ubd84\ub958 \ubaa8\ub378 \uac1c\ubc1c<\/strong>: \ub3fc\uc9c0\uac10\uc790\uc640 \ub2e4\ub978 \uc2dd\ubb3c\uc744 \uad6c\ubd84\ud560 \uc218 \uc788\ub294 \ub525\ub7ec\ub2dd \ubaa8\ub378 \uad6c\ucd95<\/li>\n\n\n\n<li><strong>\ud074\ub77c\uc6b0\ub4dc \uae30\ubc18 \ud559\uc2b5 \ud658\uacbd \uad6c\uc131<\/strong>: \ub124\uc774\ubc84 \ud074\ub77c\uc6b0\ub4dc \ud50c\ub7ab\ud3fc\uc744 \ud65c\uc6a9\ud55c \uba38\uc2e0\ub7ec\ub2dd \uc778\ud504\ub77c \uad6c\ucd95<\/li>\n\n\n\n<li><strong>\uc2e4\uc2dc\uac04 \ucd94\ub860 \uc11c\ube44\uc2a4 \uad6c\ud604<\/strong>: \uc0ac\uc6a9\uc790\uac00 \uc5c5\ub85c\ub4dc\ud55c \uc774\ubbf8\uc9c0\ub97c \ubd84\uc11d\ud558\uc5ec \ub3fc\uc9c0\uac10\uc790 \uc5ec\ubd80\ub97c \ud310\ubcc4\ud558\ub294 \uc2dc\uc2a4\ud15c \uac1c\ubc1c<\/li>\n<\/ul>\n\n\n\n<hr class=\"wp-block-separator has-alpha-channel-opacity\"\/>\n\n\n\n<h3 class=\"wp-block-heading has-white-color has-vivid-green-cyan-background-color has-text-color has-background has-link-color wp-elements-db887b022bf95b272308ba0eaad0e228\"> \ubcf8\ub860 #1 \ud559\uc2b5<\/h3>\n\n\n\n<p>\ub3fc\uc9c0\uac10\uc790 \ub370\uc774\ud130\uc14b\uc740 <strong>AI-Hub\uc5d0\uc11c \uac00\uc838\uc654\uae30\ub54c\ubb38\uc5d0 \uad6d\ub0b4 IP\uc5d0\uc11c\ub9cc \ub2e4\uc6b4\ub85c\ub4dc\ud558\ubbc0\ub85c \uba38\uc2e0\ub7ec\ub2dd \ud559\uc2b5\uacfc \ucd94\ub860\uc744 \uc704\ud55c \ud658\uacbd\uc740 \ub124\uc774\ubc84 \ud074\ub77c\uc6b0\ub4dc \ud50c\ub7ab\ud3fc\uc744 \uc120\ud0dd<\/strong>\ud588\uc2b5\ub2c8\ub2e4. \ub610\ud55c \uac1c\uc778 \ud14c\uc2a4\ud2b8\uc774\ubbc0\ub85c \ube44\uc6a9\uc801\uc778 \uce21\uba74\uc744 \uace0\ub824\ud558\uc5ec GPU \ub300\uc2e0 CPU \ud658\uacbd\uc5d0\uc11c \uc791\uc5c5\uc744 \uc9c4\ud589\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\ucc38\uace0\ub85c AWS EC2\uc5d0\uc11c\ub3c4 \ub370\uc774\ud130\ub97c \ub2e4\uc6b4\ub85c\ub4dc \ud574\ubd24\ub294\ub370 AWS EC2\uac00 \uc11c\uc6b8 \ub9ac\uc804\uc5d0 \uc788\uc5b4\ub3c4 \ud574\uc678 IP\ub85c \uc778\uc2dd\ub418\uc5b4 \uc811\uadfc\uc774 \ucc28\ub2e8\ub418\ub294 \ubb38\uc81c\uac00 \ubc1c\uc0dd\ud569\ub2c8\ub2e4. \uc774\ub7f0 \uc9c0\uc5ed \uc81c\ud55c \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574 \ub124\uc774\ubc84 \ud074\ub77c\uc6b0\ub4dc \ud50c\ub7ab\ud3fc\uc744 \uc120\ud0dd\ud558\ub294 \uac83\uc740 \ud558\ub098\uc758 \uc88b\uc740 \uc120\ud0dd\uc9c0\uac00 \ub420 \uc218 \uc788\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<ul>\n<li><strong>\uc11c\ubc84 \uc2a4\ud399:<\/strong>\n<ul>\n<li>\uc6b4\uc601\uccb4\uc81c: Ubuntu 22.04<\/li>\n\n\n\n<li>vCPU: 4<\/li>\n\n\n\n<li>Memory: 8GB<\/li>\n<\/ul>\n<\/li>\n<\/ul>\n\n\n\n<p>\ube44\uc6a9 \ud6a8\uc728\uc131\uc744 \uc704\ud574 \ucd5c\uc18c\ud55c\uc758 \uc11c\ubc84 \uc2a4\ud399\uc73c\ub85c \uc2dc\uc791\ud588\uc9c0\ub9cc \uc774\ub85c \uc778\ud55c \ud559\uc2b5 \uc2dc\uac04 \uc99d\uac00\uc640 \uba54\ubaa8\ub9ac \ubd80\uc871 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\ub294 \uacfc\uc815\uc774 \uc624\ud788\ub824 \uc88b\uc740 \ud559\uc2b5 \uacbd\ud5d8\uc774 \ub418\uc5c8\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">sudo apt update &amp;&amp; sudo apt upgrade -y\nsudo apt install -y python3 python3-pip python3-venv unzip wget<\/pre>\n\n\n\n<p>Ubuntu \uc11c\ubc84\uc5d0 \ud544\uc694\ud55c \uae30\ubcf8 \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc124\uce58\ud588\uace0 \ud504\ub85c\uc81d\ud2b8\uc758 \uc758\uc874\uc131\uc744 \ub3c5\ub9bd\uc801\uc73c\ub85c \uad00\ub9ac\ud558\uae30 \uc704\ud574 Python \uac00\uc0c1\ud658\uacbd\uc744 \uc124\uc815\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">python3 -m venv poison-env\nsource poison-env\/bin\/activate<\/pre>\n\n\n\n<p>\uc774\ud6c4 \ub525\ub7ec\ub2dd \ubaa8\ub378 \uac1c\ubc1c\uc5d0 \ud544\uc694\ud55c \uc8fc\uc694 \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc124\uce58\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">pip install torch torchvision matplotlib pandas scikit-learn opencv-python<\/pre>\n\n\n\n<p>PyTorch\ub97c \uba54\uc778 \ud504\ub808\uc784\uc6cc\ud06c\ub85c \uc120\ud0dd\ud55c \uc774\uc720\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<ul>\n<li>\uc9c1\uad00\uc801\uc778 API\uc640 \ud48d\ubd80\ud55c \ubb38\uc11c\ud654<\/li>\n\n\n\n<li>\ub3d9\uc801 \uacc4\uc0b0 \uadf8\ub798\ud504\ub97c \ud1b5\ud55c \uc720\uc5f0\ud55c \ubaa8\ub378\ub9c1<\/li>\n\n\n\n<li>\uc804\uc774\ud559\uc2b5\uc744 \uc704\ud55c \ub2e4\uc591\ud55c \uc0ac\uc804 \ud559\uc2b5 \ubaa8\ub378 \uc81c\uacf5<\/li>\n\n\n\n<li>\ud65c\ubc1c\ud55c \ucee4\ubba4\ub2c8\ud2f0 \uc9c0\uc6d0<\/li>\n<\/ul>\n\n\n\n<p>\ub370\uc774\ud130\uc14b\uc740 \uad6d\ub0b4 AI-Hub\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 &#8216;\ub3d9\uc758\ubcf4\uac10 \uc57d\ucd08 \uc774\ubbf8\uc9c0 AI \ub370\uc774\ud130&#8217;\ub97c \ud65c\uc6a9\ud588\uc2b5\ub2c8\ub2e4. <br>AI Hub\uc5d0\ub294 \ubb34\ub8cc \ub370\uc774\ud130\uc14b\uc744 \uc81c\uacf5\ud558\uace0 \uc788\uc5b4 \uacf5\ubd80\ud558\uae30\uc5d0 \uc544\uc8fc \uc88b\uc2b5\ub2c8\ub2e4! AI Hub \uc9f1\uc9f1!<\/p>\n\n\n\n<p>AI-Hub Shell\uc744 \uc774\uc6a9\ud574 \ub370\uc774\ud130\ub97c \ub2e4\uc6b4\ub85c\ub4dc\ud558\ub294 \uacfc\uc815\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4.<br>(datasetkey\ub294 \ub370\uc774\ud130\ub9c8\ub2e4 \ub118\ubc84\uac00 \ub2e4\ub974\ubbc0\ub85c \uc6d0\ud558\ub294 \ub370\uc774\ud130\uc14b \ub2e4\uc6b4\ub85c\ub4dc \uc2dc \ubcc0\uacbd\ud558\uba74 \ub418\uaca0\uc2b5\ub2c8\ub2e4.)<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># \ub2e4\uc6b4\ub85c\ub4dc \nwget https:\/\/api.aihub.or.kr\/api\/aihubshell.do\n# \uc2e4\ud589 \uad8c\ud55c \ucd94\uac00\nchmod +x aihubshell.do\n# \uc804\uc5ed \uc2e4\ud589\uc744 \uc704\ud55c bin \ub4f1\ub85d\ncp -avp aihubshell.do \/usr\/bin\/aihubshell\n# \ub370\uc774\ud130\uc14b \ub2e4\uc6b4\ub85c\ub4dc\naihubshell -mode d -datasetkey 612 -aihubapikey '{API_KEY}'<\/pre>\n\n\n\n<p>\uc800\ub294 \ub370\uc774\ud130\uc14b\uc5d0\uc11c \ub3fc\uc9c0\uac10\uc790 \uc774\ubbf8\uc9c0\ub97c \uc120\ud0dd\uc801\uc73c\ub85c \ub2e4\uc6b4\ub85c\ub4dc\ud558\uc5ec \uc791\uc5c5 \ub514\ub809\ud1a0\ub9ac\uc5d0 \uc815\ub9ac\ud588\uace0 \uc774\ud6c4 \ud559\uc2b5\uacfc \uac80\uc99d\uc744 \uc704\ud55c \ub514\ub809\ud1a0\ub9ac \uad6c\uc870\ub97c \ub2e4\uc74c\uacfc \uac19\uc774 \uc124\uc815\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># \uc791\uc5c5 \ub514\ub809\ud1a0\ub9ac \uc0dd\uc131\nmkdir -p \/poison_data\/dataset\/train\/helianthus_tuberosus\nmkdir -p \/poison_data\/dataset\/val\/helianthus_tuberosus\n\n# \ubd80\uc815 \uc0d8\ud50c\uc744 \uc704\ud55c \ub514\ub809\ud1a0\ub9ac \uc0dd\uc131\nmkdir -p \/poison_data\/dataset\/train\/not_helianthus_tuberosus\nmkdir -p \/poison_data\/dataset\/val\/not_helianthus_tuberosus<\/pre>\n\n\n\n<p>\ub514\ub809\ud1a0\ub9ac\ub97c \uc0dd\uc131\ud55c \ud6c4 \uc6d0\ubcf8 \ub370\uc774\ud130\uc14b\uc744 \uc555\ucd95 \ud574\uc81c\ud558\uace0 \ud559\uc2b5\/\uac80\uc99d \uc138\ud2b8\ub85c \ubd84\ud560\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># \ud6c8\ub828 \ub370\uc774\ud130 \uc555\ucd95 \ud574\uc81c\ncd \/poison_data\/180.\ub3d9\uc758\ubcf4\uac10_\uc57d\ucd08_\uc774\ubbf8\uc9c0_AI\ub370\uc774\ud130\/01.\ub370\uc774\ud130\/1.Training\/\uc6d0\ucc9c\ub370\uc774\ud130\nunzip -O cp949 \"\/poison_data\/180.\ub3d9\uc758\ubcf4\uac10_\uc57d\ucd08_\uc774\ubbf8\uc9c0_AI\ub370\uc774\ud130\/01.\ub370\uc774\ud130\/1.Training\/\uc6d0\ucc9c\ub370\uc774\ud130\/TS_\ub3fc\uc9c0\uac10\uc790.zip\" -d \/poison_data\/dataset\/train\/helianthus_tuberosus\n\n# \uac80\uc99d \ub370\uc774\ud130 \uc555\ucd95 \ud574\uc81c\ncd \/poison_data\/180.\ub3d9\uc758\ubcf4\uac10_\uc57d\ucd08_\uc774\ubbf8\uc9c0_AI\ub370\uc774\ud130\/01.\ub370\uc774\ud130\/2.Validation\/\uc6d0\ucc9c\ub370\uc774\ud130\nunzip -O cp949 \"\/poison_data\/180.\ub3d9\uc758\ubcf4\uac10_\uc57d\ucd08_\uc774\ubbf8\uc9c0_AI\ub370\uc774\ud130\/01.\ub370\uc774\ud130\/2.Validation\/\uc6d0\ucc9c\ub370\uc774\ud130\/VS_\ub3fc\uc9c0\uac10\uc790.zip\" -d \/poison_data\/dataset\/val\/helianthus_tuberosus<\/pre>\n\n\n\n<p>\ub610\ud55c \ubd80\uc815 \uc0d8\ud50c(negative samples)\ub85c\ub294 \uc218\ubc15 \uc774\ubbf8\uc9c0\ub97c \uc0ac\uc6a9\ud588\uc2b5\ub2c8\ub2e4.<br>\uc544\ubb34\ub798\ub3c4 \ud574\ub2f9 \ub370\uc774\ud130 \uc790\uccb4\uac00 \ub3fc\uc9c0\uac10\uc790\ub97c \uad6c\ubd84\ud558\ub294 \ub370\uc774\ud130\uac00 \uc544\ub2c8\ub77c \ub3d9\uc758\ubcf4\uac10 \ub370\uc774\ud130\ub77c \ub3fc\uc9c0\uac10\uc790\uc774\ub2e4, \uc544\ub2c8\ub2e4\ub85c \uad6c\ubd84\ud558\uae30\uc5d0\ub294 \uc5b4\ub824\uc6c0\uc774 \uc788\uc5b4\uc11c \ud14c\uc2a4\ud2b8\ub85c \uc218\ubc15 \uc774\ubbf8\uc9c0\ub97c \uc0ac\uc6a9\ud574\ubcf4\uc558\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># \ud6c8\ub828 \ub370\uc774\ud130 \uc555\ucd95 \ud574\uc81c (\uc218\ubc15 - \ubd80\uc815 \uc0d8\ud50c)\ncd \/poison_data\/180.\ub3d9\uc758\ubcf4\uac10_\uc57d\ucd08_\uc774\ubbf8\uc9c0_AI\ub370\uc774\ud130\/01.\ub370\uc774\ud130\/1.Training\/\uc6d0\ucc9c\ub370\uc774\ud130\nunzip -O cp949 \"\/poison_data\/180.\ub3d9\uc758\ubcf4\uac10_\uc57d\ucd08_\uc774\ubbf8\uc9c0_AI\ub370\uc774\ud130\/01.\ub370\uc774\ud130\/1.Training\/\uc6d0\ucc9c\ub370\uc774\ud130\/TS_\uc218\ubc15.zip\" -d \/poison_data\/dataset\/train\/not_helianthus_tuberosus\n\n# \uac80\uc99d \ub370\uc774\ud130 \uc555\ucd95 \ud574\uc81c (\uc218\ubc15 - \ubd80\uc815 \uc0d8\ud50c)\ncd \/poison_data\/180.\ub3d9\uc758\ubcf4\uac10_\uc57d\ucd08_\uc774\ubbf8\uc9c0_AI\ub370\uc774\ud130\/01.\ub370\uc774\ud130\/2.Validation\/\uc6d0\ucc9c\ub370\uc774\ud130\nunzip -O cp949 \"\/poison_data\/180.\ub3d9\uc758\ubcf4\uac10_\uc57d\ucd08_\uc774\ubbf8\uc9c0_AI\ub370\uc774\ud130\/01.\ub370\uc774\ud130\/2.Validation\/\uc6d0\ucc9c\ub370\uc774\ud130\/VS_\uc218\ubc15.zip\" -d \/poison_data\/dataset\/val\/not_helianthus_tuberosus<\/pre>\n\n\n\n<p>\ub370\uc774\ud130 \uc900\ube44\uac00 \ub05d\ub09c \ud6c4 \ubaa8\ub378\uc758 \uc77c\ubc18\ud654 \ub2a5\ub825\uc744 \ud5a5\uc0c1\uc2dc\ud0a4\uae30 \uc704\ud574 \ub2e4\uc591\ud55c \ub370\uc774\ud130 \uc99d\uac15 \uae30\ubc95\uc740 \uc544\ub798\uc640 \uac19\uc774 \uc801\uc6a9\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<ul>\n<li><strong>RandomHorizontalFlip\/VerticalFlip<\/strong>: \uc774\ubbf8\uc9c0\uc758 \uc88c\uc6b0\/\uc0c1\ud558 \ubc18\uc804\uc744 \ud1b5\ud574 \ubc29\ud5a5\uc5d0 \ubd88\ubcc0\ud55c \ud2b9\uc131 \ud559\uc2b5 <\/li>\n\n\n\n<li><strong>RandomRotation<\/strong>: \ud68c\uc804\ub41c \uc774\ubbf8\uc9c0\uc5d0 \ub300\ud55c \uc778\uc2dd \ub2a5\ub825 \ud5a5\uc0c1 <\/li>\n\n\n\n<li><strong>RandomResizedCrop<\/strong>: \ub2e4\uc591\ud55c \ud06c\uae30\uc640 \ube44\uc728\uc758 \uc774\ubbf8\uc9c0\uc5d0 \ub300\ud55c \uc801\uc751\ub825 \uac15\ud654 <\/li>\n\n\n\n<li><strong>ColorJitter<\/strong>: \uc0c9\uc0c1, \ubc1d\uae30, \ub300\ube44 \ubcc0\ud654\uc5d0 \uac15\uac74\ud55c \ubaa8\ub378 \uad6c\ucd95<\/li>\n\n\n\n<li><strong>RandomGrayscale<\/strong>: \ud751\ubc31 \uc774\ubbf8\uc9c0\uc5d0 \ub300\ud55c \ub300\uc751 \ub2a5\ub825 \uac1c\uc120<\/li>\n<\/ul>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">train_transform = transforms.Compose([\n    transforms.Resize((224, 224)),\n    transforms.RandomHorizontalFlip(p=0.5),\n    transforms.RandomVerticalFlip(p=0.3),\n    transforms.RandomRotation(30),\n    transforms.RandomResizedCrop(224, scale=(0.8, 1.0)),\n    transforms.ColorJitter(brightness=0.2, contrast=0.2, saturation=0.2, hue=0.1),\n    transforms.RandomGrayscale(p=0.1),\n    transforms.ToTensor(),\n    transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n])<\/pre>\n\n\n\n<p>\ub610\ud55c \uc774\ubbf8\uc9c0 \ubd84\ub958\ub97c \uc704\ud574 ResNet18 \ubaa8\ub378\uc744 \uc544\ub798\uc640 \uac19\uc740 \uc774\uc720\ub85c \uc120\ud0dd\ud588\uace0 \uc774\uc9c4 \ubd84\ub958 \ubb38\uc81c\uc5d0 \ub9de\uac8c \uc218\uc815\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<ul>\n<li><strong>\uc794\ucc28 \uc5f0\uacb0(Residual Connections)<\/strong>: \uae4a\uc740 \uc2e0\uacbd\ub9dd\uc5d0\uc11c \ubc1c\uc0dd\ud558\ub294 \uae30\uc6b8\uae30 \uc18c\uc2e4 \ubb38\uc81c\ub97c \ud6a8\uacfc\uc801\uc73c\ub85c \ud574\uacb0 <\/li>\n\n\n\n<li><strong>\uc0ac\uc804 \ud559\uc2b5\ub41c \uac00\uc911\uce58 \ud65c\uc6a9<\/strong>: ImageNet \ub370\uc774\ud130\uc14b\uc73c\ub85c \uc0ac\uc804 \ud559\uc2b5\ub41c \uac00\uc911\uce58\ub97c \ud65c\uc6a9\ud55c \uc804\uc774\ud559\uc2b5 \uc6a9\uc774 <\/li>\n\n\n\n<li><strong>\uc801\uc808\ud55c \ubaa8\ub378 \ud06c\uae30<\/strong>: \uc0c1\ub300\uc801\uc73c\ub85c \uac00\ubcbc\uc6b4 \ubaa8\ub378 \ud06c\uae30\ub85c <strong>CPU \ud658\uacbd\uc5d0\uc11c\ub3c4 \ud559\uc2b5 \uac00\ub2a5 <\/strong>(\uc774 \uc774\uc720\uac00 \uac00\uc7a5 \ud07c)<\/li>\n\n\n\n<li><strong>\ub192\uc740 \uc131\ub2a5<\/strong>: \uc0c1\ub300\uc801\uc73c\ub85c \ub2e8\uc21c\ud55c \uad6c\uc870\uc784\uc5d0\ub3c4 \ub192\uc740 \uc774\ubbf8\uc9c0 \ubd84\ub958 \uc131\ub2a5 \uc81c\uacf5<\/li>\n<\/ul>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">def create_model(num_classes=2):\n    model = models.resnet18(pretrained=True)\n    \n    # \ud2b9\uc131 \ucd94\ucd9c \ub808\uc774\uc5b4\uc758 \uac00\uc911\uce58\ub97c \uace0\uc815\ud558\uc9c0 \uc54a\uace0 \ud559\uc2b5 \uac00\ub2a5\ud558\ub3c4\ub85d \uc124\uc815\n    for param in model.parameters():\n        param.requires_grad = True\n    \n    # \uc644\uc804 \uc5f0\uacb0 \ub808\uc774\uc5b4\ub97c \uc774\uc9c4 \ubd84\ub958\uc5d0 \ub9de\uac8c \uad50\uccb4\n    num_features = model.fc.in_features\n    model.fc = nn.Sequential(\n        nn.Dropout(0.6),  # \uacfc\uc801\ud569 \ubc29\uc9c0\ub97c \uc704\ud55c Dropout \ub808\uc774\uc5b4\n        nn.Linear(num_features, num_classes)\n    )\n    \n    return model<\/pre>\n\n\n\n<p>\ud559\uc2b5 \ucf54\ub4dc\ub294 \uc544\ub798\uc640 \uac19\uc774 \uc791\uc131\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">\nimport os\nimport random\nimport torch\nimport torch.nn as nn\nimport torch.optim as optim\nfrom torch.utils.data import Dataset, DataLoader\nfrom torchvision import transforms, models\nimport torchvision.transforms.functional as TF\nfrom PIL import Image\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom PIL import Image, ImageDraw, ImageFilter\nimport time\nfrom datetime import datetime\nimport gc\n\n# \ub79c\ub364 \uc2dc\ub4dc \uc124\uc815\ndef set_seed(seed=42):\n    random.seed(seed)\n    np.random.seed(seed)\n    torch.manual_seed(seed)\n    torch.cuda.manual_seed(seed)\n    torch.backends.cudnn.deterministic = True\n    torch.backends.cudnn.benchmark = False\n\nset_seed()\n\n# \uac1c\uc120\ub41c \ub370\uc774\ud130\uc14b \ud074\ub798\uc2a4 \uc815\uc758\nclass ImprovedDataset(Dataset):\n    def __init__(self, root_dir, transform=None):\n        self.root_dir = root_dir\n        self.transform = transform\n        self.image_files = []\n\n        # \ub3fc\uc9c0\uac10\uc790 \uc774\ubbf8\uc9c0 (positive samples)\n        ja_dir = os.path.join(root_dir, \"helianthus_tuberosus\")\n        if os.path.exists(ja_dir):\n            for dirpath, dirnames, filenames in os.walk(ja_dir):\n                for filename in filenames:\n                    if filename.lower().endswith(('.jpg', '.jpeg', '.png')):\n                        self.image_files.append((os.path.join(dirpath, filename), 1))  # 1\uc740 \ub3fc\uc9c0\uac10\uc790 \ud074\ub798\uc2a4\n\n        # \ub3fc\uc9c0\uac10\uc790\uac00 \uc544\ub2cc \uc774\ubbf8\uc9c0 (negative samples) - \uc2e4\uc81c \uc774\ubbf8\uc9c0\n        neg_dir = os.path.join(root_dir, \"not_helianthus_tuberosus\")\n        if os.path.exists(neg_dir):\n            for dirpath, dirnames, filenames in os.walk(neg_dir):\n                for filename in filenames:\n                    if filename.lower().endswith(('.jpg', '.jpeg', '.png')):\n                        self.image_files.append((os.path.join(dirpath, filename), 0))  # 0\uc740 \ub3fc\uc9c0\uac10\uc790 \uc544\ub2d8 \ud074\ub798\uc2a4\n\n        # \ub370\uc774\ud130 \uc774\ubbf8\uc9c0 \ud30c\uc77c \ud655\uc778 \ucd9c\ub825\n        positive_count = sum(1 for _, label in self.image_files if label == 1)\n        negative_count = sum(1 for _, label in self.image_files if label == 0)\n        print(f\"\ub370\uc774\ud130\uc14b \ud06c\uae30: {len(self.image_files)} \uc774\ubbf8\uc9c0 (\ub3fc\uc9c0\uac10\uc790: {positive_count}, \ub3fc\uc9c0\uac10\uc790 \uc544\ub2d8: {negative_count})\")\n        if self.image_files:\n            print(f\"\uccab \ubc88\uc9f8 \uc774\ubbf8\uc9c0 \uacbd\ub85c \uc608\uc2dc: {self.image_files[0][0]}\")\n\n    def __len__(self):\n        return len(self.image_files)\n\n    def __getitem__(self, idx):\n        img_path, label = self.image_files[idx]\n        try:\n            image = Image.open(img_path).convert('RGB')\n\n            if self.transform:\n                image = self.transform(image)\n\n            return image, label\n        except Exception as e:\n            print(f\"\uc774\ubbf8\uc9c0 \ub85c\ub4dc \uc2e4\ud328: {img_path} - {e}\")\n            # \uc624\ub958 \ubc1c\uc0dd \uc2dc \ube48 \uc774\ubbf8\uc9c0 \ubc18\ud658 (\ud559\uc2b5 \uc9c4\ud589\uc744 \uc704\ud574)\n            dummy_image = Image.new('RGB', (224, 224), color='gray')\n            if self.transform:\n                dummy_image = self.transform(dummy_image)\n            return dummy_image, label\n\n# \uc6d0\ub798\uc758 JerusalemArtichokeDataset \ud074\ub798\uc2a4\ub294 \uc720\uc9c0 (\uae30\uc874 \ucf54\ub4dc\uc640\uc758 \ud638\ud658\uc131\uc744 \uc704\ud574)\nclass JerusalemArtichokeDataset(Dataset):\n    def __init__(self, root_dir, transform=None):\n        self.root_dir = root_dir\n        self.transform = transform\n        self.image_files = []\n\n        # \ub3fc\uc9c0\uac10\uc790 \uc774\ubbf8\uc9c0 \ud30c\uc77c \uc218\uc9d1 (\ud558\uc704 \ub514\ub809\ud1a0\ub9ac \ud3ec\ud568)\n        ja_dir = os.path.join(root_dir, \"helianthus_tuberosus\")\n        if os.path.exists(ja_dir):\n            for dirpath, dirnames, filenames in os.walk(ja_dir):\n                for filename in filenames:\n                    if filename.lower().endswith(('.jpg', '.jpeg', '.png')):\n                        self.image_files.append((os.path.join(dirpath, filename), 1))  # 1\uc740 \ub3fc\uc9c0\uac10\uc790 \ud074\ub798\uc2a4\n\n        # \ub370\uc774\ud130 \uc774\ubbf8\uc9c0 \ud30c\uc77c\uc774 \ub9ce\uc9c0 \uc54a\uc73c\ubbc0\ub85c \ud655\uc778 \ucd9c\ub825\n        print(f\"\ub370\uc774\ud130\uc14b \ud06c\uae30: {len(self.image_files)} \uc774\ubbf8\uc9c0\")\n        print(f\"\uccab \ubc88\uc9f8 \uc774\ubbf8\uc9c0 \uacbd\ub85c \uc608\uc2dc: {self.image_files[0][0] if self.image_files else 'None'}\")\n\n    def __len__(self):\n        return len(self.image_files)\n\n    def __getitem__(self, idx):\n        img_path, label = self.image_files[idx]\n        try:\n            image = Image.open(img_path).convert('RGB')\n\n            if self.transform:\n                image = self.transform(image)\n\n            return image, label\n        except Exception as e:\n            print(f\"\uc774\ubbf8\uc9c0 \ub85c\ub4dc \uc2e4\ud328: {img_path} - {e}\")\n            # \uc624\ub958 \ubc1c\uc0dd \uc2dc \ube48 \uc774\ubbf8\uc9c0 \ubc18\ud658 (\ud559\uc2b5 \uc9c4\ud589\uc744 \uc704\ud574)\n            dummy_image = Image.new('RGB', (224, 224), color='gray')\n            if self.transform:\n                dummy_image = self.transform(dummy_image)\n            return dummy_image, label\n\n# \uac00\uc9dc \ubd80\uc815 \uc0d8\ud50c \uc0dd\uc131 \ud568\uc218 (\uc2e4\uc81c \ubd80\uc815 \uc774\ubbf8\uc9c0\uac00 \ubd80\uc871\ud55c \uacbd\uc6b0 \ubcf4\uc644\uc6a9)\ndef generate_dummy_negatives_in_batches(dataset, num_samples, transform, batch_size=100):\n    \"\"\"\ud6c8\ub828\uc6a9 \uac00\uc9dc \ubd80\uc815 \uc0d8\ud50c\uc744 \ubc30\uce58 \ub2e8\uc704\ub85c \uc0dd\uc131\ud569\ub2c8\ub2e4.\"\"\"\n    width, height = 224, 224\n\n    # \uc804\uccb4 \uc0d8\ud50c \uc218 \uc81c\ud55c\n    num_samples = min(num_samples, 1000)  # \ucd5c\ub300 1000\uac1c\ub85c \uc81c\ud55c\n    print(f\"\uc0dd\uc131\ud560 \uac00\uc9dc \ubd80\uc815 \uc0d8\ud50c \uc218: {num_samples}\")\n\n    # \ubc30\uce58 \ub2e8\uc704\ub85c \ucc98\ub9ac\n    all_negatives = []\n\n    for batch_start in range(0, num_samples, batch_size):\n        batch_end = min(batch_start + batch_size, num_samples)\n        print(f\"\ubc30\uce58 \uc0dd\uc131 \uc911: {batch_start+1}-{batch_end}\/{num_samples}\")\n\n        batch_negatives = []\n        for i in range(batch_start, batch_end):\n            if i % 3 == 0:\n                # \uac04\ub2e8\ud55c \ub3c4\ud615 \uc774\ubbf8\uc9c0 (\ubcf5\uc7a1\ub3c4 \ub0ae\ucda4)\n                img = Image.new('RGB', (width, height), color=(\n                    random.randint(100, 200),\n                    random.randint(100, 200),\n                    random.randint(100, 200)\n                ))\n\n                # \uac04\ub2e8\ud55c \ub3c4\ud615 \ud55c \uac1c\ub9cc \ucd94\uac00\n                draw = ImageDraw.Draw(img)\n                shape_type = random.choice(['rectangle', 'ellipse'])\n                color = (\n                    random.randint(0, 255),\n                    random.randint(0, 255),\n                    random.randint(0, 255)\n                )\n                x1 = random.randint(0, width-1)\n                y1 = random.randint(0, height-1)\n                x2 = random.randint(x1, width)\n                y2 = random.randint(y1, height)\n\n                if shape_type == 'rectangle':\n                    draw.rectangle([x1, y1, x2, y2], fill=color)\n                else:\n                    draw.ellipse([x1, y1, x2, y2], fill=color)\n            else:\n                # \ub2e8\uc21c \uce7c\ub77c \uc774\ubbf8\uc9c0\n                img = Image.new('RGB', (width, height), color=(\n                    random.randint(100, 200),\n                    random.randint(100, 200),\n                    random.randint(100, 200)\n                ))\n\n            # \ubcc0\ud658 \uc801\uc6a9\n            img_tensor = transform(img)\n            batch_negatives.append((img_tensor, 0))  # 0\uc740 '\ub3fc\uc9c0\uac10\uc790 \uc544\ub2d8' \ud074\ub798\uc2a4\n\n        # \ubc30\uce58 \ucd94\uac00 \ubc0f \uba54\ubaa8\ub9ac \uad00\ub9ac\n        all_negatives.extend(batch_negatives)\n\n        # \uba85\uc2dc\uc801 \uba54\ubaa8\ub9ac \uc815\ub9ac\n        del batch_negatives\n        gc.collect()\n\n    return all_negatives\n\nclass CombinedDataset(Dataset):\n    \"\"\"\ub3fc\uc9c0\uac10\uc790 \ub370\uc774\ud130\uc14b\uacfc \uc0dd\uc131\ub41c \ubd80\uc815 \uc0d8\ud50c\uc744 \uacb0\ud569\ud558\ub294 \ub370\uc774\ud130\uc14b\"\"\"\n    def __init__(self, positive_dataset, negative_samples):\n        self.positive_dataset = positive_dataset\n        self.negative_samples = negative_samples\n\n    def __len__(self):\n        return len(self.positive_dataset) + len(self.negative_samples)\n\n    def __getitem__(self, idx):\n        if idx &lt; len(self.positive_dataset):\n            return self.positive_dataset[idx]\n        else:\n            return self.negative_samples[idx - len(self.positive_dataset)]\n\n# \uc774\ubbf8\uc9c0 \ubcc0\ud658 \uc815\uc758 - \ub370\uc774\ud130 \uc99d\uac15 \uac15\ud654\ndef get_transforms():\n    train_transform = transforms.Compose([\n        transforms.Resize((224, 224)),\n        transforms.RandomHorizontalFlip(p=0.5),\n        transforms.RandomVerticalFlip(p=0.3),\n        transforms.RandomRotation(30),\n        transforms.RandomResizedCrop(224, scale=(0.8, 1.0)),\n        transforms.ColorJitter(brightness=0.2, contrast=0.2, saturation=0.2, hue=0.1),\n        transforms.RandomGrayscale(p=0.1),\n        transforms.ToTensor(),\n        transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n    ])\n\n    val_transform = transforms.Compose([\n        transforms.Resize((224, 224)),\n        transforms.ToTensor(),\n        transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n    ])\n\n    return train_transform, val_transform\n\n# \ubaa8\ub378 \uc815\uc758 - \uc815\uaddc\ud654 \uac15\ud654\ndef create_model(num_classes=2):\n    model = models.resnet18(pretrained=True)\n    for param in model.parameters():\n        param.requires_grad = True\n\n    num_features = model.fc.in_features\n    model.fc = nn.Sequential(\n        nn.Dropout(0.6),  # \ub4dc\ub86d\uc544\uc6c3 \ube44\uc728 \uc99d\uac00\n        nn.Linear(num_features, num_classes)\n    )\n\n    return model\n\n# \ubaa8\ub378 \ud559\uc2b5 \ud568\uc218\ndef train_model(model, train_loader, val_loader, criterion, optimizer, scheduler, num_epochs=10, save_dir='models'):\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    print(f\"\ud559\uc2b5\uc5d0 \uc0ac\uc6a9\ub418\ub294 \uc7a5\uce58: {device}\")\n\n    model = model.to(device)\n\n    best_model_wts = None\n    best_acc = 0.0\n\n    train_losses = []\n    val_losses = []\n    train_accs = []\n    val_accs = []\n\n    for epoch in range(num_epochs):\n        print(f'Epoch {epoch+1}\/{num_epochs}')\n        print('-' * 10)\n\n        # \ud559\uc2b5 \ub2e8\uacc4\n        model.train()\n        running_loss = 0.0\n        running_corrects = 0\n\n        for inputs, labels in train_loader:\n            inputs = inputs.to(device)\n            labels = labels.to(device)\n\n            optimizer.zero_grad()\n\n            with torch.set_grad_enabled(True):\n                outputs = model(inputs)\n                _, preds = torch.max(outputs, 1)\n                loss = criterion(outputs, labels)\n\n                loss.backward()\n                optimizer.step()\n\n            running_loss += loss.item() * inputs.size(0)\n            running_corrects += torch.sum(preds == labels.data)\n\n        epoch_loss = running_loss \/ len(train_loader.dataset)\n        epoch_acc = running_corrects.double() \/ len(train_loader.dataset)\n\n        train_losses.append(epoch_loss)\n        train_accs.append(epoch_acc.item())\n\n        print(f'Train Loss: {epoch_loss:.4f} Acc: {epoch_acc:.4f}')\n\n        # \uac80\uc99d \ub2e8\uacc4\n        model.eval()\n        running_loss = 0.0\n        running_corrects = 0\n\n        for inputs, labels in val_loader:\n            inputs = inputs.to(device)\n            labels = labels.to(device)\n\n            with torch.no_grad():\n                outputs = model(inputs)\n                _, preds = torch.max(outputs, 1)\n                loss = criterion(outputs, labels)\n\n            running_loss += loss.item() * inputs.size(0)\n            running_corrects += torch.sum(preds == labels.data)\n\n        epoch_loss = running_loss \/ len(val_loader.dataset)\n        epoch_acc = running_corrects.double() \/ len(val_loader.dataset)\n\n        val_losses.append(epoch_loss)\n        val_accs.append(epoch_acc.item())\n\n        print(f'Val Loss: {epoch_loss:.4f} Acc: {epoch_acc:.4f}')\n\n        # ReduceLROnPlateau \uc2a4\ucf00\uc904\ub7ec\ub294 \uac80\uc99d \uc190\uc2e4\uc744 \uae30\ubc18\uc73c\ub85c \ud559\uc2b5\ub960 \uc870\uc815\n        if scheduler is not None:\n            if isinstance(scheduler, torch.optim.lr_scheduler.ReduceLROnPlateau):\n                scheduler.step(epoch_loss)  # \uac80\uc99d \uc190\uc2e4 \uc804\ub2ec\n            else:\n                scheduler.step()\n\n        # \ucd5c\uace0 \uc131\ub2a5 \ubaa8\ub378 \uc800\uc7a5\n        if epoch_acc > best_acc:\n            best_acc = epoch_acc\n            best_model_wts = model.state_dict().copy()\n\n            # \ud604\uc7ac \ucd5c\uace0 \uc131\ub2a5 \ubaa8\ub378 \uc800\uc7a5\n            os.makedirs(save_dir, exist_ok=True)\n            torch.save(model.state_dict(), os.path.join(save_dir, 'best_model.pth'))\n            print(f\"\ucd5c\uace0 \uc131\ub2a5 \ubaa8\ub378 \uc800\uc7a5: {os.path.join(save_dir, 'best_model.pth')}\")\n\n        print()\n\n        # \uba54\ubaa8\ub9ac \uad00\ub9ac\ub97c \uc704\ud55c \uac00\ube44\uc9c0 \uceec\ub809\uc158\n        gc.collect()\n        torch.cuda.empty_cache() if torch.cuda.is_available() else None\n\n    print(f'Best val Acc: {best_acc:.4f}')\n\n    # \ucd5c\uace0 \uc131\ub2a5 \ubaa8\ub378 \uac00\uc911\uce58 \ubd88\ub7ec\uc624\uae30\n    model.load_state_dict(best_model_wts)\n\n    # \ud559\uc2b5 \uacb0\uacfc \uc2dc\uac01\ud654\n    plt.figure(figsize=(12, 4))\n\n    plt.subplot(1, 2, 1)\n    plt.plot(range(1, num_epochs+1), train_losses, label='Train Loss')\n    plt.plot(range(1, num_epochs+1), val_losses, label='Validation Loss')\n    plt.xlabel('Epochs')\n    plt.ylabel('Loss')\n    plt.legend()\n    plt.title('Training and Validation Loss')\n\n    plt.subplot(1, 2, 2)\n    plt.plot(range(1, num_epochs+1), train_accs, label='Train Accuracy')\n    plt.plot(range(1, num_epochs+1), val_accs, label='Validation Accuracy')\n    plt.xlabel('Epochs')\n    plt.ylabel('Accuracy')\n    plt.legend()\n    plt.title('Training and Validation Accuracy')\n\n    plt.tight_layout()\n    plt.savefig(os.path.join(save_dir, 'training_results.png'))\n\n    return model\n\ndef main():\n    # 1. \ub370\uc774\ud130 \ub85c\ub529\n    train_dir = \"\/poison_data\/dataset\/train\"\n    val_dir = \"\/poison_data\/dataset\/val\"\n    model_dir = \"models\"\n\n    os.makedirs(model_dir, exist_ok=True)\n\n    train_transform, val_transform = get_transforms()\n\n    # \uac1c\uc120\ub41c \ub370\uc774\ud130\uc14b \uc0ac\uc6a9 (\uc2e4\uc81c \ubd80\uc815 \uc0d8\ud50c \ud3ec\ud568)\n    print(\"\ub370\uc774\ud130\uc14b \ub85c\ub529 \uc911...\")\n    train_dataset = ImprovedDataset(train_dir, transform=train_transform)\n    val_dataset = ImprovedDataset(val_dir, transform=val_transform)\n\n    # \uc2e4\uc81c \ubd80\uc815 \uc0d8\ud50c\uc774 \ubd80\uc871\ud55c \uacbd\uc6b0, \uac00\uc9dc \ubd80\uc815 \uc0d8\ud50c \ucd94\uac00\n    if sum(1 for _, label in train_dataset.image_files if label == 0) &lt; 100:\n        print(\"\uc2e4\uc81c \ubd80\uc815 \uc0d8\ud50c\uc774 \ubd80\uc871\ud569\ub2c8\ub2e4. \uac00\uc9dc \ubd80\uc815 \uc0d8\ud50c\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.\")\n        # \ub3fc\uc9c0\uac10\uc790 \uc774\ubbf8\uc9c0\ub9cc \ucd94\ucd9c\n        train_positive_only = [(img, label) for img, label in train_dataset.image_files if label == 1]\n        # \uac00\uc9dc \ubd80\uc815 \uc0d8\ud50c \uc0dd\uc131\n        fake_negatives = generate_dummy_negatives_in_batches(train_dataset, len(train_positive_only) \/\/ 2, train_transform, batch_size=100)\n        # \ubaa8\ub4e0 \uc0d8\ud50c \ud569\uce58\uae30\n        combined_samples = train_dataset.image_files + [(tensor, label) for tensor, label in fake_negatives]\n        # \ub370\uc774\ud130\uc14b \uc7ac\uad6c\uc131\n        train_dataset.image_files = combined_samples\n        print(f\"\ud569\uccd0\uc9c4 \ub370\uc774\ud130\uc14b \ud06c\uae30: {len(train_dataset.image_files)}\")\n\n    print(f\"\ucd5c\uc885 \ud559\uc2b5 \ub370\uc774\ud130\uc14b \ud06c\uae30: {len(train_dataset)} \uc774\ubbf8\uc9c0\")\n    print(f\"\ucd5c\uc885 \uac80\uc99d \ub370\uc774\ud130\uc14b \ud06c\uae30: {len(val_dataset)} \uc774\ubbf8\uc9c0\")\n\n    # \ubc30\uce58 \ud06c\uae30\uc640 \uc6cc\ucee4 \uc218 \uc870\uc815\n    train_loader = DataLoader(train_dataset, batch_size=8, shuffle=True, num_workers=1)\n    val_loader = DataLoader(val_dataset, batch_size=8, shuffle=False, num_workers=1)\n\n    # 2. \ubaa8\ub378 \uc0dd\uc131\n    model = create_model(num_classes=2)  # \ub3fc\uc9c0\uac10\uc790vs\uae30\ud0c0 \uc774\uc9c4 \ubd84\ub958\n\n    # 3. \uc190\uc2e4 \ud568\uc218, \uc635\ud2f0\ub9c8\uc774\uc800, \uc2a4\ucf00\uc904\ub7ec \uc124\uc815\n    criterion = nn.CrossEntropyLoss()\n    optimizer = optim.Adam(model.parameters(), lr=0.001, weight_decay=1e-4)  # \uac00\uc911\uce58 \uac10\uc1e0 \ucd94\uac00\n    scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.1, patience=3, verbose=True)\n\n    # 4. \ubaa8\ub378 \ud559\uc2b5\n    model = train_model(\n        model, train_loader, val_loader,\n        criterion, optimizer, scheduler,\n        num_epochs=20,  # \uc5d0\ud3ec\ud06c \uc218 \uc99d\uac00\n        save_dir=model_dir\n    )\n\n    # 5. \ucd5c\uc885 \ubaa8\ub378 \uc800\uc7a5\n    torch.save(model.state_dict(), os.path.join(model_dir, 'jerusalem_artichoke_model.pth'))\n    print(f\"\ucd5c\uc885 \ubaa8\ub378 \uc800\uc7a5: {os.path.join(model_dir, 'jerusalem_artichoke_model.pth')}\")\n\nif __name__ == \"__main__\":\n    main()\n<\/pre>\n\n\n\n<p>\uba3c\uc800 CPU \ud658\uacbd\uc5d0\uc11c \ub300\uc6a9\ub7c9 \uc774\ubbf8\uc9c0 \ub370\uc774\ud130\uc14b\uc744 \ud559\uc2b5\uc2dc\ud0a4\uae30 \uc704\ud574 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ubc95\ub4e4\uc744 \uc801\uc6a9\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>1) <strong>\uba54\ubaa8\ub9ac \uad00\ub9ac<\/strong>: \uc8fc\uae30\uc801\uc778 \uac00\ube44\uc9c0 \uceec\ub809\uc158 \ud638\ucd9c\ub85c \uba54\ubaa8\ub9ac \uc0ac\uc6a9\ub7c9 \ucd5c\uc801\ud654<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">import gc\ngc.collect()  # \uba54\ubaa8\ub9ac \uc815\ub9ac<\/pre>\n\n\n\n<p>2) <strong>\ubc30\uce58 \ud06c\uae30 \ucd5c\uc801\ud654<\/strong>: \uba54\ubaa8\ub9ac \uc81c\ud55c\uc744 \uace0\ub824\ud55c \ubc30\uce58 \ud06c\uae30 \uc124\uc815<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">train_loader = DataLoader(train_dataset, batch_size=16, shuffle=True)<\/pre>\n\n\n\n<p>3) <strong>\uccb4\ud06c\ud3ec\uc778\ud2b8 \uc800\uc7a5<\/strong>: \uc8fc\uae30\uc801\uc778 \ubaa8\ub378 \uc800\uc7a5\uc73c\ub85c \ud559\uc2b5 \uc911\ub2e8 \uc2dc \ubcf5\uad6c \uac00\ub2a5\uc131 \ud655\ubcf4<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">torch.save(model.state_dict(), f'models\/model_epoch_{epoch}.pth')<\/pre>\n\n\n\n<p>4) <strong>\uc870\uae30 \uc885\ub8cc(Early Stopping)<\/strong>: \uacfc\uc801\ud569 \ubc29\uc9c0 \ubc0f \ucd5c\uc801 \ubaa8\ub378 \uc120\uc815\uc744 \uc704\ud55c \uc870\uae30 \uc885\ub8cc \uad6c\ud604<\/p>\n\n\n\n<p>\uc704 \uae30\ubc95\ub4e4\uc744 \ucd94\uac00\ud558\uac8c \ub41c \uac83 \uc911 \ud2b9\ud788 \uba54\ubaa8\ub9ac \uad00\ub9ac \ubd80\ubd84\uc744 \uac00\uc7a5 \ub9ce\uc774 \uace0\ubbfc\ud588\uc5c8\ub294\ub370&#8230; \uadf8 \uc774\uc720\uac00 \ud559\uc2b5 \uc911\uc5d0 \uacc4\uc18d \uba54\ubaa8\ub9ac \ubd80\uc871\uc73c\ub85c Process Kill\uc774 \ub418\uc5b4\ubc84\ub824\uc11c&#8230; \uadf8\ub807\ub2e4\uace0 \uac1c\uc778 \ud14c\uc2a4\ud2b8\uc774\ubbc0\ub85c \uc778\uc2a4\ud134\uc2a4 \ud06c\uae30\ub97c \ub192\uc77c \uc21c \uc5c6\uc5c8\uae30\uc5d0 \ucd5c\ub300\ud55c CPU\ub9cc\uc73c\ub85c \ud559\uc2b5\ud558\uace0\uc790 \ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\ud559\uc2b5 \ub370\uc774\ud130\uc758 \uad6c\uc131\uc740 \uc544\ub798\uc640 \uac19\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<ul>\n<li><strong>\ud559\uc2b5 \ub370\uc774\ud130<\/strong>:\n<ul>\n<li>\ub3fc\uc9c0\uac10\uc790 \uc774\ubbf8\uc9c0(positive): \uc57d 36.3GB, 7,566\uac1c \uc774\ubbf8\uc9c0<\/li>\n\n\n\n<li>\uc218\ubc15 \uc774\ubbf8\uc9c0(negative): \uc57d 11.3GB, 4,032\uac1c \uc774\ubbf8\uc9c0<\/li>\n\n\n\n<li>\ucd1d \ub370\uc774\ud130\uc14b \ud06c\uae30: \uc57d 47.6GB, 11,598\uac1c \uc774\ubbf8\uc9c0<\/li>\n<\/ul>\n<\/li>\n<\/ul>\n\n\n\n<ul>\n<li><strong>\uac80\uc99d \ub370\uc774\ud130<\/strong>:\n<ul>\n<li>\ub3fc\uc9c0\uac10\uc790 \uc774\ubbf8\uc9c0: 943\uac1c<\/li>\n\n\n\n<li>\uc218\ubc15 \uc774\ubbf8\uc9c0: 501\uac1c<\/li>\n\n\n\n<li>\ucd1d \uac80\uc99d \ub370\uc774\ud130: 1,444\uac1c \uc774\ubbf8\uc9c0<\/li>\n<\/ul>\n<\/li>\n<\/ul>\n\n\n\n<hr class=\"wp-block-separator has-alpha-channel-opacity\"\/>\n\n\n\n<h3 class=\"wp-block-heading has-white-color has-vivid-green-cyan-background-color has-text-color has-background has-link-color wp-elements-2d12950a4eae297787d266a1a60d2c41\"> \ubcf8\ub860 #2 \ucd94\ub860<\/h3>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">\nimport os\nimport sys\nimport torch\nimport torchvision.transforms as transforms\nfrom PIL import Image\nimport io\nimport boto3\nfrom botocore.client import Config\nimport matplotlib.pyplot as plt\nfrom datetime import datetime\nimport time\nimport json\nfrom torch.utils.data import Dataset, DataLoader\nfrom torchvision import models\nimport torch.nn as nn\nimport argparse\nimport matplotlib.pyplot as plt\nimport matplotlib.font_manager as fm\n\n# \ud3f0\ud2b8 \uacbd\ub85c \uc9c0\uc815 \ubc0f \ub4f1\ub85d\nfont_path = '\/usr\/share\/fonts\/truetype\/nanum\/NanumGothic.ttf'  # \uc608\uc2dc\nfontprop = fm.FontProperties(fname=font_path)\nplt.rcParams['font.family'] = fontprop.get_name()\n\n# S3 \uc124\uc815\nS3_ENDPOINT = \"https:\/\/kr.object.ncloudstorage.com\"\nS3_ACCESS_KEY = \"{OBJECTSTORAGE_\uad8c\ud55c_ACCESSKEY}\"\nS3_SECRET_KEY = \"{OBJECTSTORAGE_\uad8c\ud55c_SECRETKEY}\"\nS3_BUCKET = \"{NCLOUD_OBJECTSTORAGE_BUCKET_NAME}\"\n\n# \ubaa8\ub378 \ud30c\uc77c \uacbd\ub85c (\uc704 \ud559\uc2b5 \uc2e4\ud589 \ud6c4 \uc0dd\uc131\ub41c \ubaa8\ub378\ub85c \uc9c0\uc815)\nMODEL_PATH = 'models\/best_model.pth'\n\n# \ud310\ubcc4 \uc784\uacc4\uac12 \uc124\uc815\nCONFIDENCE_THRESHOLD = 0.8  # 80% \uc774\uc0c1\uc758 \ud655\uc2e0\uc774 \uc788\uc744 \ub54c\ub9cc \ub3fc\uc9c0\uac10\uc790\ub85c \ud310\ubcc4\n\n# \uc774\ubbf8\uc9c0 \uc804\ucc98\ub9ac \ubcc0\ud658\ntransform = transforms.Compose([\n    transforms.Resize((224, 224)),\n    transforms.ToTensor(),\n    transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n])\n\n# \ubaa8\ub378 \uad6c\uc870 \uc815\uc758\ndef create_model(num_classes=2):\n    model = models.resnet18(pretrained=False)\n    num_features = model.fc.in_features\n    model.fc = nn.Sequential(\n        nn.Dropout(0.6),\n        nn.Linear(num_features, num_classes)\n    )\n    return model\n\n# S3 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc124\uc815\ndef setup_s3_client(endpoint_url=S3_ENDPOINT, access_key=S3_ACCESS_KEY, secret_key=S3_SECRET_KEY):\n    s3_client = boto3.client(\n        's3',\n        endpoint_url=endpoint_url,\n        aws_access_key_id=access_key,\n        aws_secret_access_key=secret_key,\n        config=Config(signature_version='s3v4')\n    )\n    return s3_client\n\n# S3 \ubc84\ud0b7 \ub0b4 \uc774\ubbf8\uc9c0 \ubaa9\ub85d \uac00\uc838\uc624\uae30\ndef list_images_in_bucket(s3_client, bucket_name, prefix=\"\"):\n    try:\n        response = s3_client.list_objects_v2(Bucket=bucket_name, Prefix=prefix)\n        if 'Contents' in response:\n            return [obj['Key'] for obj in response['Contents'] if obj['Key'].lower().endswith(('.jpg', '.jpeg', '.png'))]\n        return []\n    except Exception as e:\n        print(f\"S3 \ubc84\ud0b7\uc5d0\uc11c \uc774\ubbf8\uc9c0 \ubaa9\ub85d\uc744 \uac00\uc838\uc624\ub294 \uc911 \uc624\ub958 \ubc1c\uc0dd: {e}\")\n        return []\n\n# S3\uc5d0\uc11c \uc774\ubbf8\uc9c0 \uac00\uc838\uc624\uae30\ndef get_image_from_s3(s3_client, bucket_name, image_key):\n    try:\n        response = s3_client.get_object(Bucket=bucket_name, Key=image_key)\n        image_content = response['Body'].read()\n        image = Image.open(io.BytesIO(image_content)).convert('RGB')\n        return image\n    except Exception as e:\n        print(f\"S3\uc5d0\uc11c \uc774\ubbf8\uc9c0\ub97c \uac00\uc838\uc624\ub294 \uc911 \uc624\ub958 \ubc1c\uc0dd: {e}\")\n        return None\n\n# \uc784\uacc4\uac12\uc744 \uc801\uc6a9\ud55c \uc774\ubbf8\uc9c0 \ucd94\ub860 \ud568\uc218\ndef predict_image_with_threshold(model, image, transform, threshold=CONFIDENCE_THRESHOLD):\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    model = model.to(device)\n    model.eval()\n\n    image_tensor = transform(image).unsqueeze(0).to(device)\n\n    with torch.no_grad():\n        outputs = model(image_tensor)\n        probs = torch.nn.functional.softmax(outputs, dim=1)\n        prob_positive = probs[0][1].item()  # \ub3fc\uc9c0\uac10\uc790\uc77c \ud655\ub960\n\n        # \uc784\uacc4\uac12 \uc801\uc6a9\n        if prob_positive >= threshold:\n            prediction = 1  # \ub3fc\uc9c0\uac10\uc790\n        else:\n            prediction = 0  # \ub3fc\uc9c0\uac10\uc790 \uc544\ub2d8\n\n    return prediction, prob_positive\n\n# \uae30\uc874 \ucd94\ub860 \ud568\uc218 (\uc784\uacc4\uac12 \uc5c6\uc74c, \uc774\uc804 \ucf54\ub4dc\uc640\uc758 \ud638\ud658\uc131\uc744 \uc704\ud574 \uc720\uc9c0)\ndef predict_image(model, image, transform):\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    model = model.to(device)\n    model.eval()\n\n    image_tensor = transform(image).unsqueeze(0).to(device)\n\n    with torch.no_grad():\n        outputs = model(image_tensor)\n        probs = torch.nn.functional.softmax(outputs, dim=1)\n        prob, predicted = torch.max(probs, 1)\n\n    return predicted.item(), prob.item()\n\n# \ucd94\ub860 \uacb0\uacfc \uc2dc\uac01\ud654 \ubc0f \uc800\uc7a5\ndef visualize_prediction(image, prediction, probability, output_dir, filename):\n    plt.figure(figsize=(8, 8))\n    plt.imshow(image)\n\n    # \uc601\uc5b4\ub85c \uacb0\uacfc \ud45c\uc2dc (\ud55c\uae00 \ud3f0\ud2b8 \ubb38\uc81c \ud574\uacb0)\n    result_en = \"Jerusalem Artichoke\" if prediction == 1 else \"Not Jerusalem Artichoke\"\n    plt.title(f'Prediction: {result_en}, Probability: {probability:.2f}')\n    plt.axis('off')\n\n    os.makedirs(output_dir, exist_ok=True)\n    output_path = os.path.join(output_dir, f'prediction_{filename}')\n    plt.savefig(output_path)\n    plt.close()\n\n    result_info = {\n        \"image\": filename,\n        \"prediction\": \"jerusalem_artichoke\" if prediction == 1 else \"not_jerusalem_artichoke\",\n        \"probability\": float(probability),\n        \"result_image\": output_path\n    }\n    return result_info, output_path\n\n# \ubaa8\ub378 \ub85c\ub4dc\ndef load_model(model_path):\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    model = create_model(num_classes=2)\n    try:\n        model.load_state_dict(torch.load(model_path, map_location=device))\n        model.to(device)\n        model.eval()\n        print(f\"\ubaa8\ub378\uc744 \uc131\uacf5\uc801\uc73c\ub85c \ub85c\ub4dc\ud588\uc2b5\ub2c8\ub2e4: {model_path}\")\n        return model\n    except Exception as e:\n        print(f\"\ubaa8\ub378 \ub85c\ub4dc \uc911 \uc624\ub958 \ubc1c\uc0dd: {e}\")\n        return None\n\n# \uac10\uc2dc \ubc0f \ucc98\ub9ac \ub85c\uc9c1\ndef monitor_and_process(s3_endpoint, s3_access_key, s3_secret_key, bucket_name, interval=30, output_dir=\"results\", threshold=CONFIDENCE_THRESHOLD):\n    \"\"\"\n    S3 \ubc84\ud0b7\uc744 \uc8fc\uae30\uc801\uc73c\ub85c \uac10\uc2dc\ud558\uace0 \uc0c8 \uc774\ubbf8\uc9c0\ub97c \ucc98\ub9ac\ud569\ub2c8\ub2e4.\n    \"\"\"\n    # \ubaa8\ub378 \ub85c\ub4dc\n    model = load_model(MODEL_PATH)\n    if model is None:\n        return\n\n    # S3 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc124\uc815\n    s3_client = setup_s3_client(s3_endpoint, s3_access_key, s3_secret_key)\n\n    # \uc774\ubbf8 \ucc98\ub9ac\ub41c \uc774\ubbf8\uc9c0 \ucd94\uc801\n    processed_images = set()\n\n    print(f\"S3 \ubc84\ud0b7 '{bucket_name}' \uac10\uc2dc\ub97c \uc2dc\uc791\ud569\ub2c8\ub2e4...\")\n    print(f\"\uc784\uacc4\uac12: {threshold} (\uc774 \uac12 \uc774\uc0c1\uc758 \ud655\ub960\uc5d0\uc11c\ub9cc \ub3fc\uc9c0\uac10\uc790\ub85c \ud310\ubcc4)\")\n\n    try:\n        while True:\n            # \ubc84\ud0b7 \ub0b4 \uc774\ubbf8\uc9c0 \ubaa9\ub85d \uac00\uc838\uc624\uae30\n            image_keys = list_images_in_bucket(s3_client, bucket_name)\n\n            # \uc0c8 \uc774\ubbf8\uc9c0 \ucc98\ub9ac\n            for key in image_keys:\n                if key not in processed_images:\n                    print(f\"\uc0c8 \uc774\ubbf8\uc9c0 \ubc1c\uacac: {key}\")\n\n                    # \uc774\ubbf8\uc9c0 \ub2e4\uc6b4\ub85c\ub4dc\n                    image = get_image_from_s3(s3_client, bucket_name, key)\n                    if image is None:\n                        continue\n\n                    # \uc774\ubbf8\uc9c0 \ucd94\ub860 (\uc784\uacc4\uac12 \uc801\uc6a9)\n                    prediction, probability = predict_image_with_threshold(model, image, transform, threshold)\n\n                    # \uacb0\uacfc \uc800\uc7a5\n                    filename = os.path.basename(key)\n                    result_info, output_path = visualize_prediction(image, prediction, probability, output_dir, filename)\n\n                    # JSON \uacb0\uacfc \uc800\uc7a5\n                    result_json_path = os.path.join(output_dir, f\"result_{filename.split('.')[0]}.json\")\n                    with open(result_json_path, 'w', encoding='utf-8') as f:\n                        json.dump(result_info, f, ensure_ascii=False, indent=2)\n\n                    # \ucc98\ub9ac \ub85c\uadf8\n                    result_text = \"\ub3fc\uc9c0\uac10\uc790\" if prediction == 1 else \"\ub3fc\uc9c0\uac10\uc790 \uc544\ub2d8\"\n                    print(f\"\uc774\ubbf8\uc9c0 '{key}' \ucc98\ub9ac \uacb0\uacfc: {result_text} (\ud655\ub960: {probability:.2f})\")\n                    print(f\"\uacb0\uacfc \uc774\ubbf8\uc9c0: {output_path}\")\n                    print(f\"\uacb0\uacfc JSON: {result_json_path}\")\n\n                    # \ucc98\ub9ac \uc644\ub8cc \ud45c\uc2dc\n                    processed_images.add(key)\n\n            # \ub300\uae30\n            print(f\"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - \ub2e4\uc74c \ud655\uc778 \uac04\uaca9 \ub300\uae30 \uc911... ({interval}\ucd08)\")\n            time.sleep(interval)\n\n    except KeyboardInterrupt:\n        print(\"\uac10\uc2dc\ub97c \uc911\ub2e8\ud569\ub2c8\ub2e4.\")\n    except Exception as e:\n        print(f\"\uc624\ub958 \ubc1c\uc0dd: {e}\")\n\n\n# \ub2e8\uc77c \uc774\ubbf8\uc9c0 \ucc98\ub9ac (S3)\ndef process_s3_image(s3_endpoint, s3_access_key, s3_secret_key, bucket_name, image_key, output_dir=\"results\", threshold=CONFIDENCE_THRESHOLD):\n    # \ubaa8\ub378 \ub85c\ub4dc\n    model = load_model(MODEL_PATH)\n    if model is None:\n        return\n\n    # S3 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc124\uc815\n    s3_client = setup_s3_client(s3_endpoint, s3_access_key, s3_secret_key)\n\n    # \uc774\ubbf8\uc9c0 \ub2e4\uc6b4\ub85c\ub4dc\n    image = get_image_from_s3(s3_client, bucket_name, image_key)\n    if image is None:\n        print(f\"\uc774\ubbf8\uc9c0\ub97c \uac00\uc838\uc62c \uc218 \uc5c6\uc2b5\ub2c8\ub2e4: {image_key}\")\n        return\n\n    # \uc774\ubbf8\uc9c0 \ucd94\ub860 (\uc784\uacc4\uac12 \uc801\uc6a9)\n    prediction, probability = predict_image_with_threshold(model, image, transform, threshold)\n\n    # \uacb0\uacfc \uc800\uc7a5\n    filename = os.path.basename(image_key)\n    result_info, output_path = visualize_prediction(image, prediction, probability, output_dir, filename)\n\n    # JSON \uacb0\uacfc \uc800\uc7a5\n    result_json_path = os.path.join(output_dir, f\"result_{filename.split('.')[0]}.json\")\n    with open(result_json_path, 'w', encoding='utf-8') as f:\n        json.dump(result_info, f, ensure_ascii=False, indent=2)\n\n    # \ucc98\ub9ac \ub85c\uadf8\n    result_text = \"\ub3fc\uc9c0\uac10\uc790\" if prediction == 1 else \"\ub3fc\uc9c0\uac10\uc790 \uc544\ub2d8\"\n    print(f\"\uc774\ubbf8\uc9c0 '{image_key}' \ucc98\ub9ac \uacb0\uacfc: {result_text} (\ud655\ub960: {probability:.2f})\")\n    print(f\"\uc784\uacc4\uac12: {threshold} (\ud655\ub960\uc774 \uc774 \uac12 \uc774\uc0c1\uc774\uba74 \ub3fc\uc9c0\uac10\uc790\ub85c \ud310\ubcc4)\")\n    print(f\"\uacb0\uacfc \uc774\ubbf8\uc9c0: {output_path}\")\n    print(f\"\uacb0\uacfc JSON: {result_json_path}\")\n\n# \ub85c\uceec \uc774\ubbf8\uc9c0 \ucc98\ub9ac\ndef process_local_image(image_path, output_dir=\"results\", threshold=CONFIDENCE_THRESHOLD):\n    # \ubaa8\ub378 \ub85c\ub4dc\n    model = load_model(MODEL_PATH)\n    if model is None:\n        return\n\n    # \uc774\ubbf8\uc9c0 \ub85c\ub4dc\n    try:\n        image = Image.open(image_path).convert('RGB')\n    except Exception as e:\n        print(f\"\uc774\ubbf8\uc9c0\ub97c \uc5f4 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4: {e}\")\n        return\n\n    # \uc774\ubbf8\uc9c0 \ucd94\ub860 (\uc784\uacc4\uac12 \uc801\uc6a9)\n    prediction, probability = predict_image_with_threshold(model, image, transform, threshold)\n\n    # \uacb0\uacfc \uc800\uc7a5\n    filename = os.path.basename(image_path)\n    result_info, output_path = visualize_prediction(image, prediction, probability, output_dir, filename)\n\n    # JSON \uacb0\uacfc \uc800\uc7a5\n    result_json_path = os.path.join(output_dir, f\"result_{filename.split('.')[0]}.json\")\n    with open(result_json_path, 'w', encoding='utf-8') as f:\n        json.dump(result_info, f, ensure_ascii=False, indent=2)\n\n    # \ucc98\ub9ac \ub85c\uadf8\n    result_text = \"\ub3fc\uc9c0\uac10\uc790\" if prediction == 1 else \"\ub3fc\uc9c0\uac10\uc790 \uc544\ub2d8\"\n    print(f\"\uc774\ubbf8\uc9c0 '{image_path}' \ucc98\ub9ac \uacb0\uacfc: {result_text} (\ud655\ub960: {probability:.2f})\")\n    print(f\"\uc784\uacc4\uac12: {threshold} (\ud655\ub960\uc774 \uc774 \uac12 \uc774\uc0c1\uc774\uba74 \ub3fc\uc9c0\uac10\uc790\ub85c \ud310\ubcc4)\")\n    print(f\"\uacb0\uacfc \uc774\ubbf8\uc9c0: {output_path}\")\n    print(f\"\uacb0\uacfc JSON: {result_json_path}\")\n\ndef main():\n    parser = argparse.ArgumentParser(description=\"\ub3fc\uc9c0\uac10\uc790 \uc774\ubbf8\uc9c0 \ucd94\ub860 \uc11c\ube44\uc2a4\")\n\n    parser.add_argument(\"--mode\", choices=[\"local\", \"s3\", \"monitor\"], required=True,\n                        help=\"\uc2e4\ud589 \ubaa8\ub4dc: local(\ub85c\uceec \uc774\ubbf8\uc9c0), s3(S3 \uc774\ubbf8\uc9c0), monitor(S3 \uac10\uc2dc)\")\n\n    parser.add_argument(\"--image\", help=\"\ucc98\ub9ac\ud560 \uc774\ubbf8\uc9c0 \uacbd\ub85c(local \ubaa8\ub4dc) \ub610\ub294 \ud0a4(s3 \ubaa8\ub4dc)\")\n\n    parser.add_argument(\"--s3-endpoint\", default=S3_ENDPOINT,\n                        help=\"S3 \uc5d4\ub4dc\ud3ec\uc778\ud2b8 URL\")\n    parser.add_argument(\"--s3-access-key\", default=S3_ACCESS_KEY,\n                        help=\"S3 \uc561\uc138\uc2a4 \ud0a4\")\n    parser.add_argument(\"--s3-secret-key\", default=S3_SECRET_KEY,\n                        help=\"S3 \uc2dc\ud06c\ub9bf \ud0a4\")\n    parser.add_argument(\"--s3-bucket\", default=S3_BUCKET,\n                        help=\"S3 \ubc84\ud0b7 \uc774\ub984\")\n\n    parser.add_argument(\"--threshold\", type=float, default=CONFIDENCE_THRESHOLD,\n                        help=f\"\ud310\ubcc4 \uc784\uacc4\uac12 (\uae30\ubcf8\uac12: {CONFIDENCE_THRESHOLD})\")\n    parser.add_argument(\"--interval\", type=int, default=30,\n                        help=\"S3 \uac10\uc2dc \uac04\uaca9(\ucd08)\")\n    parser.add_argument(\"--output-dir\", default=\"results\",\n                        help=\"\uacb0\uacfc \uc800\uc7a5 \ub514\ub809\ud1a0\ub9ac\")\n\n    args = parser.parse_args()\n\n    # \uacb0\uacfc \ub514\ub809\ud1a0\ub9ac \uc0dd\uc131\n    os.makedirs(args.output_dir, exist_ok=True)\n\n    if args.mode == \"local\":\n        if not args.image:\n            parser.error(\"local \ubaa8\ub4dc\uc5d0\ub294 --image \uc778\uc790\uac00 \ud544\uc694\ud569\ub2c8\ub2e4.\")\n        process_local_image(args.image, args.output_dir, args.threshold)\n\n    elif args.mode == \"s3\":\n        if not args.image:\n            parser.error(\"s3 \ubaa8\ub4dc\uc5d0\ub294 --image \uc778\uc790\uac00 \ud544\uc694\ud569\ub2c8\ub2e4.\")\n        process_s3_image(\n            args.s3_endpoint, args.s3_access_key, args.s3_secret_key,\n            args.s3_bucket, args.image, args.output_dir, args.threshold\n        )\n\n    elif args.mode == \"monitor\":\n        monitor_and_process(\n            args.s3_endpoint, args.s3_access_key, args.s3_secret_key,\n            args.s3_bucket, args.interval, args.output_dir, args.threshold\n        )\n\nif __name__ == \"__main__\":\n    main()\n<\/pre>\n\n\n\n<p>\ub124\uc774\ubc84 \ud074\ub77c\uc6b0\ub4dc\uc758 Object Storage\ub97c \ud65c\uc6a9\ud558\uc5ec \uc774\ubbf8\uc9c0\ub97c \uc800\uc7a5\ud558\uace0 \ucd94\ub860\ud558\ub294 \uc2dc\uc2a4\ud15c\uc744 \uad6c\ucd95\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">def setup_s3_client(endpoint, access_key, secret_key):\n    session = boto3.session.Session()\n    s3_client = session.client(\n        's3',\n        endpoint_url=endpoint,\n        aws_access_key_id=access_key,\n        aws_secret_access_key=secret_key\n    )\n    return s3_client\n\ndef get_image_from_s3(s3_client, bucket_name, image_key):\n    response = s3_client.get_object(Bucket=bucket_name, Key=image_key)\n    image_content = response['Body'].read()\n    image = Image.open(BytesIO(image_content))\n    return image<\/pre>\n\n\n\n<p>\uc0ac\uc6a9\uc790\uac00 \uc5c5\ub85c\ub4dc\ud55c \uc774\ubbf8\uc9c0\ub97c \uc2e4\uc2dc\uac04\uc73c\ub85c \ucc98\ub9ac\ud558\ub294 \ubaa8\ub2c8\ud130\ub9c1 \ubaa8\ub4dc\ub97c \uad6c\ud604\ud588\uae30 \ub54c\ubb38\uc5d0 Object Storage\ub97c \uc2e4\uc2dc\uac04\uc73c\ub85c \ubaa8\ub2c8\ud130\ub9c1\ud558\uace0, \uc0c8\ub85c\uc6b4 \uc774\ubbf8\uc9c0\uac00 \uc5c5\ub85c\ub4dc \ub418\uba74 \ud574\ub2f9 \uc774\ubbf8\uc9c0\uac00 \uba87\ud37c\uc13c\ud2b8 \ud655\ub960\ub85c \ub3fc\uc9c0\uac10\uc790\uc778\uc9c0 \uc54c\ub824\uc8fc\ub3c4\ub85d \ud588\uc2b5\ub2c8\ub2e4. \uc989, \ucd94\ub860 \uacb0\uacfc\ub97c \uc2dc\uac01\uc801\uc73c\ub85c \ud655\uc778\ud560 \uc218 \uc788\ub3c4\ub85d \uc6d0\ubcf8 \uc774\ubbf8\uc9c0\uc5d0 \uc608\uce21 \uacb0\uacfc\ub97c \ud45c\uc2dc\ud558\uace0 \uc800\uc7a5\ud558\ub294 \uae30\ub2a5\ub3c4 \ud568\uaed8 \uad6c\ud604\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">def monitor_s3_bucket(s3_client, bucket_name, interval, output_dir, model, transform, threshold):\n    processed_images = set()\n    \n    while True:\n        try:\n            response = s3_client.list_objects_v2(Bucket=bucket_name)\n            \n            if 'Contents' in response:\n                for obj in response['Contents']:\n                    key = obj['Key']\n                    \n                    # \uc774\ubbf8\uc9c0 \ud30c\uc77c\uc774\uace0 \uc544\uc9c1 \ucc98\ub9ac\ub418\uc9c0 \uc54a\uc740 \uacbd\uc6b0\n                    if key.lower().endswith(('.png', '.jpg', '.jpeg', '.gif')) and key not in processed_images:\n                        print(f\"\uc0c8 \uc774\ubbf8\uc9c0 \ubc1c\uacac: {key}\")\n                        \n                        # \uc774\ubbf8\uc9c0 \ucc98\ub9ac\n                        process_s3_image(s3_client, bucket_name, key, output_dir, model, transform, threshold)\n                        processed_images.add(key)\n            \n            print(f\"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - \ub2e4\uc74c \ud655\uc778 \uac04\uaca9 \ub300\uae30 \uc911... ({interval}\ucd08)\")\n            time.sleep(interval)\n            \n        except Exception as e:\n            print(f\"\uc624\ub958 \ubc1c\uc0dd: {str(e)}\")\n            time.sleep(interval)<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">def add_prediction_to_image(image, prediction, probability):\n    draw = ImageDraw.Draw(image)\n    \n    # \uacb0\uacfc \ud14d\uc2a4\ud2b8 \uc0dd\uc131\n    result_text = \"\ub3fc\uc9c0\uac10\uc790\" if prediction == 1 else \"\ub3fc\uc9c0\uac10\uc790 \uc544\ub2d8\"\n    probability_text = f\"\ud655\ub960: {probability:.2f}\"\n    \n    # \uc774\ubbf8\uc9c0 \ud558\ub2e8\uc5d0 \uacb0\uacfc \ud45c\uc2dc\n    font_size = max(15, int(image.width \/ 30))\n    # ... \ud14d\uc2a4\ud2b8 \uadf8\ub9ac\uae30 \ucf54\ub4dc ...\n    \n    return image<\/pre>\n\n\n\n<p>\ud559\uc2b5\uacfc \ucd94\ub860\uc744 \ud1b5\ud574 \uacb0\uacfc\ub97c \ubd84\uc11d\ud574\ubcf8 \uacb0\uacfc \uc544\ub798\uc640 \uac19\uc774 \uc5ec\ub7ec \uc0ac\uc2e4\uc744 \uc54c\uac8c \ub418\uc5c8\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<ol>\n<li>\ub3fc\uc9c0\uac10\uc790\ub9cc \ud559\uc2b5\ud560 \uacbd\uc6b0 \ub3fc\uc9c0\uac10\uc790, \uc77c\ubc18 \uac10\uc790, \uc0dd\uac15 3\uac1c\uc758 \uc774\ubbf8\uc9c0\ub97c \ub123\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc774\uc6a9\ud558\uc5ec \ucd94\ub860\ud588\ub294\ub370 \ubaa8\ub450 \ub3fc\uc9c0 \uac10\uc790\ub77c\uace0 \ud310\ubcc4\ud55c\ub2e4.<\/li>\n\n\n\n<li>\ub3fc\uc9c0\uac10\uc790 \uc678 \ub2e4\ub978 \uac83(\uc218\ubc15)\uc744 not_helianthus_tuberosus \ub370\uc774\ud130\ub85c \ud568\uaed8 \ud559\uc2b5\ud55c \uacb0\uacfc \ub3fc\uc9c0\uac10\uc790\uc640 \ub2e4\ub978 \uac83(\uc218\ubc15)\uc744 \uad6c\ubd84\ud560 \uc218 \uc788\ub2e4. \uc989, \uc218\ubc15\uc740 \ub3fc\uc9c0\uac10\uc790\uac00 \uc544\ub2c8\ub77c\uace0 \uba85\ud655\ud558\uac8c \uad6c\ubd84\ud55c\ub2e4.<\/li>\n\n\n\n<li>\uc774 \ud14c\uc2a4\ud2b8\uc5d0\uc11c \ud559\uc2b5 \ubc0f \uac80\uc99d \uc190\uc2e4\uc774 \ucd08\uae30\uc5d0 \uae09\uaca9\ud788 \uac10\uc18c\ud558\uace0 \uc810\ucc28 \uc548\uc815\ud654\ub418\uc5c8\ub2e4.<\/li>\n\n\n\n<li>\ub610\ud55c \uc5d0\ud3ec\ud06c 20 \uae30\uc900 \uc190\uc2e4\uac12 0.05 \uc774\ud558\ub85c \ub0ae\uc544\uc84c\ub2e4.<\/li>\n\n\n\n<li>\ud574\ub2f9 \ub370\uc774\ud130\uc14b\uc73c\ub85c \ud559\uc2b5\ud588\uc744 \ub54c \ud559\uc2b5 \uc190\uc2e4\uacfc \uac80\uc99d \uc190\uc2e4\uc758 \ucc28\uc774\uac00 \uc791\uc544 \uacfc\uc801\ud569\uc774 \ud06c\uac8c \ubc1c\uc0dd\ud558\uc9c0 \uc54a\uc558\ub2e4.<\/li>\n\n\n\n<li>\ud559\uc2b5 \ubc0f \uac80\uc99d \uc815\ud655\ub3c4\uac00 \uc9c0\uc18d\uc801\uc73c\ub85c \uc0c1\uc2b9\ud558\uc5ec \ucd5c\uc885 97% \uc774\uc0c1 \ub2ec\uc131\ud558\uae30\ub3c4 \ud588\ub2e4.<\/li>\n\n\n\n<li>\uac80\uc99d \uc815\ud655\ub3c4\uc5d0 \uc57d\uac04\uc758 \ubcc0\ub3d9\uc774 \uc788\uc73c\ub098 \uc804\ubc18\uc801\uc73c\ub85c \uc0c1\uc2b9\uc138\ub97c \uc720\uc9c0\ud588\ub2e4.<\/li>\n<\/ol>\n\n\n\n<figure class=\"wp-block-gallery has-nested-images columns-default is-cropped wp-block-gallery-1 is-layout-flex wp-block-gallery-is-layout-flex\">\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"800\" height=\"800\" data-id=\"3022\" src=\"https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_gam.jpeg\" alt=\"\" class=\"wp-image-3022\" srcset=\"https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_gam.jpeg 800w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_gam-300x300.jpeg 300w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_gam-150x150.jpeg 150w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_gam-768x768.jpeg 768w\" sizes=\"(max-width: 800px) 100vw, 800px\" \/><\/figure>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"800\" height=\"800\" data-id=\"3020\" src=\"https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_piggam.jpg\" alt=\"\" class=\"wp-image-3020\" srcset=\"https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_piggam.jpg 800w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_piggam-300x300.jpg 300w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_piggam-150x150.jpg 150w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_piggam-768x768.jpg 768w\" sizes=\"(max-width: 800px) 100vw, 800px\" \/><\/figure>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"800\" height=\"800\" data-id=\"3021\" src=\"https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_watermelon.jpg\" alt=\"\" class=\"wp-image-3021\" srcset=\"https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_watermelon.jpg 800w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_watermelon-300x300.jpg 300w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_watermelon-150x150.jpg 150w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213119\/prediction_watermelon-768x768.jpg 768w\" sizes=\"(max-width: 800px) 100vw, 800px\" \/><\/figure>\n<\/figure>\n\n\n\n<figure class=\"wp-block-table\"><table><thead><tr><th>\uc774\ubbf8\uc9c0 \uc885\ub958<\/th><th>\ub3fc\uc9c0\uac10\uc790\ub85c \ud310\ubcc4\ud55c \ud655\ub960<\/th><th>\uacb0\uacfc \ud574\uc11d<\/th><\/tr><\/thead><tbody><tr><td>\ub3fc\uc9c0\uac10\uc790<\/td><td>100%<\/td><td>\uc815\ud655\ud55c \ud310\ubcc4<\/td><\/tr><tr><td>\ud1a0\ub9c8\ud1a0<\/td><td>100%<\/td><td>\uc624\uc778\uc2dd(\uae0d\uc815 \uc624\ub958)<\/td><\/tr><tr><td>\uac10\uc790<\/td><td>89%<\/td><td>\uc624\uc778\uc2dd(\uae0d\uc815 \uc624\ub958)<\/td><\/tr><tr><td>\uc218\ubc15<\/td><td>2%<\/td><td>\uc815\ud655\ud55c \ud310\ubcc4<\/td><\/tr><\/tbody><\/table><\/figure>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"341\" src=\"https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213621\/training_results-1024x341.png\" alt=\"\" class=\"wp-image-3023\" srcset=\"https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213621\/training_results-1024x341.png 1024w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213621\/training_results-300x100.png 300w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213621\/training_results-768x256.png 768w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213621\/training_results-1080x360.png 1080w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213621\/training_results.png 1200w\" sizes=\"(max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p>\u2192 \ub3fc\uc9c0\uac10\uc790(\uc57d 36.3GB)\ub9cc \ud559\uc2b5\uc2dc\ucf30\uc744 \ub54c 8\uc2dc\uac04 30\ubd84 \uc18c\uc694\ub428. (epoch 15\ub85c \uc124\uc815\ud568)<\/p>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"339\" src=\"https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213630\/image-1024x339.png\" alt=\"\" class=\"wp-image-3024\" srcset=\"https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213630\/image-1024x339.png 1024w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213630\/image-300x99.png 300w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213630\/image-768x254.png 768w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213630\/image-1080x358.png 1080w, https:\/\/cdn.manvscloud.com\/wp-content\/uploads\/2025\/05\/23213630\/image.png 1132w\" sizes=\"(max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p>\u2192 \ub3fc\uc9c0\uac10\uc790(\uc57d 36.3GB)\uc640 \uc218\ubc15(\uc57d 11.3GB) 2\uac1c\ub97c \ud559\uc2b5 \uc2dc\ucf30\uc744 \ub54c 48\uc2dc\uac04 45\ubd84\uc774 \uc18c\uc694\ub428. (epoch 20\uc73c\ub85c \uc124\uc815\ud568)<\/p>\n\n\n\n<ul>\n<li><strong>\ud55c\uacc4\uc810<\/strong><\/li>\n<\/ul>\n\n\n\n<p>1) <strong>\uc81c\ud55c\ub41c \ubd80\uc815 \uc0d8\ud50c<\/strong><br> : \uc218\ubc15 \uc774\ubbf8\uc9c0\ub9cc\uc744 \ubd80\uc815 \uc0d8\ud50c\ub85c \uc0ac\uc6a9\ud558\uc5ec \ubaa8\ub378\uc774 &#8220;\ub3fc\uc9c0\uac10\uc790 vs \uc218\ubc15&#8221;\uc758 \uc774\ubd84\ubc95\uc801 \ud559\uc2b5\uc744 \ud568<br> : \uc774\ub85c \uc778\ud574 \ud1a0\ub9c8\ud1a0, \uac10\uc790 \ub4f1 \ud559\uc2b5\ub418\uc9c0 \uc54a\uc740 \uc791\ubb3c\uc740 \uc5ec\uc804\ud788 \ub3fc\uc9c0\uac10\uc790\ub85c \uc798\ubabb \ud310\ubcc4\ub428<br> : \ucd94\uac00\uc801\uc778 \ub370\uc774\ud130 \ud559\uc2b5\uacfc \uc5f0\uad6c\ub97c \ud558\uace0 \uc2f6\uc5c8\uc73c\ub098 \uc2a4\ud1a0\ub9ac\uc9c0 \ubd80\uc871 \ubc0f \uc544\ub798 \uc774\uc720\ub85c \ub9ce\uc740 \uc2dc\uac04\uc744 \uc18c\uc694\ud558\uac8c \ub428&#8230;<\/p>\n\n\n\n<p>2) <strong>CPU \ud559\uc2b5\uc758 \ud55c<\/strong>\uacc4<br> : \ub300\uc6a9\ub7c9 \ub370\uc774\ud130\uc14b \ud559\uc2b5\uc5d0 \uae34 \uc2dc\uac04 \uc18c\uc694 (\uc57d 48\uc2dc\uac04)<br> : \uba54\ubaa8\ub9ac \uc81c\ud55c\uc73c\ub85c \uc778\ud55c \ubc30\uce58 \ud06c\uae30 \uc81c\uc57d\uacfc \ube48\ubc88\ud55c \uba54\ubaa8\ub9ac \uad00\ub9ac \ud544\uc694<\/p>\n\n\n\n<hr class=\"wp-block-separator has-alpha-channel-opacity\"\/>\n\n\n\n<h3 class=\"wp-block-heading has-white-color has-cyan-bluish-gray-background-color has-text-color has-background has-link-color wp-elements-790b4ede7486cf11fc8601a67c8dcb47\"> Personal Comments<\/h3>\n\n\n\n<p>\uc774\ubc88 \uc2a4\ud130\ub514\ub294 \ud074\ub77c\uc6b0\ub4dc \uc5d4\uc9c0\ub2c8\uc5b4\uc5d0\uc11c ML \uc5d4\uc9c0\ub2c8\uc5b4\ub85c \uc601\uc5ed\uc744 \ud655\uc7a5\ud574\uac00\ub294 \uccab \ub2e8\uacc4\uc600\uc2b5\ub2c8\ub2e4. \ucc98\uc74c\uc5d0\ub294 \uc0dd\uc18c\ud588\ub358 \uac1c\ub150\ub4e4\uc774 \uc2e4\uc81c \ud574\ubcf4\ub294 \uacbd\ud5d8\uc744 \ud1b5\ud574 \uc810\ucc28 \uba85\ud655\ud574\uc9c0\ub294 \uacfc\uc815\uc774 \ub9e4\uc6b0 \ubcf4\ub78c\ucc3c\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uc800\uc640 \uac19\uc774 ML\uc744 \ucc98\uc74c \uc811\ud558\ub294 \ud074\ub77c\uc6b0\ub4dc \uc5d4\uc9c0\ub2c8\uc5b4\ub098 IT \uc885\uc0ac\uc790\ub4e4\uc5d0\uac8c \ub3c4\uc6c0\uc774 \ub418\uae38 \ubc14\ub78d\ub2c8\ub2e4. \uac00\uc7a5 \uc911\uc694\ud55c \uac83\uc740 \ub450\ub824\uc6c0 \uc5c6\uc774 \uccab \ubc1c\uc744 \ub0b4\ub51b\ub294 \uac83\uc785\ub2c8\ub2e4. \ube44\ub85d \uc644\ubcbd\ud558\uc9c0 \uc54a\ub354\ub77c\ub3c4 \uc2e4\uc81c \ud504\ub85c\uc81d\ud2b8\ub97c \ud1b5\ud55c \uacbd\ud5d8\uc774 \uac00\uc7a5 \ube60\ub978 \ud559\uc2b5 \ubc29\ubc95\uc784\uc744 \uc774\ubc88 \uc2a4\ud130\ub514\ub97c \ud1b5\ud574 \ub2e4\uc2dc \ud55c\ubc88 \ud655\uc778\ud560 \uc218 \uc788\uc5c8\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uae34 \uae00 \uc77d\uc5b4\uc8fc\uc154\uc11c \uac10\uc0ac\ud569\ub2c8\ub2e4.<\/p>\n","protected":false},"excerpt":{"rendered":"<p>\uc548\ub155\ud558\uc138\uc694 MANVSCLOUD \uae40\uc218\ud604\uc785\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd\uacfc \uc778\uacf5\uc9c0\ub2a5 \uae30\uc220\uc774 \ube60\ub974\uac8c \ubc1c\uc804\ud558\uace0 \uc774\uc81c\ub294 \uc774\ub7ec\ud55c \uae30\uc220\ub4e4\uc774 \uc77c\uc0c1 \uc18d\uc5d0\uc11c \ud568\uaed8\ud558\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4. \ube44\ub85d \uc81c\uac00 \ud074\ub77c\uc6b0\ub4dc \uc5d4\uc9c0\ub2c8\uc5b4\uc9c0\ub9cc AI\/ML \uae30\uc220\uc758 \ubc1c\uc804\uc5d0 \ub530\ub77c \uc774 \ubd84\uc57c\uc5d0\ub3c4 \uc790\uc5f0\uc2a4\ub7fd\uac8c \uad00\uc2ec\uc774 \uc0dd\uacbc\uc2b5\ub2c8\ub2e4. \ub354 \ub113\uc740 \ubc94\uc704\uc758 \ud074\ub77c\uc6b0\ub4dc \uc11c\ube44\uc2a4\ub97c \ub2e4\ub8e8\uace0 \uc2f6\uc740 \uc695\uad6c\uac00 \ucee4\uc84c\uace0 \uc774\ub97c \uc704\ud55c \ud559\uc2b5\uc774 \ud544\uc694\ud558\ub2e4\uace0 \ub290\uaf08\uc2b5\ub2c8\ub2e4. \ucc98\uc74c \uba38\uc2e0\ub7ec\ub2dd\uc744 \ubc30\uc6b0\ub294 \uc785\ubb38\uc790 \uc785\uc7a5\uc5d0\uc11c \uc774\ub860\uc801\uc778 \uac1c\ub150\ub4e4\uc740 \uc989\uac01\uc801\uc73c\ub85c \uc774\ud574\ud558\uae30 \uc5b4\ub824\uc6e0\uae30\uc5d0 \ube60\ub974\uac8c \uc2e4\uc81c \ud504\ub85c\uc81d\ud2b8\ub97c \ud1b5\ud55c \uacbd\ud5d8\uc73c\ub85c \ud765\ubbf8\ub97c \uc5bb\uace0\uc790 \ud588\uc2b5\ub2c8\ub2e4. \uc608\uc804\uac19\uc558\uc73c\uba74 AI\/ML\uc5d0 \ub300\ud55c \uc9c0\uc2dd\uc774 \uac70\uc758 \uc5c6\uace0 \uac1c\ubc1c\uc790\uac00 \uc544\ub2c8\ub77c\ub294 \uc810\uc774 \uc2dc\uc791\ud558\ub294 \ub370 \ud070 \uc7a5\ubcbd\uc73c\ub85c \ub290\uaf08\uaca0\uc9c0\ub9cc \uc694\uc998\uc740 \ubc14\uc774\ube0c \ucf54\ub529\uc774 \uc77c\uc0c1\uc774\ub77c \uc5d4\uc9c0\ub2c8\uc5b4 \uc785\uc7a5\uc5d0\uc11c\ub3c4 \uc758\uc9c0\ub9cc \uc788\ub2e4\uba74 \ubb34\uc5c7\uc774\ub4e0 \uc2dc\uc791\ud560 \uc218 \uc788\ub2e4\uace0 \uc0dd\uac01\ud569\ub2c8\ub2e4. \uc624\ub298 \ud3ec\uc2a4\ud305\uc5d0\uc11c\ub294 AI\/ML \uacbd\ud5d8\uc774 \uc5c6\ub358 \uc81c\uac00 \ub124\uc774\ubc84 \ud074\ub77c\uc6b0\ub4dc \ud658\uacbd\uc744 \ud65c\uc6a9\ud558\uc5ec \ub3fc\uc9c0\uac10\uc790\ub97c \uc2dd\ubcc4\ud558\ub294 \ub525\ub7ec\ub2dd \uae30\ubc18 \ucd94\ub860 \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\uba70 \uc2a4\ud130\ub514\ud588\ub358 \uacfc\uc815\uc744 \uc18c\uac1c\ud558\uace0\uc790 \ud569\ub2c8\ub2e4. \uc2a4\ud130\ub514 \uac1c\uc694 \ubcf8\ub860 #1 \ud559\uc2b5 \ub3fc\uc9c0\uac10\uc790 \ub370\uc774\ud130\uc14b\uc740 AI-Hub\uc5d0\uc11c \uac00\uc838\uc654\uae30\ub54c\ubb38\uc5d0 \uad6d\ub0b4 IP\uc5d0\uc11c\ub9cc \ub2e4\uc6b4\ub85c\ub4dc\ud558\ubbc0\ub85c \uba38\uc2e0\ub7ec\ub2dd \ud559\uc2b5\uacfc \ucd94\ub860\uc744 \uc704\ud55c \ud658\uacbd\uc740 \ub124\uc774\ubc84 \ud074\ub77c\uc6b0\ub4dc \ud50c\ub7ab\ud3fc\uc744 \uc120\ud0dd\ud588\uc2b5\ub2c8\ub2e4. \ub610\ud55c \uac1c\uc778 \ud14c\uc2a4\ud2b8\uc774\ubbc0\ub85c \ube44\uc6a9\uc801\uc778 \uce21\uba74\uc744 \uace0\ub824\ud558\uc5ec GPU \ub300\uc2e0 CPU \ud658\uacbd\uc5d0\uc11c \uc791\uc5c5\uc744 \uc9c4\ud589\ud588\uc2b5\ub2c8\ub2e4. \ucc38\uace0\ub85c AWS EC2\uc5d0\uc11c\ub3c4 \ub370\uc774\ud130\ub97c \ub2e4\uc6b4\ub85c\ub4dc \ud574\ubd24\ub294\ub370 AWS EC2\uac00 \uc11c\uc6b8 \ub9ac\uc804\uc5d0 \uc788\uc5b4\ub3c4 \ud574\uc678 IP\ub85c \uc778\uc2dd\ub418\uc5b4 \uc811\uadfc\uc774 \ucc28\ub2e8\ub418\ub294 \ubb38\uc81c\uac00 \ubc1c\uc0dd\ud569\ub2c8\ub2e4. \uc774\ub7f0 \uc9c0\uc5ed \uc81c\ud55c \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574 \ub124\uc774\ubc84 \ud074\ub77c\uc6b0\ub4dc \ud50c\ub7ab\ud3fc\uc744 \uc120\ud0dd\ud558\ub294 \uac83\uc740 \ud558\ub098\uc758 \uc88b\uc740 \uc120\ud0dd\uc9c0\uac00 \ub420 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ube44\uc6a9 \ud6a8\uc728\uc131\uc744 \uc704\ud574 \ucd5c\uc18c\ud55c\uc758 \uc11c\ubc84 \uc2a4\ud399\uc73c\ub85c \uc2dc\uc791\ud588\uc9c0\ub9cc \uc774\ub85c \uc778\ud55c \ud559\uc2b5 \uc2dc\uac04 \uc99d\uac00\uc640 \uba54\ubaa8\ub9ac \ubd80\uc871 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\ub294 \uacfc\uc815\uc774 \uc624\ud788\ub824 \uc88b\uc740 \ud559\uc2b5 \uacbd\ud5d8\uc774 \ub418\uc5c8\uc2b5\ub2c8\ub2e4. Ubuntu [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"advanced_seo_description":"","jetpack_seo_html_title":"","jetpack_seo_noindex":false,"_jetpack_memberships_contains_paid_content":false,"footnotes":""},"categories":[3],"tags":[1016,1017,32,87,91,17,90,16,89,202,398,1018,790,98],"jetpack_sharing_enabled":true,"jetpack_featured_media_url":"","_links":{"self":[{"href":"https:\/\/manvscloud.com\/index.php?rest_route=\/wp\/v2\/posts\/3018"}],"collection":[{"href":"https:\/\/manvscloud.com\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/manvscloud.com\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/manvscloud.com\/index.php?rest_route=\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/manvscloud.com\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=3018"}],"version-history":[{"count":3,"href":"https:\/\/manvscloud.com\/index.php?rest_route=\/wp\/v2\/posts\/3018\/revisions"}],"predecessor-version":[{"id":3026,"href":"https:\/\/manvscloud.com\/index.php?rest_route=\/wp\/v2\/posts\/3018\/revisions\/3026"}],"wp:attachment":[{"href":"https:\/\/manvscloud.com\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=3018"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/manvscloud.com\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=3018"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/manvscloud.com\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=3018"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}