{"id":3208,"date":"2025-07-10T16:24:13","date_gmt":"2025-07-10T08:24:13","guid":{"rendered":"https:\/\/www.gnn.club\/?p=3208"},"modified":"2025-07-18T14:14:33","modified_gmt":"2025-07-18T06:14:33","slug":"3208","status":"publish","type":"post","link":"http:\/\/gnn.club\/?p=3208","title":{"rendered":"ECA-Net"},"content":{"rendered":"<h1>\u57fa\u672c\u4fe1\u606f<\/h1>\n<h5>\ud83d\udcf0\u6807\u9898: ECA-Net: Efficient Channel Attention for Deep Convolutional Neural Networks<\/h5>\n<h5>\ud83d\udd8b\ufe0f\u4f5c\u8005: Qilong Wang<\/h5>\n<h5>\ud83c\udfdb\ufe0f\u673a\u6784: Tianjin University\uff08\u5929\u6d25\u5927\u5b66\uff09<\/h5>\n<h5>\ud83d\udd25\u5173\u952e\u8bcd: Channel Attention, CNN, Lightweight, Efficiency<\/h5>\n<h2>\u6458\u8981\u6982\u8ff0<\/h2>\n<table border=\"1\" cellspacing=\"0\" cellpadding=\"5\" style=\"border-collapse: collapse;\">\n<thead>\n<tr>\n<th style=\"border: 1px solid black; padding: 8px;\">\u9879\u76ee<\/th>\n<th style=\"border: 1px solid black; padding: 8px;\">\u5185\u5bb9<\/th>\n<\/tr>\n<\/thead>\n<tbody>\n<tr>\n<td style=\"border: 1px solid black; padding: 8px;\">\ud83d\udcd6\u7814\u7a76\u80cc\u666f<\/td>\n<td style=\"border: 1px solid black; padding: 8px;\">\u73b0\u6709\u901a\u9053\u6ce8\u610f\u529b\u673a\u5236\uff08\u5982SENet\uff09\u8ba1\u7b97\u590d\u6742\u5ea6\u9ad8\uff0c\u96be\u4ee5\u8f7b\u91cf\u5316\u90e8\u7f72<\/td>\n<\/tr>\n<tr>\n<td style=\"border: 1px solid black; padding: 8px;\">\ud83c\udfaf\u7814\u7a76\u76ee\u7684<\/td>\n<td style=\"border: 1px solid black; padding: 8px;\">\u63d0\u51fa\u4e00\u79cd\u9ad8\u6548\u901a\u9053\u6ce8\u610f\u529b\u6a21\u5757\uff08ECA\uff09\uff0c\u5e73\u8861\u6027\u80fd\u4e0e\u8ba1\u7b97\u6548\u7387<\/td>\n<\/tr>\n<tr>\n<td style=\"border: 1px solid black; padding: 8px;\">\u270d\ufe0f\u7814\u7a76\u65b9\u6cd5<\/td>\n<td style=\"border: 1px solid black; padding: 8px;\">\u901a\u8fc7\u4e00\u7ef4\u5377\u79ef\uff081D Conv\uff09\u6355\u83b7\u8de8\u901a\u9053\u4ea4\u4e92\uff0c\u907f\u514d\u964d\u7ef4\u64cd\u4f5c<\/td>\n<\/tr>\n<tr>\n<td style=\"border: 1px solid black; padding: 8px;\">\ud83d\udd4a\ufe0f\u7814\u7a76\u5bf9\u8c61<\/td>\n<td style=\"border: 1px solid black; padding: 8px;\">\u6df1\u5ea6\u5377\u79ef\u795e\u7ecf\u7f51\u7edc\uff08\u5982ResNet\u3001MobileNet\u7b49\uff09<\/td>\n<\/tr>\n<tr>\n<td style=\"border: 1px solid black; padding: 8px;\">\ud83d\udd0d\u7814\u7a76\u7ed3\u8bba<\/td>\n<td style=\"border: 1px solid black; padding: 8px;\">ECA\u6a21\u5757\u5728ImageNet\u7b49\u57fa\u51c6\u4e0a\u8d85\u8d8aSENet\uff0c\u53c2\u6570\u91cf\u4ec5\u4e3a\u51761\/3\uff0c\u63a8\u7406\u901f\u5ea6\u63d0\u534715%<\/td>\n<\/tr>\n<tr>\n<td style=\"border: 1px solid black; padding: 8px;\">\u2b50\u521b\u65b0\u70b9<\/td>\n<td style=\"border: 1px solid black; padding: 8px;\">\u2460 \u65e0\u9700\u964d\u7ef4\u7684\u8de8\u901a\u9053\u4ea4\u4e92\u5efa\u6a21 \u2461 \u81ea\u9002\u5e94\u6838\u5927\u5c0f\u7684\u4e00\u7ef4\u5377\u79ef\u8bbe\u8ba1<\/td>\n<\/tr>\n<\/tbody>\n<\/table>\n<h1>\u80cc\u666f<\/h1>\n<ul>\n<li>\n<p><strong>\u7814\u7a76\u80cc\u666f<\/strong>\uff1a\u6df1\u5ea6\u5377\u79ef\u795e\u7ecf\u7f51\u7edc\uff08CNNs\uff09\u5728\u8ba1\u7b97\u673a\u89c6\u89c9\u4efb\u52a1\u4e2d\u8868\u73b0\u4f18\u5f02\uff0c\u4f46\u73b0\u6709\u901a\u9053\u6ce8\u610f\u529b\u673a\u5236\uff08\u5982SENet\uff09\u5b58\u5728\u8ba1\u7b97\u590d\u6742\u5ea6\u9ad8\u3001\u6a21\u578b\u5197\u4f59\u7684\u95ee\u9898\uff0c\u5236\u7ea6\u8f7b\u91cf\u5316\u90e8\u7f72\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u8fc7\u53bb\u65b9\u6848<\/strong>\uff1a<\/p>\n<ul>\n<li>\n<p><strong>\u4e3b\u6d41\u65b9\u6cd5<\/strong>\uff1a\u4ee5SENet\u4e3a\u4ee3\u8868\uff0c\u91c7\u7528&quot;\u538b\u7f29-\u6fc0\u52b1&quot;\u7ed3\u6784\uff08GAP+FC\u5c42\uff09\uff0c\u901a\u8fc7\u964d\u7ef4\u5efa\u6a21\u8de8\u901a\u9053\u4ea4\u4e92\uff0c\u4f46\u5f15\u5165\u989d\u5916\u53c2\u6570\u91cf\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u6539\u8fdb\u65b9\u5411<\/strong>\uff1a\u540e\u7eed\u7814\u7a76\u901a\u8fc7\u590d\u6742\u4f9d\u8d56\u5efa\u6a21\uff08\u5982CBAM\u3001A2-Nets\uff09\u6216\u7ed3\u5408\u7a7a\u95f4\u6ce8\u610f\u529b\u63d0\u5347\u6027\u80fd\uff0c\u4f46\u8fdb\u4e00\u6b65\u589e\u52a0\u8ba1\u7b97\u8d1f\u62c5\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u6838\u5fc3\u95ee\u9898<\/strong>\uff1a\u964d\u7ef4\u64cd\u4f5c\u5bfc\u81f4\u901a\u9053\u4f9d\u8d56\u4fe1\u606f\u635f\u5931\uff0c\u4e14\u5168\u901a\u9053\u4ea4\u4e92\u8ba1\u7b97\u6548\u7387\u4f4e\u4e0b\u3002<\/p>\n<\/li>\n<\/ul>\n<\/li>\n<li>\n<p><strong>\u7814\u7a76\u52a8\u673a<\/strong>\uff1a<br \/>\n\u63a2\u7d22\u4e00\u79cd\u65e0\u9700\u964d\u7ef4\u3001\u8f7b\u91cf\u5316\u7684\u8de8\u901a\u9053\u4ea4\u4e92\u673a\u5236\uff0c\u5728\u4fdd\u8bc1\u6027\u80fd\u7684\u540c\u65f6\u663e\u8457\u964d\u4f4e\u8ba1\u7b97\u6210\u672c\uff0c\u89e3\u51b3\u73b0\u6709\u65b9\u6cd5\u6548\u7387\u4e0e\u6548\u679c\u96be\u4ee5\u5e73\u8861\u7684\u74f6\u9888\u3002<\/p>\n<\/li>\n<\/ul>\n<h1>\u65b9\u6cd5<\/h1>\n<ul>\n<li>\n<p><strong>\u7406\u8bba\u80cc\u666f<\/strong>\uff1a<br \/>\n\u672c\u7814\u7a76\u57fa\u4e8e\u901a\u9053\u6ce8\u610f\u529b\u673a\u5236\uff08Channel Attention\uff09\u7684\u4f18\u5316\u9700\u6c42\u5c55\u5f00\u3002\u4f20\u7edfSE Block\u901a\u8fc7\u5168\u8fde\u63a5\u5c42\uff08FC\uff09\u8fdb\u884c\u901a\u9053\u4ea4\u4e92\u5efa\u6a21\u65f6\u5b58\u5728\u4e24\u4e2a\u7f3a\u9677\uff1a<br \/>\n1\uff09\u964d\u7ef4\u64cd\u4f5c\uff08dimensionality reduction\uff09\u7834\u574f\u901a\u9053\u4e0e\u6743\u91cd\u7684\u76f4\u63a5\u5bf9\u5e94\u5173\u7cfb\uff1b<br \/>\n2\uff09\u5168\u5c40\u8de8\u901a\u9053\u4ea4\u4e92\uff08\u5982\u5168\u77e9\u9635\u8ba1\u7b97\uff09\u5bfc\u81f4\u9ad8\u8ba1\u7b97\u590d\u6742\u5ea6\u3002\u7406\u8bba\u5206\u6790\u8868\u660e\uff0c\u4fdd\u6301\u901a\u9053\u7ef4\u5ea6\u5b8c\u6574\u6027\u4e0e\u9ad8\u6548\u5c40\u90e8\u4ea4\u4e92\u662f\u63d0\u5347\u6ce8\u610f\u529b\u673a\u5236\u6548\u7387\u7684\u5173\u952e\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u6280\u672f\u8def\u7ebf<\/strong>\uff1a<\/p>\n<ol>\n<li>\n<p><strong>\u95ee\u9898\u8bca\u65ad<\/strong>\uff1a\u901a\u8fc7\u5bf9\u6bd4\u5b9e\u9a8c\uff08SE-Var1\/2\/3\uff09\u9a8c\u8bc1\u964d\u7ef4\u64cd\u4f5c\u5bf9\u6027\u80fd\u7684\u8d1f\u9762\u5f71\u54cd\uff0c\u53d1\u73b0\u76f4\u63a5\u901a\u9053-\u6743\u91cd\u6620\u5c04\u4f18\u4e8e\u964d\u7ef4+\u975e\u7ebf\u6027\u7ec4\u5408\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u7ed3\u6784\u8bbe\u8ba1<\/strong>\uff1a<\/p>\n<ul>\n<li>\n<p><strong>\u65e0\u964d\u7ef4\u5efa\u6a21<\/strong>\uff1a\u76f4\u63a5\u5229\u7528GAP\u540e\u7684\u901a\u9053\u7279\u5f81\u5411\u91cf\uff0c\u907f\u514d\u7ef4\u5ea6\u538b\u7f29\uff08C\u2192C\/r\u2192C\uff09\u3002<\/p>\n<\/li>\n<li>\n<p><strong>\u5c40\u90e8\u8de8\u901a\u9053\u4ea4\u4e92<\/strong>\uff1a\u91c7\u7528\u4e00\u7ef4\u5377\u79ef\uff081D Conv\uff09\u66ff\u4ee3\u5168\u8fde\u63a5\u5c42\uff0c\u901a\u8fc7\u81ea\u9002\u5e94\u6838\u5927\u5c0fk\u6355\u83b7\u5c40\u90e8\u901a\u9053\u4f9d\u8d56\uff0c\u53c2\u6570\u91cf\u4ec5k\u00d7C\uff08k\u226aC\uff09\u3002<\/p>\n<\/li>\n<\/ul>\n<\/li>\n<li>\n<p><strong>\u81ea\u9002\u5e94\u4f18\u5316<\/strong>\uff1a\u6839\u636e\u901a\u9053\u7ef4\u5ea6C\u52a8\u6001\u786e\u5b9a\u5377\u79ef\u6838\u5927\u5c0fk\uff0c\u5b9e\u73b0\u8ba1\u7b97\u6548\u7387\u4e0e\u4ea4\u4e92\u8303\u56f4\u7684\u5e73\u8861\u3002\u6700\u7ec8\u6a21\u5757\uff08ECA\uff09\u4ec5\u97003\u00d7C\u53c2\u6570\uff0c\u5728ImageNet\u4e0a\u8d85\u8d8aSE Block\uff08Top-1 +0.72%\uff09\u3002<\/p>\n<\/li>\n<\/ol>\n<\/li>\n<li>\n<p><strong>\u6280\u672f\u8be6\u89e3<\/strong>\uff1a<br \/>\n\u4e0b\u56fe\u5c55\u793a\u4e86ECA-Net\uff08Efficient Channel Attention Module\uff09\u7684\u6838\u5fc3\u673a\u5236\uff0c\u5176\u8bbe\u8ba1\u76ee\u6807\u662f\u5728\u6781\u4f4e\u8ba1\u7b97\u6210\u672c\u4e0b\u5b9e\u73b0\u9ad8\u6548\u7684\u901a\u9053\u6ce8\u610f\u529b\u5efa\u6a21\u3002\u4ee5\u4e0b\u662f\u5176\u5173\u952e\u7ec4\u6210\u90e8\u5206\u548c\u5de5\u4f5c\u539f\u7406\u7684\u8be6\u7ec6\u89e3\u91ca\uff1a<\/p>\n<\/li>\n<\/ul>\n<p><img decoding=\"async\" src=\"https:\/\/gnnclub-1311496010.cos.ap-beijing.myqcloud.com\/wp-content\/uploads\/2025\/07\/20250710161431907.png\" width=\"600\" style=\"display: block; margin: 0 auto;\" \/><\/p>\n<p><strong>1\uff0e\u8f93\u5165\u7279\u5f81\u5904\u7406<\/strong><br \/>\n\u8f93\u5165\u7279\u5f81\u56fe\uff1a\u7ed9\u5b9a\u5377\u79ef\u5c42\u7684\u8f93\u51fa\u7279\u5f81\u56fe $X \\in \\mathbb{R}^{W \\times H \\times C}$ \uff0c\u5176\u4e2d $W \\times H$ \u4e3a\u7a7a\u95f4\u7ef4\u5ea6\uff0c$C$ \u4e3a\u901a\u9053\u6570\u3002<br \/>\n\u5168\u5c40\u5e73\u5747\u6c60\u5316\uff08GAP\uff09\uff1a\u5bf9\u6bcf\u4e2a\u901a\u9053\u8fdb\u884c\u7a7a\u95f4\u7ef4\u5ea6\u7684\u538b\u7f29\uff0c\u751f\u6210\u901a\u9053\u63cf\u8ff0\u5411\u91cf $y \\in \\mathbb{R}^C$ \uff0c\u5176\u4e2d $y_i=\\frac{1}{W H} \\sum_{j=1}^W \\sum_{k=1}^H X_{i, j, k}$ \u3002\u8fd9\u4e00\u6b65\u6355\u83b7\u4e86\u6bcf\u4e2a\u901a\u9053\u7684\u5168\u5c40\u4fe1\u606f\uff0c\u79f0\u4e3a\uff02Squeeze\uff02\u64cd\u4f5c\u3002<\/p>\n<p><strong>2\uff0e\u8de8\u901a\u9053\u4ea4\u4e92\u5efa\u6a21<\/strong><br \/>\n1D\u5377\u79ef\uff08C1D\uff09\uff1a\u4e0e\u4f20\u7edfSENet\u4f7f\u7528\u5168\u8fde\u63a5\u5c42\uff08FC\uff09\u4e0d\u540c\uff0cECA\u901a\u8fc7\u4e00\u7ef4\u5377\u79ef\uff08kernel size\uff1d$k$ \uff09\u5b9e\u73b0\u5c40\u90e8\u8de8\u901a\u9053\u4ea4\u4e92\u3002\u6bcf\u4e2a\u901a\u9053\u7684\u6ce8\u610f\u529b\u6743\u91cd\u4ec5\u7531\u5176 $k$ \u4e2a\u76f8\u90bb\u901a\u9053\u51b3\u5b9a\uff0c\u516c\u5f0f\u4e3a\uff1a$\\omega_i=\\sigma\\left(\\sum_{j=1}^k w_j y_{i+j-\\lfloor k \/ 2\\rfloor}\\right)$ \u5176\u4e2d $\\sigma$ \u4e3a Sigmoid\u51fd\u6570\uff0c$w_j$ \u4e3a\u5171\u4eab\u7684\u5377\u79ef\u6838\u53c2\u6570\u3002\u8fd9\u79cd\u8bbe\u8ba1\u907f\u514d\u4e86SENet\u4e2d\u964d\u7ef4\u64cd\u4f5c\u7684\u4fe1\u606f\u635f\u5931\uff0c\u540c\u65f6\u5c06\u53c2\u6570\u91cf\u4ece $O\\left(C^2\\right)$\u964d\u81f3 $O(k C)$\uff08\u901a\u5e38 $k \\ll C$ \uff09\u3002<\/p>\n<p><strong>3\uff0e\u81ea\u9002\u5e94\u6838\u5927\u5c0f\uff08Adaptive Kernel Size\uff09<\/strong><br \/>\n\u975e\u7ebf\u6027\u6620\u5c04\uff1aECA\u901a\u8fc7\u901a\u9053\u7ef4\u5ea6 $C$ \u81ea\u9002\u5e94\u786e\u5b9a\u5377\u79ef\u6838\u5927\u5c0f $k$ \uff0c\u516c\u5f0f\u4e3a\uff1a$k=\\psi(C)=\\left|\\frac{\\log _2(C)}{\\gamma}+\\frac{b}{\\gamma}\\right|_{\\text {odd }}$ \u5176\u4e2d $\\gamma=2 \u3001 b=1,|\\cdot|$ odd \u8868\u793a\u53d6\u6700\u63a5\u8fd1\u7684\u5947\u6570\u3002\u4f8b\u5982\uff0cResNet\uff0d50\u4e2d $C=256$ \u65f6 $k=5$ \u3002\u8fd9\u4e00\u673a\u5236\u786e\u4fdd\u9ad8\u7ef4\u901a\u9053\u6709\u66f4\u5927\u7684\u4ea4\u4e92\u8303\u56f4\uff0c\u800c\u4f4e\u7ef4\u901a\u9053\u5219\u5c40\u90e8\u4ea4\u4e92\uff0c\u5e73\u8861\u6548\u7387\u4e0e\u6548\u679c $[1]$ \u3002<\/p>\n<p><strong>4\uff0e\u6ce8\u610f\u529b\u6743\u91cd\u5e94\u7528<\/strong><br \/>\n\u7279\u5f81\u91cd\u6807\u5b9a\uff1a\u6700\u7ec8\u6ce8\u610f\u529b\u6743\u91cd $\\omega \\in \\mathbb{R}^C$ \u4e0e\u539f\u59cb\u7279\u5f81\u56fe\u9010\u901a\u9053\u76f8\u4e58\uff08Element\uff0dwise Product\uff09\uff0c\u5b9e\u73b0\u901a\u9053\u7ea7\u7279\u5f81\u589e\u5f3a\uff1a$\\tilde{X}_i=\\omega_i \\cdot X_i$ \u8fd9\u4e00\u6b65\u79f0\u4e3a\uff02Excitation\uff02\uff0c\u901a\u8fc7\u7a81\u51fa\u91cd\u8981\u901a\u9053\u3001\u6291\u5236\u5197\u4f59\u901a\u9053\u63d0\u5347\u6a21\u578b\u8868\u5f81\u80fd\u529b\u3002<\/p>\n<p><strong>5\uff0e\u4f18\u52bf\u5bf9\u6bd4<\/strong><br \/>\n\u4e0eSENet\u5bf9\u6bd4\uff1aECA\u7701\u53bb\u4e86SENet\u4e2d\u7684\u964d\u7ef4FC\u5c42\uff08\u53c2\u6570\u91cf\u4ece $2 C^2 \/ r$ \u964d\u81f3 $k C$ \uff09\uff0c\u5728ResNet\uff0d50\u4e0a\u4ec5\u589e\u52a080\u53c2\u6570\uff0c\u4f46Top\uff0d1\u51c6\u786e\u7387\u63d0\u53472\uff0e28\\\uff05\uff3b1\uff3d\u3002<br \/>\n\u8f7b\u91cf\u5316\u5b9e\u73b0\uff1aECA\u4ec5\u97003\u884c\u6838\u5fc3\u4ee3\u7801\uff08GAP $\\rightarrow 1 \\mathrm{D}$ Conv $\\rightarrow$ Sigmoid\uff09\uff0c\u8ba1\u7b97\u5f00\u9500\u53ef\u5ffd\u7565\uff08\u5982 ResNet\uff0d50\u4e0a\u4ec5\u589e\u52a04\uff0e7e\uff0d4 GFLOPs\uff09\u3002<\/p>\n<h1>\u7ed3\u8bba<\/h1>\n<ul>\n<li>\n<p>\u7814\u7a76\u610f\u4e49\uff1a\u63d0\u51fa\u4e00\u79cd\u8f7b\u91cf\u9ad8\u6548\u7684\u901a\u9053\u6ce8\u610f\u529b\u6a21\u5757\uff08ECA\uff09\uff0c\u901a\u8fc71D\u5377\u79ef\u5b9e\u73b0\u4f4e\u590d\u6742\u5ea6\u7684\u8de8\u901a\u9053\u4ea4\u4e92\u5efa\u6a21\uff0c\u4e3a\u6df1\u5ea6CNN\u7684\u6ce8\u610f\u529b\u673a\u5236\u8bbe\u8ba1\u63d0\u4f9b\u65b0\u601d\u8def\u3002<\/p>\n<\/li>\n<li>\n<p>\u4f18\u7f3a\u70b9\uff1a<\/p>\n<ul>\n<li>\u4f18\u70b9\uff1a<br \/>\n\u2460 \u6781\u7b80\u7ed3\u6784\uff08\u53c2\u6570\u91cf\u4ec53\u00d7C\uff09<br \/>\n\u2461 \u81ea\u9002\u5e94\u6838\u5c3a\u5bf8\u63d0\u5347\u6cdb\u5316\u6027<br \/>\n\u2462 \u5373\u63d2\u5373\u7528\u65e0\u9700\u7ed3\u6784\u8c03\u6574\uff1b<\/li>\n<li>\u7f3a\u70b9\uff1a\u672a\u4e0e\u7a7a\u95f4\u6ce8\u610f\u529b\u673a\u5236\u534f\u540c\u4f18\u5316\uff08\u4f5c\u8005\u6307\u51fa\u672a\u6765\u7814\u7a76\u65b9\u5411\uff09\u3002<\/li>\n<\/ul>\n<\/li>\n<li>\n<p>\u4e3b\u8981\u7ed3\u8bba\uff1a<br \/>\n(1) ECA\u6a21\u5757\u901a\u8fc71D\u5377\u79ef\u5b9e\u73b0\u65e0\u964d\u7ef4\u901a\u9053\u6ce8\u610f\u529b\uff0c\u8ba1\u7b97\u6548\u7387\u663e\u8457\u4f18\u4e8eSENet\uff1b<br \/>\n(2) \u5728ResNet\u3001MobileNetV2\u7b49\u67b6\u6784\u4e0a\u9a8c\u8bc1\u5176\u901a\u7528\u6027\uff0c\u76ee\u6807\u68c0\u6d4b\/\u5b9e\u4f8b\u5206\u5272\u4efb\u52a1\u4e2d\u5c55\u73b0\u5f3a\u6cdb\u5316\u80fd\u529b\uff1b<br \/>\n(3) \u6838\u5c3a\u5bf8\u81ea\u9002\u5e94\u673a\u5236\u53ef\u52a8\u6001\u5e73\u8861\u5c40\u90e8\u4ea4\u4e92\u8303\u56f4\u4e0e\u8ba1\u7b97\u6210\u672c\u3002<\/p>\n<\/li>\n<\/ul>\n<h1>Pytorch code<\/h1>\n<pre><code class=\"language-python\">import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch import Tensor\n\nclass ECABlock(nn.Module):\n    def __init__(self, channels: int, gamma: int = 2, b: int = 1):\n        &quot;&quot;&quot;\n        ECA Block \u5b9e\u73b0 (CVPR 2020)\n        Args:\n            channels: \u8f93\u5165\u7279\u5f81\u56fe\u7684\u901a\u9053\u6570\n            gamma: \u7528\u4e8e\u8ba1\u7b97\u5377\u79ef\u6838\u5927\u5c0f\u7684\u8d85\u53c2\u6570\uff08\u9ed8\u8ba4=2\uff09\n            b: \u7528\u4e8e\u8ba1\u7b97\u5377\u79ef\u6838\u5927\u5c0f\u7684\u8d85\u53c2\u6570\uff08\u9ed8\u8ba4=1\uff09\n        &quot;&quot;&quot;\n        super(ECABlock, self).__init__()\n\n        # \u81ea\u9002\u5e94\u8ba1\u7b97\u5377\u79ef\u6838\u5927\u5c0f\uff08\u786e\u4fdd\u4e3a\u5947\u6570\uff09\n        t = int(abs((math.log(channels, 2) + b) \/ gamma))\n        kernel_size = t if t % 2 else t + 1\n\n        # 1D\u5377\u79ef\u5b9e\u73b0\u8de8\u901a\u9053\u4ea4\u4e92\uff08\u65e0\u9700\u964d\u7ef4\uff09\n        self.conv = nn.Conv1d(\n            in_channels=1,\n            out_channels=1,\n            kernel_size=kernel_size,\n            padding=(kernel_size - 1) \/\/ 2,\n            bias=False\n        )\n\n        # \u5168\u5c40\u5e73\u5747\u6c60\u5316\n        self.avg_pool = nn.AdaptiveAvgPool2d(1)\n\n    def forward(self, x: Tensor) -&gt; Tensor:\n        b, c, h, w = x.shape\n\n        # Squeeze: \u5168\u5c40\u5e73\u5747\u6c60\u5316 [b,c,h,w] -&gt; [b,c,1,1]\n        y = self.avg_pool(x)\n\n        # ECA\u64cd\u4f5c\uff08\u8de8\u901a\u9053\u4ea4\u4e92\uff09\n        # [b,c,1,1] -&gt; [b,1,c] -&gt; 1D\u5377\u79ef -&gt; [b,1,c] -&gt; [b,c,1,1]\n        y = y.view(b, 1, c)           # \u8f6c\u6362\u4e3a1D\u5377\u79ef\u8f93\u5165\u683c\u5f0f\n        y = self.conv(y)              # \u8de8\u901a\u9053\u4ea4\u4e92\n        y = y.view(b, c, 1, 1)        # \u6062\u590d\u5f62\u72b6\n\n        # Sigmoid\u6fc0\u6d3b\u751f\u6210\u6743\u91cd\n        y = torch.sigmoid(y)\n\n        # \u7279\u5f81\u56fe\u91cd\u6807\u5b9a\n        return x * y.expand_as(x)\n\n# ------------------- \u7528\u6cd5\u793a\u4f8b -------------------\nif __name__ == &quot;__main__&quot;:\n    import math\n\n    # 1. \u521d\u59cb\u5316ECA Block\uff08\u8f93\u5165\u901a\u9053\u6570\u4e3a256\uff09\n    eca_block = ECABlock(channels=256)\n\n    # 2. \u6a21\u62df\u8f93\u5165\u6570\u636e\uff08batch_size=4, \u901a\u9053=256, \u7279\u5f81\u56fe\u5c3a\u5bf8=56x56\uff09\n    dummy_input = torch.randn(4, 256, 56, 56)\n\n    # 3. \u524d\u5411\u4f20\u64ad\n    output = eca_block(dummy_input)\n\n    print(f&quot;\u8f93\u5165\u5f62\u72b6: {dummy_input.shape}&quot;)\n    print(f&quot;\u8f93\u51fa\u5f62\u72b6: {output.shape}&quot;)  # \u5e94\u4e0e\u8f93\u5165\u5f62\u72b6\u4e00\u81f4<\/code><\/pre>\n","protected":false},"excerpt":{"rendered":"<p>\u57fa\u672c\u4fe1\u606f \ud83d\udcf0\u6807\u9898: ECA-Net: Efficient Channel Attention for Deep [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":3209,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[30,18],"tags":[],"class_list":["post-3208","post","type-post","status-publish","format-standard","has-post-thumbnail","hentry","category-30","category-18"],"_links":{"self":[{"href":"http:\/\/gnn.club\/index.php?rest_route=\/wp\/v2\/posts\/3208","targetHints":{"allow":["GET"]}}],"collection":[{"href":"http:\/\/gnn.club\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"http:\/\/gnn.club\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"http:\/\/gnn.club\/index.php?rest_route=\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"http:\/\/gnn.club\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=3208"}],"version-history":[{"count":7,"href":"http:\/\/gnn.club\/index.php?rest_route=\/wp\/v2\/posts\/3208\/revisions"}],"predecessor-version":[{"id":3259,"href":"http:\/\/gnn.club\/index.php?rest_route=\/wp\/v2\/posts\/3208\/revisions\/3259"}],"wp:featuredmedia":[{"embeddable":true,"href":"http:\/\/gnn.club\/index.php?rest_route=\/wp\/v2\/media\/3209"}],"wp:attachment":[{"href":"http:\/\/gnn.club\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=3208"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"http:\/\/gnn.club\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=3208"},{"taxonomy":"post_tag","embeddable":true,"href":"http:\/\/gnn.club\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=3208"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}