{"id":1021969,"date":"2024-12-27T13:29:10","date_gmt":"2024-12-27T05:29:10","guid":{"rendered":""},"modified":"2024-12-27T13:29:13","modified_gmt":"2024-12-27T05:29:13","slug":"%e5%a6%82%e4%bd%95%e7%94%a8python%e5%88%86%e6%9e%90%e5%9b%be%e7%89%87","status":"publish","type":"post","link":"https:\/\/docs.pingcode.com\/ask\/1021969.html","title":{"rendered":"\u5982\u4f55\u7528python\u5206\u6790\u56fe\u7247"},"content":{"rendered":"<p style=\"text-align:center;\" ><img decoding=\"async\" src=\"https:\/\/cdn-kb.worktile.com\/kb\/wp-content\/uploads\/2024\/04\/25164326\/cee2fd87-5c8a-4488-852a-5fddc1ab85bc.webp\" alt=\"\u5982\u4f55\u7528python\u5206\u6790\u56fe\u7247\" \/><\/p>\n<p><p> \u8981\u7528Python\u5206\u6790\u56fe\u7247\uff0c\u53ef\u4ee5\u4f7f\u7528<strong>OpenCV\u3001PIL\u3001scikit-image\u3001TensorFlow\u3001Keras\u3001PyTorch<\/strong>\u7b49\u5e93\u3002\u8fd9\u4e9b\u5e93\u63d0\u4f9b\u4e86\u4e0d\u540c\u7684\u5de5\u5177\u548c\u529f\u80fd\uff0c\u4f7f\u5f97\u5728Python\u4e2d\u5904\u7406\u548c\u5206\u6790\u56fe\u50cf\u53d8\u5f97\u76f8\u5bf9\u7b80\u5355\u3002\u4f8b\u5982\uff0c<strong>OpenCV<\/strong>\u662f\u4e00\u4e2a\u5f3a\u5927\u7684\u8ba1\u7b97\u673a\u89c6\u89c9\u5e93\uff0c\u9002\u7528\u4e8e\u5404\u79cd\u56fe\u50cf\u5904\u7406\u4efb\u52a1\uff1b<strong>PIL\uff08Python Imaging Library\uff09<\/strong>\u9002\u5408\u57fa\u672c\u7684\u56fe\u50cf\u64cd\u4f5c\uff1b<strong>scikit-image<\/strong>\u63d0\u4f9b\u4e86\u4e00\u4e9b\u9ad8\u7ea7\u7684\u56fe\u50cf\u5904\u7406\u529f\u80fd\uff1b<strong>TensorFlow\u548cKeras<\/strong>\u53ef\u4ee5\u7528\u4e8e\u6df1\u5ea6\u5b66\u4e60\u56fe\u50cf\u8bc6\u522b\u4efb\u52a1\uff1b<strong>PyTorch<\/strong>\u4e5f\u662f\u4e00\u4e2a\u5e7f\u6cdb\u4f7f\u7528\u7684\u6df1\u5ea6\u5b66\u4e60\u6846\u67b6\u3002\u5177\u4f53\u4f7f\u7528\u54ea\u79cd\u5e93\u53d6\u51b3\u4e8e\u4f60\u7684\u9700\u6c42\u548c\u9879\u76ee\u89c4\u6a21\u3002<\/p>\n<\/p>\n<p><p>\u4e0b\u9762\u5c06\u8be6\u7ec6\u4ecb\u7ecd\u5982\u4f55\u7528\u8fd9\u4e9b\u5e93\u8fdb\u884c\u56fe\u50cf\u5206\u6790\u3002<\/p>\n<\/p>\n<p><p>\u4e00\u3001\u4f7f\u7528OpenCV\u5206\u6790\u56fe\u50cf<\/p>\n<\/p>\n<p><p>OpenCV\u662f\u4e00\u4e2a\u5f00\u6e90\u7684\u8ba1\u7b97\u673a\u89c6\u89c9\u548c<a href=\"https:\/\/docs.pingcode.com\/ask\/59192.html\" target=\"_blank\">\u673a\u5668\u5b66\u4e60<\/a>\u8f6f\u4ef6\u5e93\uff0c\u5e7f\u6cdb\u7528\u4e8e\u5b9e\u65f6\u56fe\u50cf\u5904\u7406\u548c\u5206\u6790\u3002\u5b83\u652f\u6301\u591a\u79cd\u7f16\u7a0b\u8bed\u8a00\uff0c\u5305\u62ecPython\u3002<\/p>\n<\/p>\n<ol>\n<li>\n<p><strong>\u5b89\u88c5\u548c\u5bfc\u5165OpenCV<\/strong><\/p>\n<\/p>\n<p><p>\u9996\u5148\u9700\u8981\u5b89\u88c5OpenCV\u5e93\uff0c\u53ef\u4ee5\u4f7f\u7528\u4ee5\u4e0b\u547d\u4ee4\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-bash\">pip install opencv-python<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><p>\u7136\u540e\u5728Python\u811a\u672c\u4e2d\u5bfc\u5165OpenCV\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import cv2<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u8bfb\u53d6\u548c\u663e\u793a\u56fe\u50cf<\/strong><\/p>\n<\/p>\n<p><p>\u4f7f\u7528OpenCV\u8bfb\u53d6\u548c\u663e\u793a\u56fe\u50cf\u975e\u5e38\u7b80\u5355\u3002<code>cv2.imread()<\/code>\u7528\u4e8e\u8bfb\u53d6\u56fe\u50cf\uff0c<code>cv2.imshow()<\/code>\u7528\u4e8e\u663e\u793a\u56fe\u50cf\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">image = cv2.imread(&#39;path_to_image.jpg&#39;)<\/p>\n<p>cv2.imshow(&#39;Image&#39;, image)<\/p>\n<p>cv2.w<a href=\"https:\/\/docs.pingcode.com\/blog\/59162.html\" target=\"_blank\">AI<\/a>tKey(0)<\/p>\n<p>cv2.destroyAllWindows()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u56fe\u50cf\u5904\u7406<\/strong><\/p>\n<\/p>\n<p><p>OpenCV\u63d0\u4f9b\u4e86\u591a\u79cd\u56fe\u50cf\u5904\u7406\u529f\u80fd\uff0c\u4f8b\u5982\u7070\u5ea6\u8f6c\u6362\u3001\u8fb9\u7f18\u68c0\u6d4b\u3001\u56fe\u50cf\u5e73\u6ed1\u7b49\u3002<\/p>\n<\/p>\n<ul>\n<li>\n<p><strong>\u7070\u5ea6\u8f6c\u6362<\/strong>\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u8fb9\u7f18\u68c0\u6d4b<\/strong>\uff1a<\/p>\n<p>\u4f7f\u7528Canny\u8fb9\u7f18\u68c0\u6d4b\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">edges = cv2.Canny(gray_image, 100, 200)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u56fe\u50cf\u5e73\u6ed1<\/strong>\uff1a<\/p>\n<p>\u4f8b\u5982\u9ad8\u65af\u6a21\u7cca\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">blurred_image = cv2.GaussianBlur(image, (5, 5), 0)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<\/ul>\n<\/li>\n<li>\n<p><strong>\u7279\u5f81\u68c0\u6d4b<\/strong><\/p>\n<\/p>\n<p><p>OpenCV\u4e5f\u652f\u6301\u7279\u5f81\u68c0\u6d4b\u548c\u63cf\u8ff0\uff0c\u4f8b\u5982SIFT\u3001SURF\u548cORB\u3002<\/p>\n<\/p>\n<ul>\n<li><strong>ORB\u7279\u5f81\u68c0\u6d4b<\/strong>\uff1a\n<pre><code class=\"language-python\">orb = cv2.ORB_create()<\/p>\n<p>keypoints, descriptors = orb.detectAndCompute(image, None)<\/p>\n<p>image_with_keypoints = cv2.drawKeypoints(image, keypoints, None)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<\/ul>\n<p><p>\u7279\u5f81\u68c0\u6d4b\u5728\u56fe\u50cf\u5339\u914d\u3001\u7269\u4f53\u8bc6\u522b\u7b49\u4efb\u52a1\u4e2d\u975e\u5e38\u6709\u7528\u3002<\/p>\n<\/p>\n<\/li>\n<\/ol>\n<p><p>\u4e8c\u3001\u4f7f\u7528PIL\u5904\u7406\u56fe\u50cf<\/p>\n<\/p>\n<p><p>PIL\uff08Python Imaging Library\uff09\u662f\u4e00\u4e2aPython\u56fe\u50cf\u5904\u7406\u5e93\uff0c\u9002\u5408\u57fa\u672c\u7684\u56fe\u50cf\u64cd\u4f5c\u3002\u867d\u7136PIL\u5df2\u7ecf\u4e0d\u518d\u7ef4\u62a4\uff0c\u4f46\u5176\u6d3e\u751f\u5e93Pillow\u4ecd\u7136\u5e7f\u6cdb\u4f7f\u7528\u3002<\/p>\n<\/p>\n<ol>\n<li>\n<p><strong>\u5b89\u88c5\u548c\u5bfc\u5165Pillow<\/strong><\/p>\n<\/p>\n<p><p>\u53ef\u4ee5\u4f7f\u7528\u4ee5\u4e0b\u547d\u4ee4\u5b89\u88c5Pillow\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-bash\">pip install pillow<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><p>\u7136\u540e\u5728Python\u811a\u672c\u4e2d\u5bfc\u5165Pillow\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from PIL import Image<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u6253\u5f00\u548c\u663e\u793a\u56fe\u50cf<\/strong><\/p>\n<\/p>\n<p><p>\u4f7f\u7528Pillow\u6253\u5f00\u548c\u663e\u793a\u56fe\u50cf\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">image = Image.open(&#39;path_to_image.jpg&#39;)<\/p>\n<p>image.show()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u57fa\u672c\u56fe\u50cf\u64cd\u4f5c<\/strong><\/p>\n<\/p>\n<p><p>Pillow\u652f\u6301\u591a\u79cd\u57fa\u672c\u56fe\u50cf\u64cd\u4f5c\uff0c\u4f8b\u5982\u88c1\u526a\u3001\u65cb\u8f6c\u3001\u8c03\u6574\u5927\u5c0f\u3002<\/p>\n<\/p>\n<ul>\n<li>\n<p><strong>\u88c1\u526a<\/strong>\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">cropped_image = image.crop((left, top, right, bottom))<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u65cb\u8f6c<\/strong>\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">rotated_image = image.rotate(45)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u8c03\u6574\u5927\u5c0f<\/strong>\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">resized_image = image.resize((width, height))<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<\/ul>\n<\/li>\n<li>\n<p><strong>\u56fe\u50cf\u6ee4\u955c<\/strong><\/p>\n<\/p>\n<p><p>Pillow\u63d0\u4f9b\u4e86\u4e00\u4e9b\u7b80\u5355\u7684\u56fe\u50cf\u6ee4\u955c\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from PIL import ImageFilter<\/p>\n<p>blurred_image = image.filter(ImageFilter.BLUR)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<\/ol>\n<p><p>\u4e09\u3001\u4f7f\u7528scikit-image\u5206\u6790\u56fe\u50cf<\/p>\n<\/p>\n<p><p>scikit-image\u662f\u4e00\u4e2a\u57fa\u4e8eSciPy\u7684Python\u5e93\uff0c\u4e13\u95e8\u7528\u4e8e\u56fe\u50cf\u5904\u7406\u3002\u5b83\u63d0\u4f9b\u4e86\u4e00\u4e9b\u9ad8\u7ea7\u7684\u56fe\u50cf\u5904\u7406\u529f\u80fd\u3002<\/p>\n<\/p>\n<ol>\n<li>\n<p><strong>\u5b89\u88c5\u548c\u5bfc\u5165scikit-image<\/strong><\/p>\n<\/p>\n<p><p>\u53ef\u4ee5\u4f7f\u7528\u4ee5\u4e0b\u547d\u4ee4\u5b89\u88c5scikit-image\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-bash\">pip install scikit-image<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><p>\u7136\u540e\u5728Python\u811a\u672c\u4e2d\u5bfc\u5165scikit-image\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from skimage import io<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u8bfb\u53d6\u548c\u663e\u793a\u56fe\u50cf<\/strong><\/p>\n<\/p>\n<p><p>\u4f7f\u7528scikit-image\u8bfb\u53d6\u548c\u663e\u793a\u56fe\u50cf\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">image = io.imread(&#39;path_to_image.jpg&#39;)<\/p>\n<p>io.imshow(image)<\/p>\n<p>io.show()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u56fe\u50cf\u53d8\u6362<\/strong><\/p>\n<\/p>\n<p><p>scikit-image\u652f\u6301\u591a\u79cd\u56fe\u50cf\u53d8\u6362\uff0c\u4f8b\u5982\u989c\u8272\u53d8\u6362\u3001\u51e0\u4f55\u53d8\u6362\u3002<\/p>\n<\/p>\n<ul>\n<li>\n<p><strong>\u989c\u8272\u53d8\u6362<\/strong>\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from skimage.color import rgb2gray<\/p>\n<p>gray_image = rgb2gray(image)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u51e0\u4f55\u53d8\u6362<\/strong>\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from skimage.transform import rotate<\/p>\n<p>rotated_image = rotate(image, angle=45)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<\/ul>\n<\/li>\n<li>\n<p><strong>\u56fe\u50cf\u5206\u5272<\/strong><\/p>\n<\/p>\n<p><p>\u56fe\u50cf\u5206\u5272\u662f\u56fe\u50cf\u5206\u6790\u4e2d\u7684\u4e00\u4e2a\u91cd\u8981\u6b65\u9aa4\u3002scikit-image\u63d0\u4f9b\u4e86\u591a\u79cd\u5206\u5272\u7b97\u6cd5\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from skimage.filters import threshold_otsu<\/p>\n<p>thresh = threshold_otsu(gray_image)<\/p>\n<p>binary_image = gray_image &gt; thresh<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<\/ol>\n<p><p>\u56db\u3001\u4f7f\u7528TensorFlow\u548cKeras\u8fdb\u884c\u6df1\u5ea6\u5b66\u4e60\u56fe\u50cf\u5206\u6790<\/p>\n<\/p>\n<p><p>TensorFlow\u548cKeras\u662f\u7528\u4e8e\u6df1\u5ea6\u5b66\u4e60\u7684\u6d41\u884c\u6846\u67b6\uff0c\u5e38\u7528\u4e8e\u56fe\u50cf\u5206\u7c7b\u548c\u8bc6\u522b\u4efb\u52a1\u3002<\/p>\n<\/p>\n<ol>\n<li>\n<p><strong>\u5b89\u88c5\u548c\u5bfc\u5165TensorFlow\u548cKeras<\/strong><\/p>\n<\/p>\n<p><p>\u53ef\u4ee5\u4f7f\u7528\u4ee5\u4e0b\u547d\u4ee4\u5b89\u88c5TensorFlow\u548cKeras\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-bash\">pip install tensorflow<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><p>\u7136\u540e\u5728Python\u811a\u672c\u4e2d\u5bfc\u5165TensorFlow\u548cKeras\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import tensorflow as tf<\/p>\n<p>from tensorflow import keras<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u52a0\u8f7d\u548c\u9884\u5904\u7406\u6570\u636e<\/strong><\/p>\n<\/p>\n<p><p>\u4f7f\u7528Keras\u7684\u6570\u636e\u96c6\u6a21\u5757\u53ef\u4ee5\u8f7b\u677e\u52a0\u8f7d\u548c\u9884\u5904\u7406\u56fe\u50cf\u6570\u636e\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">(train_images, train_labels), (test_images, test_labels) = keras.datasets.cifar10.load_data()<\/p>\n<p>train_images = train_images \/ 255.0<\/p>\n<p>test_images = test_images \/ 255.0<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u6784\u5efa\u6a21\u578b<\/strong><\/p>\n<\/p>\n<p><p>\u4f7f\u7528Keras\u6784\u5efa\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">model = keras.Sequential([<\/p>\n<p>    keras.layers.Flatten(input_shape=(32, 32, 3)),<\/p>\n<p>    keras.layers.Dense(128, activation=&#39;relu&#39;),<\/p>\n<p>    keras.layers.Dense(10, activation=&#39;softmax&#39;)<\/p>\n<p>])<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u7f16\u8bd1\u548c\u8bad\u7ec3\u6a21\u578b<\/strong><\/p>\n<\/p>\n<p><p>\u7f16\u8bd1\u548c\u8bad\u7ec3\u6a21\u578b\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">model.compile(optimizer=&#39;adam&#39;,<\/p>\n<p>              loss=&#39;sparse_categorical_crossentropy&#39;,<\/p>\n<p>              metrics=[&#39;accuracy&#39;])<\/p>\n<p>model.fit(train_images, train_labels, epochs=10)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u8bc4\u4f30\u6a21\u578b<\/strong><\/p>\n<\/p>\n<p><p>\u4f7f\u7528\u6d4b\u8bd5\u6570\u636e\u8bc4\u4f30\u6a21\u578b\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">test_loss, test_acc = model.evaluate(test_images, test_labels)<\/p>\n<p>print(&#39;Test accuracy:&#39;, test_acc)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<\/ol>\n<p><p>\u4e94\u3001\u4f7f\u7528PyTorch\u8fdb\u884c\u6df1\u5ea6\u5b66\u4e60\u56fe\u50cf\u5206\u6790<\/p>\n<\/p>\n<p><p>PyTorch\u662f\u53e6\u4e00\u4e2a\u5e7f\u6cdb\u4f7f\u7528\u7684\u6df1\u5ea6\u5b66\u4e60\u6846\u67b6\uff0c\u5177\u6709\u7075\u6d3b\u7684\u8bbe\u8ba1\u548c\u52a8\u6001\u8ba1\u7b97\u56fe\u3002<\/p>\n<\/p>\n<ol>\n<li>\n<p><strong>\u5b89\u88c5\u548c\u5bfc\u5165PyTorch<\/strong><\/p>\n<\/p>\n<p><p>\u53ef\u4ee5\u4f7f\u7528\u4ee5\u4e0b\u547d\u4ee4\u5b89\u88c5PyTorch\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-bash\">pip install torch torchvision<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><p>\u7136\u540e\u5728Python\u811a\u672c\u4e2d\u5bfc\u5165PyTorch\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import torch<\/p>\n<p>import torchvision<\/p>\n<p>import torchvision.transforms as transforms<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u52a0\u8f7d\u548c\u9884\u5904\u7406\u6570\u636e<\/strong><\/p>\n<\/p>\n<p><p>\u4f7f\u7528torchvision\u52a0\u8f7d\u548c\u9884\u5904\u7406CIFAR-10\u6570\u636e\u96c6\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">transform = transforms.Compose(<\/p>\n<p>    [transforms.ToTensor(),<\/p>\n<p>     transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])<\/p>\n<p>trainset = torchvision.datasets.CIFAR10(root=&#39;.\/data&#39;, train=True,<\/p>\n<p>                                        download=True, transform=transform)<\/p>\n<p>trainloader = torch.utils.data.DataLoader(trainset, batch_size=4,<\/p>\n<p>                                          shuffle=True, num_workers=2)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u5b9a\u4e49\u6a21\u578b<\/strong><\/p>\n<\/p>\n<p><p>\u4f7f\u7528torch.nn\u5b9a\u4e49\u5377\u79ef\u795e\u7ecf\u7f51\u7edc\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import torch.nn as nn<\/p>\n<p>import torch.nn.functional as F<\/p>\n<p>class Net(nn.Module):<\/p>\n<p>    def __init__(self):<\/p>\n<p>        super(Net, self).__init__()<\/p>\n<p>        self.conv1 = nn.Conv2d(3, 6, 5)<\/p>\n<p>        self.pool = nn.MaxPool2d(2, 2)<\/p>\n<p>        self.conv2 = nn.Conv2d(6, 16, 5)<\/p>\n<p>        self.fc1 = nn.Linear(16 * 5 * 5, 120)<\/p>\n<p>        self.fc2 = nn.Linear(120, 84)<\/p>\n<p>        self.fc3 = nn.Linear(84, 10)<\/p>\n<p>    def forward(self, x):<\/p>\n<p>        x = self.pool(F.relu(self.conv1(x)))<\/p>\n<p>        x = self.pool(F.relu(self.conv2(x)))<\/p>\n<p>        x = x.view(-1, 16 * 5 * 5)<\/p>\n<p>        x = F.relu(self.fc1(x))<\/p>\n<p>        x = F.relu(self.fc2(x))<\/p>\n<p>        x = self.fc3(x)<\/p>\n<p>        return x<\/p>\n<p>net = Net()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u5b9a\u4e49\u635f\u5931\u51fd\u6570\u548c\u4f18\u5316\u5668<\/strong><\/p>\n<\/p>\n<p><p>\u4f7f\u7528\u4ea4\u53c9\u71b5\u635f\u5931\u548c\u968f\u673a\u68af\u5ea6\u4e0b\u964d\u4f18\u5316\u5668\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import torch.optim as optim<\/p>\n<p>criterion = nn.CrossEntropyLoss()<\/p>\n<p>optimizer = optim.SGD(net.parameters(), lr=0.001, momentum=0.9)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u8bad\u7ec3\u6a21\u578b<\/strong><\/p>\n<\/p>\n<p><p>\u8bad\u7ec3\u6a21\u578b\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">for epoch in range(2):  # loop over the dataset multiple times<\/p>\n<p>    running_loss = 0.0<\/p>\n<p>    for i, data in enumerate(trainloader, 0):<\/p>\n<p>        # get the inputs; data is a list of [inputs, labels]<\/p>\n<p>        inputs, labels = data<\/p>\n<p>        # zero the parameter gradients<\/p>\n<p>        optimizer.zero_grad()<\/p>\n<p>        # forward + backward + optimize<\/p>\n<p>        outputs = net(inputs)<\/p>\n<p>        loss = criterion(outputs, labels)<\/p>\n<p>        loss.backward()<\/p>\n<p>        optimizer.step()<\/p>\n<p>        # print statistics<\/p>\n<p>        running_loss += loss.item()<\/p>\n<p>        if i % 2000 == 1999:    # print every 2000 mini-batches<\/p>\n<p>            print(&#39;[%d, %5d] loss: %.3f&#39; %<\/p>\n<p>                  (epoch + 1, i + 1, running_loss \/ 2000))<\/p>\n<p>            running_loss = 0.0<\/p>\n<p>print(&#39;Finished Training&#39;)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<li>\n<p><strong>\u8bc4\u4f30\u6a21\u578b<\/strong><\/p>\n<\/p>\n<p><p>\u4f7f\u7528\u6d4b\u8bd5\u6570\u636e\u8bc4\u4f30\u6a21\u578b\uff1a<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">correct = 0<\/p>\n<p>total = 0<\/p>\n<p>with torch.no_grad():<\/p>\n<p>    for data in testloader:<\/p>\n<p>        images, labels = data<\/p>\n<p>        outputs = net(images)<\/p>\n<p>        _, predicted = torch.max(outputs.data, 1)<\/p>\n<p>        total += labels.size(0)<\/p>\n<p>        correct += (predicted == labels).sum().item()<\/p>\n<p>print(&#39;Accuracy of the network on the 10000 test images: %d %%&#39; % (<\/p>\n<p>    100 * correct \/ total))<\/p>\n<p><\/code><\/pre>\n<\/p>\n<\/li>\n<\/ol>\n<p><p>\u901a\u8fc7\u4ee5\u4e0a\u4ecb\u7ecd\uff0c\u53ef\u4ee5\u770b\u5230Python\u63d0\u4f9b\u4e86\u4e30\u5bcc\u7684\u56fe\u50cf\u5206\u6790\u5e93\u548c\u5de5\u5177\uff0c\u6bcf\u4e2a\u5e93\u90fd\u6709\u5176\u72ec\u7279\u7684\u529f\u80fd\u548c\u4f7f\u7528\u573a\u666f\u3002\u9009\u62e9\u5408\u9002\u7684\u5e93\u6765\u8fdb\u884c\u56fe\u50cf\u5206\u6790\uff0c\u53ef\u4ee5\u5927\u5927\u63d0\u9ad8\u5de5\u4f5c\u6548\u7387\u548c\u5206\u6790\u6548\u679c\u3002<\/p>\n<\/p>\n<h2><strong>\u76f8\u5173\u95ee\u7b54FAQs\uff1a<\/strong><\/h2>\n<p> <strong>\u5982\u4f55\u5f00\u59cb\u4f7f\u7528Python\u8fdb\u884c\u56fe\u50cf\u5206\u6790\uff1f<\/strong><br \/>\u8981\u5f00\u59cb\u4f7f\u7528Python\u8fdb\u884c\u56fe\u50cf\u5206\u6790\uff0c\u60a8\u9700\u8981\u5b89\u88c5\u4e00\u4e9b\u57fa\u7840\u5e93\uff0c\u5982OpenCV\u548cPillow\u3002\u8fd9\u4e9b\u5e93\u63d0\u4f9b\u4e86\u5f3a\u5927\u7684\u56fe\u50cf\u5904\u7406\u529f\u80fd\u3002\u60a8\u53ef\u4ee5\u901a\u8fc7pip\u547d\u4ee4\u8f7b\u677e\u5b89\u88c5\u5b83\u4eec\uff1a<code>pip install opencv-python pillow<\/code>\u3002\u63a5\u7740\uff0c\u60a8\u53ef\u4ee5\u4f7f\u7528\u8fd9\u4e9b\u5e93\u52a0\u8f7d\u56fe\u50cf\u3001\u8fdb\u884c\u57fa\u672c\u5904\u7406\uff0c\u5982\u8c03\u6574\u5927\u5c0f\u3001\u88c1\u526a\u548c\u8fc7\u6ee4\u7b49\u3002<\/p>\n<p><strong>Python\u56fe\u50cf\u5206\u6790\u4e2d\u5e38\u7528\u7684\u6280\u672f\u6709\u54ea\u4e9b\uff1f<\/strong><br \/>\u5728\u56fe\u50cf\u5206\u6790\u4e2d\uff0c\u5e38\u7528\u7684\u6280\u672f\u5305\u62ec\u8fb9\u7f18\u68c0\u6d4b\u3001\u7279\u5f81\u63d0\u53d6\u3001\u56fe\u50cf\u5206\u5272\u548c\u6a21\u5f0f\u8bc6\u522b\u3002\u8fb9\u7f18\u68c0\u6d4b\u53ef\u4ee5\u5e2e\u52a9\u8bc6\u522b\u56fe\u50cf\u4e2d\u7684\u7269\u4f53\u8f6e\u5ed3\uff0c\u800c\u7279\u5f81\u63d0\u53d6\u5219\u6709\u52a9\u4e8e\u63d0\u53d6\u91cd\u8981\u4fe1\u606f\u4ee5\u4f9b\u540e\u7eed\u5206\u6790\u3002\u56fe\u50cf\u5206\u5272\u6280\u672f\u5e38\u7528\u4e8e\u5c06\u56fe\u50cf\u5206\u5272\u6210\u591a\u4e2a\u90e8\u5206\uff0c\u4ee5\u4fbf\u66f4\u597d\u5730\u5206\u6790\u6bcf\u4e00\u90e8\u5206\u3002<\/p>\n<p><strong>\u6709\u54ea\u4e9b\u5b9e\u7528\u7684Python\u5e93\u53ef\u4ee5\u7528\u4e8e\u56fe\u50cf\u5206\u6790\uff1f<\/strong><br \/>\u9664\u4e86OpenCV\u548cPillow\uff0c\u5176\u4ed6\u4e00\u4e9b\u6709\u7528\u7684Python\u5e93\u5305\u62ecscikit-image\u3001TensorFlow\u548cKeras\u3002scikit-image\u4e13\u6ce8\u4e8e\u56fe\u50cf\u5904\u7406\u548c\u8ba1\u7b97\u673a\u89c6\u89c9\u4efb\u52a1\uff0c\u800cTensorFlow\u548cKeras\u5219\u63d0\u4f9b\u4e86\u6df1\u5ea6\u5b66\u4e60\u6846\u67b6\uff0c\u53ef\u4ee5\u7528\u4e8e\u66f4\u590d\u6742\u7684\u56fe\u50cf\u5206\u6790\u4efb\u52a1\uff0c\u5982\u56fe\u50cf\u5206\u7c7b\u548c\u5bf9\u8c61\u68c0\u6d4b\u3002\u8fd9\u4e9b\u5e93\u7684\u7ed3\u5408\u80fd\u591f\u6ee1\u8db3\u4e0d\u540c\u5c42\u6b21\u7684\u56fe\u50cf\u5206\u6790\u9700\u6c42\u3002<\/p>\n","protected":false},"excerpt":{"rendered":"\u8981\u7528Python\u5206\u6790\u56fe\u7247\uff0c\u53ef\u4ee5\u4f7f\u7528OpenCV\u3001PIL\u3001scikit-image\u3001TensorFlow\u3001Ker [&hellip;]","protected":false},"author":3,"featured_media":1021978,"comment_status":"closed","ping_status":"","sticky":false,"template":"","format":"standard","meta":{"_acf_changed":false,"footnotes":""},"categories":[37],"tags":[],"acf":[],"_links":{"self":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/1021969"}],"collection":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/users\/3"}],"replies":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/comments?post=1021969"}],"version-history":[{"count":"1","href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/1021969\/revisions"}],"predecessor-version":[{"id":1021980,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/1021969\/revisions\/1021980"}],"wp:featuredmedia":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/media\/1021978"}],"wp:attachment":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/media?parent=1021969"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/categories?post=1021969"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/tags?post=1021969"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}