{"id":322,"date":"2024-05-30T17:40:03","date_gmt":"2024-05-30T09:40:03","guid":{"rendered":"http:\/\/106.52.213.145:21080\/?p=322"},"modified":"2024-06-25T11:03:29","modified_gmt":"2024-06-25T03:03:29","slug":"youhua-yolov8-shipinliu-gongzuoliuchengtishengzairusudu","status":"publish","type":"post","link":"https:\/\/apifj.com\/index.php\/2024\/05\/30\/youhua-yolov8-shipinliu-gongzuoliuchengtishengzairusudu\/","title":{"rendered":"[\u4ee3\u7801\u4f18\u5316]\u4f18\u5316 yolov8 \u89c6\u9891\u6d41 \u5de5\u4f5c\u6d41\u7a0b\uff0c\u63d0\u5347\u8f7d\u5165\u901f\u5ea6"},"content":{"rendered":"<h2>1.\u539f\u4ee3\u7801\u6d41\u7a0b\u89e3\u91ca<\/h2>\n<p>\u539f\u4ee3\u7801\u5728\u8f7d\u5165\u89c6\u9891\u7684\u65f6\u5019\uff0c\u4f1a\u5f00\u542fn\u4e2a\u591a\u7ebf\u7a0b\u6765\u8f7d\u5165\u89c6\u9891\u5e27\uff0c\u5176\u4e2dn\u662f\u89c6\u9891\u6d41\u7684\u6570\u91cf\u3002\u7136\u540e\u5c06\u89c6\u9891\u5e27\u9001\u5165\u5230\u8bc6\u522b\u7ebf\u7a0b\u4e2d\uff0c\u7ecf\u8fc7\u9884\u5904\u7406\u3001\u8bc6\u522b\u3001\u540e\u5904\u7406\u3002\u6d41\u7a0b\u56fe\u5982\u4e0b\uff1a<br \/>\n<img decoding=\"async\" src=\"\/wp-content\/uploads\/2024\/05\/\u622a\u5716-2024-05-30-\u4e0b\u53485.28.25-1024x716.png\" alt=\"\" \/><br \/>\n\u6211\u4eec\u53ef\u4ee5\u770b\u5230\uff0c\u6211\u4eec\u5f00\u8bbe\u4e86n\u4e2a\u7ebf\u7a0b\u6765\u5904\u7406n\u4e2a\u6d41\uff0c\u7136\u540e\u6bcf\u4e00\u4e2a\u7ebf\u7a0b\u5c06\u7f16\u7801\u7684\u7ed3\u679c\u653e\u5165\u5230BUFFER\u4e2d\uff0c\u800c\u540e\u518d\u6b21\u8fdb\u5165\u7f16\u7801\u7ebf\u7a0b\u4e2d\u6355\u83b7\u4e0b\u4e00\u5e27\uff0c\u5982\u679cBUFFER\u4e2d\u6709\u56fe\u7247\uff0c\u76f4\u63a5\u8986\u76d6\u4e0a\u4e00\u5e27\u6240\u6355\u83b7\u7684\u56fe\u7247\u3002\u53e6\u5916\u4e00\u4e2a\u7ebf\u7a0b\u4eceBUFFER\u5185\u53d6\u5f97\u6570\u636e\uff0c\u7136\u540e\u7ecf\u8fc7Pre-Processing \u6a21\u5757\uff0cPre-Processing\u6a21\u5757\u4e2d\u5c06 raw data \u9010\u5f20\u6267\u884cletterbox\u5904\u7406\u5e76\u5927\u5305\u56f4batch\u7684tensor\uff0c\u5e76\u4f20\u5165cuda\u8bbe\u5907\u4e2d\uff0cDetection\u4f7f\u7528GPU\u5e76\u884c\u68c0\u6d4b\u540e\u518d\u505aNMS\u3002<\/p>\n<h2>2. \u7f3a\u70b9\u5206\u6790\u4e0e\u89e3\u51b3\u65b9\u6848<\/h2>\n<h3>1. \u65e0\u6548\u6570\u636e\u5904\u7406<\/h3>\n<p>\u5728\u7f16\u7801\u540e\uff0c\u6211\u4eec\u653e\u5165\u5230BUFFER\u4e2d\uff0c\u5982\u679c\u8bc6\u522b\u7ebf\u7a0b\u8fd8\u6ca1\u6709\u6765\u5f97\u53ca\u53d6\u8d70BUFFER\u4e2d\u7684\u56fe\u7247\uff0c\u90a3\u4e48\u7a0b\u5e8f\u4f1a\u518d\u6b21\u8c03\u7528CPU\u6765\u7f16\u7801\u89c6\u9891\u6d41\uff0c\u7136\u540e\u8986\u76d6BUFFER\u4e2d\u7684\u56fe\u7247\uff0c\u90a3\u4e48\u6211\u4eec\u4e0a\u4e00\u6b21\u505a\u7684\u7f16\u7801\u5c31\u6d6a\u8d39\u4e86\u3002<\/p>\n<p>\u6240\u4ee5\u6211\u4eec\u5e94\u8be5\u5728BUFFER\u6ca1\u88ab\u53d6\u8d70\u7684\u65f6\u5019\u505a\u4e00\u4e2a\u7ebf\u7a0b\u963b\u585e\uff0c\u8fd9\u6837\u5c31\u4e0d\u4f1a\u5360\u7528CPU\u8d44\u6e90\uff0c\u4ece\u800c\u53ef\u4ee5\u628a\u8d44\u6e90\u91ca\u653e\u51fa\u6765\u7ed9\u8bc6\u522b\u7ebf\u7a0b\uff0c\u53ef\u4ee5\u4e00\u5b9a\u7a0b\u5ea6\u4e0a\u5bb6\u5c5e\u8bc6\u522b\u8fc7\u7a0b\u3002<\/p>\n<h3>2. \u4e32\u884c\u7684\u9884\u5904\u7406<\/h3>\n<p>\u5728Pre-Processing\u5904\u7406\u9636\u6bb5\uff0c\u539f\u4ee3\u7801\u5c06 raw data \u9010\u5f20\u9001\u5165letterBox\u4e2d\u4f5c\u5904\u7406\uff0c\u8fd9\u662f\u4e32\u884c\u5904\u7406\u7684\u8fc7\u7a0b\uff0c\u8fd9\u4e5f\u662f\u5bfc\u81f4\u6027\u80fd\u5dee\u7684\u4e00\u4e2a\u539f\u56e0\u4e4b\u4e00\u3002<\/p>\n<p>\u6240\u4ee5\u6211\u4eec\u5c06\u9884\u5904\u7406\u6d41\u7a0b\u52a0\u5165\u5230\u591a\u7ebf\u7a0b\u7f16\u7801\u89c6\u9891\u6d41\u8fc7\u7a0b\u4e4b\u540e\uff0c\u4ece\u800c\u5b9e\u73b0\u5229\u7528\u591a\u7ebf\u7a0b\u52a0\u901f\u5904\u7406\u3002<\/p>\n<h2>3. \u67b6\u6784\u5206\u6790<\/h2>\n<p>\u5728\u6211\u4eec\u4f18\u5316\u7684\u8fc7\u7a0b\u4e2d\uff0c\u6211\u4eec\u8fdb\u884c\u4e86\u4f18\u5316\u4e86\u5904\u7406\u7684\u6d41\u7a0b\u67b6\u6784\uff0c\u6211\u4eec\u5c06\u9884\u5904\u7406\u79fb\u52a8\u5230\u4e86\u591a\u7ebf\u7a0b\u7684\u89c6\u9891\u6d41\u7f16\u7801\u540e\uff0c\u5229\u7528\u591a\u7ebf\u7a0b\u6765\u8fdb\u884c\u9884\u5904\u7406\uff0c\u540c\u65f6\u907f\u514d\u4e86\u9891\u7e41\u521b\u5efa\u7ebf\u7a0b\u6240\u5e26\u6765\u7684\u65f6\u95f4\u6d88\u8017\u3002 \u53e6\u5916\u5728\u5904\u7406\u5b8c\u6210\u4e4b\u540e\u5224\u65adBUFFER\u662f\u5426\u4e3a\u7a7a\uff0c\u5982\u679c\u4e3a\u7a7a\u6211\u4eec\u5c31\u963b\u585e\u7ebf\u7a0b\uff0c\u4ece\u800c\u91ca\u653eCPU\u8d44\u6e90\u7ed9\u8bc6\u522b\u7ebf\u7a0b\uff0c\u4ece\u800c\u63d0\u9ad8\u6548\u7387\u3002<br \/>\n<img decoding=\"async\" src=\"\/wp-content\/uploads\/2024\/05\/\u622a\u5716-2024-05-30-\u4e0b\u53485.37.59-1024x749.png\" alt=\"\" \/><\/p>\n<h2>4. \u4ee3\u7801\u4fee\u8ba2\u5904<\/h2>\n<p>\u4ee588\u670d\u52a1\u5668\u4e3a\u4f8b\uff0c\u6211\u4eec\u9996\u5148\u8f93\u5165\u4e0b\u9762\u4ee3\u7801\u627e\u5230\u6587\u4ef6\u76ee\u5f55\u4f4d\u7f6e<\/p>\n<pre><code class=\"language-python\">>&gt;&gt; import ultralytics\n>&gt;&gt; ultralytics.__file__\n&#039;\/usr\/local\/soft\/conda3\/envs\/yolo8\/lib\/python3.8\/site-packages\/ultralytics\/__init__.py&#039;<\/code><\/pre>\n<p>\u5176\u4e2d\uff0c&#8217;\/usr\/local\/soft\/conda3\/envs\/yolo8\/lib\/python3.8\/site-packages\/ultralytics&#8217; \u4fbf\u662f\u76ee\u5f55\u6587\u4ef6\u5939\u3002<\/p>\n<h3>4.1 loaders.py\u7684\u6587\u4ef6\u4fee\u6539<\/h3>\n<p>\u6211\u4eec\u6253\u5f00\u670d\u52a1\u5668\u7684\u8fd9\u4e2a\u6587\u4ef6\u5939\uff0c\u6253\u5f00.\/data\/loaders.py,\u5982\u4e0b\u56fe\u6240\u793a\uff1a<br \/>\n<img decoding=\"async\" src=\"\/wp-content\/uploads\/2024\/05\/\u622a\u5716-2024-05-30-\u4e0b\u53485.38.40-300x229.png\" alt=\"\" \/><br \/>\n\u9996\u5148\uff0c\u6211\u4eec\u7ed9LoadStreams\u7c7b(34\u884c\u5de6\u53f3)\u7684init\u521b\u5efa\u4e00\u4e2a\u7684letterbox\u65b9\u6cd5\uff0c\u4e5f\u5c31\u662f\u9884\u5904\u7406\u51fd\u6570\uff0c\u4ee3\u7801\u5982\u4e0b\uff1a<\/p>\n<pre><code class=\"language-Python\">from ultralytics.data.augment import LetterBox\nclass LoadStreams:\n  def __init__(self, sources=&quot;file.streams&quot;, vid_stride=1, buffer=False):\n      &quot;&quot;&quot;Initialize instance variables and check for consistent input stream shapes.&quot;&quot;&quot;\n      torch.backends.cudnn.benchmark = True  # faster for fixed-size inference\n      self.buffer = buffer  # buffer input streams\n      self.running = True  # running flag for Thread\n      self.mode = &quot;stream&quot;\n      self.vid_stride = vid_stride  # video frame-rate stride\n\n      # ************ create preprocessing function ************ #\n      self.letterbox = LetterBox([736, 1280],auto =  False,stride= 32)<\/code><\/pre>\n<p>\u7136\u540e\u4fee\u6539LoadStreams\u7c7b\u4e0b\u7684update\u65b9\u6cd5\uff08\u5927\u7ea6\u5728119\u884c\u5de6\u53f3\uff09\uff0c\u4fee\u6539\u4e3a\u5982\u4e0b\u65b9\u6cd5\uff1a<\/p>\n<pre><code class=\"language-Python\">    def update(self, i, cap, stream):\n        &quot;&quot;&quot;Read stream `i` frames in daemon thread.&quot;&quot;&quot;\n        n, f = 0, self.frames[i]  # frame number, frame array\n        while self.running and cap.isOpened() and n &lt; (f - 1):\n            if len(self.imgs[i]) &lt; 2:  # keep a &lt;=30-image buffer\n                n += 1\n                cap.grab()  # .read() = .grab() followed by .retrieve()\n                if n % self.vid_stride == 0:\n                    success, im = cap.retrieve()\n\n                    # ************ PreProcessing while encode video  ************ #\n                    im = self.letterbox(image = im)\n                    im = torch.from_numpy(im).to(&#039;cuda&#039;)\n                    # *********************************************************** #\n\n                    if not success:\n                        im = np.zeros(self.shape[i], dtype=np.uint8)\n                        LOGGER.warning(&quot;WARNING \u26a0\ufe0f Video stream unresponsive, please check your IP camera connection.&quot;)\n                        cap.open(stream)  # re-open stream if signal was lost\n                    if self.buffer:\n                        self.imgs[i].append(im)\n                    else:\n                        self.imgs[i] = [im]\n            else:\n                # LOGGER.warning(&quot;WARNING \u274c OUT OFF BUFF&quot;)\n                time.sleep(0.01)  # wait until the buffer is empty\n<\/code><\/pre>\n<h3>4.1 predictor.py\u7684\u6587\u4ef6\u4fee\u6539<\/h3>\n<p>\u63a5\u7740\u6253\u5f00.\/data\/predictor.py,\u5982\u4e0b\u56fe\u6240\u793a\uff1a<br \/>\n<img decoding=\"async\" src=\"\/wp-content\/uploads\/2024\/05\/\u622a\u5716-2024-05-30-\u4e0b\u53485.39.39-300x229.png\" alt=\"\" \/><br \/>\n\u4fee\u6539BasePredictor\u7c7b\uff0865\u884c\u5de6\u53f3\uff09\u4e0b\u7684preprocess\u65b9\u6cd5\uff08117\u884c\u5de6\u53f3\uff09\u4e3a\u5982\u4e0b\uff1a<\/p>\n<pre><code class=\"language-Python\">def preprocess(self, im):\n    &quot;&quot;&quot;\n    Prepares input image before inference.\n\n    Args:\n        im (torch.Tensor | List(np.ndarray)): BCHW for tensor, [(HWC) x B] for list.\n    &quot;&quot;&quot;\n    not_tensor = not isinstance(im, torch.Tensor)\n    is_list = not isinstance(im[0], torch.Tensor)\n    if not_tensor:\n        if is_list:\n            # ensure other type can be preprocessing as before\n            im = np.stack(self.pre_transform(im))\n            im = im[..., ::-1].transpose((0, 3, 1, 2))  # BGR to RGB, BHWC to BCHW, (n, 3, h, w)\n            im = np.ascontiguousarray(im)  # contiguous\n            im = torch.from_numpy(im)\n        else:\n            # if is video stream, we don&#039;t preprocessing in here,one change BHWC to BCHW\n            im = torch.stack(im).permute(0, 3, 1, 2)\n    im = im.to(self.device)\n    im = im.half() if self.model.fp16 else im.float()  # uint8 to fp16\/32\n    if not_tensor:\n        im \/= 255  # 0 - 255 to 0.0 - 1.0\n    return im<\/code><\/pre>\n","protected":false},"excerpt":{"rendered":"<p>1.\u539f\u4ee3\u7801\u6d41\u7a0b\u89e3\u91ca \u539f\u4ee3\u7801\u5728\u8f7d\u5165\u89c6\u9891\u7684\u65f6\u5019\uff0c\u4f1a\u5f00\u542fn\u4e2a\u591a\u7ebf\u7a0b\u6765\u8f7d\u5165\u89c6\u9891\u5e27\uff0c\u5176\u4e2dn\u662f\u89c6\u9891\u6d41\u7684\u6570\u91cf\u3002\u7136\u540e\u5c06\u89c6\u9891\u5e27\u9001&#8230; &raquo; <a class=\"read-more-link\" href=\"https:\/\/apifj.com\/index.php\/2024\/05\/30\/youhua-yolov8-shipinliu-gongzuoliuchengtishengzairusudu\/\">\u9605\u8bfb\u5168\u6587<\/a><\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"closed","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[1,7],"tags":[],"class_list":["post-322","post","type-post","status-publish","format-standard","hentry","category-dl","category-xuexibiji"],"_links":{"self":[{"href":"https:\/\/apifj.com\/index.php\/wp-json\/wp\/v2\/posts\/322","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/apifj.com\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/apifj.com\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/apifj.com\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/apifj.com\/index.php\/wp-json\/wp\/v2\/comments?post=322"}],"version-history":[{"count":7,"href":"https:\/\/apifj.com\/index.php\/wp-json\/wp\/v2\/posts\/322\/revisions"}],"predecessor-version":[{"id":333,"href":"https:\/\/apifj.com\/index.php\/wp-json\/wp\/v2\/posts\/322\/revisions\/333"}],"wp:attachment":[{"href":"https:\/\/apifj.com\/index.php\/wp-json\/wp\/v2\/media?parent=322"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/apifj.com\/index.php\/wp-json\/wp\/v2\/categories?post=322"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/apifj.com\/index.php\/wp-json\/wp\/v2\/tags?post=322"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}