日韩性视频-久久久蜜桃-www中文字幕-在线中文字幕av-亚洲欧美一区二区三区四区-撸久久-香蕉视频一区-久久无码精品丰满人妻-国产高潮av-激情福利社-日韩av网址大全-国产精品久久999-日本五十路在线-性欧美在线-久久99精品波多结衣一区-男女午夜免费视频-黑人极品ⅴideos精品欧美棵-人人妻人人澡人人爽精品欧美一区-日韩一区在线看-欧美a级在线免费观看

歡迎訪問 生活随笔!

生活随笔

當前位置: 首頁 > 编程语言 > python >内容正文

python

python Intel Realsense udp协议 局域网传输实时视频流并通过窗口显示 (opencv压缩解码)

發(fā)布時間:2025/3/20 python 31 豆豆
生活随笔 收集整理的這篇文章主要介紹了 python Intel Realsense udp协议 局域网传输实时视频流并通过窗口显示 (opencv压缩解码) 小編覺得挺不錯的,現(xiàn)在分享給大家,幫大家做個參考.

文章目錄

    • 發(fā)送端
    • 接收端
    • 執(zhí)行結果
      • 發(fā)送端
      • 接收端

發(fā)送端

# -*- coding: utf-8 -*- """ @File : 200103_obstacle_detection_發(fā)送數(shù)據(jù)_測試udp傳輸上限.py @Time : 2020/1/3 14:28 @Author : Dontla @Email : sxana@qq.com @Software: PyCharm """import socket import struct import time import numpy as np import pyrealsense2 as rs import cv2 import sys from numba import jitdef udp_send_image(img, pack_size, socket, ip_port):_, img_encode = cv2.imencode('.jpg', img)data = img_encode.tobytes()# print(len(data)) # 有很多超過65535的# 【定義文件頭、數(shù)據(jù)】(打包名為l?不是,l表示長整型,占四個字節(jié))fhead = struct.pack('i', len(data))# 【發(fā)送文件頭、數(shù)據(jù)】socket.sendto(fhead, ip_port)# 每次發(fā)送x字節(jié),計算所需發(fā)送次數(shù)send_times = len(data) // pack_size + 1for count in range(send_times):# time.sleep(0.01)if count < send_times - 1:socket.sendto(data[pack_size * count:pack_size * (count + 1)], ip_port)else:socket.sendto(data[pack_size * count:], ip_port)# @jit # 貌似開不了jit,不知啥原因,開了也沒明顯看到加速 def filter_alpha(depth_image, filter_alpha):if filter_alpha > 1:# 獲取depth_image寬高h, w = depth_image.shape[0], depth_image.shape[1] # 360,640# 創(chuàng)建上下alpha(不同方法都能創(chuàng)建)# filter_upper = np.array([1] * int(h / 2))filter_upper = np.full(int(h / 2), 1)filter_lower = np.linspace(1, filter_alpha, h / 2)# 將filter_upper和filter_lower連在一起filter = np.r_[filter_upper, filter_lower]# print(filter)# print(filter.shape) # (360,)# print(filter_alpha_upper)# print(filter_alpha_upper.shape) # (180,)# print(filter_alpha_lower)# print(filter_alpha_lower.shape) # (180,)return (depth_image.T * filter).Telse:return depth_image# 如果要防止下面棉花過近被誤探測,可用兩層for循環(huán)設置梯度過濾 # 不過貌似還得中間對半分,下面直接舍棄掉,只用上面作為判斷,因為就算下面用了梯度...(還是得用梯度...) @jit def traversing_pixels(depth_image, threshold_dangerous_distance):num_dangerous = 0num_all_pixels = 0depth_image_ravel = depth_image.ravel()# depth_image_segmentation為分割后的圖像(紅藍兩色)depth_image_segmentation_ravel = []for pixel in depth_image_ravel:num_all_pixels += 1# 第一種效果要好一些if pixel < threshold_dangerous_distance and pixel != 0:# if pixel < threshold_dangerous_distance:num_dangerous += 1depth_image_segmentation_ravel.append(0)else:depth_image_segmentation_ravel.append(6000)depth_image_segmentation = np.array(depth_image_segmentation_ravel).reshape(depth_image.shape)return num_all_pixels, num_dangerous, depth_image_segmentationclass ObstacleDetection(object):def __init__(self):# self.cam_serials = ['838212073161', '827312071726']# self.cam_serials = ['838212073161', '827312071726', '838212073249', '827312070790', '836612072369',# '826212070395']self.cam_serials = ['838212073161']self.cam_width, self.cam_height = 640, 360# 【危險距離:單位mm】self.threshold_dangerous_distance = 3000# 【攝像頭到棉花平面垂直距離(單位mm)】self.distance_cam_vertical_to_cotton_top = 260# 【危險距離補償系數(shù)】用于讓最下面深度遠離臨界值,避免造成誤檢測self.factor_compensation_dangerous_distance = 1.5# 【危險距離像素占比】self.threshold_dangerous_scale = 0.05# 【攝像頭視場角(單位°)】self.FOV_width = 69.4self.FOV_height = 42.5self.FOV_scale = self.FOV_height / self.FOV_width # 0.6123919308357348# 【實際變換后height視場角】if self.cam_height / self.cam_width < self.FOV_scale:self.FOV_height_actual = self.FOV_width * self.cam_height / self.cam_widthelse:self.FOV_height_actual = self.FOV_height# 【計算過濾α值(distance_min為圖像最下方的深度,看到最近棉花的距離)】# 當攝像頭到棉花頂垂直距離為800,最小距離為2256,當危險距離為2000時,alpha濾值為0.88# 當攝像頭到棉花頂垂直距離為800,最小距離為2256,當危險距離為3000時,alpha濾值為1.32# 所以,后面進行濾值時需判斷self.filter_alpha的值是否大于1(已添加進filter_alpha()函數(shù)中)self.distance_min = self.distance_cam_vertical_to_cotton_top / (np.tan(self.FOV_height_actual / 2 * np.pi / 180))self.filter_alpha = self.threshold_dangerous_distance / self.distance_min * self.factor_compensation_dangerous_distance# 【UDP信號發(fā)送模塊】# 遠程主機ip地址及端口self.ip_port = ('192.168.1.49', 9000)self.udp_server_client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)# self.bytes_udp_pack = 1024self.bytes_udp_pack = 65507def obstacle_detection(self):# print(self.distance_min) # 2256.7829632201597# print(self.filter_alpha) # 0.8862172537611853# 攝像頭個數(shù)(在這里設置所需使用攝像頭的總個數(shù))cam_num = 6ctx = rs.context()'''連續(xù)驗證機制'''# D·C 1911202:創(chuàng)建最大驗證次數(shù)max_veri_times;創(chuàng)建連續(xù)穩(wěn)定值continuous_stable_value,用于判斷設備重置后是否處于穩(wěn)定狀態(tài)max_veri_times = 100continuous_stable_value = 5print('\n', end='')print('開始連續(xù)驗證,連續(xù)驗證穩(wěn)定值:{},最大驗證次數(shù):{}:'.format(continuous_stable_value, max_veri_times))continuous_value = 0veri_times = 0while True:devices = ctx.query_devices()connected_cam_num = len(devices)print('攝像頭個數(shù):{}'.format(connected_cam_num))if connected_cam_num == cam_num:continuous_value += 1if continuous_value == continuous_stable_value:breakelse:continuous_value = 0veri_times += 1if veri_times == max_veri_times:print("檢測超時,請檢查攝像頭連接!")sys.exit()'''循環(huán)reset攝像頭'''# hardware_reset()后是不是應該延遲一段時間?不延遲就會報錯print('\n', end='')print('開始初始化攝像頭:')for dev in ctx.query_devices():# 先將設備的序列號放進一個變量里,免得在下面for循環(huán)里訪問設備的信息過多(雖然不知道它會不會每次都重新訪問)dev_serial = dev.get_info(rs.camera_info.serial_number)# 匹配序列號,重置我們需重置的特定攝像頭(注意兩個for循環(huán)順序,哪個在外哪個在內(nèi)很重要,不然會導致剛重置的攝像頭又被訪問導致報錯)for serial in self.cam_serials:if serial == dev_serial:dev.hardware_reset()# 像下面這條語句居然不會報錯,不是剛剛才重置了dev嗎?莫非區(qū)別在于沒有通過for循環(huán)ctx.query_devices()去訪問?# 是不是剛重置后可以通過ctx.query_devices()去查看有這個設備,但是卻沒有存儲設備地址?如果是這樣,# 也就能夠解釋為啥能夠通過len(ctx.query_devices())函數(shù)獲取設備數(shù)量,但訪問序列號等信息就會報錯的原因了print('攝像頭{}初始化成功'.format(dev.get_info(rs.camera_info.serial_number)))'''連續(xù)驗證機制'''# D·C 1911202:創(chuàng)建最大驗證次數(shù)max_veri_times;創(chuàng)建連續(xù)穩(wěn)定值continuous_stable_value,用于判斷設備重置后是否處于穩(wěn)定狀態(tài)print('\n', end='')print('開始連續(xù)驗證,連續(xù)驗證穩(wěn)定值:{},最大驗證次數(shù):{}:'.format(continuous_stable_value, max_veri_times))continuous_value = 0veri_times = 0while True:devices = ctx.query_devices()connected_cam_num = len(devices)print('攝像頭個數(shù):{}'.format(connected_cam_num))if connected_cam_num == cam_num:continuous_value += 1if continuous_value == continuous_stable_value:breakelse:continuous_value = 0veri_times += 1if veri_times == max_veri_times:print("檢測超時,請檢查攝像頭連接!")sys.exit()'''配置各個攝像頭的基本對象'''for i in range(len(self.cam_serials)):locals()['pipeline' + str(i)] = rs.pipeline(ctx)locals()['config' + str(i)] = rs.config()locals()['config' + str(i)].enable_device(self.cam_serials[i])# 為啥我設置成1280×720就報錯呢?明明Intel Realsense的usb接口已經(jīng)顯示為3.0了# locals()['config' + str(i)].enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)# locals()['config' + str(i)].enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30)locals()['config' + str(i)].enable_stream(rs.stream.depth, self.cam_width, self.cam_height, rs.format.z16,30)locals()['config' + str(i)].enable_stream(rs.stream.color, self.cam_width, self.cam_height, rs.format.bgr8,30)locals()['pipeline' + str(i)].start(locals()['config' + str(i)])# 創(chuàng)建對齊對象(深度對齊顏色)locals()['align' + str(i)] = rs.align(rs.stream.color)'''運行攝像頭'''try:while True:start_time = time.time()for i in range(len(self.cam_serials)):locals()['frames' + str(i)] = locals()['pipeline' + str(i)].wait_for_frames()# 獲取對齊幀集locals()['aligned_frames' + str(i)] = locals()['align' + str(i)].process(locals()['frames' + str(i)])# 獲取對齊后的深度幀和彩色幀locals()['aligned_depth_frame' + str(i)] = locals()['aligned_frames' + str(i)].get_depth_frame()locals()['color_frame' + str(i)] = locals()['aligned_frames' + str(i)].get_color_frame()if not locals()['aligned_depth_frame' + str(i)] or not locals()['color_frame' + str(i)]:continue# 獲取顏色幀內(nèi)參locals()['color_profile' + str(i)] = locals()['color_frame' + str(i)].get_profile()locals()['cvsprofile' + str(i)] = rs.video_stream_profile(locals()['color_profile' + str(i)])locals()['color_intrin' + str(i)] = locals()['cvsprofile' + str(i)].get_intrinsics()locals()['color_intrin_part' + str(i)] = [locals()['color_intrin' + str(i)].ppx,locals()['color_intrin' + str(i)].ppy,locals()['color_intrin' + str(i)].fx,locals()['color_intrin' + str(i)].fy]locals()['color_image' + str(i)] = np.asanyarray(locals()['color_frame' + str(i)].get_data())locals()['depth_image' + str(i)] = np.asanyarray(locals()['aligned_depth_frame' + str(i)].get_data())# 【阿爾法過濾】locals()['depth_image_alpha_filter' + str(i)] = filter_alpha(locals()['depth_image' + str(i)],self.filter_alpha)# 【遍歷深度圖像素值,如存在小于危險值范圍比例超過閾值,則告警】locals()['num_all_pixels' + str(i)], locals()['num_dangerous' + str(i)], locals()['depth_image_segmentation' + str(i)] = traversing_pixels(locals()['depth_image_alpha_filter' + str(i)], self.threshold_dangerous_distance)print('num_all_pixels:{}'.format(locals()['num_all_pixels' + str(i)]))print('num_dangerous:{}'.format(locals()['num_dangerous' + str(i)]))locals()['dangerous_scale' + str(i)] = locals()['num_dangerous' + str(i)] / locals()['num_all_pixels' + str(i)]print('危險比例:{}'.format(locals()['dangerous_scale' + str(i)]))locals()['depth_colormap' + str(i)] = cv2.applyColorMap(cv2.convertScaleAbs(locals()['depth_image_segmentation' + str(i)], alpha=0.0425),cv2.COLORMAP_JET)locals()['image' + str(i)] = np.hstack((locals()['color_image' + str(i)], locals()['depth_colormap' + str(i)]))# 注意: 窗口名不要用中文字符, 小心亂碼cv2.imshow('win{}:{}'.format(i, self.cam_serials[i]), locals()['image' + str(i)])# cv2.imshow('colorWin{}: {}'.format(i, self.cam_serials[i]), locals()['color_image' + str(i)])# cv2.imshow('depthWin{}: {}'.format(i, self.cam_serials[i]), locals()['depth_colormap' + str(i)])cv2.waitKey(1)# 【向遠端發(fā)送告警信號及圖片:】if locals()['dangerous_scale' + str(i)] > self.threshold_dangerous_scale:print("距離警告,向遠端發(fā)送告警信息!")# self.udp_server_client.sendto('攝像頭{}告警'.format(i).encode('utf-8'), self.ip_port)# print(locals()['image' + str(i)].shape) # (360, 1280, 3)udp_send_image(locals()['image' + str(i)], self.bytes_udp_pack, self.udp_server_client,self.ip_port)end_time = time.time()# print('單幀運行時間:{}'.format(end_time - start_time))# 遇到異常再次啟動檢測函數(shù),如有需要可以將連續(xù)監(jiān)測和攝像頭重置全放進去# except:# print('\n出現(xiàn)異常,請重新檢查攝像頭連接!\n')# for i in range(len(self.cam_serials)):# cv2.destroyAllWindows()# locals()['pipeline' + str(i)].stop()# ObstacleDetection().obstacle_detection()finally:for i in range(len(self.cam_serials)):locals()['pipeline' + str(i)].stop()if __name__ == '__main__':ObstacleDetection().obstacle_detection()

接收端

# -*- encoding: utf-8 -*- """ @File : 201003_避障程序信號及圖像接收端_測試udp傳輸上限.py @Time : 2020/1/3 14:32 @Author : Dontla @Email : sxana@qq.com @Software: PyCharm """import socket import struct import timeimport cv2 import numpy as npip_port = ('192.168.1.49', 9000) BUFSIZE = 65507udp_server_client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)udp_server_client.bind(ip_port)def receive():while True:try:while True:# 計算fmt字節(jié)長度(貌似沒必要我先注釋掉了)# fhead_size = struct.calcsize('i')# print('fhead_size大小:{}'.format(fhead_size)) # 4# 獲取數(shù)據(jù)頭信息,第一個參數(shù)為信息,第二個參數(shù)是發(fā)送方ip地址buffer, _ = udp_server_client.recvfrom(BUFSIZE)# print(buffer)# print(len(buffer)) # 可能為4或60000+if len(buffer) == 4:# print(buffer) # b';\xfb\x00\x00' # 每次都不一樣的# 解包,看看有多大(unpack返回的是只有一個元素的元組,如(64282,),元素個數(shù)貌似取決于fmt)data_size = struct.unpack('i', buffer)[0]# data_size = struct.unpack('i', buf)[0]# print(data_size) # 64315else:print('不是struct頭,繼續(xù)下次循環(huán)!')# cv2.destroyAllWindows()continue# 重寫接收程序recv_times = data_size // BUFSIZE + 1# print(recv_times) # 按目前的BUFSIZE,為1或2,大部分為2data_total = b''recvd_size = 0for count in range(recv_times):data, _ = udp_server_client.recvfrom(BUFSIZE)recvd_size += len(data)data_total += data# 判斷data_total長度是否等于圖片長度,不是就繼續(xù)下次循環(huán)# print(len(data_total))if len(data_total) != data_size:print('一定又是哪接收出錯了,導致沒接收上,繼續(xù)下輪循環(huán)!')continue# recvd_size = 0# data_total = b''# while recvd_size < data_size:# if data_size - recvd_size >= BUFSIZE:# data, _ = udp_server_client.recvfrom(BUFSIZE)# recvd_size += len(data)# else:# data, _ = udp_server_client.recvfrom(data_size - recvd_size)# recvd_size += len(data)# data_total += dataprint('received!')# print(data_total)# print(type(data_total))# <class 'bytes'>nparr = np.fromstring(data_total, np.uint8)# print(nparr) # [255 216 255 ... 15 255 217] # 每次不一樣的img_decode = cv2.imdecode(nparr, cv2.IMREAD_COLOR)cv2.imshow('win', img_decode)# 保存截圖# cv2.imwrite('{}.jpg'.format(time.clock()), img_decode)cv2.waitKey(1)# data, addr = udp_server_client.recvfrom(BUFSIZE)# print(data.decode('utf-8'), addr)# print(data, addr)# nparr = np.fromstring(data, np.uint8)# img_decode = cv2.imdecode(nparr, cv2.IMREAD_COLOR)# cv2.imshow('result', img_decode)# cv2.waitKey()except:print('出現(xiàn)異常,繼續(xù)調(diào)用receive()函數(shù)!')# receive()finally:# cv2.destroyAllWindows()pass# breakif __name__ == '__main__':receive()

執(zhí)行結果

發(fā)送端

接收端

參考文章1:python通過udp傳輸圖片

參考文章2:python 網(wǎng)絡編程 問題記錄

總結

以上是生活随笔為你收集整理的python Intel Realsense udp协议 局域网传输实时视频流并通过窗口显示 (opencv压缩解码)的全部內(nèi)容,希望文章能夠幫你解決所遇到的問題。

如果覺得生活随笔網(wǎng)站內(nèi)容還不錯,歡迎將生活随笔推薦給好友。