websocket传输canvas图像数据给C++服务端opencv图像实现web在线实时图像处理
為什么80%的碼農都做不了架構師?>>> ??
前后端的耦合想了很久,上下課都在思考怎么做,然后終于憋出來了。這是之前搞的一個視覺計算的項目,boss叫對接到前端,于是就產生了這樣一個詭異的需求,就是前端打開攝像頭,同時需要把攝像頭的數據回傳到后端進行圖像處理(比如美顏啊腦袋上加個裝飾品之類),這就需要涉及到前端和服務端的數據編碼耦合,想了想既然任何圖像在內存里面都是一個uchar矩陣,于是琢磨了這個東西出來。
一般情況下,圖像在內存里的表達都是個uchar串,或者說byte流,因為我經常需要寫跨語言調用的玩意兒,所以一般在內存里我都是用字符串和比特流進行交互,這里我采用了同樣的思想,我們把opencv的圖像進行編碼為png,然后再一次編碼為base64,通過websocket傳輸給前端。大致過程如下。
首先假設我們的前端打開websocket連接后端,連接上了以后前端打開攝像頭取攝像頭數據傳輸給后端,后端通過一系列的圖像處理機器學習以后編碼圖像回傳給前端。
前端代碼:
<!DOCTYPE html> <html lang="en"> <head><meta charset="UTF-8"><title>Title</title> </head> <body> <video id="video" style="display: none" width="480" height="320" controls></video><canvas id="canvas" width="480" height="320"></canvas> <img id="target" width="480" height="320"></img> <script>var video = document.getElementById('video');var canvas = document.getElementById('canvas');var image = document.getElementById('target');var context = canvas.getContext('2d');var ws = new WebSocket("ws://127.0.0.1:9002");ws.binaryType = "arraybuffer";ws.onopen = function() {ws.send("I'm client");};ws.onmessage = function (evt) {console.log("resive");try{//顯示后端回傳回來的base64圖像image.src="https://img-blog.csdnimg.cn/2022010707385143216.png"+evt.data;console.log(evt.data);}catch{}};ws.onclose = function() {alert("Closed");};ws.onerror = function(err) {alert("Error: " + err);};function getUserMedia(constraints, success, error) {if (navigator.mediaDevices.getUserMedia) {navigator.mediaDevices.getUserMedia(constraints).then(success).catch(error);}}//成功回調函數function success(stream){video.srcObject=stream;video.play();}function error(error) {console.log('訪問用戶媒體失敗:',error.name,error.message);}//這個函數是實現將canvas上面的base64圖像轉為圖像數據流的字符串形式function dataURItoBlob(dataURI) {// convert base64/URLEncoded data component to raw binary data held in a stringvar byteString;if (dataURI.split(',')[0].indexOf('base64') >= 0)byteString = atob(dataURI.split(',')[1]);elsebyteString = unescape(dataURI.split(',')[1]);// separate out the mime componentvar mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];// write the bytes of the string to a typed arrayvar ia = new Uint8Array(byteString.length);for (var i = 0; i < byteString.length; i++) {ia[i] = byteString.charCodeAt(i);}return new Blob([ia], {type:mimeString});}if (navigator.mediaDevices.getUserMedia || navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia) {//調用用戶媒體設備, 訪問攝像頭getUserMedia({video: {width: 480, height: 320}}, success, error);timer = setInterval(function () {context.drawImage(video,0,0,480,320);var data = canvas.toDataURL('image/jpeg', 1.0);newblob = dataURItoBlob(data);//將轉換好成為字符串的圖像數據發送出去ws.send(newblob);}, 100);//這里我們的前端還是需要延時的,如果我們的后端計算實時性不是很強的話,而恰好我的項目后端計算規模非常大,所以需要50ms的等待} else {alert('不支持訪問用戶媒體');}</script> </body> </html>C++服務器端(這里需要使用到websocket++讀者請自行編譯)
opencv_websocket_server.h
// // Created by Pulsar on 2019/4/16. //#ifndef WEBSOCKETPP_OPENCV_WEBSOCKET_H #define WEBSOCKETPP_OPENCV_WEBSOCKET_H#include <opencv2/opencv.hpp>#include <boost/thread/thread.hpp> //#include <boost/bind.hpp> #include <boost/thread/mutex.hpp> #include <websocketpp/config/asio_no_tls.hpp> #include <websocketpp/server.hpp>typedef websocketpp::server<websocketpp::config::asio> WebsocketServer; typedef WebsocketServer::message_ptr message_ptr;class opencv_websocket { public:opencv_websocket(std::string file_path) ;void Run(int port);~opencv_websocket(); };#endif //WEBSOCKETPP_OPENCV_WEBSOCKET_Hopencv_websocket_server.cpp
// // Created by Pulsar on 2019/4/16. //#include <opencv_websocket.h> //using websocketpp::lib::placeholders::_1; //using websocketpp::lib::placeholders::_2; //using websocketpp::lib::bind; boost::shared_mutex read_write_mutex; boost::mutex lock; cv::CascadeClassifier cascade; //解碼base64數據 static std::string base64Decode(const char *Data, int DataByte) {//解碼表const char DecodeTable[] ={0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,62, // '+'0, 0, 0,63, // '/'52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // '0'-'9'0, 0, 0, 0, 0, 0, 0,0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // 'A'-'Z'0, 0, 0, 0, 0, 0,26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // 'a'-'z'};std::string strDecode;int nValue;int i = 0;while (i < DataByte) {if (*Data != '\r' && *Data != '\n') {nValue = DecodeTable[*Data++] << 18;nValue += DecodeTable[*Data++] << 12;strDecode += (nValue & 0x00FF0000) >> 16;if (*Data != '=') {nValue += DecodeTable[*Data++] << 6;strDecode += (nValue & 0x0000FF00) >> 8;if (*Data != '=') {nValue += DecodeTable[*Data++];strDecode += nValue & 0x000000FF;}}i += 4;} else {Data++;i++;}}return strDecode; }//編碼base64數據 static std::string base64Encode(const unsigned char *Data, int DataByte) {//編碼表const char EncodeTable[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";//返回值std::string strEncode;unsigned char Tmp[4] = {0};int LineLength = 0;for (int i = 0; i < (int) (DataByte / 3); i++) {Tmp[1] = *Data++;Tmp[2] = *Data++;Tmp[3] = *Data++;strEncode += EncodeTable[Tmp[1] >> 2];strEncode += EncodeTable[((Tmp[1] << 4) | (Tmp[2] >> 4)) & 0x3F];strEncode += EncodeTable[((Tmp[2] << 2) | (Tmp[3] >> 6)) & 0x3F];strEncode += EncodeTable[Tmp[3] & 0x3F];if (LineLength += 4, LineLength == 76) {strEncode += "\r\n";LineLength = 0;}}//對剩余數據進行編碼int Mod = DataByte % 3;if (Mod == 1) {Tmp[1] = *Data++;strEncode += EncodeTable[(Tmp[1] & 0xFC) >> 2];strEncode += EncodeTable[((Tmp[1] & 0x03) << 4)];strEncode += "==";} else if (Mod == 2) {Tmp[1] = *Data++;Tmp[2] = *Data++;strEncode += EncodeTable[(Tmp[1] & 0xFC) >> 2];strEncode += EncodeTable[((Tmp[1] & 0x03) << 4) | ((Tmp[2] & 0xF0) >> 4)];strEncode += EncodeTable[((Tmp[2] & 0x0F) << 2)];strEncode += "=";}return strEncode; }//imgType 包括png bmp jpg jpeg等opencv能夠進行編碼解碼的文件 static std::string Mat2Base64(const cv::Mat &img, std::string imgType) {//Mat轉base64std::string img_data;std::vector<uchar> vecImg;std::vector<int> vecCompression_params;vecCompression_params.push_back(CV_IMWRITE_JPEG_QUALITY);vecCompression_params.push_back(90);imgType = "." + imgType;//重點來了,它是負責把圖像從opencv的Mat變成編碼好的圖像比特流的重要函數cv::imencode(imgType, img, vecImg, vecCompression_params);img_data = base64Encode(vecImg.data(), vecImg.size());return img_data; }//base64轉Mat static cv::Mat Base2Mat(std::string &base64_data) {cv::Mat img;std::string s_mat;s_mat = base64Decode(base64_data.data(), base64_data.size());std::vector<char> base64_img(s_mat.begin(), s_mat.end());img = cv::imdecode(base64_img, CV_LOAD_IMAGE_COLOR);return img; }void OnOpen(WebsocketServer *server, websocketpp::connection_hdl hdl) {std::cout << "have client connected" << std::endl; }void OnClose(WebsocketServer *server, websocketpp::connection_hdl hdl) {std::cout << "have client disconnected" << std::endl; }void OnMessage(WebsocketServer *server, websocketpp::connection_hdl hdl, message_ptr msg) {std::string image_str = msg->get_payload();std::vector<char> img_vec(image_str.begin(), image_str.end());try {//把前端傳來的圖像字符串進行解碼cv::Mat img = cv::imdecode(img_vec, CV_LOAD_IMAGE_COLOR);if (!img.empty()) { // cv::imshow("", img);std::vector<cv::Rect> faces;lock.lock(); // cascade.detectMultiScale(img, faces, 1.1, 3, 0, cv::Size(30, 30)); // for (size_t t = 0; t < faces.size(); t++){ // cv::rectangle(img, faces[t], cv::Scalar(0, 0, 255), 2, 8); // }lock.unlock();cv::Mat output = img;if (!output.empty()) {//把你處理完的圖像轉換為字符串返回給前端std::string strRespon = Mat2Base64(output, "bmp");server->send(hdl, strRespon, websocketpp::frame::opcode::text);} // cv::waitKey(10);}}catch (const std::exception &) {std::cout << " 解碼異常" << std::endl;} }opencv_websocket::opencv_websocket(std::string file_path) {//訓練好的文件名稱,放置在可執行文件同目錄下if(!cascade.load(file_path))perror("Load Model Error"); }opencv_websocket::~opencv_websocket() {}void opencv_websocket::Run(int port) {WebsocketServer server;server.set_access_channels(websocketpp::log::alevel::all);server.clear_access_channels(websocketpp::log::alevel::frame_payload);// Initialize Asioserver.init_asio();// Register our message handlerserver.set_open_handler(websocketpp::lib::bind(&OnOpen, &server, ::websocketpp::lib::placeholders::_1));server.set_close_handler(websocketpp::lib::bind(&OnClose, &server, websocketpp::lib::placeholders::_1));server.set_message_handler(websocketpp::lib::bind(OnMessage, &server, websocketpp::lib::placeholders::_1, websocketpp::lib::placeholders::_2));// Listen on port 9002server.listen(port);// Start the server accept loopserver.start_accept();// Start the ASIO io_service run loopserver.run(); }int main(int argc, char **argv) {std::cout<<"[INFO] load model"<<std::endl;opencv_websocket opencv_websocket_server("haarcascade_frontalface_alt.xml");std::cout<<"[INFO] start server"<<std::endl;opencv_websocket_server.Run(9002);std::cout<<"[INFO] listen"<<std::endl;getchar();return 0; }上述工程地址:
https://gitee.com/Luciferearth/websocketpp
example\opencv_websocket_server下
注意websocket在Windows下需要改動編譯依賴
去掉
iostream_server
testee_server
testee_client
utility_client
的Cmake(直接全部注釋)
CmakeLists.txt
set (WEBSOCKETPP_LIB ${WEBSOCKETPP_BUILD_ROOT}/lib)后面加入以下編譯命令
#########################################OpenSSL####################################### set(OPENSSL_INCLUDE_DIR D:/pgsql/include) set(OPENSSL_LIBRARIES D:/pgsql/lib/ssleay32MD.lib;D:/pgsql/lib/libeay32MD.lib) ####################################################################################### ##########################Windows 下對Boost的引用###################################### set(BUILD_EXAMPLES ON) set(BUILD_EXAMPLES ON)set(Boost_FOUND TRUE) set(Boost_INCLUDE_DIRS E:/local/boost_1_67_0) set(Boost_INCLUDE_DIR E:/local/boost_1_67_0) set(Boost_LIBRARY_DIRS E:/local/boost_1_67_0/lib64-msvc-14.0 ) set(Boost_LIBRARIESboost_filesystem-vc140-mt-x64-1_67.libboost_filesystem-vc140-mt-gd-x64-1_67.liblibboost_zlib-vc140-mt-gd-x64-1_67.liblibboost_zlib-vc140-mt-x64-1_67.libboost_system-vc140-mt-gd-x64-1_67.libboost_system-vc140-mt-x64-1_67.liblibboost_chrono-vc140-mt-s-x64-1_67.liblibboost_chrono-vc140-mt-gd-x64-1_67.libboost_thread-vc140-mt-gd-x64-1_67.libboost_thread-vc140-mt-x64-1_67.lib) ###################################################opencv-server
file (GLOB SOURCE_FILES *.cpp) file (GLOB HEADER_FILES *.hpp)set(OPENCV_INCLUDE_DIR F:/Smart_Classroom/3rdparty/ALLPLATHFORM/opencv/include) message(${OPENCV_INCLUDE_DIR}) set(OPENCV_LIB_DIR F:/Smart_Classroom/3rdparty/ALLPLATHFORM/opencv/x64/vc14/lib) message(${OPENCV_LIB_DIR}) include_directories(${OPENCV_INCLUDE_DIR}) link_directories(${OPENCV_LIB_DIR}) init_target (opencv_websocket_server)build_executable (${TARGET_NAME} ${SOURCE_FILES} ${HEADER_FILES}) file(COPY haarcascade_frontalface_alt.xml DESTINATION ${CMAKE_BINARY_DIR}/bin/) # link_boost () final_target () target_link_libraries(opencv_websocket_serveropencv_world341.libopencv_world341d.lib) # set_target_properties(${TARGET_NAME} PROPERTIES FOLDER "examples")代碼難免在打字的時候打錯,有什么問題聯系筆者。整個服務端的實現難點無非在于編碼與解碼的方法保持客戶端和服務端數據耦合性,這個東西也琢磨了我好幾天才琢磨透,再接再厲把,io真的是一個神奇的東西,當你把它深刻的理解到內存的時候,它就像個聽話的孩子。
轉載于:https://my.oschina.net/VenusV/blog/3030001
總結
以上是生活随笔為你收集整理的websocket传输canvas图像数据给C++服务端opencv图像实现web在线实时图像处理的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: PHP处理图片(orientation)
- 下一篇: java.util.concurrent