H264云臺型網絡攝像機學習
RTSP交互過程
C表示rtsp客戶端,S表示rtsp服務端
1.C->S:OPTION request //詢問S有哪些方法可用
1.S->C:OPTION response //S回應信息中包括提供的所有可用方法
2.C->S:DESCRIBE request //要求得到S提供的媒體初始化描述信息
2.S->C:DESCRIBE response //S回應媒體初始化描述信息,主要是sdp
3.C->S:SETUP request //設置會話的屬性,以及傳輸模式,提醒S建立會話
3.S->C:SETUP response //S建立會話,返回會話標識符,以及會話相關信息
4.C->S:PLAY request //C請求播放
4.S->C:PLAY response //S回應該請求的信息
S->C:發送流媒體數據
5.C->S:TEARDOWN request //C請求關閉會話
5.S->C:TEARDOWN response //S回應該請求


RTSP連接、斷開、視頻播放、停止的工具類文件
/* * RTSPVideoUtil.h * * Created on: 2012-10-4 */ #include<sys/socket.h> #include<unistd.h> #include<ctype.h> #include<arpa/inet.h> #include<pthread.h> #include"include/rtppackage.h" #include"include/type.h" #include"include/DecodeFrame.h" //全局變量 JNIEnv *env_play; int rtsp_Socket = -1; int size = 1024; int mWidth = 352, mHeight = 288;//顯示視頻屏幕大小 char Session[100]; uint8_t videoDecodeData[352 * 288 * 2];//視頻數據緩沖,使其與mWidth、mHeight相同 jbyteArray jarrayVideo; char *IP; int Port; /** * RTSP連接 * 返回0,連接失敗;返回1,連接成功 */ int getConnectRTSP(char *ip, int port) { //LOGD("begin to connect rtsp\n"); char szResponse[1024]; char szSendBuf[1024]; int nRet = 0; int nTimeOut = 5000; IP = ip; Port = port; setsockopt(rtsp_Socket, SOL_SOCKET, SO_RCVTIMEO, (char*) &nTimeOut, sizeof(nTimeOut)); rtsp_Socket = socket(AF_INET, SOCK_STREAM, 0);//創建socket if (rtsp_Socket < 0) {//告訴Java創建Sock失敗 return 0; } //Socket連接 struct sockaddr_in addrSrv; addrSrv.sin_addr.s_addr = inet_addr(ip); addrSrv.sin_family = AF_INET; addrSrv.sin_port = htons(port); if (-1 == connect(rtsp_Socket, (struct sockaddr*) &addrSrv, sizeof(struct sockaddr))) {//告訴Java創建Sock失敗 LOGD("Connect失敗\n"); close(rtsp_Socket); rtsp_Socket = -1; return 0; } //OPTIONS方法 char lstrOptions[1024] = "OPTIONS rtsp://%s:%d/H264 RTSP/1.0\r\n" "CSeq: 1\r\n" "\r\n"; sprintf(szSendBuf, lstrOptions, ip, port); nRet = send(rtsp_Socket, szSendBuf, 1024, 0); if (nRet == -1) { return 0; } LOGD("send1 nRet=%d,buf=%s", nRet, szSendBuf); memset(szResponse, 0, sizeof(szResponse)); nRet = recv(rtsp_Socket, &szResponse[0], 1024, 0); LOGD("recv1 nRet=%d,response=%s", nRet, szResponse); if (nRet == -1) { return 0; } //DESCRIBE方法 char lstrDescribe[1024] = "DESCRIBE rtsp://%s:%d/H264 RTSP/1.0\r\n" "CSeq: 2\r\n" "Content-Length: 0\r\n" "\r\n"; memset(szSendBuf, 0, sizeof(szSendBuf)); sprintf(szSendBuf, lstrDescribe, ip, port); nRet = send(rtsp_Socket, szSendBuf, 1024, 0); if (nRet == -1) { return 0; } //LOGD("send2 nRet=%d,buf=%s", nRet, szSendBuf); memset(szResponse, 0, sizeof(szResponse)); nRet = recv(rtsp_Socket, &szResponse[0], 1024, 0); if (nRet == -1) { return 0; } //LOGD("recv2 nRet=%d,response=%s", nRet, szResponse); //SETUP方法 char lstrSetup[1024] = "SETUP rtsp://%s:%d/H264 RTSP/1.0\r\n" "CSeq: 3\r\n" "Transport: RTP/AVP;unicast;client_port=6666-6667\r\n" "\r\n"; memset(szSendBuf, 0, sizeof(szSendBuf)); sprintf(szSendBuf, lstrSetup, ip, port); nRet = send(rtsp_Socket, szSendBuf, 1024, 0); if (nRet == -1) { return 0; } //LOGD("send3 nRet=%d,buf=%s", nRet, szSendBuf); memset(szResponse, 0, sizeof(szResponse)); nRet = recv(rtsp_Socket, &szResponse[0], 1024, 0); if (nRet == -1) { return 0; } //LOGD("recv3 nRet=%d,response=%s", nRet, szResponse); //獲取Session的內容 int i = 0, j = 0; for (; i < strlen(szResponse) - 9; i++) { if ('S' == szResponse[i] && 'e' == szResponse[i + 1] && 's' == szResponse[i + 2] && 's' == szResponse[i + 3] && 'i' == szResponse[i + 4] && 'o' == szResponse[i + 5] && 'n' == szResponse[i + 6] && ':' == szResponse[i + 7] && ' ' == szResponse[i + 8]) { while (szResponse[i + 9 + j] != '\r' && szResponse[i + 10 + j] != '\n') { Session[j] = szResponse[i + 9 + j]; j++; } break; } } //PLAY方法 char lstrPlay[1024] = "PLAY rtsp://%s:%d/H264 RTSP/1.0\r\n" "CSeq: 4\r\n" "Content-Length: 0\r\n" "Session: %s\r\n" "Range: npt=0.000-\r\n" "\r\n"; memset(szSendBuf, 0, sizeof(szSendBuf)); sprintf(szSendBuf, lstrPlay, ip, port, Session); nRet = send(rtsp_Socket, szSendBuf, 1024, 0); if (nRet == -1) { return 0; } //LOGD("send4 nRet=%d,buf=%s", nRet, szSendBuf); memset(szResponse, 0, sizeof(szResponse)); nRet = recv(rtsp_Socket, &szResponse[0], 1024, 0); if (nRet == -1) { return 0; } //LOGD("recv4 nRet=%d,response=%s", nRet, szResponse); //LOGD("connect rtsp success\n"); return 1; } /** * 釋放視頻播放資源 */ void releaseResource() { close(rtsp_Socket); //釋放jarrayVideo、videoVideoData內存 (env_play)->ReleaseByteArrayElements(jarrayVideo, (jbyte*) videoDecodeData, 0); } /** *RTSP斷開連接 */ int DisconnectRTSP() { char szResponse[1024]; char szSendBuf[1024]; int nRet; //TEARDOWN方法 char teardown[1024] = "TEARDOWN rtsp://%s:%d/H264 RTSP/1.0\r\n" "CSeq: 5\r\n" "Session: %s\r\n" "\r\n"; sprintf(szSendBuf, teardown, IP, Port, Session); nRet = send(rtsp_Socket, szSendBuf, 1024, 0); if (nRet == -1) { return 0; } //LOGD("send1 nRet=%d,buf=%s", nRet, szSendBuf); memset(szResponse, 0, sizeof(szResponse)); nRet = recv(rtsp_Socket, &szResponse[0], 1024, 0); //LOGD("recv1 nRet=%d,response=%s", nRet, szResponse); if (nRet == -1) { return 0; } releaseResource(); return 1; } /** * 調用Android的SurfaceView進行播放 */ void displayVideo(int status, char *pRGBBuf, int nWidth, int nHeight, int nDepth) { //創建子線程 (jvm_save)->AttachCurrentThread(&env_play, NULL); jarrayVideo = (env_play)->NewByteArray(nWidth * nHeight * 2); //將解碼完的數據轉成byte數組jarrayVideo (env_play)->SetByteArrayRegion(jarrayVideo, 0, nWidth * nHeight * 2, (const jbyte *) pRGBBuf); //調用SurfaceView的方法進行播放,displayVideo_methodId方法需要在JniHelp.cpp中實例化 (env_play)->CallStaticVoidMethod(cls_save, displayVideo_methodId, status, jarrayVideo, nWidth, nHeight, nDepth); //釋放jarrayVideo、videoVideoData內存 (env_play)->ReleaseByteArrayElements(jarrayVideo, (jbyte*) videoDecodeData, 0); (jvm_save)->DetachCurrentThread(); } /** * 接收數據、拼幀、解碼、播放 */ void do_echo(int sockfd, struct sockaddr *pcliaddr, socklen_t client) { int hr, n, outSize = 0; int isFindIFrame = 0; CH264_RTP_UNPACK *unpack = new CH264_RTP_UNPACK(hr); socklen_t len = client; unsigned char mesg[20480]; long timebase = 0; InitDecoder(mWidth, mHeight); while (true) { /* waiting for receive data */ memset(mesg, 0, sizeof(mesg)); n = recvfrom(sockfd, mesg, 2048, 0, pcliaddr, &len); outSize = 0; //接受到視頻數據開始拼楨 BYTE *pFrame = unpack->Parse_RTP_Packet(mesg, n, &outSize); if (timebase == 0 || timebase <= unpack->m_RTP_Header.ts) { timebase = unpack->m_RTP_Header.ts; } else { continue; } if (pFrame != NULL) { if (isFindIFrame == 0) { if (outSize > 1456) { isFindIFrame = 1; } else { continue; } } int outLen = outSize; //解碼播放 if (DecoderNal(pFrame, outLen, videoDecodeData) > 0) { displayVideo(0, (char *) videoDecodeData, c->width, c->height, 0); } } } //釋放變量 UninitDecoder(); } /** * 接收處理數據線程 */ void *recvThread(void *lpParam) { int sockfd; struct sockaddr_in servaddr, cliaddr; sockfd = socket(AF_INET, SOCK_DGRAM, 0); /* create a socket */ /* init servaddr */ bzero(&servaddr, sizeof(servaddr)); servaddr.sin_family = AF_INET; servaddr.sin_addr.s_addr = htonl(INADDR_ANY); servaddr.sin_port = htons(6666);//綁定1024后的端口 /* bind address and port to socket */ if (bind(sockfd, (struct sockaddr *) &servaddr, sizeof(servaddr)) == -1) { perror("bind error"); exit(1); } do_echo(sockfd, (struct sockaddr *) &cliaddr, sizeof(cliaddr)); } /** * 外部調用播放視頻的方法 */ void displayRTSP() { int nSendBuf = 1024 * 10;//設置為1M pthread_t m_hRecvThread = -1; pthread_create(&m_hRecvThread, NULL, recvThread, NULL); }

浙公網安備 33010602011771號