![](/img/trans.png)
[英]Can RTSP (Real Time Streaming Protocol) be used to send live video stream from iPhone to a media server?
[英]How to connect to a media server using Real Time Streaming Protocol (RTSP) from an iOS application?
我是iOS編程的新手,正在研究iPhone應用程序以捕獲視頻並通過媒體服務器實時流式傳輸視頻。 我從此鏈接中獲得了示例代碼。 但是此代碼使iPhone成為RTSP服務器,而媒體服務器必須從iPhone中提取流。 即使當前的設置工作正常,我還是希望iPhone將實時流推送到媒體服務器,因為我相信服務器不會實時嘗試從所有iPhone用戶那里獲取流。 連接應該以其他方式進行以使其正常工作(如果我錯了,請糾正我)。 請告訴我執行此操作的正確方法是什么。 還請告訴我Apple提供了哪些框架或內置功能,可用於將實時流發送到服務器,從而使iPhone作為客戶端工作並啟動連接。
請檢查代碼:
頭文件
#import <Foundation/Foundation.h>
#import "RTSPServer.h"
@interface RTSPClientConnection : NSObject
+ (RTSPClientConnection*) createWithSocket:(CFSocketNativeHandle) s server:(RTSPServer*) server;
- (void) onVideoData:(NSArray*) data time:(double) pts;
- (void) shutdown;
@end
實施文件
#import "RTSPClientConnection.h"
#import "RTSPMessage.h"
#import "NALUnit.h"
#import "arpa/inet.h"
void tonet_short(uint8_t* p, unsigned short s)
{
//Code
}
void tonet_long(uint8_t* p, unsigned long l)
{
//Code
}
static const char* Base64Mapping = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
static const int max_packet_size = 1200;
NSString* encodeLong(unsigned long val, int nPad)
{
//Code
}
NSString* encodeToBase64(NSData* data)
{
//Code
}
enum ServerState
{
ServerIdle,
Setup,
Playing,
};
@interface RTSPClientConnection ()
{
CFSocketRef _s;
RTSPServer* _server;
CFRunLoopSourceRef _rls;
CFDataRef _addrRTP;
CFSocketRef _sRTP;
CFDataRef _addrRTCP;
CFSocketRef _sRTCP;
NSString* _session;
ServerState _state;
long _packets;
long _bytesSent;
long _ssrc;
BOOL _bFirst;
// time mapping using NTP
uint64_t _ntpBase;
uint64_t _rtpBase;
double _ptsBase;
// RTCP stats
long _packetsReported;
long _bytesReported;
NSDate* _sentRTCP;
// reader reports
CFSocketRef _recvRTCP;
CFRunLoopSourceRef _rlsRTCP;
}
- (RTSPClientConnection*) initWithSocket:(CFSocketNativeHandle) s Server:(RTSPServer*) server;
- (void) onSocketData:(CFDataRef)data;
- (void) onRTCP:(CFDataRef) data;
@end
static void onSocket (
CFSocketRef s,
CFSocketCallBackType callbackType,
CFDataRef address,
const void *data,
void *info
)
{
RTSPClientConnection* conn = (__bridge RTSPClientConnection*)info;
switch (callbackType)
{
case kCFSocketDataCallBack:
[conn onSocketData:(CFDataRef) data];
break;
default:
NSLog(@"unexpected socket event");
break;
}
}
static void onRTCP(CFSocketRef s,
CFSocketCallBackType callbackType,
CFDataRef address,
const void *data,
void *info
)
{
RTSPClientConnection* conn = (__bridge RTSPClientConnection*)info;
switch (callbackType)
{
case kCFSocketDataCallBack:
[conn onRTCP:(CFDataRef) data];
break;
default:
NSLog(@"unexpected socket event");
break;
}
}
@implementation RTSPClientConnection
+ (RTSPClientConnection*) createWithSocket:(CFSocketNativeHandle) s server:(RTSPServer*) server
{
RTSPClientConnection* conn = [RTSPClientConnection alloc];
if ([conn initWithSocket:s Server:server] != nil)
{
return conn;
}
return nil;
}
- (RTSPClientConnection*) initWithSocket:(CFSocketNativeHandle)s Server:(RTSPServer *)server
{
_state = ServerIdle;
_server = server;
CFSocketContext info;
memset(&info, 0, sizeof(info));
info.info = (void*)CFBridgingRetain(self);
_s = CFSocketCreateWithNative(nil, s, kCFSocketDataCallBack, onSocket, &info);
_rls = CFSocketCreateRunLoopSource(nil, _s, 0);
CFRunLoopAddSource(CFRunLoopGetMain(), _rls, kCFRunLoopCommonModes);
return self;
}
- (void) onSocketData:(CFDataRef)data
{
//Code
}
- (NSString*) makeSDP
{
//Code
}
- (void) onVideoData:(NSArray*) data time:(double) pts
{
//Code
}
- (void) writeHeader:(uint8_t*) packet marker:(BOOL) bMarker time:(double) pts
{
//Code
}
- (void) sendPacket:(uint8_t*) packet length:(int) cBytes
{
//Code
}
- (void) onRTCP:(CFDataRef) data
{
// NSLog(@"RTCP recv");
}
- (void) tearDown
{
// Code
}
- (void) shutdown
{
[self tearDown];
@synchronized(self)
{
CFSocketInvalidate(_s);
_s = nil;
}
}
@end
謝謝。
最近四個月我一直在努力,沒有簡單的方法可以做到。 您必須編寫自己的RTCP協議。
編輯:我聽說過RTPDump,但我從未嘗試過
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.