![](/img/trans.png)
[英]Can RTSP (Real Time Streaming Protocol) be used to send live video stream from iPhone to a media server?
[英]How to connect to a media server using Real Time Streaming Protocol (RTSP) from an iOS application?
我是iOS编程的新手,正在研究iPhone应用程序以捕获视频并通过媒体服务器实时流式传输视频。 我从此链接中获得了示例代码。 但是此代码使iPhone成为RTSP服务器,而媒体服务器必须从iPhone中提取流。 即使当前的设置工作正常,我还是希望iPhone将实时流推送到媒体服务器,因为我相信服务器不会实时尝试从所有iPhone用户那里获取流。 连接应该以其他方式进行以使其正常工作(如果我错了,请纠正我)。 请告诉我执行此操作的正确方法是什么。 还请告诉我Apple提供了哪些框架或内置功能,可用于将实时流发送到服务器,从而使iPhone作为客户端工作并启动连接。
请检查代码:
头文件
#import <Foundation/Foundation.h>
#import "RTSPServer.h"
@interface RTSPClientConnection : NSObject
+ (RTSPClientConnection*) createWithSocket:(CFSocketNativeHandle) s server:(RTSPServer*) server;
- (void) onVideoData:(NSArray*) data time:(double) pts;
- (void) shutdown;
@end
实施文件
#import "RTSPClientConnection.h"
#import "RTSPMessage.h"
#import "NALUnit.h"
#import "arpa/inet.h"
void tonet_short(uint8_t* p, unsigned short s)
{
//Code
}
void tonet_long(uint8_t* p, unsigned long l)
{
//Code
}
static const char* Base64Mapping = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
static const int max_packet_size = 1200;
NSString* encodeLong(unsigned long val, int nPad)
{
//Code
}
NSString* encodeToBase64(NSData* data)
{
//Code
}
enum ServerState
{
ServerIdle,
Setup,
Playing,
};
@interface RTSPClientConnection ()
{
CFSocketRef _s;
RTSPServer* _server;
CFRunLoopSourceRef _rls;
CFDataRef _addrRTP;
CFSocketRef _sRTP;
CFDataRef _addrRTCP;
CFSocketRef _sRTCP;
NSString* _session;
ServerState _state;
long _packets;
long _bytesSent;
long _ssrc;
BOOL _bFirst;
// time mapping using NTP
uint64_t _ntpBase;
uint64_t _rtpBase;
double _ptsBase;
// RTCP stats
long _packetsReported;
long _bytesReported;
NSDate* _sentRTCP;
// reader reports
CFSocketRef _recvRTCP;
CFRunLoopSourceRef _rlsRTCP;
}
- (RTSPClientConnection*) initWithSocket:(CFSocketNativeHandle) s Server:(RTSPServer*) server;
- (void) onSocketData:(CFDataRef)data;
- (void) onRTCP:(CFDataRef) data;
@end
static void onSocket (
CFSocketRef s,
CFSocketCallBackType callbackType,
CFDataRef address,
const void *data,
void *info
)
{
RTSPClientConnection* conn = (__bridge RTSPClientConnection*)info;
switch (callbackType)
{
case kCFSocketDataCallBack:
[conn onSocketData:(CFDataRef) data];
break;
default:
NSLog(@"unexpected socket event");
break;
}
}
static void onRTCP(CFSocketRef s,
CFSocketCallBackType callbackType,
CFDataRef address,
const void *data,
void *info
)
{
RTSPClientConnection* conn = (__bridge RTSPClientConnection*)info;
switch (callbackType)
{
case kCFSocketDataCallBack:
[conn onRTCP:(CFDataRef) data];
break;
default:
NSLog(@"unexpected socket event");
break;
}
}
@implementation RTSPClientConnection
+ (RTSPClientConnection*) createWithSocket:(CFSocketNativeHandle) s server:(RTSPServer*) server
{
RTSPClientConnection* conn = [RTSPClientConnection alloc];
if ([conn initWithSocket:s Server:server] != nil)
{
return conn;
}
return nil;
}
- (RTSPClientConnection*) initWithSocket:(CFSocketNativeHandle)s Server:(RTSPServer *)server
{
_state = ServerIdle;
_server = server;
CFSocketContext info;
memset(&info, 0, sizeof(info));
info.info = (void*)CFBridgingRetain(self);
_s = CFSocketCreateWithNative(nil, s, kCFSocketDataCallBack, onSocket, &info);
_rls = CFSocketCreateRunLoopSource(nil, _s, 0);
CFRunLoopAddSource(CFRunLoopGetMain(), _rls, kCFRunLoopCommonModes);
return self;
}
- (void) onSocketData:(CFDataRef)data
{
//Code
}
- (NSString*) makeSDP
{
//Code
}
- (void) onVideoData:(NSArray*) data time:(double) pts
{
//Code
}
- (void) writeHeader:(uint8_t*) packet marker:(BOOL) bMarker time:(double) pts
{
//Code
}
- (void) sendPacket:(uint8_t*) packet length:(int) cBytes
{
//Code
}
- (void) onRTCP:(CFDataRef) data
{
// NSLog(@"RTCP recv");
}
- (void) tearDown
{
// Code
}
- (void) shutdown
{
[self tearDown];
@synchronized(self)
{
CFSocketInvalidate(_s);
_s = nil;
}
}
@end
谢谢。
最近四个月我一直在努力,没有简单的方法可以做到。 您必须编写自己的RTCP协议。
编辑:我听说过RTPDump,但我从未尝试过
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.