我是 iOS 编程新手,正在开发 iPhone 应用程序以捕获视频并通过媒体服务器实时流式传输。我从这个链接得到了一个示例代码。但是此代码使 iPhone 成为 RTSP 服务器,媒体服务器必须从 iPhone 中提取流。尽管目前的设置运行良好,但我希望 iPhone 将实时流推送到媒体服务器,因为我相信服务器不会实时尝试从所有 iPhone 用户那里获取流。连接应该以其他方式使其工作(如果我错了,请纠正我)。请告诉我这样做的正确方法是什么。还请告诉我 Apple 提供了哪些框架或内置功能,可用于将实时流发送到服务器,使 iPhone 作为客户端工作并启动连接。
请检查代码:
头文件
#import <Foundation/Foundation.h>
#import "RTSPServer.h"
@interface RTSPClientConnection : NSObject
+ (RTSPClientConnection*) createWithSocket:(CFSocketNativeHandle) s server:(RTSPServer*) server;
- (void) onVideoData:(NSArray*) data time:(double) pts;
- (void) shutdown;
@end
实施文件
#import "RTSPClientConnection.h"
#import "RTSPMessage.h"
#import "NALUnit.h"
#import "arpa/inet.h"
void tonet_short(uint8_t* p, unsigned short s)
{
//Code
}
void tonet_long(uint8_t* p, unsigned long l)
{
//Code
}
static const char* Base64Mapping = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
static const int max_packet_size = 1200;
NSString* encodeLong(unsigned long val, int nPad)
{
//Code
}
NSString* encodeToBase64(NSData* data)
{
//Code
}
enum ServerState
{
ServerIdle,
Setup,
Playing,
};
@interface RTSPClientConnection ()
{
CFSocketRef _s;
RTSPServer* _server;
CFRunLoopSourceRef _rls;
CFDataRef _addrRTP;
CFSocketRef _sRTP;
CFDataRef _addrRTCP;
CFSocketRef _sRTCP;
NSString* _session;
ServerState _state;
long _packets;
long _bytesSent;
long _ssrc;
BOOL _bFirst;
// time mapping using NTP
uint64_t _ntpBase;
uint64_t _rtpBase;
double _ptsBase;
// RTCP stats
long _packetsReported;
long _bytesReported;
NSDate* _sentRTCP;
// reader reports
CFSocketRef _recvRTCP;
CFRunLoopSourceRef _rlsRTCP;
}
- (RTSPClientConnection*) initWithSocket:(CFSocketNativeHandle) s Server:(RTSPServer*) server;
- (void) onSocketData:(CFDataRef)data;
- (void) onRTCP:(CFDataRef) data;
@end
static void onSocket (
CFSocketRef s,
CFSocketCallBackType callbackType,
CFDataRef address,
const void *data,
void *info
)
{
RTSPClientConnection* conn = (__bridge RTSPClientConnection*)info;
switch (callbackType)
{
case kCFSocketDataCallBack:
[conn onSocketData:(CFDataRef) data];
break;
default:
NSLog(@"unexpected socket event");
break;
}
}
static void onRTCP(CFSocketRef s,
CFSocketCallBackType callbackType,
CFDataRef address,
const void *data,
void *info
)
{
RTSPClientConnection* conn = (__bridge RTSPClientConnection*)info;
switch (callbackType)
{
case kCFSocketDataCallBack:
[conn onRTCP:(CFDataRef) data];
break;
default:
NSLog(@"unexpected socket event");
break;
}
}
@implementation RTSPClientConnection
+ (RTSPClientConnection*) createWithSocket:(CFSocketNativeHandle) s server:(RTSPServer*) server
{
RTSPClientConnection* conn = [RTSPClientConnection alloc];
if ([conn initWithSocket:s Server:server] != nil)
{
return conn;
}
return nil;
}
- (RTSPClientConnection*) initWithSocket:(CFSocketNativeHandle)s Server:(RTSPServer *)server
{
_state = ServerIdle;
_server = server;
CFSocketContext info;
memset(&info, 0, sizeof(info));
info.info = (void*)CFBridgingRetain(self);
_s = CFSocketCreateWithNative(nil, s, kCFSocketDataCallBack, onSocket, &info);
_rls = CFSocketCreateRunLoopSource(nil, _s, 0);
CFRunLoopAddSource(CFRunLoopGetMain(), _rls, kCFRunLoopCommonModes);
return self;
}
- (void) onSocketData:(CFDataRef)data
{
//Code
}
- (NSString*) makeSDP
{
//Code
}
- (void) onVideoData:(NSArray*) data time:(double) pts
{
//Code
}
- (void) writeHeader:(uint8_t*) packet marker:(BOOL) bMarker time:(double) pts
{
//Code
}
- (void) sendPacket:(uint8_t*) packet length:(int) cBytes
{
//Code
}
- (void) onRTCP:(CFDataRef) data
{
// NSLog(@"RTCP recv");
}
- (void) tearDown
{
// Code
}
- (void) shutdown
{
[self tearDown];
@synchronized(self)
{
CFSocketInvalidate(_s);
_s = nil;
}
}
@end
谢谢。