0

是否可以使用 ASIFormDataRequest 上传捕获视频的帧?

我有代码:

 - (void)viewDidLoad
    {
        [super viewDidLoad];

        frameCount = 0;
        [myQueue cancelAllOperations];
        [myQueue setDelegate:self];
        NSString *urlString = @"url";
        NSURL *url = [NSURL URLWithString:urlString];
        request = [ASIFormDataRequest requestWithURL:url];

        objReturner = [[ReturnerClass alloc] init];
        NSError *deviceError;
        AVCaptureDevice *cameraDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        AVCaptureDeviceInput *inputDevice = [AVCaptureDeviceInput deviceInputWithDevice:cameraDevice error:&deviceError];

        // make output device
        AVCaptureVideoDataOutput *outputDevice = [[AVCaptureVideoDataOutput alloc] init];
        outputDevice.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (id)kCVPixelBufferPixelFormatTypeKey];
        [outputDevice setSampleBufferDelegate:self queue:dispatch_get_main_queue()];

        // initialize capture session
        self.captureSession = [[AVCaptureSession alloc] init];
        [self.captureSession addInput:inputDevice];
        [self.captureSession addOutput:outputDevice];

        // make preview layer and add so that camera's view is displayed on screen
        AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
        previewLayer.frame = self.previewLayer.bounds;
        [self.previewLayer.layer addSublayer:previewLayer];
        [self.captureSession startRunning];

    }




-(void) captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
    {
        //[connection setVideoMinFrameDuration:CMTimeMake(1, 30)];
        /*We create an autorelease pool because as we are not in the main_queue our code is
         not executed in the main thread. So we have to create an autorelease pool for the thread we are in*/


        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        /*Lock the image buffer*/
        CVPixelBufferLockBaseAddress(imageBuffer,0);
        /*Get information about the image*/
        uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
        size_t width = CVPixelBufferGetWidth(imageBuffer);
        size_t height = CVPixelBufferGetHeight(imageBuffer);

        /*Create a CGImageRef from the CVImageBufferRef*/
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
        CGImageRef newImage = CGBitmapContextCreateImage(newContext);

        /*We release some components*/
        CGContextRelease(newContext);
        CGColorSpaceRelease(colorSpace);

        /*We display the result on the custom layer. All the display stuff must be done in the main thread because
         UIKit is no thread safe, and as we are not in the main thread (remember we didn't use the main_queue)
         we use performSelectorOnMainThread to call our CALayer and tell it to display the CGImage.*/
        //[self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage waitUntilDone:YES];

        /*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly).
         Same thing as for the CALayer we are not in the main thread so ...*/
        UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];

        /*We relase the CGImageRef*/
        CGImageRelease(newImage);

        //[self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];

        NSData *imageData = UIImageJPEGRepresentation(image,0.1);     //change Image to NSData



        [self sendFrameToServer:imageData];
        /*We unlock the  image buffer*/
        CVPixelBufferUnlockBaseAddress(imageBuffer,0);


    }


-(void)sendFrameToServer:(NSData *)imageData{
        [request setData:imageData withFileName:[NSString stringWithFormat:@"%i_%@.png",frameCount,[objReturner returnTimeStamp]] andContentType:@"image/png" forKey:@"imagetoupload"];
        [request startSynchronous];
        [myQueue addOperation:request];
        NSLog(@"Value: %@",[request responseString]);
        frameCount++;
    }

我在 .h 中声明 myQueue 和请求对象

我在这里遇到的问题是 NSLog 响应总是显示发送到服务器的第一张图像,它永远不会改变,例如“值:文件 0_2013-03-14 19:27:34.png 已上传”保持不变这一直是,而 frameCount 正在增加并且日期秒数甚至没有上升。

有解决方案吗?

4

1 回答 1

1

一旦尝试使用委托方法,例如,

-(void)sendFrameToserver:(NSData *)imageData {

    NSURL *url = [NSURL URLWithString:@"YOUR_URL_TO_POST"];
    ASIFormDataRequest *request = [ASIFormDataRequest requestWithURL:url];
    [request setUseKeychainPersistence:YES];
    [request setData:imageData withFileName:[NSString stringWithFormat:@"%i_%@.png",frameCount,[objReturner returnTimeStamp]] andContentType:@"image/png" forKey:@"imagetoupload"];
    [request setDelegate:self];
    [request setDidFinishSelector:@selector(uploadRequestFinished:)];
    [request setDidFailSelector:@selector(uploadRequestFailed:)];
    [request startAsynchronous];

   }

现在编写您的委托方法,

- (void)uploadRequestFinished:(ASIHTTPRequest *)request{    

     NSString *responseString = [request responseString];
     NSLog( @"response--%@",responseString );

   }
 - (void)uploadRequestFailed:(ASIHTTPRequest *)request{

    NSLog(@" Error - Statistics file upload failed: \"%@\"",[[request error] localizedDescription]);

   }

希望对你有帮助......

于 2013-03-15T05:02:32.977 回答