-1

Hai Folks 从过去几天开始,我正在编写代码以在 WaveFrom 中创建歌曲可视化。这是我使用 AVAssetReader 从绘图波形中获得的代码, 谁能帮我在播放我的歌曲时调用哪种方法我想展示可视化。这是我的代码

首先,一个通用的渲染方法,它接受一个指向平均样本数据的指针,并返回一个 UIImage。请注意,这些样本不是可播放的音频样本。

-(UIImage *) audioImageGraph:(SInt16 *) samples
            normalizeMax:(SInt16) normalizeMax
             sampleCount:(NSInteger) sampleCount 
            channelCount:(NSInteger) channelCount
             imageHeight:(float) imageHeight {

CGSize imageSize = CGSizeMake(sampleCount, imageHeight);
UIGraphicsBeginImageContext(imageSize);
CGContextRef context = UIGraphicsGetCurrentContext();

CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
CGContextSetAlpha(context,1.0);
CGRect rect;
rect.size = imageSize;
rect.origin.x = 0;
rect.origin.y = 0;

CGColorRef leftcolor = [[UIColor whiteColor] CGColor];
CGColorRef rightcolor = [[UIColor redColor] CGColor];

CGContextFillRect(context, rect);

CGContextSetLineWidth(context, 1.0);

float halfGraphHeight = (imageHeight / 2) / (float) channelCount ;
float centerLeft = halfGraphHeight;
float centerRight = (halfGraphHeight*3) ; 
float sampleAdjustmentFactor = (imageHeight/ (float) channelCount) / (float) normalizeMax;

for (NSInteger intSample = 0 ; intSample < sampleCount ; intSample ++ ) {
    SInt16 left = *samples++;
    float pixels = (float) left;
    pixels *= sampleAdjustmentFactor;
    CGContextMoveToPoint(context, intSample, centerLeft-pixels);
    CGContextAddLineToPoint(context, intSample, centerLeft+pixels);
    CGContextSetStrokeColorWithColor(context, leftcolor);
    CGContextStrokePath(context);

    if (channelCount==2) {
        SInt16 right = *samples++;
        float pixels = (float) right;
        pixels *= sampleAdjustmentFactor;
        CGContextMoveToPoint(context, intSample, centerRight - pixels);
        CGContextAddLineToPoint(context, intSample, centerRight + pixels);
        CGContextSetStrokeColorWithColor(context, rightcolor);
        CGContextStrokePath(context); 
    }
}

// Create new image
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();

// Tidy up
UIGraphicsEndImageContext();   

return newImage;

}

接下来,一个接受 AVURLAsset 并返回 PNG 图像数据的方法

- (NSData *) renderPNGAudioPictogramForAssett:(AVURLAsset *)songAsset
 {

NSError * error = nil;


AVAssetReader * reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];

AVAssetTrack * songTrack = [songAsset.tracks objectAtIndex:0];

NSDictionary* outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:

                                    [NSNumber     numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
                                    //     [NSNumber     numberWithInt:44100.0],AVSampleRateKey, /*Not Supported*/
                                    //     [NSNumber numberWithInt: 2],AVNumberOfChannelsKey,    /*Not Supported*/

                                    [NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
                                    [NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
                                    [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
                                    [NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,

                                    nil];


AVAssetReaderTrackOutput* output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:outputSettingsDict];

[reader addOutput:output];
[output release];

UInt32 sampleRate,channelCount;

NSArray* formatDesc = songTrack.formatDescriptions;
for(unsigned int i = 0; i < [formatDesc count]; ++i) {
    CMAudioFormatDescriptionRef item = (CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
    const AudioStreamBasicDescription* fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription (item);
    if(fmtDesc ) {

        sampleRate = fmtDesc->mSampleRate;
        channelCount = fmtDesc->mChannelsPerFrame;

        //    NSLog(@"channels:%u, bytes/packet: %u, sampleRate %f",fmtDesc->mChannelsPerFrame, fmtDesc->mBytesPerPacket,fmtDesc->mSampleRate);
    }
}


UInt32 bytesPerSample = 2 * channelCount;
SInt16 normalizeMax = 0;

NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];


UInt64 totalBytes = 0; 


SInt64 totalLeft = 0;
SInt64 totalRight = 0;
NSInteger sampleTally = 0;

NSInteger samplesPerPixel = sampleRate / 50;


while (reader.status == AVAssetReaderStatusReading){

    AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
    CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];

    if (sampleBufferRef){
        CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);

        size_t length = CMBlockBufferGetDataLength(blockBufferRef);
        totalBytes += length;


        NSAutoreleasePool *wader = [[NSAutoreleasePool alloc] init];

        NSMutableData * data = [NSMutableData dataWithLength:length];
        CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);


        SInt16 * samples = (SInt16 *) data.mutableBytes;
        int sampleCount = length / bytesPerSample;
        for (int i = 0; i < sampleCount ; i ++) {

            SInt16 left = *samples++;

            totalLeft  += left;



            SInt16 right;
            if (channelCount==2) {
                right = *samples++;

                totalRight += right;
            }

            sampleTally++;

            if (sampleTally > samplesPerPixel) {

                left  = totalLeft / sampleTally; 

                SInt16 fix = abs(left);
                if (fix > normalizeMax) {
                    normalizeMax = fix;
                }


                [fullSongData appendBytes:&left length:sizeof(left)];

                if (channelCount==2) {
                    right = totalRight / sampleTally; 


                    SInt16 fix = abs(right);
                    if (fix > normalizeMax) {
                        normalizeMax = fix;
                    }


                    [fullSongData appendBytes:&right length:sizeof(right)];
                }

                totalLeft   = 0;
                totalRight  = 0;
                sampleTally = 0;

            }
        }



        [wader drain];


        CMSampleBufferInvalidate(sampleBufferRef);

        CFRelease(sampleBufferRef);
    }
}


NSData * finalData = nil;

if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown){
    // Something went wrong. return nil

    return nil;
}

if (reader.status == AVAssetReaderStatusCompleted){

    NSLog(@"rendering output graphics using normalizeMax %d",normalizeMax);

    UIImage *test = [self audioImageGraph:(SInt16 *) 
                     fullSongData.bytes 
                             normalizeMax:normalizeMax 
                              sampleCount:fullSongData.length / 4 
                             channelCount:2
                              imageHeight:100];

    finalData = imageToData(test);
}




[fullSongData release];
[reader release];

return finalData;

}

平均和渲染方法的对数版本

 -(UIImage *) audioImageLogGraph:(Float32 *) samples
               normalizeMax:(Float32) normalizeMax
                sampleCount:(NSInteger) sampleCount 
               channelCount:(NSInteger) channelCount
                imageHeight:(float) imageHeight {

CGSize imageSize = CGSizeMake(sampleCount, imageHeight);
UIGraphicsBeginImageContext(imageSize);
CGContextRef context = UIGraphicsGetCurrentContext();

CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
CGContextSetAlpha(context,1.0);
CGRect rect;
rect.size = imageSize;
rect.origin.x = 0;
rect.origin.y = 0;

CGColorRef leftcolor = [[UIColor whiteColor] CGColor];
CGColorRef rightcolor = [[UIColor redColor] CGColor];

CGContextFillRect(context, rect);

CGContextSetLineWidth(context, 1.0);

float halfGraphHeight = (imageHeight / 2) / (float) channelCount ;
float centerLeft = halfGraphHeight;
float centerRight = (halfGraphHeight*3) ; 
float sampleAdjustmentFactor = (imageHeight/ (float) channelCount) / (normalizeMax -      noiseFloor) / 2;

for (NSInteger intSample = 0 ; intSample < sampleCount ; intSample ++ ) {
    Float32 left = *samples++;
    float pixels = (left - noiseFloor) * sampleAdjustmentFactor;
    CGContextMoveToPoint(context, intSample, centerLeft-pixels);
    CGContextAddLineToPoint(context, intSample, centerLeft+pixels);
    CGContextSetStrokeColorWithColor(context, leftcolor);
    CGContextStrokePath(context);

    if (channelCount==2) {
        Float32 right = *samples++;
        float pixels = (right - noiseFloor) * sampleAdjustmentFactor;
        CGContextMoveToPoint(context, intSample, centerRight - pixels);
        CGContextAddLineToPoint(context, intSample, centerRight + pixels);
        CGContextSetStrokeColorWithColor(context, rightcolor);
        CGContextStrokePath(context); 
    }
}

// Create new image
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();

// Tidy up
UIGraphicsEndImageContext();   

return newImage;

}

 - (NSData *) renderPNGAudioPictogramLogForAssett:(AVURLAsset *)songAsset {

NSError * error = nil;


AVAssetReader * reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];

AVAssetTrack * songTrack = [songAsset.tracks objectAtIndex:0];

NSDictionary* outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:

                                    [NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
                                    //     [NSNumber numberWithInt:44100.0],AVSampleRateKey, /*Not Supported*/
                                    //     [NSNumber numberWithInt: 2],AVNumberOfChannelsKey,    /*Not Supported*/

                                    [NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
                                    [NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
                                    [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
                                    [NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,

                                    nil];


AVAssetReaderTrackOutput* output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:outputSettingsDict];

[reader addOutput:output];
[output release];

UInt32 sampleRate,channelCount;

NSArray* formatDesc = songTrack.formatDescriptions;
for(unsigned int i = 0; i < [formatDesc count]; ++i) {
    CMAudioFormatDescriptionRef item = (CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
    const AudioStreamBasicDescription* fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription (item);
    if(fmtDesc ) {

        sampleRate = fmtDesc->mSampleRate;
        channelCount = fmtDesc->mChannelsPerFrame;

        //    NSLog(@"channels:%u, bytes/packet: %u, sampleRate %f",fmtDesc->mChannelsPerFrame, fmtDesc->mBytesPerPacket,fmtDesc->mSampleRate);
    }
}


UInt32 bytesPerSample = 2 * channelCount;
Float32 normalizeMax = noiseFloor;
NSLog(@"normalizeMax = %f",normalizeMax);
NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];


UInt64 totalBytes = 0; 


Float64 totalLeft = 0;
Float64 totalRight = 0;
Float32 sampleTally = 0;

NSInteger samplesPerPixel = sampleRate / 50;


while (reader.status == AVAssetReaderStatusReading){

    AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
    CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];

    if (sampleBufferRef){
        CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);

        size_t length = CMBlockBufferGetDataLength(blockBufferRef);
        totalBytes += length;


        NSAutoreleasePool *wader = [[NSAutoreleasePool alloc] init];

        NSMutableData * data = [NSMutableData dataWithLength:length];
        CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);


        SInt16 * samples = (SInt16 *) data.mutableBytes;
        int sampleCount = length / bytesPerSample;
        for (int i = 0; i < sampleCount ; i ++) {

            Float32 left = (Float32) *samples++;
            left = decibel(left);
            left = minMaxX(left,noiseFloor,0);

            totalLeft  += left;



            Float32 right;
            if (channelCount==2) {
                right = (Float32) *samples++;
                right = decibel(right);
                right = minMaxX(right,noiseFloor,0);

                totalRight += right;
            }

            sampleTally++;

            if (sampleTally > samplesPerPixel) {

                left  = totalLeft / sampleTally; 
                if (left > normalizeMax) {
                    normalizeMax = left;
                }

                // NSLog(@"left average = %f, normalizeMax = %f",left,normalizeMax);


                [fullSongData appendBytes:&left length:sizeof(left)];

                if (channelCount==2) {
                    right = totalRight / sampleTally; 


                    if (right > normalizeMax) {
                        normalizeMax = right;
                    }


                    [fullSongData appendBytes:&right length:sizeof(right)];
                }

                totalLeft   = 0;
                totalRight  = 0;
                sampleTally = 0;

            }
        }



        [wader drain];


        CMSampleBufferInvalidate(sampleBufferRef);

        CFRelease(sampleBufferRef);
    }
}


NSData * finalData = nil;

if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown){
    // Something went wrong. Handle it.
}

if (reader.status == AVAssetReaderStatusCompleted){
    // You're done. It worked.

    NSLog(@"rendering output graphics using normalizeMax %f",normalizeMax);

    UIImage *test = [self audioImageLogGraph:(Float32 *) fullSongData.bytes 
                                normalizeMax:normalizeMax 
                                 sampleCount:fullSongData.length / (sizeof(Float32) * 2) 
                                channelCount:2
                                 imageHeight:100];

    finalData = imageToData(test);
}




[fullSongData release];
[reader release];

 return finalData;

}

  **Now the init method that does "the business"**


 - (id) initWithMPMediaItem:(MPMediaItem*) item 
       completionBlock:(void (^)(UIImage* delayedImagePreparation))completionBlock 
{

 NSFileManager *fman = [NSFileManager defaultManager];
 NSString *assetPictogramFilepath = [[self class]    cachedAudioPictogramPathForMPMediaItem:item];

if ([fman fileExistsAtPath:assetPictogramFilepath]) {

    NSLog(@"Returning cached waveform pictogram: %@",[assetPictogramFilepath lastPathComponent]);

    self = [self initWithContentsOfFile:assetPictogramFilepath];
    return self;

}


NSString *assetFilepath = [[self class] cachedAudioFilepathForMPMediaItem:item];

NSURL *assetFileURL = [NSURL fileURLWithPath:assetFilepath];

if ([fman fileExistsAtPath:assetFilepath]) {

    NSLog(@"scanning cached audio data to create UIImage file: %@",[assetFilepath lastPathComponent]);

    [assetFileURL retain];
    [assetPictogramFilepath retain];

    [NSThread MCSM_performBlockInBackground: ^{


        AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetFileURL options:nil];
        NSData *waveFormData = [self renderPNGAudioPictogramForAssett:asset]; 

        [waveFormData writeToFile:assetPictogramFilepath atomically:YES];

        [assetFileURL release];

        [assetPictogramFilepath release];

        if (completionBlock) {

            [waveFormData retain];
            [NSThread MCSM_performBlockOnMainThread:^{


                 UIImage *result = [UIImage imageWithData:waveFormData];

                  NSLog(@"returning rendered pictogram on main thread (%d bytes %@ data           in UIImage %0.0f x %0.0f pixels)",waveFormData.length, [imgExtuppercaseString],result.size.width,result.size.height);

                completionBlock(result);

                [waveFormData release];

            }];

        }


    }];

    return nil;

} else {



   NSString *assetFolder = [[self class] assetCacheFolder];

    [fman createDirectoryAtPath:assetFolder withIntermediateDirectories:YES 
   attributes:nil error:nil];

    NSLog(@"Preparing to import audio asset data %@",[assetFilepath lastPathComponent]);

    [assetPictogramFilepath retain];
    [assetFileURL retain];

    TSLibraryImport* import = [[TSLibraryImport alloc] init];
    NSURL    * assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];

    [import importAsset:assetURL toURL:assetFileURL completionBlock:^(TSLibraryImport* import) {
        //check the status and error properties of
        //TSLibraryImport


        if (import.error) {

            NSLog (@"audio data import failed:%@",import.error);


        } else{
            NSLog (@"Creating waveform pictogram file: %@", [assetPictogramFilepath lastPathComponent]);
            AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetFileURL options:nil];
            NSData *waveFormData = [self renderPNGAudioPictogramForAssett:asset]; 

            [waveFormData writeToFile:assetPictogramFilepath atomically:YES];


            if (completionBlock) {
                [waveFormData retain];
                [NSThread MCSM_performBlockOnMainThread:^
                {

                    UIImage *result = [UIImage imageWithData:waveFormData];
                    NSLog(@"returning rendered pictogram on main thread (%d bytes %@ data in UIImage %0.0f x %0.0f pixels)",waveFormData.length,[imgExt uppercaseString],result.size.width,result.size.height);

                    completionBlock(result);

                    [waveFormData release];

                }];

            }
        }

        [assetPictogramFilepath release];
        [assetFileURL release];


    }  ];

    return nil;
}

}

调用这个的一个例子:

  -(void) importMediaItem 

{

MPMediaItem* item = [self mediaItem];

// since we will be needing this for playback, save the url to the cached audio.
[url release];
url = [[UIImage cachedAudioURLForMPMediaItem:item] retain];


[waveFormImage release];

waveFormImage = [[UIImage alloc ] initWithMPMediaItem:item completionBlock:^(UIImage* delayedImagePreparation){

    waveFormImage = [delayedImagePreparation retain];
    [self displayWaveFormImage];

}];

if (waveFormImage) {
    [waveFormImage retain];
    [self displayWaveFormImage];
}

}

我已经将所有的类方法、库、NSThread 类添加到我的项目中,一切都得到了修复,但没有得到调用哪个方法,真的很乱

4

1 回答 1

0

创建一个视图控制器并添加importMediaItem方法。

确保媒体项目不是nil。您可以从 iPad 播放器获取当前曲目,如下所示:

MPMusicPlayerController* player = [MPMusicPlayerController iPodMusicPlayer];
MPMediaItem *item = [player nowPlayingItem];

viewDidAppear方法中,调用importMediaItem.

添加displayWaveFormImage回调方法,并UIImageView使用返回的图像创建一个。

确保在运行应用程序之前启动 iPod 播放器。

然后一切正常。

于 2012-07-16T12:29:00.740 回答