0

我正在尝试更改文件输入的输入剪辑开始时间码和结束时间码,并将剪辑的视频保存到S3 存储桶中的文件输出目标

目前,我可以使用以下代码执行操作:

 using System;
 using System.Threading.Tasks;
 using Amazon.MediaConvert;
 using Amazon.MediaConvert.Model;

namespace MediaConvertNET
{

class Program
{

    static async Task MainAsync()
    {
        String mediaConvertRole = "Your AWS Elemental MediaConvert role ARN";
        String fileInput = "s3://yourinputfile";
        String fileOutput = "s3://youroutputdestination";
        String mediaConvertEndpoint = "";

        // If we do not have our customer-specific endpoint
        if (String.IsNullOrEmpty(mediaConvertEndpoint))
        {
            // Obtain the customer-specific MediaConvert endpoint
            AmazonMediaConvertClient client = new AmazonMediaConvertClient("AccessKey", "AccessSecret", Amazon.RegionEndpoint.USWest1);
            DescribeEndpointsRequest describeRequest = new DescribeEndpointsRequest();

            DescribeEndpointsResponse describeResponse = await client.DescribeEndpointsAsync(describeRequest);
            mediaConvertEndpoint = describeResponse.Endpoints[0].Url;
        }

        // Since we have a service url for MediaConvert, we do not
        // need to set RegionEndpoint. If we do, the ServiceURL will
        // be overwritten
        AmazonMediaConvertConfig mcConfig = new AmazonMediaConvertConfig
        {
            ServiceURL = mediaConvertEndpoint,
        };

        AmazonMediaConvertClient mcClient = new AmazonMediaConvertClient("AccessKey", "AccessSecret", mcConfig);
        CreateJobRequest createJobRequest = new CreateJobRequest();

        createJobRequest.Role = mediaConvertRole;
        createJobRequest.UserMetadata.Add("Customer", "Amazon");

        #region Create job settings
        JobSettings jobSettings = new JobSettings();
        jobSettings.AdAvailOffset = 0;
        jobSettings.TimecodeConfig = new TimecodeConfig();
        jobSettings.TimecodeConfig.Source = TimecodeSource.EMBEDDED;
        createJobRequest.Settings = jobSettings;

        #region OutputGroup
        OutputGroup ofg = new OutputGroup();
        ofg.Name = "File Group";
        ofg.OutputGroupSettings = new OutputGroupSettings();
        ofg.OutputGroupSettings.Type = OutputGroupType.FILE_GROUP_SETTINGS;
        ofg.OutputGroupSettings.FileGroupSettings = new FileGroupSettings();
        ofg.OutputGroupSettings.FileGroupSettings.Destination = fileOutput;

        Output output = new Output();
        output.NameModifier = "_1";

        #region VideoDescription
        VideoDescription vdes = new VideoDescription();
        output.VideoDescription = vdes;
        vdes.ScalingBehavior = ScalingBehavior.DEFAULT;
        vdes.TimecodeInsertion = VideoTimecodeInsertion.DISABLED;
        vdes.AntiAlias = AntiAlias.ENABLED;
        vdes.Sharpness = 50;
        vdes.AfdSignaling = AfdSignaling.NONE;
        vdes.DropFrameTimecode = DropFrameTimecode.ENABLED;
        vdes.RespondToAfd = RespondToAfd.NONE;
        vdes.ColorMetadata = ColorMetadata.INSERT;
        vdes.CodecSettings = new VideoCodecSettings();
        vdes.CodecSettings.Codec = VideoCodec.H_264;
        H264Settings h264 = new H264Settings();
        h264.InterlaceMode = H264InterlaceMode.PROGRESSIVE;
        h264.NumberReferenceFrames = 3;
        h264.Syntax = H264Syntax.DEFAULT;
        h264.Softness = 0;
        h264.GopClosedCadence = 1;
        h264.GopSize = 90;
        h264.Slices = 1;
        h264.GopBReference = H264GopBReference.DISABLED;
        h264.SlowPal = H264SlowPal.DISABLED;
        h264.SpatialAdaptiveQuantization = H264SpatialAdaptiveQuantization.ENABLED;
        h264.TemporalAdaptiveQuantization = H264TemporalAdaptiveQuantization.ENABLED;
        h264.FlickerAdaptiveQuantization = H264FlickerAdaptiveQuantization.DISABLED;
        h264.EntropyEncoding = H264EntropyEncoding.CABAC;
        h264.Bitrate = 2000000;
        h264.FramerateControl = H264FramerateControl.SPECIFIED;
        h264.RateControlMode = H264RateControlMode.CBR;
        h264.CodecProfile = H264CodecProfile.MAIN;
        h264.Telecine = H264Telecine.NONE;
        h264.MinIInterval = 0;
        h264.AdaptiveQuantization = H264AdaptiveQuantization.HIGH;
        h264.CodecLevel = H264CodecLevel.AUTO;
        h264.FieldEncoding = H264FieldEncoding.PAFF;
        h264.SceneChangeDetect = H264SceneChangeDetect.ENABLED;
        h264.QualityTuningLevel = H264QualityTuningLevel.SINGLE_PASS;
        h264.FramerateConversionAlgorithm = H264FramerateConversionAlgorithm.DUPLICATE_DROP;
        h264.UnregisteredSeiTimecode = H264UnregisteredSeiTimecode.DISABLED;
        h264.GopSizeUnits = H264GopSizeUnits.FRAMES;
        h264.ParControl = H264ParControl.SPECIFIED;
        h264.NumberBFramesBetweenReferenceFrames = 2;
        h264.RepeatPps = H264RepeatPps.DISABLED;
        h264.FramerateNumerator = 30;
        h264.FramerateDenominator = 1;
        h264.ParNumerator = 1;
        h264.ParDenominator = 1;
        output.VideoDescription.CodecSettings.H264Settings = h264;
        #endregion VideoDescription

        #region AudioDescription
        AudioDescription ades = new AudioDescription();
        ades.LanguageCodeControl = AudioLanguageCodeControl.FOLLOW_INPUT;
        // This name matches one specified in the Inputs below
        ades.AudioSourceName = "Audio Selector 1";
        ades.CodecSettings = new AudioCodecSettings();
        ades.CodecSettings.Codec = AudioCodec.AAC;
        AacSettings aac = new AacSettings();
        aac.AudioDescriptionBroadcasterMix = AacAudioDescriptionBroadcasterMix.NORMAL;
        aac.RateControlMode = AacRateControlMode.CBR;
        aac.CodecProfile = AacCodecProfile.LC;
        aac.CodingMode = AacCodingMode.CODING_MODE_2_0;
        aac.RawFormat = AacRawFormat.NONE;
        aac.SampleRate = 48000;
        aac.Specification = AacSpecification.MPEG4;
        aac.Bitrate = 64000;
        ades.CodecSettings.AacSettings = aac;
        output.AudioDescriptions.Add(ades);
        #endregion AudioDescription

        #region Mp4 Container
        output.ContainerSettings = new ContainerSettings();
        output.ContainerSettings.Container = ContainerType.MP4;
        Mp4Settings mp4 = new Mp4Settings();
        mp4.CslgAtom = Mp4CslgAtom.INCLUDE;
        mp4.FreeSpaceBox = Mp4FreeSpaceBox.EXCLUDE;
        mp4.MoovPlacement = Mp4MoovPlacement.PROGRESSIVE_DOWNLOAD;
        output.ContainerSettings.Mp4Settings = mp4;
        #endregion Mp4 Container

        ofg.Outputs.Add(output);
        createJobRequest.Settings.OutputGroups.Add(ofg);
        #endregion OutputGroup

        #region Input
        Input input = new Input();

        InputClipping ip = new InputClipping();
        ip.StartTimecode= "00:00:00:00";
        ip.EndTimecode= "00:00:05:00";

        input.FilterEnable = InputFilterEnable.AUTO;
        input.PsiControl = InputPsiControl.USE_PSI;
        input.FilterStrength = 0;
        input.DeblockFilter = InputDeblockFilter.DISABLED;
        input.DenoiseFilter = InputDenoiseFilter.DISABLED;
        input.TimecodeSource = InputTimecodeSource.ZEROBASED;
        input.InputClippings.Add(ip);
        input.FileInput = fileInput;

        AudioSelector audsel = new AudioSelector();
        audsel.Offset = 0;
        audsel.DefaultSelection = AudioDefaultSelection.NOT_DEFAULT;
        audsel.ProgramSelection = 1;
        audsel.SelectorType = AudioSelectorType.TRACK;
        audsel.Tracks.Add(1);
        input.AudioSelectors.Add("Audio Selector 1", audsel);

        input.VideoSelector = new VideoSelector();
        input.VideoSelector.ColorSpace = ColorSpace.FOLLOW;

        createJobRequest.Settings.Inputs.Add(input);
        #endregion Input
        #endregion Create job settings

        try
        {
            CreateJobResponse createJobResponse =await mcClient.CreateJobAsync(createJobRequest);
            Console.WriteLine("Job Id: {0}", createJobResponse.Job.Id);
        }
        catch (BadRequestException bre)
        {
            // If the enpoint was bad
            if (bre.Message.StartsWith("You must use the customer-"))
            {
                // The exception contains the correct endpoint; extract it
                mediaConvertEndpoint = bre.Message.Split('\'')[1];
                // Code to retry query
            }
        }

    }



    static void Main(string[] args)
    {
       Task.Run(() => MainAsync()).GetAwaiter().GetResult();
    }
}

}

我想知道几点:

  1. 是否必须创建 VideoDescription 对象和 AudioDescription 对象,因为我只想执行剪辑操作

     InputClipping ip = new InputClipping();
     ip.StartTimecode= "00:00:00:00";
     ip.EndTimecode= "00:00:05:00";
    

2. CreateJobResponse createJobResponse =await mcClient.CreateJobAsync(createJobRequest); 如何检查我的工作流程是否完成

  1. 如果作业过程完成,作为回报,我如何获取 S3 存储桶新输出文件创建的 URL,因为我想将该 URL 保存到我的数据库中。
4

1 回答 1

0

对于问题 1:根据您的工作流程,您输出的对象必须包含以下描述组合:

  • VideoDescription & AudioDescription(视频和音频混合)
  • 视频说明(仅限视频)
  • 音频描述(仅限音频)

这将确保您的输出只有视频/视频和音频/音频。

MediaConvert 将对您定义的剪辑区域中的输入进行编码。该服务不会将视频或音频传递到输出(有时在视频社区中称为 transmuxing)。将 MediaConvert 的输出视为一个全新的文件。

问题 2:我建议使用 CloudWatch Events 来监控工作进度。请参阅以下文档: https ://docs.aws.amazon.com/mediaconvert/latest/ug/how-mediaconvert-jobs-progress.html
https://docs.aws.amazon.com/mediaconvert/latest/ug/ cloudwatch_events.html

问题 3:请参阅我在 MediaConvert 中完成工作后如何检索编码文件和路径列表的帖子?

您可以通过收集 COMPLETE CloudWatch 事件来获取此信息。

于 2021-05-05T16:37:13.723 回答