![](/img/trans.png)
[英]AWS Elemental Media Converter How To Create Job Thumbnail using .Net C#
[英]AWS Elemental MediaConvert CreateJob Example Using the AWS SDK for .NET
我正在嘗試更改文件輸入的Input Clipping StartTimecode 和 end timecode ,並將剪輯的視頻保存到S3 存儲桶中的文件 output Destination
目前,我可以使用以下代碼執行操作:
using System;
using System.Threading.Tasks;
using Amazon.MediaConvert;
using Amazon.MediaConvert.Model;
namespace MediaConvertNET
{
class Program
{
static async Task MainAsync()
{
String mediaConvertRole = "Your AWS Elemental MediaConvert role ARN";
String fileInput = "s3://yourinputfile";
String fileOutput = "s3://youroutputdestination";
String mediaConvertEndpoint = "";
// If we do not have our customer-specific endpoint
if (String.IsNullOrEmpty(mediaConvertEndpoint))
{
// Obtain the customer-specific MediaConvert endpoint
AmazonMediaConvertClient client = new AmazonMediaConvertClient("AccessKey", "AccessSecret", Amazon.RegionEndpoint.USWest1);
DescribeEndpointsRequest describeRequest = new DescribeEndpointsRequest();
DescribeEndpointsResponse describeResponse = await client.DescribeEndpointsAsync(describeRequest);
mediaConvertEndpoint = describeResponse.Endpoints[0].Url;
}
// Since we have a service url for MediaConvert, we do not
// need to set RegionEndpoint. If we do, the ServiceURL will
// be overwritten
AmazonMediaConvertConfig mcConfig = new AmazonMediaConvertConfig
{
ServiceURL = mediaConvertEndpoint,
};
AmazonMediaConvertClient mcClient = new AmazonMediaConvertClient("AccessKey", "AccessSecret", mcConfig);
CreateJobRequest createJobRequest = new CreateJobRequest();
createJobRequest.Role = mediaConvertRole;
createJobRequest.UserMetadata.Add("Customer", "Amazon");
#region Create job settings
JobSettings jobSettings = new JobSettings();
jobSettings.AdAvailOffset = 0;
jobSettings.TimecodeConfig = new TimecodeConfig();
jobSettings.TimecodeConfig.Source = TimecodeSource.EMBEDDED;
createJobRequest.Settings = jobSettings;
#region OutputGroup
OutputGroup ofg = new OutputGroup();
ofg.Name = "File Group";
ofg.OutputGroupSettings = new OutputGroupSettings();
ofg.OutputGroupSettings.Type = OutputGroupType.FILE_GROUP_SETTINGS;
ofg.OutputGroupSettings.FileGroupSettings = new FileGroupSettings();
ofg.OutputGroupSettings.FileGroupSettings.Destination = fileOutput;
Output output = new Output();
output.NameModifier = "_1";
#region VideoDescription
VideoDescription vdes = new VideoDescription();
output.VideoDescription = vdes;
vdes.ScalingBehavior = ScalingBehavior.DEFAULT;
vdes.TimecodeInsertion = VideoTimecodeInsertion.DISABLED;
vdes.AntiAlias = AntiAlias.ENABLED;
vdes.Sharpness = 50;
vdes.AfdSignaling = AfdSignaling.NONE;
vdes.DropFrameTimecode = DropFrameTimecode.ENABLED;
vdes.RespondToAfd = RespondToAfd.NONE;
vdes.ColorMetadata = ColorMetadata.INSERT;
vdes.CodecSettings = new VideoCodecSettings();
vdes.CodecSettings.Codec = VideoCodec.H_264;
H264Settings h264 = new H264Settings();
h264.InterlaceMode = H264InterlaceMode.PROGRESSIVE;
h264.NumberReferenceFrames = 3;
h264.Syntax = H264Syntax.DEFAULT;
h264.Softness = 0;
h264.GopClosedCadence = 1;
h264.GopSize = 90;
h264.Slices = 1;
h264.GopBReference = H264GopBReference.DISABLED;
h264.SlowPal = H264SlowPal.DISABLED;
h264.SpatialAdaptiveQuantization = H264SpatialAdaptiveQuantization.ENABLED;
h264.TemporalAdaptiveQuantization = H264TemporalAdaptiveQuantization.ENABLED;
h264.FlickerAdaptiveQuantization = H264FlickerAdaptiveQuantization.DISABLED;
h264.EntropyEncoding = H264EntropyEncoding.CABAC;
h264.Bitrate = 2000000;
h264.FramerateControl = H264FramerateControl.SPECIFIED;
h264.RateControlMode = H264RateControlMode.CBR;
h264.CodecProfile = H264CodecProfile.MAIN;
h264.Telecine = H264Telecine.NONE;
h264.MinIInterval = 0;
h264.AdaptiveQuantization = H264AdaptiveQuantization.HIGH;
h264.CodecLevel = H264CodecLevel.AUTO;
h264.FieldEncoding = H264FieldEncoding.PAFF;
h264.SceneChangeDetect = H264SceneChangeDetect.ENABLED;
h264.QualityTuningLevel = H264QualityTuningLevel.SINGLE_PASS;
h264.FramerateConversionAlgorithm = H264FramerateConversionAlgorithm.DUPLICATE_DROP;
h264.UnregisteredSeiTimecode = H264UnregisteredSeiTimecode.DISABLED;
h264.GopSizeUnits = H264GopSizeUnits.FRAMES;
h264.ParControl = H264ParControl.SPECIFIED;
h264.NumberBFramesBetweenReferenceFrames = 2;
h264.RepeatPps = H264RepeatPps.DISABLED;
h264.FramerateNumerator = 30;
h264.FramerateDenominator = 1;
h264.ParNumerator = 1;
h264.ParDenominator = 1;
output.VideoDescription.CodecSettings.H264Settings = h264;
#endregion VideoDescription
#region AudioDescription
AudioDescription ades = new AudioDescription();
ades.LanguageCodeControl = AudioLanguageCodeControl.FOLLOW_INPUT;
// This name matches one specified in the Inputs below
ades.AudioSourceName = "Audio Selector 1";
ades.CodecSettings = new AudioCodecSettings();
ades.CodecSettings.Codec = AudioCodec.AAC;
AacSettings aac = new AacSettings();
aac.AudioDescriptionBroadcasterMix = AacAudioDescriptionBroadcasterMix.NORMAL;
aac.RateControlMode = AacRateControlMode.CBR;
aac.CodecProfile = AacCodecProfile.LC;
aac.CodingMode = AacCodingMode.CODING_MODE_2_0;
aac.RawFormat = AacRawFormat.NONE;
aac.SampleRate = 48000;
aac.Specification = AacSpecification.MPEG4;
aac.Bitrate = 64000;
ades.CodecSettings.AacSettings = aac;
output.AudioDescriptions.Add(ades);
#endregion AudioDescription
#region Mp4 Container
output.ContainerSettings = new ContainerSettings();
output.ContainerSettings.Container = ContainerType.MP4;
Mp4Settings mp4 = new Mp4Settings();
mp4.CslgAtom = Mp4CslgAtom.INCLUDE;
mp4.FreeSpaceBox = Mp4FreeSpaceBox.EXCLUDE;
mp4.MoovPlacement = Mp4MoovPlacement.PROGRESSIVE_DOWNLOAD;
output.ContainerSettings.Mp4Settings = mp4;
#endregion Mp4 Container
ofg.Outputs.Add(output);
createJobRequest.Settings.OutputGroups.Add(ofg);
#endregion OutputGroup
#region Input
Input input = new Input();
InputClipping ip = new InputClipping();
ip.StartTimecode= "00:00:00:00";
ip.EndTimecode= "00:00:05:00";
input.FilterEnable = InputFilterEnable.AUTO;
input.PsiControl = InputPsiControl.USE_PSI;
input.FilterStrength = 0;
input.DeblockFilter = InputDeblockFilter.DISABLED;
input.DenoiseFilter = InputDenoiseFilter.DISABLED;
input.TimecodeSource = InputTimecodeSource.ZEROBASED;
input.InputClippings.Add(ip);
input.FileInput = fileInput;
AudioSelector audsel = new AudioSelector();
audsel.Offset = 0;
audsel.DefaultSelection = AudioDefaultSelection.NOT_DEFAULT;
audsel.ProgramSelection = 1;
audsel.SelectorType = AudioSelectorType.TRACK;
audsel.Tracks.Add(1);
input.AudioSelectors.Add("Audio Selector 1", audsel);
input.VideoSelector = new VideoSelector();
input.VideoSelector.ColorSpace = ColorSpace.FOLLOW;
createJobRequest.Settings.Inputs.Add(input);
#endregion Input
#endregion Create job settings
try
{
CreateJobResponse createJobResponse =await mcClient.CreateJobAsync(createJobRequest);
Console.WriteLine("Job Id: {0}", createJobResponse.Job.Id);
}
catch (BadRequestException bre)
{
// If the enpoint was bad
if (bre.Message.StartsWith("You must use the customer-"))
{
// The exception contains the correct endpoint; extract it
mediaConvertEndpoint = bre.Message.Split('\'')[1];
// Code to retry query
}
}
}
static void Main(string[] args)
{
Task.Run(() => MainAsync()).GetAwaiter().GetResult();
}
}
}
我想知道幾點:
是否必須創建 VideoDescription Object 和 AudioDescription 對象,因為我只想執行剪輯操作
InputClipping ip = new InputClipping(); ip.StartTimecode= "00:00:00:00"; ip.EndTimecode= "00:00:05:00";
2. CreateJobResponse createJobResponse =await mcClient.CreateJobAsync(createJobRequest); 如何檢查我的工作流程是否完成
對於問題 1:根據您的工作流程,您 output object 必須包含以下描述組合:
這將確保您的 output 僅具有視頻/視頻和音頻/音頻。
MediaConvert 將對您定義的剪輯區域中的輸入進行編碼。 該服務不會將視頻或音頻傳遞到 output(有時在視頻社區中稱為 transmuxing)。 將 MediaConvert 中的 output 視為全新文件。
問題 2:我建議使用 CloudWatch Events 來監控工作進度。 請參閱以下文檔: https://docs.aws.amazon.com/mediaconvert/latest/ug/how-mediaconvert-jobs-progress.html
https://docs.aws.amazon.com/mediaconvert/latest/ug/cloudwatch_events.html
問題 3:請參閱我在 MediaConvert 中完成工作后如何檢索編碼文件和路徑列表的帖子?
您可以通過收集 COMPLETE CloudWatch 事件來獲取此信息。
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.