// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT. package elastictranscoder import ( "fmt" "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/internal/awsutil" "github.com/aws/aws-sdk-go-v2/private/protocol" ) var _ aws.Config var _ = awsutil.Prettify // The file to be used as album art. There can be multiple artworks associated // with an audio file, to a maximum of 20. // // To remove artwork or leave the artwork empty, you can either set Artwork // to null, or set the Merge Policy to "Replace" and use an empty Artwork array. // // To pass through existing artwork unchanged, set the Merge Policy to "Prepend", // "Append", or "Fallback", and use an empty Artwork array. type Artwork struct { _ struct{} `type:"structure"` // The format of album art, if any. Valid formats are .jpg and .png. AlbumArtFormat *string `type:"string"` // The encryption settings, if any, that you want Elastic Transcoder to apply // to your artwork. Encryption *Encryption `type:"structure"` // The name of the file to be used as album art. To determine which Amazon S3 // bucket contains the specified file, Elastic Transcoder checks the pipeline // specified by PipelineId; the InputBucket object in that pipeline identifies // the bucket. // // If the file name includes a prefix, for example, cooking/pie.jpg, include // the prefix in the key. If the file isn't in the specified bucket, Elastic // Transcoder returns an error. InputKey *string `min:"1" type:"string"` // The maximum height of the output album art in pixels. If you specify auto, // Elastic Transcoder uses 600 as the default value. If you specify a numeric // value, enter an even integer between 32 and 3072, inclusive. MaxHeight *string `type:"string"` // The maximum width of the output album art in pixels. If you specify auto, // Elastic Transcoder uses 600 as the default value. If you specify a numeric // value, enter an even integer between 32 and 4096, inclusive. MaxWidth *string `type:"string"` // When you set PaddingPolicy to Pad, Elastic Transcoder may add white bars // to the top and bottom and/or left and right sides of the output album art // to make the total size of the output art match the values that you specified // for MaxWidth and MaxHeight. PaddingPolicy *string `type:"string"` // Specify one of the following values to control scaling of the output album // art: // // * Fit: Elastic Transcoder scales the output art so it matches the value // that you specified in either MaxWidth or MaxHeight without exceeding the // other value. // // * Fill: Elastic Transcoder scales the output art so it matches the value // that you specified in either MaxWidth or MaxHeight and matches or exceeds // the other value. Elastic Transcoder centers the output art and then crops // it in the dimension (if any) that exceeds the maximum value. // // * Stretch: Elastic Transcoder stretches the output art to match the values // that you specified for MaxWidth and MaxHeight. If the relative proportions // of the input art and the output art are different, the output art will // be distorted. // // * Keep: Elastic Transcoder does not scale the output art. If either dimension // of the input art exceeds the values that you specified for MaxWidth and // MaxHeight, Elastic Transcoder crops the output art. // // * ShrinkToFit: Elastic Transcoder scales the output art down so that its // dimensions match the values that you specified for at least one of MaxWidth // and MaxHeight without exceeding either value. If you specify this option, // Elastic Transcoder does not scale the art up. // // * ShrinkToFill Elastic Transcoder scales the output art down so that its // dimensions match the values that you specified for at least one of MaxWidth // and MaxHeight without dropping below either value. If you specify this // option, Elastic Transcoder does not scale the art up. SizingPolicy *string `type:"string"` } // String returns the string representation func (s Artwork) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *Artwork) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "Artwork"} if s.InputKey != nil && len(*s.InputKey) < 1 { invalidParams.Add(aws.NewErrParamMinLen("InputKey", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Artwork) MarshalFields(e protocol.FieldEncoder) error { if s.AlbumArtFormat != nil { v := *s.AlbumArtFormat metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "AlbumArtFormat", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Encryption != nil { v := s.Encryption metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Encryption", v, metadata) } if s.InputKey != nil { v := *s.InputKey metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "InputKey", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.MaxHeight != nil { v := *s.MaxHeight metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MaxHeight", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.MaxWidth != nil { v := *s.MaxWidth metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MaxWidth", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.PaddingPolicy != nil { v := *s.PaddingPolicy metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "PaddingPolicy", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.SizingPolicy != nil { v := *s.SizingPolicy metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "SizingPolicy", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // Options associated with your audio codec. type AudioCodecOptions struct { _ struct{} `type:"structure"` // You can only choose an audio bit depth when you specify flac or pcm for the // value of Audio:Codec. // // The bit depth of a sample is how many bits of information are included in // the audio samples. The higher the bit depth, the better the audio, but the // larger the file. // // Valid values are 16 and 24. // // The most common bit depth is 24. BitDepth *string `type:"string"` // You can only choose an audio bit order when you specify pcm for the value // of Audio:Codec. // // The order the bits of a PCM sample are stored in. // // The supported value is LittleEndian. BitOrder *string `type:"string"` // You can only choose an audio profile when you specify AAC for the value of // Audio:Codec. // // Specify the AAC profile for the output file. Elastic Transcoder supports // the following profiles: // // * auto: If you specify auto, Elastic Transcoder selects the profile based // on the bit rate selected for the output file. // // * AAC-LC: The most common AAC profile. Use for bit rates larger than 64 // kbps. // // * HE-AAC: Not supported on some older players and devices. Use for bit // rates between 40 and 80 kbps. // // * HE-AACv2: Not supported on some players and devices. Use for bit rates // less than 48 kbps. // // All outputs in a Smooth playlist must have the same value for Profile. // // If you created any presets before AAC profiles were added, Elastic Transcoder // automatically updated your presets to use AAC-LC. You can change the value // as required. Profile *string `type:"string"` // You can only choose whether an audio sample is signed when you specify pcm // for the value of Audio:Codec. // // Whether audio samples are represented with negative and positive numbers // (signed) or only positive numbers (unsigned). // // The supported value is Signed. Signed *string `type:"string"` } // String returns the string representation func (s AudioCodecOptions) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s AudioCodecOptions) MarshalFields(e protocol.FieldEncoder) error { if s.BitDepth != nil { v := *s.BitDepth metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "BitDepth", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.BitOrder != nil { v := *s.BitOrder metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "BitOrder", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Profile != nil { v := *s.Profile metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Profile", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Signed != nil { v := *s.Signed metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Signed", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // Parameters required for transcoding audio. type AudioParameters struct { _ struct{} `type:"structure"` // The method of organizing audio channels and tracks. Use Audio:Channels to // specify the number of channels in your output, and Audio:AudioPackingMode // to specify the number of tracks and their relation to the channels. If you // do not specify an Audio:AudioPackingMode, Elastic Transcoder uses SingleTrack. // // The following values are valid: // // SingleTrack, OneChannelPerTrack, and OneChannelPerTrackWithMosTo8Tracks // // When you specify SingleTrack, Elastic Transcoder creates a single track for // your output. The track can have up to eight channels. Use SingleTrack for // all non-mxf containers. // // The outputs of SingleTrack for a specific channel value and inputs are as // follows: // // * 0 channels with any input: Audio omitted from the output // // * 1, 2, or auto channels with no audio input: Audio omitted from the output // // * 1 channel with any input with audio: One track with one channel, downmixed // if necessary // // * 2 channels with one track with one channel: One track with two identical // channels // // * 2 or auto channels with two tracks with one channel each: One track // with two channels // // * 2 or auto channels with one track with two channels: One track with // two channels // // * 2 channels with one track with multiple channels: One track with two // channels // // * auto channels with one track with one channel: One track with one channel // // * auto channels with one track with multiple channels: One track with // multiple channels // // When you specify OneChannelPerTrack, Elastic Transcoder creates a new track // for every channel in your output. Your output can have up to eight single-channel // tracks. // // The outputs of OneChannelPerTrack for a specific channel value and inputs // are as follows: // // * 0 channels with any input: Audio omitted from the output // // * 1, 2, or auto channels with no audio input: Audio omitted from the output // // * 1 channel with any input with audio: One track with one channel, downmixed // if necessary // // * 2 channels with one track with one channel: Two tracks with one identical // channel each // // * 2 or auto channels with two tracks with one channel each: Two tracks // with one channel each // // * 2 or auto channels with one track with two channels: Two tracks with // one channel each // // * 2 channels with one track with multiple channels: Two tracks with one // channel each // // * auto channels with one track with one channel: One track with one channel // // * auto channels with one track with multiple channels: Up to eight tracks // with one channel each // // When you specify OneChannelPerTrackWithMosTo8Tracks, Elastic Transcoder creates // eight single-channel tracks for your output. All tracks that do not contain // audio data from an input channel are MOS, or Mit Out Sound, tracks. // // The outputs of OneChannelPerTrackWithMosTo8Tracks for a specific channel // value and inputs are as follows: // // * 0 channels with any input: Audio omitted from the output // // * 1, 2, or auto channels with no audio input: Audio omitted from the output // // * 1 channel with any input with audio: One track with one channel, downmixed // if necessary, plus six MOS tracks // // * 2 channels with one track with one channel: Two tracks with one identical // channel each, plus six MOS tracks // // * 2 or auto channels with two tracks with one channel each: Two tracks // with one channel each, plus six MOS tracks // // * 2 or auto channels with one track with two channels: Two tracks with // one channel each, plus six MOS tracks // // * 2 channels with one track with multiple channels: Two tracks with one // channel each, plus six MOS tracks // // * auto channels with one track with one channel: One track with one channel, // plus seven MOS tracks // // * auto channels with one track with multiple channels: Up to eight tracks // with one channel each, plus MOS tracks until there are eight tracks in // all AudioPackingMode *string `type:"string"` // The bit rate of the audio stream in the output file, in kilobits/second. // Enter an integer between 64 and 320, inclusive. BitRate *string `type:"string"` // The number of audio channels in the output file. The following values are // valid: // // auto, 0, 1, 2 // // One channel carries the information played by a single speaker. For example, // a stereo track with two channels sends one channel to the left speaker, and // the other channel to the right speaker. The output channels are organized // into tracks. If you want Elastic Transcoder to automatically detect the number // of audio channels in the input file and use that value for the output file, // select auto. // // The output of a specific channel value and inputs are as follows: // // * auto channel specified, with any input: Pass through up to eight input // channels. // // * 0 channels specified, with any input: Audio omitted from the output. // // * 1 channel specified, with at least one input channel: Mono sound. // // * 2 channels specified, with any input: Two identical mono channels or // stereo. For more information about tracks, see Audio:AudioPackingMode. // // For more information about how Elastic Transcoder organizes channels and // tracks, see Audio:AudioPackingMode. Channels *string `type:"string"` // The audio codec for the output file. Valid values include aac, flac, mp2, // mp3, pcm, and vorbis. Codec *string `type:"string"` // If you specified AAC for Audio:Codec, this is the AAC compression profile // to use. Valid values include: // // auto, AAC-LC, HE-AAC, HE-AACv2 // // If you specify auto, Elastic Transcoder chooses a profile based on the bit // rate of the output file. CodecOptions *AudioCodecOptions `type:"structure"` // The sample rate of the audio stream in the output file, in Hertz. Valid values // include: // // auto, 22050, 32000, 44100, 48000, 96000 // // If you specify auto, Elastic Transcoder automatically detects the sample // rate. SampleRate *string `type:"string"` } // String returns the string representation func (s AudioParameters) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s AudioParameters) MarshalFields(e protocol.FieldEncoder) error { if s.AudioPackingMode != nil { v := *s.AudioPackingMode metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "AudioPackingMode", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.BitRate != nil { v := *s.BitRate metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "BitRate", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Channels != nil { v := *s.Channels metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Channels", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Codec != nil { v := *s.Codec metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Codec", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.CodecOptions != nil { v := s.CodecOptions metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "CodecOptions", v, metadata) } if s.SampleRate != nil { v := *s.SampleRate metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "SampleRate", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // The file format of the output captions. If you leave this value blank, Elastic // Transcoder returns an error. type CaptionFormat struct { _ struct{} `type:"structure"` // The encryption settings, if any, that you want Elastic Transcoder to apply // to your caption formats. Encryption *Encryption `type:"structure"` // The format you specify determines whether Elastic Transcoder generates an // embedded or sidecar caption for this output. // // * Valid Embedded Caption Formats: for FLAC: None For MP3: None For MP4: // mov-text For MPEG-TS: None For ogg: None For webm: None // // * Valid Sidecar Caption Formats: Elastic Transcoder supports dfxp (first // div element only), scc, srt, and webvtt. If you want ttml or smpte-tt // compatible captions, specify dfxp as your output format. For FMP4: dfxp // Non-FMP4 outputs: All sidecar types fmp4 captions have an extension of // .ismt Format *string `type:"string"` // The prefix for caption filenames, in the form description-{language}, where: // // * description is a description of the video. // // * {language} is a literal value that Elastic Transcoder replaces with // the two- or three-letter code for the language of the caption in the output // file names. // // If you don't include {language} in the file name pattern, Elastic Transcoder // automatically appends "{language}" to the value that you specify for the // description. In addition, Elastic Transcoder automatically appends the count // to the end of the segment files. // // For example, suppose you're transcoding into srt format. When you enter "Sydney-{language}-sunrise", // and the language of the captions is English (en), the name of the first caption // file is be Sydney-en-sunrise00000.srt. Pattern *string `type:"string"` } // String returns the string representation func (s CaptionFormat) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s CaptionFormat) MarshalFields(e protocol.FieldEncoder) error { if s.Encryption != nil { v := s.Encryption metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Encryption", v, metadata) } if s.Format != nil { v := *s.Format metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Format", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Pattern != nil { v := *s.Pattern metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Pattern", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // A source file for the input sidecar captions used during the transcoding // process. type CaptionSource struct { _ struct{} `type:"structure"` // The encryption settings, if any, that Elastic Transcoder needs to decyrpt // your caption sources, or that you want Elastic Transcoder to apply to your // caption sources. Encryption *Encryption `type:"structure"` // The name of the sidecar caption file that you want Elastic Transcoder to // include in the output file. Key *string `min:"1" type:"string"` // The label of the caption shown in the player when choosing a language. We // recommend that you put the caption language name here, in the language of // the captions. Label *string `min:"1" type:"string"` // A string that specifies the language of the caption. If you specified multiple // inputs with captions, the caption language must match in order to be included // in the output. Specify this as one of: // // * 2-character ISO 639-1 code // // * 3-character ISO 639-2 code // // For more information on ISO language codes and language names, see the List // of ISO 639-1 codes. Language *string `min:"1" type:"string"` // For clip generation or captions that do not start at the same time as the // associated video file, the TimeOffset tells Elastic Transcoder how much of // the video to encode before including captions. // // Specify the TimeOffset in the form [+-]SS.sss or [+-]HH:mm:SS.ss. TimeOffset *string `type:"string"` } // String returns the string representation func (s CaptionSource) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *CaptionSource) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "CaptionSource"} if s.Key != nil && len(*s.Key) < 1 { invalidParams.Add(aws.NewErrParamMinLen("Key", 1)) } if s.Label != nil && len(*s.Label) < 1 { invalidParams.Add(aws.NewErrParamMinLen("Label", 1)) } if s.Language != nil && len(*s.Language) < 1 { invalidParams.Add(aws.NewErrParamMinLen("Language", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s CaptionSource) MarshalFields(e protocol.FieldEncoder) error { if s.Encryption != nil { v := s.Encryption metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Encryption", v, metadata) } if s.Key != nil { v := *s.Key metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Key", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Label != nil { v := *s.Label metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Label", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Language != nil { v := *s.Language metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Language", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.TimeOffset != nil { v := *s.TimeOffset metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "TimeOffset", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // The captions to be created, if any. type Captions struct { _ struct{} `type:"structure"` // The array of file formats for the output captions. If you leave this value // blank, Elastic Transcoder returns an error. CaptionFormats []CaptionFormat `type:"list"` // Source files for the input sidecar captions used during the transcoding process. // To omit all sidecar captions, leave CaptionSources blank. CaptionSources []CaptionSource `deprecated:"true" type:"list"` // A policy that determines how Elastic Transcoder handles the existence of // multiple captions. // // * MergeOverride: Elastic Transcoder transcodes both embedded and sidecar // captions into outputs. If captions for a language are embedded in the // input file and also appear in a sidecar file, Elastic Transcoder uses // the sidecar captions and ignores the embedded captions for that language. // // * MergeRetain: Elastic Transcoder transcodes both embedded and sidecar // captions into outputs. If captions for a language are embedded in the // input file and also appear in a sidecar file, Elastic Transcoder uses // the embedded captions and ignores the sidecar captions for that language. // If CaptionSources is empty, Elastic Transcoder omits all sidecar captions // from the output files. // // * Override: Elastic Transcoder transcodes only the sidecar captions that // you specify in CaptionSources. // // MergePolicy cannot be null. MergePolicy *string `deprecated:"true" type:"string"` } // String returns the string representation func (s Captions) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *Captions) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "Captions"} if s.CaptionSources != nil { for i, v := range s.CaptionSources { if err := v.Validate(); err != nil { invalidParams.AddNested(fmt.Sprintf("%s[%v]", "CaptionSources", i), err.(aws.ErrInvalidParams)) } } } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Captions) MarshalFields(e protocol.FieldEncoder) error { if s.CaptionFormats != nil { v := s.CaptionFormats metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "CaptionFormats", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } if s.CaptionSources != nil { v := s.CaptionSources metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "CaptionSources", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } if s.MergePolicy != nil { v := *s.MergePolicy metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MergePolicy", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // Settings for one clip in a composition. All jobs in a playlist must have // the same clip settings. type Clip struct { _ struct{} `deprecated:"true" type:"structure"` // Settings that determine when a clip begins and how long it lasts. TimeSpan *TimeSpan `type:"structure"` } // String returns the string representation func (s Clip) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Clip) MarshalFields(e protocol.FieldEncoder) error { if s.TimeSpan != nil { v := s.TimeSpan metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "TimeSpan", v, metadata) } return nil } // The CreateJobOutput structure. type CreateJobOutputResult struct { _ struct{} `type:"structure"` // Information about the album art that you want Elastic Transcoder to add to // the file during transcoding. You can specify up to twenty album artworks // for each output. Settings for each artwork must be defined in the job for // the current output. AlbumArt *JobAlbumArt `type:"structure"` // You can configure Elastic Transcoder to transcode captions, or subtitles, // from one format to another. All captions must be in UTF-8. Elastic Transcoder // supports two types of captions: // // * Embedded: Embedded captions are included in the same file as the audio // and video. Elastic Transcoder supports only one embedded caption per language, // to a maximum of 300 embedded captions per file. Valid input values include: // CEA-608 (EIA-608, first non-empty channel only), CEA-708 (EIA-708, first // non-empty channel only), and mov-text Valid outputs include: mov-text // Elastic Transcoder supports a maximum of one embedded format per output. // // * Sidecar: Sidecar captions are kept in a separate metadata file from // the audio and video data. Sidecar captions require a player that is capable // of understanding the relationship between the video file and the sidecar // file. Elastic Transcoder supports only one sidecar caption per language, // to a maximum of 20 sidecar captions per file. Valid input values include: // dfxp (first div element only), ebu-tt, scc, smpt, srt, ttml (first div // element only), and webvtt Valid outputs include: dfxp (first div element // only), scc, srt, and webvtt. // // If you want ttml or smpte-tt compatible captions, specify dfxp as your output // format. // // Elastic Transcoder does not support OCR (Optical Character Recognition), // does not accept pictures as a valid input for captions, and is not available // for audio-only transcoding. Elastic Transcoder does not preserve text formatting // (for example, italics) during the transcoding process. // // To remove captions or leave the captions empty, set Captions to null. To // pass through existing captions unchanged, set the MergePolicy to MergeRetain, // and pass in a null CaptionSources array. // // For more information on embedded files, see the Subtitles Wikipedia page. // // For more information on sidecar files, see the Extensible Metadata Platform // and Sidecar file Wikipedia pages. Captions *Captions `type:"structure"` // You can create an output file that contains an excerpt from the input file. // This excerpt, called a clip, can come from the beginning, middle, or end // of the file. The Composition object contains settings for the clips that // make up an output file. For the current release, you can only specify settings // for a single clip per output file. The Composition object cannot be null. Composition []Clip `deprecated:"true" type:"list"` // You can specify encryption settings for any output files that you want to // use for a transcoding job. This includes the output file and any watermarks, // thumbnails, album art, or captions that you want to use. You must specify // encryption settings for each file individually. Encryption *Encryption `type:"structure"` // The name to assign to the transcoded file. Elastic Transcoder saves the file // in the Amazon S3 bucket specified by the OutputBucket object in the pipeline // that is specified by the pipeline ID. If a file with the specified name already // exists in the output bucket, the job fails. Key *string `min:"1" type:"string"` // The Id of the preset to use for this job. The preset determines the audio, // video, and thumbnail settings that Elastic Transcoder uses for transcoding. PresetId *string `type:"string"` // The number of degrees clockwise by which you want Elastic Transcoder to rotate // the output relative to the input. Enter one of the following values: auto, // 0, 90, 180, 270. The value auto generally works only if the file that you're // transcoding contains rotation metadata. Rotate *string `type:"string"` // // (Outputs in Fragmented MP4 or MPEG-TS format only. // // If you specify a preset in PresetId for which the value of Container is fmp4 // (Fragmented MP4) or ts (MPEG-TS), SegmentDuration is the target maximum duration // of each segment in seconds. For HLSv3 format playlists, each media segment // is stored in a separate .ts file. For HLSv4 and Smooth playlists, all media // segments for an output are stored in a single file. Each segment is approximately // the length of the SegmentDuration, though individual segments might be shorter // or longer. // // The range of valid values is 1 to 60 seconds. If the duration of the video // is not evenly divisible by SegmentDuration, the duration of the last segment // is the remainder of total length/SegmentDuration. // // Elastic Transcoder creates an output-specific playlist for each output HLS // output that you specify in OutputKeys. To add an output to the master playlist // for this job, include it in the OutputKeys of the associated playlist. SegmentDuration *string `type:"string"` // The encryption settings, if any, that you want Elastic Transcoder to apply // to your thumbnail. ThumbnailEncryption *Encryption `type:"structure"` // Whether you want Elastic Transcoder to create thumbnails for your videos // and, if so, how you want Elastic Transcoder to name the files. // // If you don't want Elastic Transcoder to create thumbnails, specify "". // // If you do want Elastic Transcoder to create thumbnails, specify the information // that you want to include in the file name for each thumbnail. You can specify // the following values in any sequence: // // * {count} (Required): If you want to create thumbnails, you must include // {count} in the ThumbnailPattern object. Wherever you specify {count}, // Elastic Transcoder adds a five-digit sequence number (beginning with 00001) // to thumbnail file names. The number indicates where a given thumbnail // appears in the sequence of thumbnails for a transcoded file. If you specify // a literal value and/or {resolution} but you omit {count}, Elastic Transcoder // returns a validation error and does not create the job. // // * Literal values (Optional): You can specify literal values anywhere in // the ThumbnailPattern object. For example, you can include them as a file // name prefix or as a delimiter between {resolution} and {count}. // // * {resolution} (Optional): If you want Elastic Transcoder to include the // resolution in the file name, include {resolution} in the ThumbnailPattern // object. // // When creating thumbnails, Elastic Transcoder automatically saves the files // in the format (.jpg or .png) that appears in the preset that you specified // in the PresetID value of CreateJobOutput. Elastic Transcoder also appends // the applicable file name extension. ThumbnailPattern *string `type:"string"` // Information about the watermarks that you want Elastic Transcoder to add // to the video during transcoding. You can specify up to four watermarks for // each output. Settings for each watermark must be defined in the preset for // the current output. Watermarks []JobWatermark `type:"list"` } // String returns the string representation func (s CreateJobOutputResult) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *CreateJobOutputResult) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "CreateJobOutputResult"} if s.Key != nil && len(*s.Key) < 1 { invalidParams.Add(aws.NewErrParamMinLen("Key", 1)) } if s.AlbumArt != nil { if err := s.AlbumArt.Validate(); err != nil { invalidParams.AddNested("AlbumArt", err.(aws.ErrInvalidParams)) } } if s.Captions != nil { if err := s.Captions.Validate(); err != nil { invalidParams.AddNested("Captions", err.(aws.ErrInvalidParams)) } } if s.Watermarks != nil { for i, v := range s.Watermarks { if err := v.Validate(); err != nil { invalidParams.AddNested(fmt.Sprintf("%s[%v]", "Watermarks", i), err.(aws.ErrInvalidParams)) } } } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s CreateJobOutputResult) MarshalFields(e protocol.FieldEncoder) error { if s.AlbumArt != nil { v := s.AlbumArt metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "AlbumArt", v, metadata) } if s.Captions != nil { v := s.Captions metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Captions", v, metadata) } if s.Composition != nil { v := s.Composition metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "Composition", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } if s.Encryption != nil { v := s.Encryption metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Encryption", v, metadata) } if s.Key != nil { v := *s.Key metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Key", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.PresetId != nil { v := *s.PresetId metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "PresetId", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Rotate != nil { v := *s.Rotate metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Rotate", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.SegmentDuration != nil { v := *s.SegmentDuration metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "SegmentDuration", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.ThumbnailEncryption != nil { v := s.ThumbnailEncryption metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "ThumbnailEncryption", v, metadata) } if s.ThumbnailPattern != nil { v := *s.ThumbnailPattern metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "ThumbnailPattern", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Watermarks != nil { v := s.Watermarks metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "Watermarks", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } return nil } // Information about the master playlist. type CreateJobPlaylist struct { _ struct{} `type:"structure"` // The format of the output playlist. Valid formats include HLSv3, HLSv4, and // Smooth. Format *string `type:"string"` // The HLS content protection settings, if any, that you want Elastic Transcoder // to apply to the output files associated with this playlist. HlsContentProtection *HlsContentProtection `type:"structure"` // The name that you want Elastic Transcoder to assign to the master playlist, // for example, nyc-vacation.m3u8. If the name includes a / character, the section // of the name before the last / must be identical for all Name objects. If // you create more than one master playlist, the values of all Name objects // must be unique. // // Elastic Transcoder automatically appends the relevant file extension to the // file name (.m3u8 for HLSv3 and HLSv4 playlists, and .ism and .ismc for Smooth // playlists). If you include a file extension in Name, the file name will have // two extensions. Name *string `min:"1" type:"string"` // For each output in this job that you want to include in a master playlist, // the value of the Outputs:Key object. // // * If your output is not HLS or does not have a segment duration set, the // name of the output file is a concatenation of OutputKeyPrefix and Outputs:Key: // OutputKeyPrefixOutputs:Key // // * If your output is HLSv3 and has a segment duration set, or is not included // in a playlist, Elastic Transcoder creates an output playlist file with // a file extension of .m3u8, and a series of .ts files that include a five-digit // sequential counter beginning with 00000: OutputKeyPrefixOutputs:Key.m3u8 // OutputKeyPrefixOutputs:Key00000.ts // // * If your output is HLSv4, has a segment duration set, and is included // in an HLSv4 playlist, Elastic Transcoder creates an output playlist file // with a file extension of _v4.m3u8. If the output is video, Elastic Transcoder // also creates an output file with an extension of _iframe.m3u8: OutputKeyPrefixOutputs:Key_v4.m3u8 // OutputKeyPrefixOutputs:Key_iframe.m3u8 OutputKeyPrefixOutputs:Key.ts // // Elastic Transcoder automatically appends the relevant file extension to the // file name. If you include a file extension in Output Key, the file name will // have two extensions. // // If you include more than one output in a playlist, any segment duration settings, // clip settings, or caption settings must be the same for all outputs in the // playlist. For Smooth playlists, the Audio:Profile, Video:Profile, and Video:FrameRate // to Video:KeyframesMaxDist ratio must be the same for all outputs. OutputKeys []string `type:"list"` // The DRM settings, if any, that you want Elastic Transcoder to apply to the // output files associated with this playlist. PlayReadyDrm *PlayReadyDrm `type:"structure"` } // String returns the string representation func (s CreateJobPlaylist) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *CreateJobPlaylist) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "CreateJobPlaylist"} if s.Name != nil && len(*s.Name) < 1 { invalidParams.Add(aws.NewErrParamMinLen("Name", 1)) } if s.PlayReadyDrm != nil { if err := s.PlayReadyDrm.Validate(); err != nil { invalidParams.AddNested("PlayReadyDrm", err.(aws.ErrInvalidParams)) } } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s CreateJobPlaylist) MarshalFields(e protocol.FieldEncoder) error { if s.Format != nil { v := *s.Format metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Format", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.HlsContentProtection != nil { v := s.HlsContentProtection metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "HlsContentProtection", v, metadata) } if s.Name != nil { v := *s.Name metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Name", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.OutputKeys != nil { v := s.OutputKeys metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "OutputKeys", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddValue(protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v1)}) } ls0.End() } if s.PlayReadyDrm != nil { v := s.PlayReadyDrm metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "PlayReadyDrm", v, metadata) } return nil } // The detected properties of the input file. Elastic Transcoder identifies // these values from the input file. type DetectedProperties struct { _ struct{} `type:"structure"` // The detected duration of the input file, in milliseconds. DurationMillis *int64 `type:"long"` // The detected file size of the input file, in bytes. FileSize *int64 `type:"long"` // The detected frame rate of the input file, in frames per second. FrameRate *string `type:"string"` // The detected height of the input file, in pixels. Height *int64 `type:"integer"` // The detected width of the input file, in pixels. Width *int64 `type:"integer"` } // String returns the string representation func (s DetectedProperties) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s DetectedProperties) MarshalFields(e protocol.FieldEncoder) error { if s.DurationMillis != nil { v := *s.DurationMillis metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "DurationMillis", protocol.Int64Value(v), metadata) } if s.FileSize != nil { v := *s.FileSize metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "FileSize", protocol.Int64Value(v), metadata) } if s.FrameRate != nil { v := *s.FrameRate metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "FrameRate", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Height != nil { v := *s.Height metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Height", protocol.Int64Value(v), metadata) } if s.Width != nil { v := *s.Width metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Width", protocol.Int64Value(v), metadata) } return nil } // The encryption settings, if any, that are used for decrypting your input // files or encrypting your output files. If your input file is encrypted, you // must specify the mode that Elastic Transcoder uses to decrypt your file, // otherwise you must specify the mode you want Elastic Transcoder to use to // encrypt your output files. type Encryption struct { _ struct{} `type:"structure"` // The series of random bits created by a random bit generator, unique for every // encryption operation, that you used to encrypt your input files or that you // want Elastic Transcoder to use to encrypt your output files. The initialization // vector must be base64-encoded, and it must be exactly 16 bytes long before // being base64-encoded. InitializationVector *string `type:"string"` // The data encryption key that you want Elastic Transcoder to use to encrypt // your output file, or that was used to encrypt your input file. The key must // be base64-encoded and it must be one of the following bit lengths before // being base64-encoded: // // 128, 192, or 256. // // The key must also be encrypted by using the Amazon Key Management Service. Key *string `type:"string"` // The MD5 digest of the key that you used to encrypt your input file, or that // you want Elastic Transcoder to use to encrypt your output file. Elastic Transcoder // uses the key digest as a checksum to make sure your key was not corrupted // in transit. The key MD5 must be base64-encoded, and it must be exactly 16 // bytes long before being base64-encoded. KeyMd5 *string `type:"string"` // The specific server-side encryption mode that you want Elastic Transcoder // to use when decrypting your input files or encrypting your output files. // Elastic Transcoder supports the following options: // // * s3: Amazon S3 creates and manages the keys used for encrypting your // files. // // * s3-aws-kms: Amazon S3 calls the Amazon Key Management Service, which // creates and manages the keys that are used for encrypting your files. // If you specify s3-aws-kms and you don't want to use the default key, you // must add the AWS-KMS key that you want to use to your pipeline. // // * aes-cbc-pkcs7: A padded cipher-block mode of operation originally used // for HLS files. // // * aes-ctr: AES Counter Mode. // // * aes-gcm: AES Galois Counter Mode, a mode of operation that is an authenticated // encryption format, meaning that a file, key, or initialization vector // that has been tampered with fails the decryption process. // // For all three AES options, you must provide the following settings, which // must be base64-encoded: // // * Key // // * Key MD5 // // * Initialization Vector // // For the AES modes, your private encryption keys and your unencrypted data // are never stored by AWS; therefore, it is important that you safely manage // your encryption keys. If you lose them, you won't be able to unencrypt your // data. Mode *string `type:"string"` } // String returns the string representation func (s Encryption) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Encryption) MarshalFields(e protocol.FieldEncoder) error { if s.InitializationVector != nil { v := *s.InitializationVector metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "InitializationVector", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Key != nil { v := *s.Key metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Key", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.KeyMd5 != nil { v := *s.KeyMd5 metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "KeyMd5", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Mode != nil { v := *s.Mode metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Mode", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // The HLS content protection settings, if any, that you want Elastic Transcoder // to apply to your output files. type HlsContentProtection struct { _ struct{} `type:"structure"` // If Elastic Transcoder is generating your key for you, you must leave this // field blank. // // The series of random bits created by a random bit generator, unique for every // encryption operation, that you want Elastic Transcoder to use to encrypt // your output files. The initialization vector must be base64-encoded, and // it must be exactly 16 bytes before being base64-encoded. InitializationVector *string `type:"string"` // If you want Elastic Transcoder to generate a key for you, leave this field // blank. // // If you choose to supply your own key, you must encrypt the key by using AWS // KMS. The key must be base64-encoded, and it must be one of the following // bit lengths before being base64-encoded: // // 128, 192, or 256. Key *string `type:"string"` // If Elastic Transcoder is generating your key for you, you must leave this // field blank. // // The MD5 digest of the key that you want Elastic Transcoder to use to encrypt // your output file, and that you want Elastic Transcoder to use as a checksum // to make sure your key was not corrupted in transit. The key MD5 must be base64-encoded, // and it must be exactly 16 bytes before being base64- encoded. KeyMd5 *string `type:"string"` // Specify whether you want Elastic Transcoder to write your HLS license key // to an Amazon S3 bucket. If you choose WithVariantPlaylists, LicenseAcquisitionUrl // must be left blank and Elastic Transcoder writes your data key into the same // bucket as the associated playlist. KeyStoragePolicy *string `type:"string"` // The location of the license key required to decrypt your HLS playlist. The // URL must be an absolute path, and is referenced in the URI attribute of the // EXT-X-KEY metadata tag in the playlist file. LicenseAcquisitionUrl *string `type:"string"` // The content protection method for your output. The only valid value is: aes-128. // // This value is written into the method attribute of the EXT-X-KEY metadata // tag in the output playlist. Method *string `type:"string"` } // String returns the string representation func (s HlsContentProtection) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s HlsContentProtection) MarshalFields(e protocol.FieldEncoder) error { if s.InitializationVector != nil { v := *s.InitializationVector metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "InitializationVector", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Key != nil { v := *s.Key metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Key", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.KeyMd5 != nil { v := *s.KeyMd5 metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "KeyMd5", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.KeyStoragePolicy != nil { v := *s.KeyStoragePolicy metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "KeyStoragePolicy", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.LicenseAcquisitionUrl != nil { v := *s.LicenseAcquisitionUrl metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "LicenseAcquisitionUrl", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Method != nil { v := *s.Method metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Method", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // The captions to be created, if any. type InputCaptions struct { _ struct{} `type:"structure"` // Source files for the input sidecar captions used during the transcoding process. // To omit all sidecar captions, leave CaptionSources blank. CaptionSources []CaptionSource `type:"list"` // A policy that determines how Elastic Transcoder handles the existence of // multiple captions. // // * MergeOverride: Elastic Transcoder transcodes both embedded and sidecar // captions into outputs. If captions for a language are embedded in the // input file and also appear in a sidecar file, Elastic Transcoder uses // the sidecar captions and ignores the embedded captions for that language. // // * MergeRetain: Elastic Transcoder transcodes both embedded and sidecar // captions into outputs. If captions for a language are embedded in the // input file and also appear in a sidecar file, Elastic Transcoder uses // the embedded captions and ignores the sidecar captions for that language. // If CaptionSources is empty, Elastic Transcoder omits all sidecar captions // from the output files. // // * Override: Elastic Transcoder transcodes only the sidecar captions that // you specify in CaptionSources. // // MergePolicy cannot be null. MergePolicy *string `type:"string"` } // String returns the string representation func (s InputCaptions) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *InputCaptions) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "InputCaptions"} if s.CaptionSources != nil { for i, v := range s.CaptionSources { if err := v.Validate(); err != nil { invalidParams.AddNested(fmt.Sprintf("%s[%v]", "CaptionSources", i), err.(aws.ErrInvalidParams)) } } } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s InputCaptions) MarshalFields(e protocol.FieldEncoder) error { if s.CaptionSources != nil { v := s.CaptionSources metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "CaptionSources", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } if s.MergePolicy != nil { v := *s.MergePolicy metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MergePolicy", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // A section of the response body that provides information about the job that // is created. type Job struct { _ struct{} `type:"structure"` // The Amazon Resource Name (ARN) for the job. Arn *string `type:"string"` // The identifier that Elastic Transcoder assigned to the job. You use this // value to get settings for the job or to delete the job. Id *string `type:"string"` // A section of the request or response body that provides information about // the file that is being transcoded. Input *JobInput `type:"structure"` // Information about the files that you're transcoding. If you specified multiple // files for this job, Elastic Transcoder stitches the files together to make // one output. Inputs []JobInput `type:"list"` // If you specified one output for a job, information about that output. If // you specified multiple outputs for a job, the Output object lists information // about the first output. This duplicates the information that is listed for // the first output in the Outputs object. // // Outputs recommended instead. // // A section of the request or response body that provides information about // the transcoded (target) file. Output *JobOutput `type:"structure"` // The value, if any, that you want Elastic Transcoder to prepend to the names // of all files that this job creates, including output files, thumbnails, and // playlists. We recommend that you add a / or some other delimiter to the end // of the OutputKeyPrefix. OutputKeyPrefix *string `min:"1" type:"string"` // Information about the output files. We recommend that you use the Outputs // syntax for all jobs, even when you want Elastic Transcoder to transcode a // file into only one format. Do not use both the Outputs and Output syntaxes // in the same request. You can create a maximum of 30 outputs per job. // // If you specify more than one output for a job, Elastic Transcoder creates // the files for each output in the order in which you specify them in the job. Outputs []JobOutput `type:"list"` // The Id of the pipeline that you want Elastic Transcoder to use for transcoding. // The pipeline determines several settings, including the Amazon S3 bucket // from which Elastic Transcoder gets the files to transcode and the bucket // into which Elastic Transcoder puts the transcoded files. PipelineId *string `type:"string"` // // Outputs in Fragmented MP4 or MPEG-TS format only. // // If you specify a preset in PresetId for which the value of Container is fmp4 // (Fragmented MP4) or ts (MPEG-TS), Playlists contains information about the // master playlists that you want Elastic Transcoder to create. // // The maximum number of master playlists in a job is 30. Playlists []Playlist `type:"list"` // The status of the job: Submitted, Progressing, Complete, Canceled, or Error. Status *string `type:"string"` // Details about the timing of a job. Timing *Timing `type:"structure"` // User-defined metadata that you want to associate with an Elastic Transcoder // job. You specify metadata in key/value pairs, and you can add up to 10 key/value // pairs per job. Elastic Transcoder does not guarantee that key/value pairs // are returned in the same order in which you specify them. // // Metadata keys and values must use characters from the following list: // // * 0-9 // // * A-Z and a-z // // * Space // // * The following symbols: _.:/=+-%@ UserMetadata map[string]string `type:"map"` } // String returns the string representation func (s Job) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Job) MarshalFields(e protocol.FieldEncoder) error { if s.Arn != nil { v := *s.Arn metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Arn", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Id != nil { v := *s.Id metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Id", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Input != nil { v := s.Input metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Input", v, metadata) } if s.Inputs != nil { v := s.Inputs metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "Inputs", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } if s.Output != nil { v := s.Output metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Output", v, metadata) } if s.OutputKeyPrefix != nil { v := *s.OutputKeyPrefix metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "OutputKeyPrefix", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Outputs != nil { v := s.Outputs metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "Outputs", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } if s.PipelineId != nil { v := *s.PipelineId metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "PipelineId", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Playlists != nil { v := s.Playlists metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "Playlists", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } if s.Status != nil { v := *s.Status metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Status", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Timing != nil { v := s.Timing metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Timing", v, metadata) } if s.UserMetadata != nil { v := s.UserMetadata metadata := protocol.Metadata{} ms0 := e.Map(protocol.BodyTarget, "UserMetadata", metadata) ms0.Start() for k1, v1 := range v { ms0.MapSetValue(k1, protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v1)}) } ms0.End() } return nil } // The .jpg or .png file associated with an audio file. type JobAlbumArt struct { _ struct{} `type:"structure"` // The file to be used as album art. There can be multiple artworks associated // with an audio file, to a maximum of 20. Valid formats are .jpg and .png Artwork []Artwork `type:"list"` // A policy that determines how Elastic Transcoder handles the existence of // multiple album artwork files. // // * Replace: The specified album art replaces any existing album art. // // * Prepend: The specified album art is placed in front of any existing // album art. // // * Append: The specified album art is placed after any existing album art. // // * Fallback: If the original input file contains artwork, Elastic Transcoder // uses that artwork for the output. If the original input does not contain // artwork, Elastic Transcoder uses the specified album art file. MergePolicy *string `type:"string"` } // String returns the string representation func (s JobAlbumArt) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *JobAlbumArt) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "JobAlbumArt"} if s.Artwork != nil { for i, v := range s.Artwork { if err := v.Validate(); err != nil { invalidParams.AddNested(fmt.Sprintf("%s[%v]", "Artwork", i), err.(aws.ErrInvalidParams)) } } } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s JobAlbumArt) MarshalFields(e protocol.FieldEncoder) error { if s.Artwork != nil { v := s.Artwork metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "Artwork", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } if s.MergePolicy != nil { v := *s.MergePolicy metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MergePolicy", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // Information about the file that you're transcoding. type JobInput struct { _ struct{} `type:"structure"` // The aspect ratio of the input file. If you want Elastic Transcoder to automatically // detect the aspect ratio of the input file, specify auto. If you want to specify // the aspect ratio for the output file, enter one of the following values: // // 1:1, 4:3, 3:2, 16:9 // // If you specify a value other than auto, Elastic Transcoder disables automatic // detection of the aspect ratio. AspectRatio *string `type:"string"` // The container type for the input file. If you want Elastic Transcoder to // automatically detect the container type of the input file, specify auto. // If you want to specify the container type for the input file, enter one of // the following values: // // 3gp, aac, asf, avi, divx, flv, m4a, mkv, mov, mp3, mp4, mpeg, mpeg-ps, mpeg-ts, // mxf, ogg, vob, wav, webm Container *string `type:"string"` // The detected properties of the input file. DetectedProperties *DetectedProperties `type:"structure"` // The encryption settings, if any, that are used for decrypting your input // files. If your input file is encrypted, you must specify the mode that Elastic // Transcoder uses to decrypt your file. Encryption *Encryption `type:"structure"` // The frame rate of the input file. If you want Elastic Transcoder to automatically // detect the frame rate of the input file, specify auto. If you want to specify // the frame rate for the input file, enter one of the following values: // // 10, 15, 23.97, 24, 25, 29.97, 30, 60 // // If you specify a value other than auto, Elastic Transcoder disables automatic // detection of the frame rate. FrameRate *string `type:"string"` // You can configure Elastic Transcoder to transcode captions, or subtitles, // from one format to another. All captions must be in UTF-8. Elastic Transcoder // supports two types of captions: // // * Embedded: Embedded captions are included in the same file as the audio // and video. Elastic Transcoder supports only one embedded caption per language, // to a maximum of 300 embedded captions per file. Valid input values include: // CEA-608 (EIA-608, first non-empty channel only), CEA-708 (EIA-708, first // non-empty channel only), and mov-text Valid outputs include: mov-text // Elastic Transcoder supports a maximum of one embedded format per output. // // * Sidecar: Sidecar captions are kept in a separate metadata file from // the audio and video data. Sidecar captions require a player that is capable // of understanding the relationship between the video file and the sidecar // file. Elastic Transcoder supports only one sidecar caption per language, // to a maximum of 20 sidecar captions per file. Valid input values include: // dfxp (first div element only), ebu-tt, scc, smpt, srt, ttml (first div // element only), and webvtt Valid outputs include: dfxp (first div element // only), scc, srt, and webvtt. // // If you want ttml or smpte-tt compatible captions, specify dfxp as your output // format. // // Elastic Transcoder does not support OCR (Optical Character Recognition), // does not accept pictures as a valid input for captions, and is not available // for audio-only transcoding. Elastic Transcoder does not preserve text formatting // (for example, italics) during the transcoding process. // // To remove captions or leave the captions empty, set Captions to null. To // pass through existing captions unchanged, set the MergePolicy to MergeRetain, // and pass in a null CaptionSources array. // // For more information on embedded files, see the Subtitles Wikipedia page. // // For more information on sidecar files, see the Extensible Metadata Platform // and Sidecar file Wikipedia pages. InputCaptions *InputCaptions `type:"structure"` // Whether the input file is interlaced. If you want Elastic Transcoder to automatically // detect whether the input file is interlaced, specify auto. If you want to // specify whether the input file is interlaced, enter one of the following // values: // // true, false // // If you specify a value other than auto, Elastic Transcoder disables automatic // detection of interlacing. Interlaced *string `type:"string"` // The name of the file to transcode. Elsewhere in the body of the JSON block // is the the ID of the pipeline to use for processing the job. The InputBucket // object in that pipeline tells Elastic Transcoder which Amazon S3 bucket to // get the file from. // // If the file name includes a prefix, such as cooking/lasagna.mpg, include // the prefix in the key. If the file isn't in the specified bucket, Elastic // Transcoder returns an error. Key *string `min:"1" type:"string"` // This value must be auto, which causes Elastic Transcoder to automatically // detect the resolution of the input file. Resolution *string `type:"string"` // Settings for clipping an input. Each input can have different clip settings. TimeSpan *TimeSpan `type:"structure"` } // String returns the string representation func (s JobInput) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *JobInput) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "JobInput"} if s.Key != nil && len(*s.Key) < 1 { invalidParams.Add(aws.NewErrParamMinLen("Key", 1)) } if s.InputCaptions != nil { if err := s.InputCaptions.Validate(); err != nil { invalidParams.AddNested("InputCaptions", err.(aws.ErrInvalidParams)) } } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s JobInput) MarshalFields(e protocol.FieldEncoder) error { if s.AspectRatio != nil { v := *s.AspectRatio metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "AspectRatio", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Container != nil { v := *s.Container metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Container", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.DetectedProperties != nil { v := s.DetectedProperties metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "DetectedProperties", v, metadata) } if s.Encryption != nil { v := s.Encryption metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Encryption", v, metadata) } if s.FrameRate != nil { v := *s.FrameRate metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "FrameRate", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.InputCaptions != nil { v := s.InputCaptions metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "InputCaptions", v, metadata) } if s.Interlaced != nil { v := *s.Interlaced metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Interlaced", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Key != nil { v := *s.Key metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Key", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Resolution != nil { v := *s.Resolution metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Resolution", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.TimeSpan != nil { v := s.TimeSpan metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "TimeSpan", v, metadata) } return nil } // // Outputs recommended instead. // // If you specified one output for a job, information about that output. If // you specified multiple outputs for a job, the Output object lists information // about the first output. This duplicates the information that is listed for // the first output in the Outputs object. type JobOutput struct { _ struct{} `type:"structure"` // The album art to be associated with the output file, if any. AlbumArt *JobAlbumArt `type:"structure"` // If Elastic Transcoder used a preset with a ColorSpaceConversionMode to transcode // the output file, the AppliedColorSpaceConversion parameter shows the conversion // used. If no ColorSpaceConversionMode was defined in the preset, this parameter // is not be included in the job response. AppliedColorSpaceConversion *string `type:"string"` // You can configure Elastic Transcoder to transcode captions, or subtitles, // from one format to another. All captions must be in UTF-8. Elastic Transcoder // supports two types of captions: // // * Embedded: Embedded captions are included in the same file as the audio // and video. Elastic Transcoder supports only one embedded caption per language, // to a maximum of 300 embedded captions per file. Valid input values include: // CEA-608 (EIA-608, first non-empty channel only), CEA-708 (EIA-708, first // non-empty channel only), and mov-text Valid outputs include: mov-text // Elastic Transcoder supports a maximum of one embedded format per output. // // * Sidecar: Sidecar captions are kept in a separate metadata file from // the audio and video data. Sidecar captions require a player that is capable // of understanding the relationship between the video file and the sidecar // file. Elastic Transcoder supports only one sidecar caption per language, // to a maximum of 20 sidecar captions per file. Valid input values include: // dfxp (first div element only), ebu-tt, scc, smpt, srt, ttml (first div // element only), and webvtt Valid outputs include: dfxp (first div element // only), scc, srt, and webvtt. // // If you want ttml or smpte-tt compatible captions, specify dfxp as your output // format. // // Elastic Transcoder does not support OCR (Optical Character Recognition), // does not accept pictures as a valid input for captions, and is not available // for audio-only transcoding. Elastic Transcoder does not preserve text formatting // (for example, italics) during the transcoding process. // // To remove captions or leave the captions empty, set Captions to null. To // pass through existing captions unchanged, set the MergePolicy to MergeRetain, // and pass in a null CaptionSources array. // // For more information on embedded files, see the Subtitles Wikipedia page. // // For more information on sidecar files, see the Extensible Metadata Platform // and Sidecar file Wikipedia pages. Captions *Captions `type:"structure"` // You can create an output file that contains an excerpt from the input file. // This excerpt, called a clip, can come from the beginning, middle, or end // of the file. The Composition object contains settings for the clips that // make up an output file. For the current release, you can only specify settings // for a single clip per output file. The Composition object cannot be null. Composition []Clip `deprecated:"true" type:"list"` // Duration of the output file, in seconds. Duration *int64 `type:"long"` // Duration of the output file, in milliseconds. DurationMillis *int64 `type:"long"` // The encryption settings, if any, that you want Elastic Transcoder to apply // to your output files. If you choose to use encryption, you must specify a // mode to use. If you choose not to use encryption, Elastic Transcoder writes // an unencrypted file to your Amazon S3 bucket. Encryption *Encryption `type:"structure"` // File size of the output file, in bytes. FileSize *int64 `type:"long"` // Frame rate of the output file, in frames per second. FrameRate *string `type:"string"` // Height of the output file, in pixels. Height *int64 `type:"integer"` // A sequential counter, starting with 1, that identifies an output among the // outputs from the current job. In the Output syntax, this value is always // 1. Id *string `type:"string"` // The name to assign to the transcoded file. Elastic Transcoder saves the file // in the Amazon S3 bucket specified by the OutputBucket object in the pipeline // that is specified by the pipeline ID. Key *string `min:"1" type:"string"` // The value of the Id object for the preset that you want to use for this job. // The preset determines the audio, video, and thumbnail settings that Elastic // Transcoder uses for transcoding. To use a preset that you created, specify // the preset ID that Elastic Transcoder returned in the response when you created // the preset. You can also use the Elastic Transcoder system presets, which // you can get with ListPresets. PresetId *string `type:"string"` // The number of degrees clockwise by which you want Elastic Transcoder to rotate // the output relative to the input. Enter one of the following values: // // auto, 0, 90, 180, 270 // // The value auto generally works only if the file that you're transcoding contains // rotation metadata. Rotate *string `type:"string"` // // (Outputs in Fragmented MP4 or MPEG-TS format only. // // If you specify a preset in PresetId for which the value of Container is fmp4 // (Fragmented MP4) or ts (MPEG-TS), SegmentDuration is the target maximum duration // of each segment in seconds. For HLSv3 format playlists, each media segment // is stored in a separate .ts file. For HLSv4, MPEG-DASH, and Smooth playlists, // all media segments for an output are stored in a single file. Each segment // is approximately the length of the SegmentDuration, though individual segments // might be shorter or longer. // // The range of valid values is 1 to 60 seconds. If the duration of the video // is not evenly divisible by SegmentDuration, the duration of the last segment // is the remainder of total length/SegmentDuration. // // Elastic Transcoder creates an output-specific playlist for each output HLS // output that you specify in OutputKeys. To add an output to the master playlist // for this job, include it in the OutputKeys of the associated playlist. SegmentDuration *string `type:"string"` // The status of one output in a job. If you specified only one output for the // job, Outputs:Status is always the same as Job:Status. If you specified more // than one output: // // * Job:Status and Outputs:Status for all of the outputs is Submitted until // Elastic Transcoder starts to process the first output. // // * When Elastic Transcoder starts to process the first output, Outputs:Status // for that output and Job:Status both change to Progressing. For each output, // the value of Outputs:Status remains Submitted until Elastic Transcoder // starts to process the output. // // * Job:Status remains Progressing until all of the outputs reach a terminal // status, either Complete or Error. // // * When all of the outputs reach a terminal status, Job:Status changes // to Complete only if Outputs:Status for all of the outputs is Complete. // If Outputs:Status for one or more outputs is Error, the terminal status // for Job:Status is also Error. // // The value of Status is one of the following: Submitted, Progressing, Complete, // Canceled, or Error. Status *string `type:"string"` // Information that further explains Status. StatusDetail *string `type:"string"` // The encryption settings, if any, that you want Elastic Transcoder to apply // to your thumbnail. ThumbnailEncryption *Encryption `type:"structure"` // Whether you want Elastic Transcoder to create thumbnails for your videos // and, if so, how you want Elastic Transcoder to name the files. // // If you don't want Elastic Transcoder to create thumbnails, specify "". // // If you do want Elastic Transcoder to create thumbnails, specify the information // that you want to include in the file name for each thumbnail. You can specify // the following values in any sequence: // // * {count} (Required): If you want to create thumbnails, you must include // {count} in the ThumbnailPattern object. Wherever you specify {count}, // Elastic Transcoder adds a five-digit sequence number (beginning with 00001) // to thumbnail file names. The number indicates where a given thumbnail // appears in the sequence of thumbnails for a transcoded file. If you specify // a literal value and/or {resolution} but you omit {count}, Elastic Transcoder // returns a validation error and does not create the job. // // * Literal values (Optional): You can specify literal values anywhere in // the ThumbnailPattern object. For example, you can include them as a file // name prefix or as a delimiter between {resolution} and {count}. // // * {resolution} (Optional): If you want Elastic Transcoder to include the // resolution in the file name, include {resolution} in the ThumbnailPattern // object. // // When creating thumbnails, Elastic Transcoder automatically saves the files // in the format (.jpg or .png) that appears in the preset that you specified // in the PresetID value of CreateJobOutput. Elastic Transcoder also appends // the applicable file name extension. ThumbnailPattern *string `type:"string"` // Information about the watermarks that you want Elastic Transcoder to add // to the video during transcoding. You can specify up to four watermarks for // each output. Settings for each watermark must be defined in the preset that // you specify in Preset for the current output. // // Watermarks are added to the output video in the sequence in which you list // them in the job output—the first watermark in the list is added to the // output video first, the second watermark in the list is added next, and so // on. As a result, if the settings in a preset cause Elastic Transcoder to // place all watermarks in the same location, the second watermark that you // add covers the first one, the third one covers the second, and the fourth // one covers the third. Watermarks []JobWatermark `type:"list"` // Specifies the width of the output file in pixels. Width *int64 `type:"integer"` } // String returns the string representation func (s JobOutput) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s JobOutput) MarshalFields(e protocol.FieldEncoder) error { if s.AlbumArt != nil { v := s.AlbumArt metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "AlbumArt", v, metadata) } if s.AppliedColorSpaceConversion != nil { v := *s.AppliedColorSpaceConversion metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "AppliedColorSpaceConversion", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Captions != nil { v := s.Captions metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Captions", v, metadata) } if s.Composition != nil { v := s.Composition metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "Composition", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } if s.Duration != nil { v := *s.Duration metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Duration", protocol.Int64Value(v), metadata) } if s.DurationMillis != nil { v := *s.DurationMillis metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "DurationMillis", protocol.Int64Value(v), metadata) } if s.Encryption != nil { v := s.Encryption metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Encryption", v, metadata) } if s.FileSize != nil { v := *s.FileSize metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "FileSize", protocol.Int64Value(v), metadata) } if s.FrameRate != nil { v := *s.FrameRate metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "FrameRate", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Height != nil { v := *s.Height metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Height", protocol.Int64Value(v), metadata) } if s.Id != nil { v := *s.Id metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Id", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Key != nil { v := *s.Key metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Key", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.PresetId != nil { v := *s.PresetId metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "PresetId", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Rotate != nil { v := *s.Rotate metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Rotate", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.SegmentDuration != nil { v := *s.SegmentDuration metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "SegmentDuration", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Status != nil { v := *s.Status metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Status", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.StatusDetail != nil { v := *s.StatusDetail metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "StatusDetail", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.ThumbnailEncryption != nil { v := s.ThumbnailEncryption metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "ThumbnailEncryption", v, metadata) } if s.ThumbnailPattern != nil { v := *s.ThumbnailPattern metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "ThumbnailPattern", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Watermarks != nil { v := s.Watermarks metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "Watermarks", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } if s.Width != nil { v := *s.Width metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Width", protocol.Int64Value(v), metadata) } return nil } // Watermarks can be in .png or .jpg format. If you want to display a watermark // that is not rectangular, use the .png format, which supports transparency. type JobWatermark struct { _ struct{} `type:"structure"` // The encryption settings, if any, that you want Elastic Transcoder to apply // to your watermarks. Encryption *Encryption `type:"structure"` // The name of the .png or .jpg file that you want to use for the watermark. // To determine which Amazon S3 bucket contains the specified file, Elastic // Transcoder checks the pipeline specified by Pipeline; the Input Bucket object // in that pipeline identifies the bucket. // // If the file name includes a prefix, for example, logos/128x64.png, include // the prefix in the key. If the file isn't in the specified bucket, Elastic // Transcoder returns an error. InputKey *string `min:"1" type:"string"` // The ID of the watermark settings that Elastic Transcoder uses to add watermarks // to the video during transcoding. The settings are in the preset specified // by Preset for the current output. In that preset, the value of Watermarks // Id tells Elastic Transcoder which settings to use. PresetWatermarkId *string `min:"1" type:"string"` } // String returns the string representation func (s JobWatermark) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *JobWatermark) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "JobWatermark"} if s.InputKey != nil && len(*s.InputKey) < 1 { invalidParams.Add(aws.NewErrParamMinLen("InputKey", 1)) } if s.PresetWatermarkId != nil && len(*s.PresetWatermarkId) < 1 { invalidParams.Add(aws.NewErrParamMinLen("PresetWatermarkId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s JobWatermark) MarshalFields(e protocol.FieldEncoder) error { if s.Encryption != nil { v := s.Encryption metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Encryption", v, metadata) } if s.InputKey != nil { v := *s.InputKey metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "InputKey", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.PresetWatermarkId != nil { v := *s.PresetWatermarkId metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "PresetWatermarkId", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // The Amazon Simple Notification Service (Amazon SNS) topic or topics to notify // in order to report job status. // // To receive notifications, you must also subscribe to the new topic in the // Amazon SNS console. type Notifications struct { _ struct{} `type:"structure"` // The Amazon SNS topic that you want to notify when Elastic Transcoder has // finished processing the job. Completed *string `type:"string"` // The Amazon SNS topic that you want to notify when Elastic Transcoder encounters // an error condition. Error *string `type:"string"` // The Amazon Simple Notification Service (Amazon SNS) topic that you want to // notify when Elastic Transcoder has started to process the job. Progressing *string `type:"string"` // The Amazon SNS topic that you want to notify when Elastic Transcoder encounters // a warning condition. Warning *string `type:"string"` } // String returns the string representation func (s Notifications) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Notifications) MarshalFields(e protocol.FieldEncoder) error { if s.Completed != nil { v := *s.Completed metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Completed", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Error != nil { v := *s.Error metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Error", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Progressing != nil { v := *s.Progressing metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Progressing", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Warning != nil { v := *s.Warning metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Warning", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // The Permission structure. type Permission struct { _ struct{} `type:"structure"` // The permission that you want to give to the AWS user that is listed in Grantee. // Valid values include: // // * READ: The grantee can read the thumbnails and metadata for thumbnails // that Elastic Transcoder adds to the Amazon S3 bucket. // // * READ_ACP: The grantee can read the object ACL for thumbnails that Elastic // Transcoder adds to the Amazon S3 bucket. // // * WRITE_ACP: The grantee can write the ACL for the thumbnails that Elastic // Transcoder adds to the Amazon S3 bucket. // // * FULL_CONTROL: The grantee has READ, READ_ACP, and WRITE_ACP permissions // for the thumbnails that Elastic Transcoder adds to the Amazon S3 bucket. Access []string `type:"list"` // The AWS user or group that you want to have access to transcoded files and // playlists. To identify the user or group, you can specify the canonical user // ID for an AWS account, an origin access identity for a CloudFront distribution, // the registered email address of an AWS account, or a predefined Amazon S3 // group. Grantee *string `min:"1" type:"string"` // The type of value that appears in the Grantee object: // // * Canonical: Either the canonical user ID for an AWS account or an origin // access identity for an Amazon CloudFront distribution. A canonical user // ID is not the same as an AWS account number. // // * Email: The registered email address of an AWS account. // // * Group: One of the following predefined Amazon S3 groups: AllUsers, AuthenticatedUsers, // or LogDelivery. GranteeType *string `type:"string"` } // String returns the string representation func (s Permission) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *Permission) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "Permission"} if s.Grantee != nil && len(*s.Grantee) < 1 { invalidParams.Add(aws.NewErrParamMinLen("Grantee", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Permission) MarshalFields(e protocol.FieldEncoder) error { if s.Access != nil { v := s.Access metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "Access", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddValue(protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v1)}) } ls0.End() } if s.Grantee != nil { v := *s.Grantee metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Grantee", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.GranteeType != nil { v := *s.GranteeType metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "GranteeType", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // The pipeline (queue) that is used to manage jobs. type Pipeline struct { _ struct{} `type:"structure"` // The Amazon Resource Name (ARN) for the pipeline. Arn *string `type:"string"` // The AWS Key Management Service (AWS KMS) key that you want to use with this // pipeline. // // If you use either s3 or s3-aws-kms as your Encryption:Mode, you don't need // to provide a key with your job because a default key, known as an AWS-KMS // key, is created for you automatically. You need to provide an AWS-KMS key // only if you want to use a non-default AWS-KMS key, or if you are using an // Encryption:Mode of aes-cbc-pkcs7, aes-ctr, or aes-gcm. AwsKmsKeyArn *string `type:"string"` // Information about the Amazon S3 bucket in which you want Elastic Transcoder // to save transcoded files and playlists. Either you specify both ContentConfig // and ThumbnailConfig, or you specify OutputBucket. // // * Bucket: The Amazon S3 bucket in which you want Elastic Transcoder to // save transcoded files and playlists. // // * Permissions: A list of the users and/or predefined Amazon S3 groups // you want to have access to transcoded files and playlists, and the type // of access that you want them to have. GranteeType: The type of value that // appears in the Grantee object: Canonical: Either the canonical user ID // for an AWS account or an origin access identity for an Amazon CloudFront // distribution. Email: The registered email address of an AWS account. Group: // One of the following predefined Amazon S3 groups: AllUsers, AuthenticatedUsers, // or LogDelivery. Grantee: The AWS user or group that you want to have access // to transcoded files and playlists. Access: The permission that you want // to give to the AWS user that is listed in Grantee. Valid values include: // READ: The grantee can read the objects and metadata for objects that Elastic // Transcoder adds to the Amazon S3 bucket. READ_ACP: The grantee can read // the object ACL for objects that Elastic Transcoder adds to the Amazon // S3 bucket. WRITE_ACP: The grantee can write the ACL for the objects that // Elastic Transcoder adds to the Amazon S3 bucket. FULL_CONTROL: The grantee // has READ, READ_ACP, and WRITE_ACP permissions for the objects that Elastic // Transcoder adds to the Amazon S3 bucket. // // * StorageClass: The Amazon S3 storage class, Standard or ReducedRedundancy, // that you want Elastic Transcoder to assign to the video files and playlists // that it stores in your Amazon S3 bucket. ContentConfig *PipelineOutputConfig `type:"structure"` // The identifier for the pipeline. You use this value to identify the pipeline // in which you want to perform a variety of operations, such as creating a // job or a preset. Id *string `type:"string"` // The Amazon S3 bucket from which Elastic Transcoder gets media files for transcoding // and the graphics files, if any, that you want to use for watermarks. InputBucket *string `type:"string"` // The name of the pipeline. We recommend that the name be unique within the // AWS account, but uniqueness is not enforced. // // Constraints: Maximum 40 characters Name *string `min:"1" type:"string"` // The Amazon Simple Notification Service (Amazon SNS) topic that you want to // notify to report job status. // // To receive notifications, you must also subscribe to the new topic in the // Amazon SNS console. // // * Progressing (optional): The Amazon Simple Notification Service (Amazon // SNS) topic that you want to notify when Elastic Transcoder has started // to process the job. // // * Complete (optional): The Amazon SNS topic that you want to notify when // Elastic Transcoder has finished processing the job. // // * Warning (optional): The Amazon SNS topic that you want to notify when // Elastic Transcoder encounters a warning condition. // // * Error (optional): The Amazon SNS topic that you want to notify when // Elastic Transcoder encounters an error condition. Notifications *Notifications `type:"structure"` // The Amazon S3 bucket in which you want Elastic Transcoder to save transcoded // files, thumbnails, and playlists. Either you specify this value, or you specify // both ContentConfig and ThumbnailConfig. OutputBucket *string `type:"string"` // The IAM Amazon Resource Name (ARN) for the role that Elastic Transcoder uses // to transcode jobs for this pipeline. Role *string `type:"string"` // The current status of the pipeline: // // * Active: The pipeline is processing jobs. // // * Paused: The pipeline is not currently processing jobs. Status *string `type:"string"` // Information about the Amazon S3 bucket in which you want Elastic Transcoder // to save thumbnail files. Either you specify both ContentConfig and ThumbnailConfig, // or you specify OutputBucket. // // * Bucket: The Amazon S3 bucket in which you want Elastic Transcoder to // save thumbnail files. // // * Permissions: A list of the users and/or predefined Amazon S3 groups // you want to have access to thumbnail files, and the type of access that // you want them to have. GranteeType: The type of value that appears in // the Grantee object: Canonical: Either the canonical user ID for an AWS // account or an origin access identity for an Amazon CloudFront distribution. // A canonical user ID is not the same as an AWS account number. Email: The // registered email address of an AWS account. Group: One of the following // predefined Amazon S3 groups: AllUsers, AuthenticatedUsers, or LogDelivery. // Grantee: The AWS user or group that you want to have access to thumbnail // files. Access: The permission that you want to give to the AWS user that // is listed in Grantee. Valid values include: READ: The grantee can read // the thumbnails and metadata for thumbnails that Elastic Transcoder adds // to the Amazon S3 bucket. READ_ACP: The grantee can read the object ACL // for thumbnails that Elastic Transcoder adds to the Amazon S3 bucket. WRITE_ACP: // The grantee can write the ACL for the thumbnails that Elastic Transcoder // adds to the Amazon S3 bucket. FULL_CONTROL: The grantee has READ, READ_ACP, // and WRITE_ACP permissions for the thumbnails that Elastic Transcoder adds // to the Amazon S3 bucket. // // * StorageClass: The Amazon S3 storage class, Standard or ReducedRedundancy, // that you want Elastic Transcoder to assign to the thumbnails that it stores // in your Amazon S3 bucket. ThumbnailConfig *PipelineOutputConfig `type:"structure"` } // String returns the string representation func (s Pipeline) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Pipeline) MarshalFields(e protocol.FieldEncoder) error { if s.Arn != nil { v := *s.Arn metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Arn", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.AwsKmsKeyArn != nil { v := *s.AwsKmsKeyArn metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "AwsKmsKeyArn", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.ContentConfig != nil { v := s.ContentConfig metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "ContentConfig", v, metadata) } if s.Id != nil { v := *s.Id metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Id", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.InputBucket != nil { v := *s.InputBucket metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "InputBucket", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Name != nil { v := *s.Name metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Name", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Notifications != nil { v := s.Notifications metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Notifications", v, metadata) } if s.OutputBucket != nil { v := *s.OutputBucket metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "OutputBucket", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Role != nil { v := *s.Role metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Role", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Status != nil { v := *s.Status metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Status", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.ThumbnailConfig != nil { v := s.ThumbnailConfig metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "ThumbnailConfig", v, metadata) } return nil } // The PipelineOutputConfig structure. type PipelineOutputConfig struct { _ struct{} `type:"structure"` // The Amazon S3 bucket in which you want Elastic Transcoder to save the transcoded // files. Specify this value when all of the following are true: // // * You want to save transcoded files, thumbnails (if any), and playlists // (if any) together in one bucket. // // * You do not want to specify the users or groups who have access to the // transcoded files, thumbnails, and playlists. // // * You do not want to specify the permissions that Elastic Transcoder grants // to the files. // // * You want to associate the transcoded files and thumbnails with the Amazon // S3 Standard storage class. // // If you want to save transcoded files and playlists in one bucket and thumbnails // in another bucket, specify which users can access the transcoded files or // the permissions the users have, or change the Amazon S3 storage class, omit // OutputBucket and specify values for ContentConfig and ThumbnailConfig instead. Bucket *string `type:"string"` // Optional. The Permissions object specifies which users and/or predefined // Amazon S3 groups you want to have access to transcoded files and playlists, // and the type of access you want them to have. You can grant permissions to // a maximum of 30 users and/or predefined Amazon S3 groups. // // If you include Permissions, Elastic Transcoder grants only the permissions // that you specify. It does not grant full permissions to the owner of the // role specified by Role. If you want that user to have full control, you must // explicitly grant full control to the user. // // If you omit Permissions, Elastic Transcoder grants full control over the // transcoded files and playlists to the owner of the role specified by Role, // and grants no other permissions to any other user or group. Permissions []Permission `type:"list"` // The Amazon S3 storage class, Standard or ReducedRedundancy, that you want // Elastic Transcoder to assign to the video files and playlists that it stores // in your Amazon S3 bucket. StorageClass *string `type:"string"` } // String returns the string representation func (s PipelineOutputConfig) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *PipelineOutputConfig) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "PipelineOutputConfig"} if s.Permissions != nil { for i, v := range s.Permissions { if err := v.Validate(); err != nil { invalidParams.AddNested(fmt.Sprintf("%s[%v]", "Permissions", i), err.(aws.ErrInvalidParams)) } } } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s PipelineOutputConfig) MarshalFields(e protocol.FieldEncoder) error { if s.Bucket != nil { v := *s.Bucket metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Bucket", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Permissions != nil { v := s.Permissions metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "Permissions", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } if s.StorageClass != nil { v := *s.StorageClass metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "StorageClass", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // The PlayReady DRM settings, if any, that you want Elastic Transcoder to apply // to the output files associated with this playlist. // // PlayReady DRM encrypts your media files using aes-ctr encryption. // // If you use DRM for an HLSv3 playlist, your outputs must have a master playlist. type PlayReadyDrm struct { _ struct{} `type:"structure"` // The type of DRM, if any, that you want Elastic Transcoder to apply to the // output files associated with this playlist. Format *string `type:"string"` // The series of random bits created by a random bit generator, unique for every // encryption operation, that you want Elastic Transcoder to use to encrypt // your files. The initialization vector must be base64-encoded, and it must // be exactly 8 bytes long before being base64-encoded. If no initialization // vector is provided, Elastic Transcoder generates one for you. InitializationVector *string `type:"string"` // The DRM key for your file, provided by your DRM license provider. The key // must be base64-encoded, and it must be one of the following bit lengths before // being base64-encoded: // // 128, 192, or 256. // // The key must also be encrypted by using AWS KMS. Key *string `type:"string"` // The ID for your DRM key, so that your DRM license provider knows which key // to provide. // // The key ID must be provided in big endian, and Elastic Transcoder converts // it to little endian before inserting it into the PlayReady DRM headers. If // you are unsure whether your license server provides your key ID in big or // little endian, check with your DRM provider. KeyId *string `type:"string"` // The MD5 digest of the key used for DRM on your file, and that you want Elastic // Transcoder to use as a checksum to make sure your key was not corrupted in // transit. The key MD5 must be base64-encoded, and it must be exactly 16 bytes // before being base64-encoded. KeyMd5 *string `type:"string"` // The location of the license key required to play DRM content. The URL must // be an absolute path, and is referenced by the PlayReady header. The PlayReady // header is referenced in the protection header of the client manifest for // Smooth Streaming outputs, and in the EXT-X-DXDRM and EXT-XDXDRMINFO metadata // tags for HLS playlist outputs. An example URL looks like this: https://www.example.com/exampleKey/ LicenseAcquisitionUrl *string `min:"1" type:"string"` } // String returns the string representation func (s PlayReadyDrm) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *PlayReadyDrm) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "PlayReadyDrm"} if s.LicenseAcquisitionUrl != nil && len(*s.LicenseAcquisitionUrl) < 1 { invalidParams.Add(aws.NewErrParamMinLen("LicenseAcquisitionUrl", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s PlayReadyDrm) MarshalFields(e protocol.FieldEncoder) error { if s.Format != nil { v := *s.Format metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Format", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.InitializationVector != nil { v := *s.InitializationVector metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "InitializationVector", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Key != nil { v := *s.Key metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Key", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.KeyId != nil { v := *s.KeyId metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "KeyId", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.KeyMd5 != nil { v := *s.KeyMd5 metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "KeyMd5", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.LicenseAcquisitionUrl != nil { v := *s.LicenseAcquisitionUrl metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "LicenseAcquisitionUrl", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // Use Only for Fragmented MP4 or MPEG-TS Outputs. If you specify a preset for // which the value of Container is fmp4 (Fragmented MP4) or ts (MPEG-TS), Playlists // contains information about the master playlists that you want Elastic Transcoder // to create. We recommend that you create only one master playlist per output // format. The maximum number of master playlists in a job is 30. type Playlist struct { _ struct{} `type:"structure"` // The format of the output playlist. Valid formats include HLSv3, HLSv4, and // Smooth. Format *string `type:"string"` // The HLS content protection settings, if any, that you want Elastic Transcoder // to apply to the output files associated with this playlist. HlsContentProtection *HlsContentProtection `type:"structure"` // The name that you want Elastic Transcoder to assign to the master playlist, // for example, nyc-vacation.m3u8. If the name includes a / character, the section // of the name before the last / must be identical for all Name objects. If // you create more than one master playlist, the values of all Name objects // must be unique. // // Elastic Transcoder automatically appends the relevant file extension to the // file name (.m3u8 for HLSv3 and HLSv4 playlists, and .ism and .ismc for Smooth // playlists). If you include a file extension in Name, the file name will have // two extensions. Name *string `min:"1" type:"string"` // For each output in this job that you want to include in a master playlist, // the value of the Outputs:Key object. // // * If your output is not HLS or does not have a segment duration set, the // name of the output file is a concatenation of OutputKeyPrefix and Outputs:Key: // OutputKeyPrefixOutputs:Key // // * If your output is HLSv3 and has a segment duration set, or is not included // in a playlist, Elastic Transcoder creates an output playlist file with // a file extension of .m3u8, and a series of .ts files that include a five-digit // sequential counter beginning with 00000: OutputKeyPrefixOutputs:Key.m3u8 // OutputKeyPrefixOutputs:Key00000.ts // // * If your output is HLSv4, has a segment duration set, and is included // in an HLSv4 playlist, Elastic Transcoder creates an output playlist file // with a file extension of _v4.m3u8. If the output is video, Elastic Transcoder // also creates an output file with an extension of _iframe.m3u8: OutputKeyPrefixOutputs:Key_v4.m3u8 // OutputKeyPrefixOutputs:Key_iframe.m3u8 OutputKeyPrefixOutputs:Key.ts // // Elastic Transcoder automatically appends the relevant file extension to the // file name. If you include a file extension in Output Key, the file name will // have two extensions. // // If you include more than one output in a playlist, any segment duration settings, // clip settings, or caption settings must be the same for all outputs in the // playlist. For Smooth playlists, the Audio:Profile, Video:Profile, and Video:FrameRate // to Video:KeyframesMaxDist ratio must be the same for all outputs. OutputKeys []string `type:"list"` // The DRM settings, if any, that you want Elastic Transcoder to apply to the // output files associated with this playlist. PlayReadyDrm *PlayReadyDrm `type:"structure"` // The status of the job with which the playlist is associated. Status *string `type:"string"` // Information that further explains the status. StatusDetail *string `type:"string"` } // String returns the string representation func (s Playlist) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Playlist) MarshalFields(e protocol.FieldEncoder) error { if s.Format != nil { v := *s.Format metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Format", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.HlsContentProtection != nil { v := s.HlsContentProtection metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "HlsContentProtection", v, metadata) } if s.Name != nil { v := *s.Name metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Name", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.OutputKeys != nil { v := s.OutputKeys metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "OutputKeys", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddValue(protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v1)}) } ls0.End() } if s.PlayReadyDrm != nil { v := s.PlayReadyDrm metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "PlayReadyDrm", v, metadata) } if s.Status != nil { v := *s.Status metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Status", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.StatusDetail != nil { v := *s.StatusDetail metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "StatusDetail", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // Presets are templates that contain most of the settings for transcoding media // files from one format to another. Elastic Transcoder includes some default // presets for common formats, for example, several iPod and iPhone versions. // You can also create your own presets for formats that aren't included among // the default presets. You specify which preset you want to use when you create // a job. type Preset struct { _ struct{} `type:"structure"` // The Amazon Resource Name (ARN) for the preset. Arn *string `type:"string"` // A section of the response body that provides information about the audio // preset values. Audio *AudioParameters `type:"structure"` // The container type for the output file. Valid values include flac, flv, fmp4, // gif, mp3, mp4, mpg, mxf, oga, ogg, ts, and webm. Container *string `type:"string"` // A description of the preset. Description *string `type:"string"` // Identifier for the new preset. You use this value to get settings for the // preset or to delete it. Id *string `type:"string"` // The name of the preset. Name *string `min:"1" type:"string"` // A section of the response body that provides information about the thumbnail // preset values, if any. Thumbnails *Thumbnails `type:"structure"` // Whether the preset is a default preset provided by Elastic Transcoder (System) // or a preset that you have defined (Custom). Type *string `type:"string"` // A section of the response body that provides information about the video // preset values. Video *VideoParameters `type:"structure"` } // String returns the string representation func (s Preset) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Preset) MarshalFields(e protocol.FieldEncoder) error { if s.Arn != nil { v := *s.Arn metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Arn", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Audio != nil { v := s.Audio metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Audio", v, metadata) } if s.Container != nil { v := *s.Container metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Container", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Description != nil { v := *s.Description metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Description", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Id != nil { v := *s.Id metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Id", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Name != nil { v := *s.Name metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Name", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Thumbnails != nil { v := s.Thumbnails metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Thumbnails", v, metadata) } if s.Type != nil { v := *s.Type metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Type", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Video != nil { v := s.Video metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "Video", v, metadata) } return nil } // Settings for the size, location, and opacity of graphics that you want Elastic // Transcoder to overlay over videos that are transcoded using this preset. // You can specify settings for up to four watermarks. Watermarks appear in // the specified size and location, and with the specified opacity for the duration // of the transcoded video. // // Watermarks can be in .png or .jpg format. If you want to display a watermark // that is not rectangular, use the .png format, which supports transparency. // // When you create a job that uses this preset, you specify the .png or .jpg // graphics that you want Elastic Transcoder to include in the transcoded videos. // You can specify fewer graphics in the job than you specify watermark settings // in the preset, which allows you to use the same preset for up to four watermarks // that have different dimensions. type PresetWatermark struct { _ struct{} `type:"structure"` // The horizontal position of the watermark unless you specify a non-zero value // for HorizontalOffset: // // * Left: The left edge of the watermark is aligned with the left border // of the video. // // * Right: The right edge of the watermark is aligned with the right border // of the video. // // * Center: The watermark is centered between the left and right borders. HorizontalAlign *string `type:"string"` // The amount by which you want the horizontal position of the watermark to // be offset from the position specified by HorizontalAlign: // // * number of pixels (px): The minimum value is 0 pixels, and the maximum // value is the value of MaxWidth. // // * integer percentage (%): The range of valid values is 0 to 100. // // For example, if you specify Left for HorizontalAlign and 5px for HorizontalOffset, // the left side of the watermark appears 5 pixels from the left border of the // output video. // // HorizontalOffset is only valid when the value of HorizontalAlign is Left // or Right. If you specify an offset that causes the watermark to extend beyond // the left or right border and Elastic Transcoder has not added black bars, // the watermark is cropped. If Elastic Transcoder has added black bars, the // watermark extends into the black bars. If the watermark extends beyond the // black bars, it is cropped. // // Use the value of Target to specify whether you want to include the black // bars that are added by Elastic Transcoder, if any, in the offset calculation. HorizontalOffset *string `type:"string"` // A unique identifier for the settings for one watermark. The value of Id can // be up to 40 characters long. Id *string `min:"1" type:"string"` // The maximum height of the watermark in one of the following formats: // // * number of pixels (px): The minimum value is 16 pixels, and the maximum // value is the value of MaxHeight. // // * integer percentage (%): The range of valid values is 0 to 100. Use the // value of Target to specify whether you want Elastic Transcoder to include // the black bars that are added by Elastic Transcoder, if any, in the calculation. // // If you specify the value in pixels, it must be less than or equal to the // value of MaxHeight. MaxHeight *string `type:"string"` // The maximum width of the watermark in one of the following formats: // // * number of pixels (px): The minimum value is 16 pixels, and the maximum // value is the value of MaxWidth. // // * integer percentage (%): The range of valid values is 0 to 100. Use the // value of Target to specify whether you want Elastic Transcoder to include // the black bars that are added by Elastic Transcoder, if any, in the calculation. // If you specify the value in pixels, it must be less than or equal to the // value of MaxWidth. MaxWidth *string `type:"string"` // A percentage that indicates how much you want a watermark to obscure the // video in the location where it appears. Valid values are 0 (the watermark // is invisible) to 100 (the watermark completely obscures the video in the // specified location). The datatype of Opacity is float. // // Elastic Transcoder supports transparent .png graphics. If you use a transparent // .png, the transparent portion of the video appears as if you had specified // a value of 0 for Opacity. The .jpg file format doesn't support transparency. Opacity *string `type:"string"` // A value that controls scaling of the watermark: // // * Fit: Elastic Transcoder scales the watermark so it matches the value // that you specified in either MaxWidth or MaxHeight without exceeding the // other value. // // * Stretch: Elastic Transcoder stretches the watermark to match the values // that you specified for MaxWidth and MaxHeight. If the relative proportions // of the watermark and the values of MaxWidth and MaxHeight are different, // the watermark will be distorted. // // * ShrinkToFit: Elastic Transcoder scales the watermark down so that its // dimensions match the values that you specified for at least one of MaxWidth // and MaxHeight without exceeding either value. If you specify this option, // Elastic Transcoder does not scale the watermark up. SizingPolicy *string `type:"string"` // A value that determines how Elastic Transcoder interprets values that you // specified for HorizontalOffset, VerticalOffset, MaxWidth, and MaxHeight: // // * Content: HorizontalOffset and VerticalOffset values are calculated based // on the borders of the video excluding black bars added by Elastic Transcoder, // if any. In addition, MaxWidth and MaxHeight, if specified as a percentage, // are calculated based on the borders of the video excluding black bars // added by Elastic Transcoder, if any. // // * Frame: HorizontalOffset and VerticalOffset values are calculated based // on the borders of the video including black bars added by Elastic Transcoder, // if any. In addition, MaxWidth and MaxHeight, if specified as a percentage, // are calculated based on the borders of the video including black bars // added by Elastic Transcoder, if any. Target *string `type:"string"` // The vertical position of the watermark unless you specify a non-zero value // for VerticalOffset: // // * Top: The top edge of the watermark is aligned with the top border of // the video. // // * Bottom: The bottom edge of the watermark is aligned with the bottom // border of the video. // // * Center: The watermark is centered between the top and bottom borders. VerticalAlign *string `type:"string"` // VerticalOffset // // The amount by which you want the vertical position of the watermark to be // offset from the position specified by VerticalAlign: // // * number of pixels (px): The minimum value is 0 pixels, and the maximum // value is the value of MaxHeight. // // * integer percentage (%): The range of valid values is 0 to 100. // // For example, if you specify Top for VerticalAlign and 5px for VerticalOffset, // the top of the watermark appears 5 pixels from the top border of the output // video. // // VerticalOffset is only valid when the value of VerticalAlign is Top or Bottom. // // If you specify an offset that causes the watermark to extend beyond the top // or bottom border and Elastic Transcoder has not added black bars, the watermark // is cropped. If Elastic Transcoder has added black bars, the watermark extends // into the black bars. If the watermark extends beyond the black bars, it is // cropped. // // Use the value of Target to specify whether you want Elastic Transcoder to // include the black bars that are added by Elastic Transcoder, if any, in the // offset calculation. VerticalOffset *string `type:"string"` } // String returns the string representation func (s PresetWatermark) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *PresetWatermark) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "PresetWatermark"} if s.Id != nil && len(*s.Id) < 1 { invalidParams.Add(aws.NewErrParamMinLen("Id", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s PresetWatermark) MarshalFields(e protocol.FieldEncoder) error { if s.HorizontalAlign != nil { v := *s.HorizontalAlign metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "HorizontalAlign", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.HorizontalOffset != nil { v := *s.HorizontalOffset metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "HorizontalOffset", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Id != nil { v := *s.Id metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Id", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.MaxHeight != nil { v := *s.MaxHeight metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MaxHeight", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.MaxWidth != nil { v := *s.MaxWidth metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MaxWidth", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Opacity != nil { v := *s.Opacity metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Opacity", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.SizingPolicy != nil { v := *s.SizingPolicy metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "SizingPolicy", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Target != nil { v := *s.Target metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Target", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.VerticalAlign != nil { v := *s.VerticalAlign metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "VerticalAlign", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.VerticalOffset != nil { v := *s.VerticalOffset metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "VerticalOffset", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // Thumbnails for videos. type Thumbnails struct { _ struct{} `type:"structure"` // // To better control resolution and aspect ratio of thumbnails, we recommend // that you use the values MaxWidth, MaxHeight, SizingPolicy, and PaddingPolicy // instead of Resolution and AspectRatio. The two groups of settings are mutually // exclusive. Do not use them together. // // The aspect ratio of thumbnails. Valid values include: // // auto, 1:1, 4:3, 3:2, 16:9 // // If you specify auto, Elastic Transcoder tries to preserve the aspect ratio // of the video in the output file. AspectRatio *string `type:"string"` // The format of thumbnails, if any. Valid values are jpg and png. // // You specify whether you want Elastic Transcoder to create thumbnails when // you create a job. Format *string `type:"string"` // The approximate number of seconds between thumbnails. Specify an integer // value. Interval *string `type:"string"` // The maximum height of thumbnails in pixels. If you specify auto, Elastic // Transcoder uses 1080 (Full HD) as the default value. If you specify a numeric // value, enter an even integer between 32 and 3072. MaxHeight *string `type:"string"` // The maximum width of thumbnails in pixels. If you specify auto, Elastic Transcoder // uses 1920 (Full HD) as the default value. If you specify a numeric value, // enter an even integer between 32 and 4096. MaxWidth *string `type:"string"` // When you set PaddingPolicy to Pad, Elastic Transcoder may add black bars // to the top and bottom and/or left and right sides of thumbnails to make the // total size of the thumbnails match the values that you specified for thumbnail // MaxWidth and MaxHeight settings. PaddingPolicy *string `type:"string"` // // To better control resolution and aspect ratio of thumbnails, we recommend // that you use the values MaxWidth, MaxHeight, SizingPolicy, and PaddingPolicy // instead of Resolution and AspectRatio. The two groups of settings are mutually // exclusive. Do not use them together. // // The width and height of thumbnail files in pixels. Specify a value in the // format width x height where both values are even integers. The values cannot // exceed the width and height that you specified in the Video:Resolution object. Resolution *string `type:"string"` // Specify one of the following values to control scaling of thumbnails: // // * Fit: Elastic Transcoder scales thumbnails so they match the value that // you specified in thumbnail MaxWidth or MaxHeight settings without exceeding // the other value. // // * Fill: Elastic Transcoder scales thumbnails so they match the value that // you specified in thumbnail MaxWidth or MaxHeight settings and matches // or exceeds the other value. Elastic Transcoder centers the image in thumbnails // and then crops in the dimension (if any) that exceeds the maximum value. // // * Stretch: Elastic Transcoder stretches thumbnails to match the values // that you specified for thumbnail MaxWidth and MaxHeight settings. If the // relative proportions of the input video and thumbnails are different, // the thumbnails will be distorted. // // * Keep: Elastic Transcoder does not scale thumbnails. If either dimension // of the input video exceeds the values that you specified for thumbnail // MaxWidth and MaxHeight settings, Elastic Transcoder crops the thumbnails. // // * ShrinkToFit: Elastic Transcoder scales thumbnails down so that their // dimensions match the values that you specified for at least one of thumbnail // MaxWidth and MaxHeight without exceeding either value. If you specify // this option, Elastic Transcoder does not scale thumbnails up. // // * ShrinkToFill: Elastic Transcoder scales thumbnails down so that their // dimensions match the values that you specified for at least one of MaxWidth // and MaxHeight without dropping below either value. If you specify this // option, Elastic Transcoder does not scale thumbnails up. SizingPolicy *string `type:"string"` } // String returns the string representation func (s Thumbnails) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Thumbnails) MarshalFields(e protocol.FieldEncoder) error { if s.AspectRatio != nil { v := *s.AspectRatio metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "AspectRatio", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Format != nil { v := *s.Format metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Format", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Interval != nil { v := *s.Interval metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Interval", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.MaxHeight != nil { v := *s.MaxHeight metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MaxHeight", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.MaxWidth != nil { v := *s.MaxWidth metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MaxWidth", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.PaddingPolicy != nil { v := *s.PaddingPolicy metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "PaddingPolicy", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Resolution != nil { v := *s.Resolution metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Resolution", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.SizingPolicy != nil { v := *s.SizingPolicy metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "SizingPolicy", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // Settings that determine when a clip begins and how long it lasts. type TimeSpan struct { _ struct{} `type:"structure"` // The duration of the clip. The format can be either HH:mm:ss.SSS (maximum // value: 23:59:59.999; SSS is thousandths of a second) or sssss.SSS (maximum // value: 86399.999). If you don't specify a value, Elastic Transcoder creates // an output file from StartTime to the end of the file. // // If you specify a value longer than the duration of the input file, Elastic // Transcoder transcodes the file and returns a warning message. Duration *string `type:"string"` // The place in the input file where you want a clip to start. The format can // be either HH:mm:ss.SSS (maximum value: 23:59:59.999; SSS is thousandths of // a second) or sssss.SSS (maximum value: 86399.999). If you don't specify a // value, Elastic Transcoder starts at the beginning of the input file. StartTime *string `type:"string"` } // String returns the string representation func (s TimeSpan) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s TimeSpan) MarshalFields(e protocol.FieldEncoder) error { if s.Duration != nil { v := *s.Duration metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Duration", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.StartTime != nil { v := *s.StartTime metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "StartTime", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } // Details about the timing of a job. type Timing struct { _ struct{} `type:"structure"` // The time the job finished transcoding, in epoch milliseconds. FinishTimeMillis *int64 `type:"long"` // The time the job began transcoding, in epoch milliseconds. StartTimeMillis *int64 `type:"long"` // The time the job was submitted to Elastic Transcoder, in epoch milliseconds. SubmitTimeMillis *int64 `type:"long"` } // String returns the string representation func (s Timing) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Timing) MarshalFields(e protocol.FieldEncoder) error { if s.FinishTimeMillis != nil { v := *s.FinishTimeMillis metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "FinishTimeMillis", protocol.Int64Value(v), metadata) } if s.StartTimeMillis != nil { v := *s.StartTimeMillis metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "StartTimeMillis", protocol.Int64Value(v), metadata) } if s.SubmitTimeMillis != nil { v := *s.SubmitTimeMillis metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "SubmitTimeMillis", protocol.Int64Value(v), metadata) } return nil } // The VideoParameters structure. type VideoParameters struct { _ struct{} `type:"structure"` // // To better control resolution and aspect ratio of output videos, we recommend // that you use the values MaxWidth, MaxHeight, SizingPolicy, PaddingPolicy, // and DisplayAspectRatio instead of Resolution and AspectRatio. The two groups // of settings are mutually exclusive. Do not use them together. // // The display aspect ratio of the video in the output file. Valid values include: // // auto, 1:1, 4:3, 3:2, 16:9 // // If you specify auto, Elastic Transcoder tries to preserve the aspect ratio // of the input file. // // If you specify an aspect ratio for the output file that differs from aspect // ratio of the input file, Elastic Transcoder adds pillarboxing (black bars // on the sides) or letterboxing (black bars on the top and bottom) to maintain // the aspect ratio of the active region of the video. AspectRatio *string `type:"string"` // The bit rate of the video stream in the output file, in kilobits/second. // Valid values depend on the values of Level and Profile. If you specify auto, // Elastic Transcoder uses the detected bit rate of the input source. If you // specify a value other than auto, we recommend that you specify a value less // than or equal to the maximum H.264-compliant value listed for your level // and profile: // // Level - Maximum video bit rate in kilobits/second (baseline and main Profile) // : maximum video bit rate in kilobits/second (high Profile) // // * 1 - 64 : 80 // // * 1b - 128 : 160 // // * 1.1 - 192 : 240 // // * 1.2 - 384 : 480 // // * 1.3 - 768 : 960 // // * 2 - 2000 : 2500 // // * 3 - 10000 : 12500 // // * 3.1 - 14000 : 17500 // // * 3.2 - 20000 : 25000 // // * 4 - 20000 : 25000 // // * 4.1 - 50000 : 62500 BitRate *string `type:"string"` // The video codec for the output file. Valid values include gif, H.264, mpeg2, // vp8, and vp9. You can only specify vp8 and vp9 when the container type is // webm, gif when the container type is gif, and mpeg2 when the container type // is mpg. Codec *string `type:"string"` // Profile (H.264/VP8/VP9 Only) // // The H.264 profile that you want to use for the output file. Elastic Transcoder // supports the following profiles: // // * baseline: The profile most commonly used for videoconferencing and for // mobile applications. // // * main: The profile used for standard-definition digital TV broadcasts. // // * high: The profile used for high-definition digital TV broadcasts and // for Blu-ray discs. // // Level (H.264 Only) // // The H.264 level that you want to use for the output file. Elastic Transcoder // supports the following levels: // // 1, 1b, 1.1, 1.2, 1.3, 2, 2.1, 2.2, 3, 3.1, 3.2, 4, 4.1 // // MaxReferenceFrames (H.264 Only) // // Applicable only when the value of Video:Codec is H.264. The maximum number // of previously decoded frames to use as a reference for decoding future frames. // Valid values are integers 0 through 16, but we recommend that you not use // a value greater than the following: // // Min(Floor(Maximum decoded picture buffer in macroblocks * 256 / (Width in // pixels * Height in pixels)), 16) // // where Width in pixels and Height in pixels represent either MaxWidth and // MaxHeight, or Resolution. Maximum decoded picture buffer in macroblocks depends // on the value of the Level object. See the list below. (A macroblock is a // block of pixels measuring 16x16.) // // * 1 - 396 // // * 1b - 396 // // * 1.1 - 900 // // * 1.2 - 2376 // // * 1.3 - 2376 // // * 2 - 2376 // // * 2.1 - 4752 // // * 2.2 - 8100 // // * 3 - 8100 // // * 3.1 - 18000 // // * 3.2 - 20480 // // * 4 - 32768 // // * 4.1 - 32768 // // MaxBitRate (Optional, H.264/MPEG2/VP8/VP9 only) // // The maximum number of bits per second in a video buffer; the size of the // buffer is specified by BufferSize. Specify a value between 16 and 62,500. // You can reduce the bandwidth required to stream a video by reducing the maximum // bit rate, but this also reduces the quality of the video. // // BufferSize (Optional, H.264/MPEG2/VP8/VP9 only) // // The maximum number of bits in any x seconds of the output video. This window // is commonly 10 seconds, the standard segment duration when you're using FMP4 // or MPEG-TS for the container type of the output video. Specify an integer // greater than 0. If you specify MaxBitRate and omit BufferSize, Elastic Transcoder // sets BufferSize to 10 times the value of MaxBitRate. // // InterlacedMode (Optional, H.264/MPEG2 Only) // // The interlace mode for the output video. // // Interlaced video is used to double the perceived frame rate for a video by // interlacing two fields (one field on every other line, the other field on // the other lines) so that the human eye registers multiple pictures per frame. // Interlacing reduces the bandwidth required for transmitting a video, but // can result in blurred images and flickering. // // Valid values include Progressive (no interlacing, top to bottom), TopFirst // (top field first), BottomFirst (bottom field first), and Auto. // // If InterlaceMode is not specified, Elastic Transcoder uses Progressive for // the output. If Auto is specified, Elastic Transcoder interlaces the output. // // ColorSpaceConversionMode (Optional, H.264/MPEG2 Only) // // The color space conversion Elastic Transcoder applies to the output video. // Color spaces are the algorithms used by the computer to store information // about how to render color. Bt.601 is the standard for standard definition // video, while Bt.709 is the standard for high definition video. // // Valid values include None, Bt709toBt601, Bt601toBt709, and Auto. // // If you chose Auto for ColorSpaceConversionMode and your output is interlaced, // your frame rate is one of 23.97, 24, 25, 29.97, 50, or 60, your SegmentDuration // is null, and you are using one of the resolution changes from the list below, // Elastic Transcoder applies the following color space conversions: // // * Standard to HD, 720x480 to 1920x1080 - Elastic Transcoder applies Bt601ToBt709 // // * Standard to HD, 720x576 to 1920x1080 - Elastic Transcoder applies Bt601ToBt709 // // * HD to Standard, 1920x1080 to 720x480 - Elastic Transcoder applies Bt709ToBt601 // // * HD to Standard, 1920x1080 to 720x576 - Elastic Transcoder applies Bt709ToBt601 // // Elastic Transcoder may change the behavior of the ColorspaceConversionMode // Auto mode in the future. All outputs in a playlist must use the same ColorSpaceConversionMode. // // If you do not specify a ColorSpaceConversionMode, Elastic Transcoder does // not change the color space of a file. If you are unsure what ColorSpaceConversionMode // was applied to your output file, you can check the AppliedColorSpaceConversion // parameter included in your job response. If your job does not have an AppliedColorSpaceConversion // in its response, no ColorSpaceConversionMode was applied. // // ChromaSubsampling // // The sampling pattern for the chroma (color) channels of the output video. // Valid values include yuv420p and yuv422p. // // yuv420p samples the chroma information of every other horizontal and every // other vertical line, yuv422p samples the color information of every horizontal // line and every other vertical line. // // LoopCount (Gif Only) // // The number of times you want the output gif to loop. Valid values include // Infinite and integers between 0 and 100, inclusive. CodecOptions map[string]string `type:"map"` // The value that Elastic Transcoder adds to the metadata in the output file. DisplayAspectRatio *string `type:"string"` // Applicable only when the value of Video:Codec is one of H.264, MPEG2, or // VP8. // // Whether to use a fixed value for FixedGOP. Valid values are true and false: // // * true: Elastic Transcoder uses the value of KeyframesMaxDist for the // distance between key frames (the number of frames in a group of pictures, // or GOP). // // * false: The distance between key frames can vary. // // FixedGOP must be set to true for fmp4 containers. FixedGOP *string `type:"string"` // The frames per second for the video stream in the output file. Valid values // include: // // auto, 10, 15, 23.97, 24, 25, 29.97, 30, 60 // // If you specify auto, Elastic Transcoder uses the detected frame rate of the // input source. If you specify a frame rate, we recommend that you perform // the following calculation: // // Frame rate = maximum recommended decoding speed in luma samples/second / // (width in pixels * height in pixels) // // where: // // * width in pixels and height in pixels represent the Resolution of the // output video. // // * maximum recommended decoding speed in Luma samples/second is less than // or equal to the maximum value listed in the following table, based on // the value that you specified for Level. // // The maximum recommended decoding speed in Luma samples/second for each level // is described in the following list (Level - Decoding speed): // // * 1 - 380160 // // * 1b - 380160 // // * 1.1 - 76800 // // * 1.2 - 1536000 // // * 1.3 - 3041280 // // * 2 - 3041280 // // * 2.1 - 5068800 // // * 2.2 - 5184000 // // * 3 - 10368000 // // * 3.1 - 27648000 // // * 3.2 - 55296000 // // * 4 - 62914560 // // * 4.1 - 62914560 FrameRate *string `type:"string"` // Applicable only when the value of Video:Codec is one of H.264, MPEG2, or // VP8. // // The maximum number of frames between key frames. Key frames are fully encoded // frames; the frames between key frames are encoded based, in part, on the // content of the key frames. The value is an integer formatted as a string; // valid values are between 1 (every frame is a key frame) and 100000, inclusive. // A higher value results in higher compression but may also discernibly decrease // video quality. // // For Smooth outputs, the FrameRate must have a constant ratio to the KeyframesMaxDist. // This allows Smooth playlists to switch between different quality levels while // the file is being played. // // For example, an input file can have a FrameRate of 30 with a KeyframesMaxDist // of 90. The output file then needs to have a ratio of 1:3. Valid outputs would // have FrameRate of 30, 25, and 10, and KeyframesMaxDist of 90, 75, and 30, // respectively. // // Alternately, this can be achieved by setting FrameRate to auto and having // the same values for MaxFrameRate and KeyframesMaxDist. KeyframesMaxDist *string `type:"string"` // If you specify auto for FrameRate, Elastic Transcoder uses the frame rate // of the input video for the frame rate of the output video. Specify the maximum // frame rate that you want Elastic Transcoder to use when the frame rate of // the input video is greater than the desired maximum frame rate of the output // video. Valid values include: 10, 15, 23.97, 24, 25, 29.97, 30, 60. MaxFrameRate *string `type:"string"` // The maximum height of the output video in pixels. If you specify auto, Elastic // Transcoder uses 1080 (Full HD) as the default value. If you specify a numeric // value, enter an even integer between 96 and 3072. MaxHeight *string `type:"string"` // The maximum width of the output video in pixels. If you specify auto, Elastic // Transcoder uses 1920 (Full HD) as the default value. If you specify a numeric // value, enter an even integer between 128 and 4096. MaxWidth *string `type:"string"` // When you set PaddingPolicy to Pad, Elastic Transcoder may add black bars // to the top and bottom and/or left and right sides of the output video to // make the total size of the output video match the values that you specified // for MaxWidth and MaxHeight. PaddingPolicy *string `type:"string"` // // To better control resolution and aspect ratio of output videos, we recommend // that you use the values MaxWidth, MaxHeight, SizingPolicy, PaddingPolicy, // and DisplayAspectRatio instead of Resolution and AspectRatio. The two groups // of settings are mutually exclusive. Do not use them together. // // The width and height of the video in the output file, in pixels. Valid values // are auto and width x height: // // * auto: Elastic Transcoder attempts to preserve the width and height of // the input file, subject to the following rules. // // * width x height : The width and height of the output video in pixels. // // Note the following about specifying the width and height: // // * The width must be an even integer between 128 and 4096, inclusive. // // * The height must be an even integer between 96 and 3072, inclusive. // // * If you specify a resolution that is less than the resolution of the // input file, Elastic Transcoder rescales the output file to the lower resolution. // // * If you specify a resolution that is greater than the resolution of the // input file, Elastic Transcoder rescales the output to the higher resolution. // // * We recommend that you specify a resolution for which the product of // width and height is less than or equal to the applicable value in the // following list (List - Max width x height value): 1 - 25344 1b - 25344 // 1.1 - 101376 1.2 - 101376 1.3 - 101376 2 - 101376 2.1 - 202752 2.2 - 404720 // 3 - 404720 3.1 - 921600 3.2 - 1310720 4 - 2097152 4.1 - 2097152 Resolution *string `type:"string"` // Specify one of the following values to control scaling of the output video: // // * Fit: Elastic Transcoder scales the output video so it matches the value // that you specified in either MaxWidth or MaxHeight without exceeding the // other value. // // * Fill: Elastic Transcoder scales the output video so it matches the value // that you specified in either MaxWidth or MaxHeight and matches or exceeds // the other value. Elastic Transcoder centers the output video and then // crops it in the dimension (if any) that exceeds the maximum value. // // * Stretch: Elastic Transcoder stretches the output video to match the // values that you specified for MaxWidth and MaxHeight. If the relative // proportions of the input video and the output video are different, the // output video will be distorted. // // * Keep: Elastic Transcoder does not scale the output video. If either // dimension of the input video exceeds the values that you specified for // MaxWidth and MaxHeight, Elastic Transcoder crops the output video. // // * ShrinkToFit: Elastic Transcoder scales the output video down so that // its dimensions match the values that you specified for at least one of // MaxWidth and MaxHeight without exceeding either value. If you specify // this option, Elastic Transcoder does not scale the video up. // // * ShrinkToFill: Elastic Transcoder scales the output video down so that // its dimensions match the values that you specified for at least one of // MaxWidth and MaxHeight without dropping below either value. If you specify // this option, Elastic Transcoder does not scale the video up. SizingPolicy *string `type:"string"` // Settings for the size, location, and opacity of graphics that you want Elastic // Transcoder to overlay over videos that are transcoded using this preset. // You can specify settings for up to four watermarks. Watermarks appear in // the specified size and location, and with the specified opacity for the duration // of the transcoded video. // // Watermarks can be in .png or .jpg format. If you want to display a watermark // that is not rectangular, use the .png format, which supports transparency. // // When you create a job that uses this preset, you specify the .png or .jpg // graphics that you want Elastic Transcoder to include in the transcoded videos. // You can specify fewer graphics in the job than you specify watermark settings // in the preset, which allows you to use the same preset for up to four watermarks // that have different dimensions. Watermarks []PresetWatermark `type:"list"` } // String returns the string representation func (s VideoParameters) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *VideoParameters) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "VideoParameters"} if s.Watermarks != nil { for i, v := range s.Watermarks { if err := v.Validate(); err != nil { invalidParams.AddNested(fmt.Sprintf("%s[%v]", "Watermarks", i), err.(aws.ErrInvalidParams)) } } } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s VideoParameters) MarshalFields(e protocol.FieldEncoder) error { if s.AspectRatio != nil { v := *s.AspectRatio metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "AspectRatio", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.BitRate != nil { v := *s.BitRate metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "BitRate", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Codec != nil { v := *s.Codec metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Codec", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.CodecOptions != nil { v := s.CodecOptions metadata := protocol.Metadata{} ms0 := e.Map(protocol.BodyTarget, "CodecOptions", metadata) ms0.Start() for k1, v1 := range v { ms0.MapSetValue(k1, protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v1)}) } ms0.End() } if s.DisplayAspectRatio != nil { v := *s.DisplayAspectRatio metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "DisplayAspectRatio", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.FixedGOP != nil { v := *s.FixedGOP metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "FixedGOP", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.FrameRate != nil { v := *s.FrameRate metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "FrameRate", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.KeyframesMaxDist != nil { v := *s.KeyframesMaxDist metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "KeyframesMaxDist", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.MaxFrameRate != nil { v := *s.MaxFrameRate metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MaxFrameRate", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.MaxHeight != nil { v := *s.MaxHeight metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MaxHeight", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.MaxWidth != nil { v := *s.MaxWidth metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MaxWidth", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.PaddingPolicy != nil { v := *s.PaddingPolicy metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "PaddingPolicy", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Resolution != nil { v := *s.Resolution metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Resolution", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.SizingPolicy != nil { v := *s.SizingPolicy metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "SizingPolicy", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Watermarks != nil { v := s.Watermarks metadata := protocol.Metadata{} ls0 := e.List(protocol.BodyTarget, "Watermarks", metadata) ls0.Start() for _, v1 := range v { ls0.ListAddFields(v1) } ls0.End() } return nil } // Elastic Transcoder returns a warning if the resources used by your pipeline // are not in the same region as the pipeline. // // Using resources in the same region, such as your Amazon S3 buckets, Amazon // SNS notification topics, and AWS KMS key, reduces processing time and prevents // cross-regional charges. type Warning struct { _ struct{} `type:"structure"` // The code of the cross-regional warning. Code *string `type:"string"` // The message explaining what resources are in a different region from the // pipeline. // // AWS KMS keys must be in the same region as the pipeline. Message *string `type:"string"` } // String returns the string representation func (s Warning) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s Warning) MarshalFields(e protocol.FieldEncoder) error { if s.Code != nil { v := *s.Code metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Code", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.Message != nil { v := *s.Message metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Message", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil }