// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT. package kinesisvideoarchivedmedia import ( "context" "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/internal/awsutil" "github.com/aws/aws-sdk-go-v2/private/protocol" ) type GetDASHStreamingSessionURLInput struct { _ struct{} `type:"structure"` // The time range of the requested fragment and the source of the timestamps. // // This parameter is required if PlaybackMode is ON_DEMAND or LIVE_REPLAY. This // parameter is optional if PlaybackMode is LIVE. If PlaybackMode is LIVE, the // FragmentSelectorType can be set, but the TimestampRange should not be set. // If PlaybackMode is ON_DEMAND or LIVE_REPLAY, both FragmentSelectorType and // TimestampRange must be set. DASHFragmentSelector *DASHFragmentSelector `type:"structure"` // Fragments are identified in the manifest file based on their sequence number // in the session. If DisplayFragmentNumber is set to ALWAYS, the Kinesis Video // Streams fragment number is added to each S element in the manifest file with // the attribute name “kvs:fn”. These fragment numbers can be used for logging // or for use with other APIs (e.g. GetMedia and GetMediaForFragmentList). A // custom MPEG-DASH media player is necessary to leverage these this custom // attribute. // // The default value is NEVER. DisplayFragmentNumber DASHDisplayFragmentNumber `type:"string" enum:"true"` // Per the MPEG-DASH specification, the wall-clock time of fragments in the // manifest file can be derived using attributes in the manifest itself. However, // typically, MPEG-DASH compatible media players do not properly handle gaps // in the media timeline. Kinesis Video Streams adjusts the media timeline in // the manifest file to enable playback of media with discontinuities. Therefore, // the wall-clock time derived from the manifest file may be inaccurate. If // DisplayFragmentTimestamp is set to ALWAYS, the accurate fragment timestamp // is added to each S element in the manifest file with the attribute name “kvs:ts”. // A custom MPEG-DASH media player is necessary to leverage this custom attribute. // // The default value is NEVER. When DASHFragmentSelector is SERVER_TIMESTAMP, // the timestamps will be the server start timestamps. Similarly, when DASHFragmentSelector // is PRODUCER_TIMESTAMP, the timestamps will be the producer start timestamps. DisplayFragmentTimestamp DASHDisplayFragmentTimestamp `type:"string" enum:"true"` // The time in seconds until the requested session expires. This value can be // between 300 (5 minutes) and 43200 (12 hours). // // When a session expires, no new calls to GetDashManifest, GetMP4InitFragment, // or GetMP4MediaFragment can be made for that session. // // The default is 300 (5 minutes). Expires *int64 `min:"300" type:"integer"` // The maximum number of fragments that are returned in the MPEG-DASH manifest. // // When the PlaybackMode is LIVE, the most recent fragments are returned up // to this value. When the PlaybackMode is ON_DEMAND, the oldest fragments are // returned, up to this maximum number. // // When there are a higher number of fragments available in a live MPEG-DASH // manifest, video players often buffer content before starting playback. Increasing // the buffer size increases the playback latency, but it decreases the likelihood // that rebuffering will occur during playback. We recommend that a live MPEG-DASH // manifest have a minimum of 3 fragments and a maximum of 10 fragments. // // The default is 5 fragments if PlaybackMode is LIVE or LIVE_REPLAY, and 1,000 // if PlaybackMode is ON_DEMAND. // // The maximum value of 1,000 fragments corresponds to more than 16 minutes // of video on streams with 1-second fragments, and more than 2 1/2 hours of // video on streams with 10-second fragments. MaxManifestFragmentResults *int64 `min:"1" type:"long"` // Whether to retrieve live, live replay, or archived, on-demand data. // // Features of the three types of sessions include the following: // // * LIVE : For sessions of this type, the MPEG-DASH manifest is continually // updated with the latest fragments as they become available. We recommend // that the media player retrieve a new manifest on a one-second interval. // When this type of session is played in a media player, the user interface // typically displays a "live" notification, with no scrubber control for // choosing the position in the playback window to display. In LIVE mode, // the newest available fragments are included in an MPEG-DASH manifest, // even if there is a gap between fragments (that is, if a fragment is missing). // A gap like this might cause a media player to halt or cause a jump in // playback. In this mode, fragments are not added to the MPEG-DASH manifest // if they are older than the newest fragment in the playlist. If the missing // fragment becomes available after a subsequent fragment is added to the // manifest, the older fragment is not added, and the gap is not filled. // // * LIVE_REPLAY : For sessions of this type, the MPEG-DASH manifest is updated // similarly to how it is updated for LIVE mode except that it starts by // including fragments from a given start time. Instead of fragments being // added as they are ingested, fragments are added as the duration of the // next fragment elapses. For example, if the fragments in the session are // two seconds long, then a new fragment is added to the manifest every two // seconds. This mode is useful to be able to start playback from when an // event is detected and continue live streaming media that has not yet been // ingested as of the time of the session creation. This mode is also useful // to stream previously archived media without being limited by the 1,000 // fragment limit in the ON_DEMAND mode. // // * ON_DEMAND : For sessions of this type, the MPEG-DASH manifest contains // all the fragments for the session, up to the number that is specified // in MaxMediaPlaylistFragmentResults. The manifest must be retrieved only // once for each session. When this type of session is played in a media // player, the user interface typically displays a scrubber control for choosing // the position in the playback window to display. // // In all playback modes, if FragmentSelectorType is PRODUCER_TIMESTAMP, and // if there are multiple fragments with the same start timestamp, the fragment // that has the larger fragment number (that is, the newer fragment) is included // in the MPEG-DASH manifest. The other fragments are not included. Fragments // that have different timestamps but have overlapping durations are still included // in the MPEG-DASH manifest. This can lead to unexpected behavior in the media // player. // // The default is LIVE. PlaybackMode DASHPlaybackMode `type:"string" enum:"true"` // The Amazon Resource Name (ARN) of the stream for which to retrieve the MPEG-DASH // manifest URL. // // You must specify either the StreamName or the StreamARN. StreamARN *string `min:"1" type:"string"` // The name of the stream for which to retrieve the MPEG-DASH manifest URL. // // You must specify either the StreamName or the StreamARN. StreamName *string `min:"1" type:"string"` } // String returns the string representation func (s GetDASHStreamingSessionURLInput) String() string { return awsutil.Prettify(s) } // Validate inspects the fields of the type to determine if they are valid. func (s *GetDASHStreamingSessionURLInput) Validate() error { invalidParams := aws.ErrInvalidParams{Context: "GetDASHStreamingSessionURLInput"} if s.Expires != nil && *s.Expires < 300 { invalidParams.Add(aws.NewErrParamMinValue("Expires", 300)) } if s.MaxManifestFragmentResults != nil && *s.MaxManifestFragmentResults < 1 { invalidParams.Add(aws.NewErrParamMinValue("MaxManifestFragmentResults", 1)) } if s.StreamARN != nil && len(*s.StreamARN) < 1 { invalidParams.Add(aws.NewErrParamMinLen("StreamARN", 1)) } if s.StreamName != nil && len(*s.StreamName) < 1 { invalidParams.Add(aws.NewErrParamMinLen("StreamName", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s GetDASHStreamingSessionURLInput) MarshalFields(e protocol.FieldEncoder) error { e.SetValue(protocol.HeaderTarget, "Content-Type", protocol.StringValue("application/json"), protocol.Metadata{}) if s.DASHFragmentSelector != nil { v := s.DASHFragmentSelector metadata := protocol.Metadata{} e.SetFields(protocol.BodyTarget, "DASHFragmentSelector", v, metadata) } if len(s.DisplayFragmentNumber) > 0 { v := s.DisplayFragmentNumber metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "DisplayFragmentNumber", protocol.QuotedValue{ValueMarshaler: v}, metadata) } if len(s.DisplayFragmentTimestamp) > 0 { v := s.DisplayFragmentTimestamp metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "DisplayFragmentTimestamp", protocol.QuotedValue{ValueMarshaler: v}, metadata) } if s.Expires != nil { v := *s.Expires metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "Expires", protocol.Int64Value(v), metadata) } if s.MaxManifestFragmentResults != nil { v := *s.MaxManifestFragmentResults metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "MaxManifestFragmentResults", protocol.Int64Value(v), metadata) } if len(s.PlaybackMode) > 0 { v := s.PlaybackMode metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "PlaybackMode", protocol.QuotedValue{ValueMarshaler: v}, metadata) } if s.StreamARN != nil { v := *s.StreamARN metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "StreamARN", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } if s.StreamName != nil { v := *s.StreamName metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "StreamName", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } type GetDASHStreamingSessionURLOutput struct { _ struct{} `type:"structure"` // The URL (containing the session token) that a media player can use to retrieve // the MPEG-DASH manifest. DASHStreamingSessionURL *string `type:"string"` } // String returns the string representation func (s GetDASHStreamingSessionURLOutput) String() string { return awsutil.Prettify(s) } // MarshalFields encodes the AWS API shape using the passed in protocol encoder. func (s GetDASHStreamingSessionURLOutput) MarshalFields(e protocol.FieldEncoder) error { if s.DASHStreamingSessionURL != nil { v := *s.DASHStreamingSessionURL metadata := protocol.Metadata{} e.SetValue(protocol.BodyTarget, "DASHStreamingSessionURL", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata) } return nil } const opGetDASHStreamingSessionURL = "GetDASHStreamingSessionURL" // GetDASHStreamingSessionURLRequest returns a request value for making API operation for // Amazon Kinesis Video Streams Archived Media. // // Retrieves an MPEG Dynamic Adaptive Streaming over HTTP (DASH) URL for the // stream. You can then open the URL in a media player to view the stream contents. // // Both the StreamName and the StreamARN parameters are optional, but you must // specify either the StreamName or the StreamARN when invoking this API operation. // // An Amazon Kinesis video stream has the following requirements for providing // data through MPEG-DASH: // // * The media must contain h.264 or h.265 encoded video and, optionally, // AAC or G.711 encoded audio. Specifically, the codec ID of track 1 should // be V_MPEG/ISO/AVC (for h.264) or V_MPEGH/ISO/HEVC (for H.265). Optionally, // the codec ID of track 2 should be A_AAC (for AAC) or A_MS/ACM (for G.711). // // * Data retention must be greater than 0. // // * The video track of each fragment must contain codec private data in // the Advanced Video Coding (AVC) for H.264 format and HEVC for H.265 format. // For more information, see MPEG-4 specification ISO/IEC 14496-15 (https://www.iso.org/standard/55980.html). // For information about adapting stream data to a given format, see NAL // Adaptation Flags (http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/producer-reference-nal.html). // // * The audio track (if present) of each fragment must contain codec private // data in the AAC format (AAC specification ISO/IEC 13818-7 (https://www.iso.org/standard/43345.html)) // or the MS Wave format (http://www-mmsp.ece.mcgill.ca/Documents/AudioFormats/WAVE/WAVE.html). // // The following procedure shows how to use MPEG-DASH with Kinesis Video Streams: // // Get an endpoint using GetDataEndpoint (http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/API_GetDataEndpoint.html), // specifying GET_DASH_STREAMING_SESSION_URL for the APIName parameter. // // Retrieve the MPEG-DASH URL using GetDASHStreamingSessionURL. Kinesis Video // Streams creates an MPEG-DASH streaming session to be used for accessing content // in a stream using the MPEG-DASH protocol. GetDASHStreamingSessionURL returns // an authenticated URL (that includes an encrypted session token) for the session's // MPEG-DASH manifest (the root resource needed for streaming with MPEG-DASH). // // Don't share or store this token where an unauthorized entity could access // it. The token provides access to the content of the stream. Safeguard the // token with the same measures that you would use with your AWS credentials. // // The media that is made available through the manifest consists only of the // requested stream, time range, and format. No other media data (such as frames // outside the requested window or alternate bitrates) is made available. // // Provide the URL (containing the encrypted session token) for the MPEG-DASH // manifest to a media player that supports the MPEG-DASH protocol. Kinesis // Video Streams makes the initialization fragment and media fragments available // through the manifest URL. The initialization fragment contains the codec // private data for the stream, and other data needed to set up the video or // audio decoder and renderer. The media fragments contain encoded video frames // or encoded audio samples. // // The media player receives the authenticated URL and requests stream metadata // and media data normally. When the media player requests data, it calls the // following actions: // // * GetDASHManifest: Retrieves an MPEG DASH manifest, which contains the // metadata for the media that you want to playback. // // * GetMP4InitFragment: Retrieves the MP4 initialization fragment. The media // player typically loads the initialization fragment before loading any // media fragments. This fragment contains the "fytp" and "moov" MP4 atoms, // and the child atoms that are needed to initialize the media player decoder. // The initialization fragment does not correspond to a fragment in a Kinesis // video stream. It contains only the codec private data for the stream and // respective track, which the media player needs to decode the media frames. // // * GetMP4MediaFragment: Retrieves MP4 media fragments. These fragments // contain the "moof" and "mdat" MP4 atoms and their child atoms, containing // the encoded fragment's media frames and their timestamps. After the first // media fragment is made available in a streaming session, any fragments // that don't contain the same codec private data cause an error to be returned // when those different media fragments are loaded. Therefore, the codec // private data should not change between fragments in a session. This also // means that the session fails if the fragments in a stream change from // having only video to having both audio and video. Data retrieved with // this action is billable. See Pricing (https://aws.amazon.com/kinesis/video-streams/pricing/) // for details. // // The following restrictions apply to MPEG-DASH sessions: // // * A streaming session URL should not be shared between players. The service // might throttle a session if multiple media players are sharing it. For // connection limits, see Kinesis Video Streams Limits (http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/limits.html). // // * A Kinesis video stream can have a maximum of ten active MPEG-DASH streaming // sessions. If a new session is created when the maximum number of sessions // is already active, the oldest (earliest created) session is closed. The // number of active GetMedia connections on a Kinesis video stream does not // count against this limit, and the number of active MPEG-DASH sessions // does not count against the active GetMedia connection limit. The maximum // limits for active HLS and MPEG-DASH streaming sessions are independent // of each other. // // You can monitor the amount of data that the media player consumes by monitoring // the GetMP4MediaFragment.OutgoingBytes Amazon CloudWatch metric. For information // about using CloudWatch to monitor Kinesis Video Streams, see Monitoring Kinesis // Video Streams (http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/monitoring.html). // For pricing information, see Amazon Kinesis Video Streams Pricing (https://aws.amazon.com/kinesis/video-streams/pricing/) // and AWS Pricing (https://aws.amazon.com/pricing/). Charges for both HLS sessions // and outgoing AWS data apply. // // For more information about HLS, see HTTP Live Streaming (https://developer.apple.com/streaming/) // on the Apple Developer site (https://developer.apple.com). // // If an error is thrown after invoking a Kinesis Video Streams archived media // API, in addition to the HTTP status code and the response body, it includes // the following pieces of information: // // * x-amz-ErrorType HTTP header – contains a more specific error type // in addition to what the HTTP status code provides. // // * x-amz-RequestId HTTP header – if you want to report an issue to AWS, // the support team can better diagnose the problem if given the Request // Id. // // Both the HTTP status code and the ErrorType header can be utilized to make // programmatic decisions about whether errors are retry-able and under what // conditions, as well as provide information on what actions the client programmer // might need to take in order to successfully try again. // // For more information, see the Errors section at the bottom of this topic, // as well as Common Errors (https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/CommonErrors.html). // // // Example sending a request using GetDASHStreamingSessionURLRequest. // req := client.GetDASHStreamingSessionURLRequest(params) // resp, err := req.Send(context.TODO()) // if err == nil { // fmt.Println(resp) // } // // Please also see https://docs.aws.amazon.com/goto/WebAPI/kinesis-video-archived-media-2017-09-30/GetDASHStreamingSessionURL func (c *Client) GetDASHStreamingSessionURLRequest(input *GetDASHStreamingSessionURLInput) GetDASHStreamingSessionURLRequest { op := &aws.Operation{ Name: opGetDASHStreamingSessionURL, HTTPMethod: "POST", HTTPPath: "/getDASHStreamingSessionURL", } if input == nil { input = &GetDASHStreamingSessionURLInput{} } req := c.newRequest(op, input, &GetDASHStreamingSessionURLOutput{}) return GetDASHStreamingSessionURLRequest{Request: req, Input: input, Copy: c.GetDASHStreamingSessionURLRequest} } // GetDASHStreamingSessionURLRequest is the request type for the // GetDASHStreamingSessionURL API operation. type GetDASHStreamingSessionURLRequest struct { *aws.Request Input *GetDASHStreamingSessionURLInput Copy func(*GetDASHStreamingSessionURLInput) GetDASHStreamingSessionURLRequest } // Send marshals and sends the GetDASHStreamingSessionURL API request. func (r GetDASHStreamingSessionURLRequest) Send(ctx context.Context) (*GetDASHStreamingSessionURLResponse, error) { r.Request.SetContext(ctx) err := r.Request.Send() if err != nil { return nil, err } resp := &GetDASHStreamingSessionURLResponse{ GetDASHStreamingSessionURLOutput: r.Request.Data.(*GetDASHStreamingSessionURLOutput), response: &aws.Response{Request: r.Request}, } return resp, nil } // GetDASHStreamingSessionURLResponse is the response type for the // GetDASHStreamingSessionURL API operation. type GetDASHStreamingSessionURLResponse struct { *GetDASHStreamingSessionURLOutput response *aws.Response } // SDKResponseMetdata returns the response metadata for the // GetDASHStreamingSessionURL request. func (r *GetDASHStreamingSessionURLResponse) SDKResponseMetdata() *aws.Response { return r.response }