urls_downloaded_cb({"token":"dedicated","urls":[{"url":"additional/design/adaptive-demuxer.html#download-helper-and-thread","node_type":"p","page":"Adaptive Demuxers for DASH, HLS and Smooth Streaming","sections":["New design","Download helper and thread"],"context":{"gi-language":["default"]}},{"url":"additional/design/adaptive-demuxer.html#highlevel-overview-of-the-new-internal-adaptivedemux2-base-class","node_type":"ul","page":"Adaptive Demuxers for DASH, HLS and Smooth Streaming","sections":["New design","High-level overview of the new internal AdaptiveDemux2 base class:"],"context":{"gi-language":["default"]}},{"url":"additional/design/machine-learning-analytics.html#handling-multiple-tensors-simultaneously-in-a-tensor-decoder","node_type":"p","page":"Machine Learning Based Analytics","sections":["Machine Learning Based Analytics","Supporting Neural Network Inference","Tensor Decoders Need to Recognize Tensor(s) They Can Handle","Handling Multiple Tensors Simultaneously In A Tensor Decoder"],"context":{"gi-language":["default"]}},{"url":"additional/design/machine-learning-analytics.html#refinement-using-analytics-pipeline","node_type":"p","page":"Machine Learning Based Analytics","sections":["Machine Learning Based Analytics","Refinement Using Analytics Pipeline"],"context":{"gi-language":["default"]}},{"url":"additional/design/opengl.html#opengl-and-threads","node_type":"p","page":"OpenGL","sections":["OpenGL","Limits imposed by OpenGL","OpenGL and Threads"],"context":{"gi-language":["default"]}},{"url":"additional/design/subtitle-overlays.html#background","node_type":"p","page":"Subtitle Overlays and Hardware-Accelerated Playback","sections":["Subtitle Overlays and Hardware-Accelerated Playback","Background"],"context":{"gi-language":["default"]}},{"url":"application-development/introduction/motivation.html#high-performance","node_type":"ul","page":"Design principles","sections":["Design principles","High performance"],"context":{"gi-language":["default"]}},{"url":"gst-devtools/gst-validate-scenarios.html#gstvalidate-scenario-file-format","node_type":"p","page":"Scenarios","sections":["GstValidate Scenario File Format"],"context":{"gi-language":["default"]}},{"url":"tools/ges-launch.html#define-a-timeline-through-the-command-line","node_type":"p","page":"ges-launch-1.0","sections":["ges-launch-1.0","Define a timeline through the command line"],"context":{"gi-language":["default"]}},{"url":"tutorials/android/a-running-pipeline.html#a-pipeline-on-android-java-code","node_type":"p","page":"Android tutorial 2: A running pipeline","sections":["Android tutorial 2: A running pipeline","A pipeline on Android [Java code]"],"context":{"gi-language":["default"]}},{"url":"tutorials/basic/platform-specific-elements.html#iosavassetsrc","node_type":"p","page":"Basic tutorial 16: Platform-specific elements","sections":["Basic tutorial 16: Platform-specific elements","iOS","iosavassetsrc"],"context":{"gi-language":["default"]}},{"url":"tutorials/playback/hardware-accelerated-video-decoding.html#introduction","node_type":"p","page":"Playback tutorial 8: Hardware-accelerated video decoding","sections":["Playback tutorial 8: Hardware-accelerated video decoding","Introduction"],"context":{"gi-language":["default"]}},{"url":"video/gstcolorbalance.html#GstColorBalanceType","node_type":"p","page":"GstColorBalance","sections":["Enumerations"],"context":{"gi-language":["c","javascript","python"]}}]});