首页
学习
活动
专区
圈层
工具
发布
首页
学习
活动
专区
圈层
工具
MCP广场
社区首页 >问答首页 >Raspberry Pi上的gstreamer-1.0 :无法解码H.264流

Raspberry Pi上的gstreamer-1.0 :无法解码H.264流
EN

Stack Overflow用户
提问于 2016-10-20 04:59:14
回答 1查看 1K关注 0票数 0

我正在尝试在Raspberry Pi上运行一个gstreamer-1.0 python脚本(见下文,在ubuntu笔记本上运行良好)。但是,它似乎无法对流进行解码:

代码语言:javascript
运行
复制
0:00:11.237415476  9605 0xafb0cc60 ERROR            vaapidecode ../../../gst/vaapi/gstvaapidecode.c:1025:gst_vaapidecode_ensure_allowed_caps: failed to retrieve VA display
0:00:11.239490439  9605 0xafb0cc60 WARN               decodebin gstdecodebin2.c:2087:connect_pad:<decodebin0> Link failed on pad vaapidecode0:sink
0:00:11.244097356  9605 0xafb0cc60 WARN            uridecodebin gsturidecodebin.c:939:unknown_type_cb:<decoder> warning: No decoder available for type 'video/x-h264, stream-format=(string)byte-stream, alignment=(string)nal, width=(int)426, height=(int)240, framerate=(fraction)30/1, parsed=(boolean)true, pixel-aspect-ratio=(fraction)1/1, level=(string)2.1, profile=(string)main'.

我搜索了有关错误和警告的信息(结果没有启发我),但除了安装已经安装的gstreamer1.0-libav之外,我真的找不到更多的建议。因此,解码器应该是可用的。

这里可能出了什么问题,我该如何修复它?

脚本是这样的:

代码语言:javascript
运行
复制
#!/usr/bin/env python

# GST_DEBUG=3,python:5,gnl*:5 python 01_parsepipeline.py http://www.ustream.tv/channel/17074538 worst novideo.png

from __future__ import print_function

import sys

import gi

from gi.repository import GObject as gobject, Gst as gst
from livestreamer import Livestreamer, StreamError, PluginError, NoPluginError

import cv2
import numpy


def exit(msg):
    print(msg, file=sys.stderr)
    sys.exit()


class Player(object):
    def __init__(self):
        self.fd = None
        self.mainloop = gobject.MainLoop()

        # This creates a playbin pipeline and using the appsrc source
        # we can feed it our stream data
        self.pipeline = gst.parse_launch('uridecodebin uri=appsrc:// name=decoder \
            decoder. ! videorate ! video/x-raw,framerate=1/1 ! tee name=t \
              t. ! queue ! videoconvert ! video/x-raw,format=RGB ! appsink name=appsink \
            decoder. ! queue ! audioconvert ! fakesink')
        if self.pipeline is None:
            exit("couldn't build pipeline")
        decoder = self.pipeline.get_by_name('decoder')
        if decoder is None:
            exit("couldn't get decoder")
        decoder.connect("source-setup", self.on_source_setup)

        vsink = self.pipeline.get_by_name('appsink')
        if vsink is None:
            exit("couldn't get sink")
        vsink.set_property("emit-signals", True)
        vsink.set_property("max-buffers", 1)
        vsink.connect("new-sample", self.on_new_sample)

        # Creates a bus and set callbacks to receive errors
        self.bus = self.pipeline.get_bus()
        self.bus.add_signal_watch()
        self.bus.connect("message::eos", self.on_eos)
        self.bus.connect("message::error", self.on_error)

    def on_new_sample(self, sink):
        sample = sink.emit("pull-sample")
        buf = sample.get_buffer()
        caps = sample.get_caps()
        height = caps.get_structure(0).get_value('height')
        width = caps.get_structure(0).get_value('width')
        (result, mapinfo) = buf.map(gst.MapFlags.READ)
        if result == True:
            arr = numpy.ndarray(
                (height,
                 width,
                3),
                buffer=buf.extract_dup(0, buf.get_size()),
                dtype=numpy.uint8)
            resized_refimage = cv2.resize(refArray, (width, height))
            diff = cv2.norm(arr, resized_refimage, cv2.NORM_L2)

        buf.unmap(mapinfo)
        s = "diff = " + str(diff)
        print(s)
        return gst.FlowReturn.OK

    def exit(self, msg):
        self.stop()
        exit(msg)

    def stop(self):
        # Stop playback and exit mainloop
        self.pipeline.set_state(gst.State.NULL)
        self.mainloop.quit()

        # Close the stream
        if self.fd:
            self.fd.close()

    def play(self, stream):
        # Attempt to open the stream
        try:
            self.fd = stream.open()
        except StreamError as err:
            self.exit("Failed to open stream: {0}".format(err))

        # Start playback
        self.pipeline.set_state(gst.State.PLAYING)
        self.mainloop.run()

    def on_source_setup(self, element, source):
        # When this callback is called the appsrc expects
        # us to feed it more data
        print("source setup")
        source.connect("need-data", self.on_source_need_data)
        print("done")

    def on_pad_added(self, element, pad):
        string = pad.query_caps(None).to_string()
        print(string)
        if string.startswith('video/'):
        #type = pad.get_caps()[0].get_name()
        #print(type)
        #if type.startswith("video"):
          pad.link(self.vconverter.get_static_pad("sink"))

    def on_source_need_data(self, source, length):
        # Attempt to read data from the stream
        try:
            data = self.fd.read(length)
        except IOError as err:
            self.exit("Failed to read data from stream: {0}".format(err))

        # If data is empty it's the end of stream
        if not data:
            source.emit("end-of-stream")
            return

        # Convert the Python bytes into a GStreamer Buffer
        # and then push it to the appsrc
        buf = gst.Buffer.new_wrapped(data)
        source.emit("push-buffer", buf)
        #print("sent " + str(length) + " bytes")

    def on_eos(self, bus, msg):
        # Stop playback on end of stream
        self.stop()

    def on_error(self, bus, msg):
        # Print error message and exit on error
        error = msg.parse_error()[1]
        self.exit(error)


def main():
    if len(sys.argv) < 4:
        exit("Usage: {0} <url> <quality> <reference png image path>".format(sys.argv[0]))

    # Initialize and check GStreamer version
    gi.require_version("Gst", "1.0")
    gobject.threads_init()
    gst.init(None)

    # Collect arguments
    url = sys.argv[1]
    quality = sys.argv[2]
    refImage = sys.argv[3]
    global refArray
    image = cv2.imread(refImage)
    refArray = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
#    refArray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
    refArray = cv2.blur(refArray, (3,3))

    # Create the Livestreamer session
    livestreamer = Livestreamer()

    # Enable logging
    livestreamer.set_loglevel("debug")
    livestreamer.set_logoutput(sys.stdout)

    # Attempt to fetch streams
    try:
        streams = livestreamer.streams(url)
    except NoPluginError:
        exit("Livestreamer is unable to handle the URL '{0}'".format(url))
    except PluginError as err:
        exit("Plugin error: {0}".format(err))

    if not streams:
        exit("No streams found on URL '{0}'".format(url))

    # Look for specified stream
    if quality not in streams:
        exit("Unable to find '{0}' stream on URL '{1}'".format(quality, url))

    # We found the stream
    stream = streams[quality]

    # Create the player and start playback
    player = Player()

    # Blocks until playback is done
    player.play(stream)

if __name__ == "__main__":
    main()
EN

回答 1

Stack Overflow用户

发布于 2017-06-03 00:24:47

您设置的管道似乎试图调用一个vaapi解码器来硬件解码h264 - vaapi在raspberry pi上不可用,因为封闭源代码的X服务器没有实现它。您也可以使用gstreamer1.0-omx包中的omxh264dec。

如果不是,你可以做软件h264解码,但这会更慢(在覆盆子pi上可能慢得不可接受)。

票数 1
EN
页面原文内容由Stack Overflow提供。腾讯云小微IT领域专用引擎提供翻译支持
原文链接:

https://stackoverflow.com/questions/40141430

复制
相关文章

相似问题

领券
问题归档专栏文章快讯文章归档关键词归档开发者手册归档开发者手册 Section 归档