分享

Live555 直播源 以及MediaSubsession

 waston 2021-10-21
/*
 * H264DeviceSource.hh
 *
 *  Created on: 2014年7月19日
 *      Author: zjzhang
 */

#ifndef H264DEVICESOURCE_HH_
#define H264DEVICESOURCE_HH_
#include<DeviceSource.hh>

class H264DeviceSource : public DeviceSource {
public:
    static DeviceSource* createNew(UsageEnvironment& env, u_int8_t index = 1, u_int width = 352, u_int height = 288, u_int fps = 15, u_int kbps = 100);
protected:
    H264DeviceSource(UsageEnvironment& env, u_int8_t index, u_int width, u_int height, u_int fps, u_int kbps);
    virtual ~H264DeviceSource();
private:
    virtual void doGetNextFrame();
    virtual unsigned maxFrameSize() const;
    int fHeight;
    int fWidth;
    void* fH264Encoder;
    u_int8_t* fBuffer;
    u_int fBufferSize;
};

#endif /* H264DEVICESOURCE_HH_ */


/*
 * H264DeviceSource.cpp
 *
 *  Created on: 2014年7月19日
 *      Author: zjzhang
 */

#include "H264DeviceSource.hh"
#ifdef __cplusplus
extern "C" {
#endif
#include "H264Stream.h"
#ifdef __cplusplus
}
#endif
DeviceSource*
H264DeviceSource::createNew(UsageEnvironment& env, u_int8_t index, u_int width,
    u_int height, u_int fps, u_int kbps) {
    return new H264DeviceSource(env, index, width, height, fps, kbps);
}

H264DeviceSource::H264DeviceSource(UsageEnvironment& env, u_int8_t index,
    u_int width, u_int height, u_int fps, u_int kbps) :
    DeviceSource(env, DeviceParameters()) {
    openCamera(1);
    getFrame(1);
    fHeight = getHeight(1);
    fWidth = getWidth(1);
    openH264Encoder(fWidth, fHeight, fps, kbps, &fH264Encoder);
    fBufferSize = fHeight * fWidth * 3 / 2;
    fBuffer = new uint8_t[fBufferSize];

}

H264DeviceSource::~H264DeviceSource() {
    // TODO Auto-generated destructor stub

    delete[] fBuffer;
    closeH264Encoder(fH264Encoder);
    closeCamera(1);
}
unsigned H264DeviceSource::maxFrameSize() const {
    // By default, this source has no maximum frame size.
    return 4096;
}
void H264DeviceSource::doGetNextFrame() {
    if (!isCurrentlyAwaitingData())
        return; // we're not ready for the data yet

    unsigned char* rgbBuffer = getFrame(1);
    ConvertRGB2YUV(fWidth, fHeight, rgbBuffer, fBuffer);
    int newFrameSize = encodeFrame(fH264Encoder, fBuffer, fBufferSize);

    // Deliver the data here:
    if (newFrameSize < 0) {
        handleClosure();
        return;
    }
    if (newFrameSize > fMaxSize) {
        fFrameSize = fMaxSize;
        fNumTruncatedBytes = newFrameSize - fMaxSize;
    }
    else {
        fFrameSize = newFrameSize;
    }
    if (fFrameSize > 0) {
        int result = 0;
        int p = 0;
        do {
            unsigned long len = 0;
            result = getNextPacket(fH264Encoder, fBuffer + p, &len);
            p += len;
        } while (result > 0);
    }

    gettimeofday(&fPresentationTime, NULL); // If you have a more accurate time - e.g., from an encoder - then use that instead.
    // If the device is *not* a 'live source' (e.g., it comes instead from a file or buffer), then set "fDurationInMicroseconds" here.
    memmove(fTo, fBuffer, fFrameSize);

    FramedSource::afterGetting(this);
}


#ifndef _DEVIC_SERVER_MEDIA_SUBSESSION_HH
#define _DEVICE_SERVER_MEDIA_SUBSESSION_HH

#ifndef _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH
#include "OnDemandServerMediaSubsession.hh"
#endif
class  DeviceSource;
class DeviceServerMediaSubsession : public OnDemandServerMediaSubsession {
public:
    static DeviceServerMediaSubsession*
        createNew(UsageEnvironment& env,
            Boolean reuseFirstSource);

    // Used to implement "getAuxSDPLine()":
    void checkForAuxSDPLine1();
    void afterPlayingDummy1();
protected: // we're a virtual base class
    DeviceServerMediaSubsession(UsageEnvironment& env,
        Boolean reuseFirstSource);
    virtual ~DeviceServerMediaSubsession();

    void setDoneFlag() { fDoneFlag = ~0; }

protected: // redefined virtual functions
    virtual char const* getAuxSDPLine(RTPSink* rtpSink,
        FramedSource* inputSource);
    virtual FramedSource* createNewStreamSource(unsigned clientSessionId,
        unsigned& estBitrate);
    virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock,
        unsigned char rtpPayloadTypeIfDynamic,
        FramedSource* inputSource);

private:
    char* fAuxSDPLine;
    char fDoneFlag; // used when setting up "fAuxSDPLine"
    RTPSink* fDummyRTPSink; // ditto
};

#endif


#include "DeviceServerMediaSubsession.hh"
#include "H264VideoRTPSink.hh"
#include "DeviceSource.hh"
#include "H264VideoStreamFramer.hh"
#include "H264DeviceSource.hh"

DeviceServerMediaSubsession*
DeviceServerMediaSubsession::createNew(UsageEnvironment& env,
    Boolean reuseFirstSource) {
    return new DeviceServerMediaSubsession(env, reuseFirstSource);
}
DeviceServerMediaSubsession::DeviceServerMediaSubsession(UsageEnvironment& env,
    Boolean reuseFirstSource) :
    OnDemandServerMediaSubsession(env, reuseFirstSource) {
}

DeviceServerMediaSubsession::~DeviceServerMediaSubsession() {
}

FramedSource* DeviceServerMediaSubsession::createNewStreamSource(
    unsigned /*clientSessionId*/, unsigned& estBitrate) {
    DeviceSource* source = H264DeviceSource::createNew(envir());
    return H264VideoStreamFramer::createNew(envir(), source);
}

RTPSink* DeviceServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock,
    unsigned char rtpPayloadTypeIfDynamic, FramedSource* /*inputSource*/) {
    return H264VideoRTPSink::createNew(envir(), rtpGroupsock,
        rtpPayloadTypeIfDynamic);
}

static void afterPlayingDummy(void* clientData) {
    DeviceServerMediaSubsession* subsess =
        (DeviceServerMediaSubsession*)clientData;
    subsess->afterPlayingDummy1();
}

void DeviceServerMediaSubsession::afterPlayingDummy1() {
    // Unschedule any pending 'checking' task:
    envir().taskScheduler().unscheduleDelayedTask(nextTask());
    // Signal the event loop that we're done:
    setDoneFlag();
}

static void checkForAuxSDPLine(void* clientData) {
    DeviceServerMediaSubsession* subsess =
        (DeviceServerMediaSubsession*)clientData;
    subsess->checkForAuxSDPLine1();
}

void DeviceServerMediaSubsession::checkForAuxSDPLine1() {
    char const* dasl;

    if (fAuxSDPLine != NULL) {
        // Signal the event loop that we're done:
        setDoneFlag();
    }
    else if (fDummyRTPSink != NULL
        && (dasl = fDummyRTPSink->auxSDPLine()) != NULL) {
        fAuxSDPLine = strDup(dasl);
        fDummyRTPSink = NULL;

        // Signal the event loop that we're done:
        setDoneFlag();
    }
    else if (!fDoneFlag) {
        // try again after a brief delay:
        int uSecsToDelay = 100000; // 100 ms
        nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
            (TaskFunc*)checkForAuxSDPLine, this);
    }
}
char const* DeviceServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,
    FramedSource* inputSource) {

    if (fAuxSDPLine != NULL)
        return fAuxSDPLine; // it's already been set up (for a previous client)

    if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream
        // Note: For H264 video files, the 'config' information ("profile-level-id" and "sprop-parameter-sets") isn't known
        // until we start reading the file.  This means that "rtpSink"s "auxSDPLine()" will be NULL initially,
        // and we need to start reading data from our file until this changes.
        fDummyRTPSink = rtpSink;

        // Start reading the file:
        fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);

        // Check whether the sink's 'auxSDPLine()' is ready:
        checkForAuxSDPLine(this);
    }

    envir().taskScheduler().doEventLoop(&fDoneFlag);

    return fAuxSDPLine;
}

    本站是提供个人知识管理的网络存储空间,所有内容均由用户发布,不代表本站观点。请注意甄别内容中的联系方式、诱导购买等信息,谨防诈骗。如发现有害或侵权内容,请点击一键举报。
    转藏 分享 献花(0

    0条评论

    发表

    请遵守用户 评论公约

    类似文章 更多