vot1120

vot1120

0个粉丝

14

问答

0

专栏

0

资料

vot1120  发布于  2013-05-21 18:27:26
采纳率 0%
14个问答
14013

有谁做了海思上的RTSP直播实现

 
最近在另外一个平台上,参照WIS-STREAMER做了个H.264直播,但是延时好大,而且还有马赛克现象

后期是准备在海思平台上实现

不知有哪位前辈实现了的,给些建议
我来回答
回答8个
时间排序
认可量排序

david

41个粉丝

368

问答

253

专栏

229

资料

david 2013-05-23 11:23:17
认可0
你是在PC上先做了个RTSP Server? 我这边用没像你说的延时很大和马赛克现象,你用VLC的播放器试下。

vot1120

0个粉丝

14

问答

0

专栏

0

资料

vot1120 2013-05-23 15:07:57
认可0
[quote]david 发表于 2013-5-23 11:23 [url=forum.php?mod=redirect&goto=findpost&pid=3108&ptid=1734][img]static/image/common/back.gif[/img][/url]
你是在PC上先做了个RTSP Server? 我这边用没像你说的延时很大和马赛克现象,你用VLC的播放器试下。[/quote]

我在我们公司的另外一个硬件平台,它能实现720P的硬件编码,然后我的程序了封装了它,通过它的encoder获取H.264数据。

vot1120

0个粉丝

14

问答

0

专栏

0

资料

vot1120 2013-05-23 15:08:36
认可0
[quote]david 发表于 2013-5-23 11:23 [url=forum.php?mod=redirect&goto=findpost&pid=3108&ptid=1734][img]static/image/common/back.gif[/img][/url]
你是在PC上先做了个RTSP Server? 我这边用没像你说的延时很大和马赛克现象,你用VLC的播放器试下。[/quote]

你是直接在海思平台上的吗?能告诉我一下你的实现方式吗?

vot1120

0个粉丝

14

问答

0

专栏

0

资料

vot1120 2013-05-23 16:11:37
认可0
#include // for "gettimeofday()"
#include "WISInput.hh"
#include "Options.hh"
#include "Err.hh"
#include "capture.h"

#define VIDEO_WIDTH 176
#define VIDEO_HEIGHT 144
#define FRAME_PER_SEC 30.0
////////// WISOpenFileSource definition //////////

// A common "FramedSource" subclass, used for reading from an open file:

class WISOpenFileSource: public FramedSource {
protected:
  WISOpenFileSource(UsageEnvironment& env, WISInput& input);
  virtual ~WISOpenFileSource();

  virtual void readFromFile() = 0;

private: // redefined virtual functions:
  virtual void doGetNextFrame();

private:
  static void incomingDataHandler(WISOpenFileSource* source);
  void incomingDataHandler1();

protected:
  WISInput& fInput;
//  int fFileNo;
};


////////// WISVideoOpenFileSource definition //////////

class WISVideoOpenFileSource: public WISOpenFileSource {
public:
  WISVideoOpenFileSource(UsageEnvironment& env, WISInput& input);
                  unsigned char frames[100*1024];
                int frameSize;       
                int keyframe;
                int offset;
                FILE* fp;
  virtual ~WISVideoOpenFileSource();
protected: // redefined virtual functions:
  virtual void readFromFile();
};

////////// WISInput implementation //////////

WISInput* WISInput::createNew(UsageEnvironment& env) {
  if (!fHaveInitialized) {
         
    if (!initialize(env)) return NULL;//初始化硬件
    fHaveInitialized = True;
  }

  return new WISInput(env);
}

FramedSource* WISInput::videoSource() {
  if (fOurVideoSource == NULL) {
    fOurVideoSource = new WISVideoOpenFileSource(envir(), *this);
  }
  return fOurVideoSource;
}



WISInput::WISInput(UsageEnvironment& env)
  : Medium(env){
}

WISInput::~WISInput() {
         if( fOurVideoSource )
{
        delete (WISVideoOpenFileSource *)fOurVideoSource;
        fOurVideoSource = NULL;
}
}

Boolean WISInput::initialize(UsageEnvironment& env) {//初始化硬件
        int ret;
  do {
        if(fHaveInitialized==False)
        {
                setupOn2(VIDEO_WIDTH,VIDEO_HEIGHT,15,4000000);
                ret = startH264Encode(VIDEO_WIDTH,VIDEO_HEIGHT);
                if(ret < 0)
                        break;
        }       
           return True;
  } while (0);

  // An error occurred
  return False;
}

static int capture_start = 1;





char FrameBuff[1024*1024];

Boolean WISInput::fHaveInitialized = False;
FramedSource* WISInput::fOurVideoSource = NULL;

////////// WISOpenFileSource implementation //////////

WISOpenFileSource
::WISOpenFileSource(UsageEnvironment& env, WISInput& input)
  : FramedSource(env),
    fInput(input) {
           
}

WISOpenFileSource::~WISOpenFileSource() {
}

void WISOpenFileSource::doGetNextFrame() {
  // Await the next incoming data on our FID:
        incomingDataHandler(this);
}

void WISOpenFileSource
::incomingDataHandler(WISOpenFileSource* source) {
  source->incomingDataHandler1();
}

void WISOpenFileSource::incomingDataHandler1() {
  // Read the data from our file into the client's buffer:
  readFromFile();

nextTask() = envir().taskScheduler().scheduleDelayedTask( 0,
        (TaskFunc*)FramedSource::afterGetting, this);//表示延迟0秒后再执行 afterGetting 函数

}


////////// WISVideoOpenFileSource implementation //////////

WISVideoOpenFileSource
::WISVideoOpenFileSource(UsageEnvironment& env, WISInput& input)
  : WISOpenFileSource(env, input) {
         
//          fp = fopen("cx7.264","wr");
}

WISVideoOpenFileSource::~WISVideoOpenFileSource() {
  fInput.fOurVideoSource = NULL;
}
static int count = 0;
void WISVideoOpenFileSource::readFromFile() {

        int len = 0;
        struct timeval        timestamp;
        frameSize = 0;
        memset(frames,0,100*1024);       
       
        /*从encoder获取数据,数据格式为一个完整的H.264数据帧,。        
        *关键帧:00 00 00 01 27....00 00 00 01 28....00 00 00 01 25....
        *P帧:00 00 00 01 21 .......
        ********************************************/                               
        frameSize = procOneFrame (frames,&keyframe,×tamp);

        if(frameSize > 0)       
        {
//                if(count < 1000)
//                        fwrite(frames,frameSize,1,fp);
//                if(count == 1000)
//                {
//                        fclose(fp);
//                        printf("-----------------------------------------done\n");
//                }
//                count++;       
                gettimeofday(&fPresentationTime, NULL);
//                fPresentationTime = timestamp;
                fFrameSize = frameSize;
                printf("fFrameSize = %d\n",fFrameSize);
                if (fFrameSize > fMaxSize) {
                       
//                        printf("fFrameSize = %d\n",fFrameSize);
//                        printf("fMaxSize = %d\n",fMaxSize);
                    fNumTruncatedBytes = fFrameSize - fMaxSize;
                    fFrameSize = fMaxSize;
//                        printf("Frame Truncated:%d\n",fNumTruncatedBytes);
                }
                else {
                    fNumTruncatedBytes = 0;
                }
               
                memmove(fTo, frames, fFrameSize);                       
        }                       
}
/*******************
从编码器获取SPS PPS
********************/
int GetSprop(u_int8_t* sps,int& spssize,u_int8_t* pps, int& ppssize)
{
        unsigned char nal[64];
        int offset = 0;
        int len = 0;
        unsigned char spspps[200];
        u_int8_t nal_unit_type=0;
        int i=0;
        int head_len = getSpsPpsPamaStr(spspps);
        i = head_len;
      while(i>0)
        {
                fprintf(stderr, "%s .... calling:%d\n", __func__,i);
                len = GetAnnexbNALU(spspps+offset,nal,head_len-offset,&offset);
                i-=offset;
                if(len > 0)
                {
        //                                                fprintf(stderr,"[%d] %s .... calling:0x%.2x:0x%.2x:0x%.2x:0x%.2x:0x%.2x:0x%.2x,%d,offset=%d\n",gettid(), __func__,nal[0],nal[1],nal[2],nal[3],nal[4],nal[5],len,offset);
                        nal_unit_type = nal[0]&0x1f;
                        if(nal_unit_type == 7)
                        {
                                memmove(sps,nal,len);
                                spssize = len;
                        }
                               
                        else if(nal_unit_type == 8)
                        {
                                memmove(pps,nal,len);
                                ppssize = len;
                        }
                               
                }
                                               
        }
        return 1;
}

vot1120

0个粉丝

14

问答

0

专栏

0

资料

vot1120 2013-05-23 16:12:16
认可0
#include "WISH264VideoServerMediaSubsession.hh"
#include
#include

WISH264VideoServerMediaSubsession* WISH264VideoServerMediaSubsession
::createNew(UsageEnvironment& env, WISInput& wisInput, unsigned estimatedBitrate) {
  return new WISH264VideoServerMediaSubsession(env, wisInput, estimatedBitrate);
}

WISH264VideoServerMediaSubsession
::WISH264VideoServerMediaSubsession(UsageEnvironment& env, WISInput& wisInput,
                                     unsigned estimatedBitrate)
  : WISServerMediaSubsession(env, wisInput, estimatedBitrate) {
}

WISH264VideoServerMediaSubsession::~WISH264VideoServerMediaSubsession() {
}

static void afterPlayingDummy(void* clientData) {
  WISH264VideoServerMediaSubsession* subsess
    = (WISH264VideoServerMediaSubsession*)clientData;
  // Signal the event loop that we're done:
  subsess->setDoneFlag();
}

static void checkForAuxSDPLine(void* clientData) {
  WISH264VideoServerMediaSubsession* subsess
    = (WISH264VideoServerMediaSubsession*)clientData;
  subsess->checkForAuxSDPLine1();
}

void WISH264VideoServerMediaSubsession::checkForAuxSDPLine1() {
  if (fDummyRTPSink->auxSDPLine() != NULL) {
    // Signal the event loop that we're done:
    setDoneFlag();
  } else {
    // try again after a brief delay:
    int uSecsToDelay = 100000; // 100 ms
    nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
                              (TaskFunc*)checkForAuxSDPLine, this);
  }
}

char const* WISH264VideoServerMediaSubsession
::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) {
  // Note: For MPEG-4 video buffer, the 'config' information isn't known
  // until we start reading the Buffer.  This means that "rtpSink"s
  // "auxSDPLine()" will be NULL initially, and we need to start reading
  // data from our buffer until this changes.
  fDummyRTPSink = rtpSink;

  // Start reading the buffer:
  fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);

  // Check whether the sink's 'auxSDPLine()' is ready:
  checkForAuxSDPLine(this);

  fDoneFlag = 1;
  envir().taskScheduler().doEventLoop(&fDoneFlag);

  char const* auxSDPLine = fDummyRTPSink->auxSDPLine();
  return auxSDPLine;
  //return NULL;
}

FramedSource* WISH264VideoServerMediaSubsession
::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) {
  estBitrate = fEstimatedKbps;

  // Create a framer for the Video Elementary Stream:
  return H264VideoStreamFramer::createNew(envir(), fWISInput.videoSource());
}

RTPSink* WISH264VideoServerMediaSubsession
::createNewRTPSink(Groupsock* rtpGroupsock,
                   unsigned char rtpPayloadTypeIfDynamic,
                   FramedSource* /*inputSource*/) {
  setVideoRTPSinkBufferSize();
  //return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);

        u_int8_t sps[64];
        u_int8_t pps[32];
        int spsSize;
        int ppsSize;

extern int GetSprop(unsigned char* sps,int& spssize,unsigned char* pps, int& ppssize);
  GetSprop(sps,spsSize,pps,ppsSize);

  return H264VideoRTPSink::createNew(envir(), rtpGroupsock,96,sps,spsSize,pps,ppsSize);
}

ebaina_

0个粉丝

82

问答

40

专栏

0

资料

ebaina_ 2013-05-23 21:56:44
认可0
microcreat 已经做了移植总结。请参照论坛网友的总结。:):)

hbrs_coder

0个粉丝

0

问答

0

专栏

0

资料

hbrs_coder 2014-01-15 14:59:18
认可0
版主有链接吗

sxsong

0个粉丝

12

问答

0

专栏

1

资料

sxsong 2015-08-29 17:32:19
认可0
我也遇到这个问题,请问楼楼解决了没有,谢谢!
或将文件直接拖到这里
悬赏:
E币
网盘
* 网盘链接:
* 提取码:
悬赏:
E币

Markdown 语法

  • 加粗**内容**
  • 斜体*内容*
  • 删除线~~内容~~
  • 引用> 引用内容
  • 代码`代码`
  • 代码块```编程语言↵代码```
  • 链接[链接标题](url)
  • 无序列表- 内容
  • 有序列表1. 内容
  • 缩进内容
  • 图片![alt](url)
+ 添加网盘链接/附件

Markdown 语法

  • 加粗**内容**
  • 斜体*内容*
  • 删除线~~内容~~
  • 引用> 引用内容
  • 代码`代码`
  • 代码块```编程语言↵代码```
  • 链接[链接标题](url)
  • 无序列表- 内容
  • 有序列表1. 内容
  • 缩进内容
  • 图片![alt](url)
举报反馈

举报类型

  • 内容涉黄/赌/毒
  • 内容侵权/抄袭
  • 政治相关
  • 涉嫌广告
  • 侮辱谩骂
  • 其他

详细说明

易百纳技术社区