2012-08-28 3 views
2

라이브 555를 통해 JPEG 이미지 또는 모션 JPEG 파일을 스트리밍하고 싶습니다. 그러나 문제는 라이브에서 555 Jpegs 구현을 사용할 수 없다는 것입니다. 누구든지 도울 수 있습니까 ??live555로 JPEG 스트리밍

답변

0

희망 사항을 입력했으나하지 않은 경우 이 부분을 참조하십시오. Jpeg Streaming using live555 이미지/JPEG를 스트리밍하도록 요청한 것과 같습니다. MJpeg의 경우 동일한 과정을 수행해야합니다.

2

devel 메일 링리스트 http://lists.live555.com/pipermail/live-devel/2012-February/014672.html에 게시 된 구현을 찾을 수 있습니다. 코드와 샘플을 사용할 수 있지만이 수정 사항은 live555 관리자가 거부했습니다.

처음에는 JPEGVideoRTPSink을 먹일 수있는 것보다 MJPEGVideoSource을 구현해야합니다.

#include "liveMedia.hh" 
#include "BasicUsageEnvironment.hh" 
#include "GroupsockHelper.hh" 
#include "MJPEGVideoSource.hh" 

char const* inputFileName = "test.mjpeg"; 

int main(int argc, char** argv) { 
    // Begin by setting up our usage environment: 
    TaskScheduler* scheduler = BasicTaskScheduler::createNew(); 
    UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); 

    // Create 'groupsocks' for RTP and RTCP: 
    struct in_addr destinationAddress; 
    destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env); 

    const unsigned short rtpPortNum = 18888; 
    const unsigned short rtcpPortNum = rtpPortNum+1; 
    const unsigned char ttl = 255; 

    const Port rtpPort(rtpPortNum); 
    const Port rtcpPort(rtcpPortNum); 

    Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl); 
    rtpGroupsock.multicastSendOnly(); // we're a SSM source 
    Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl); 
    rtcpGroupsock.multicastSendOnly(); // we're a SSM source 

    // Create a 'JPEG Video RTP' sink from the RTP 'groupsock': 
    RTPSink* videoSink = JPEGVideoRTPSink::createNew(*env, &rtpGroupsock); 

    // Create (and start) a 'RTCP instance' for this RTP sink: 
    const unsigned estimatedSessionBandwidth = 5000; // in kbps; for RTCP b/w share 
    const unsigned maxCNAMElen = 100; 
    unsigned char CNAME[maxCNAMElen+1]; 
    gethostname((char*)CNAME, maxCNAMElen); 
    CNAME[maxCNAMElen] = '\0'; // just in case 
    RTCPInstance* rtcp = RTCPInstance::createNew(*env, &rtcpGroupsock, 
       estimatedSessionBandwidth, CNAME, 
       videoSink, NULL /* we're a server */, 
       True /* we're a SSM source */); 
    // Note: This starts RTCP running automatically 

    RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554); 
    if (rtspServer == NULL) { 
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; 
    exit(1); 
    } 
    ServerMediaSession* sms = ServerMediaSession::createNew(*env, "testStream", inputFileName,"Session streamed by \"testMJPEGVideoStreamer\"", 
         True /*SSM*/); 
    sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp)); 
    rtspServer->addServerMediaSession(sms); 

    char* url = rtspServer->rtspURL(sms); 
    *env << "Play this stream using the URL \"" << url << "\"\n"; 
    delete[] url; 

    // Start the streaming: 
    *env << "Beginning streaming...\n"; 
    // Open the input file as a 'byte-stream file source': 
    ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(*env, inputFileName); 
    if (fileSource == NULL) { 
    *env << "Unable to open file \"" << inputFileName 
    << "\" as a byte-stream file source\n"; 
    exit(1); 
    } 

    // Create the MJPEG video source: 
    MJPEGVideoSource* videoSource = MJPEGVideoSource::createNew(*env, fileSource); 

    // Finally, start playing: 
    *env << "Beginning to read from file...\n"; 
    videoSink->startPlaying(*videoSource, NULL, NULL); 

    env->taskScheduler().doEventLoop(); 

    return 0; 
} 
:

#include "JPEGVideoSource.hh" 

class MJPEGVideoSource : public JPEGVideoSource 
{ 
     public: 
       static MJPEGVideoSource* createNew (UsageEnvironment& env, FramedSource* source) 
       { 
         return new MJPEGVideoSource(env,source); 
       } 
       virtual void doGetNextFrame() 
       { 
        if (m_inputSource) 
         m_inputSource->getNextFrame(fTo, fMaxSize, afterGettingFrameSub, this, FramedSource::handleClosure, this);      
       } 
       virtual void doStopGettingFrames() 
       { 
        FramedSource::doStopGettingFrames(); 
        if (m_inputSource) 
         m_inputSource->stopGettingFrames();      
       } 
       static void afterGettingFrameSub(void* clientData, unsigned frameSize,unsigned numTruncatedBytes,struct timeval presentationTime,unsigned durationInMicroseconds) 
       { 
           MJPEGVideoSource* source = (MJPEGVideoSource*)clientData; 
           source->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds); 
       }   
       void afterGettingFrame(unsigned frameSize,unsigned numTruncatedBytes,struct timeval presentationTime,unsigned durationInMicroseconds) 
       { 
        int headerSize = 0; 
        bool headerOk = false; 
        fFrameSize = 0; 

        for (unsigned int i = 0; i < frameSize ; ++i) 
        { 
         // SOF 
         if ((i+8) < frameSize && fTo[i] == 0xFF && fTo[i+1] == 0xC0) 
         { 
          m_height = (fTo[i+5]<<5)|(fTo[i+6]>>3); 
          m_width = (fTo[i+7]<<5)|(fTo[i+8]>>3); 
         } 
         // DQT 
         if ((i+5+64) < frameSize && fTo[i] == 0xFF && fTo[i+1] == 0xDB) 
         { 
          if (fTo[i+4] ==0) 
          { 
           memcpy(m_qTable, fTo + i + 5, 64); 
           m_qTable0Init = true; 
          } 
          else if (fTo[i+4] ==1) 
          { 
           memcpy(m_qTable + 64, fTo + i + 5, 64); 
           m_qTable1Init = true; 
          } 
         } 
         // End of header 
         if ((i+1) < frameSize && fTo[i] == 0x3F && fTo[i+1] == 0x00) 
         { 
          headerOk = true; 
          headerSize = i+2; 
          break; 
         } 
        } 

        if (headerOk) 
        { 
         fFrameSize = frameSize - headerSize; 
         memmove(fTo, fTo + headerSize, fFrameSize); 
        } 

        fNumTruncatedBytes = numTruncatedBytes; 
        fPresentationTime = presentationTime; 
        fDurationInMicroseconds = durationInMicroseconds; 
        afterGetting(this); 
       } 
       virtual u_int8_t type() { return 1; }; 
       virtual u_int8_t qFactor() { return 128; }; 
       virtual u_int8_t width() { return m_width; }; 
       virtual u_int8_t height() { return m_height; }; 
       u_int8_t const* quantizationTables(u_int8_t& precision, u_int16_t& length) 
       { 
        length = 0; 
        precision = 0; 
        if (m_qTable0Init && m_qTable1Init) 
        { 
         precision = 8; 
         length = sizeof(m_qTable); 
        } 
        return m_qTable;    
       } 

     protected: 
       MJPEGVideoSource(UsageEnvironment& env, FramedSource* source) : JPEGVideoSource(env), 
       m_inputSource(source), 
       m_width(0), 
       m_height(0), 
       m_qTable0Init(false), 
       m_qTable1Init(false) 
       { 
        memset(&m_qTable,0,sizeof(m_qTable)); 
       } 
       virtual ~MJPEGVideoSource() 
       { 
        Medium::close(m_inputSource); 
       } 

     protected: 
       FramedSource* m_inputSource; 
       u_int8_t  m_width; 
       u_int8_t  m_height; 
       u_int8_t  m_qTable[128]; 
       bool   m_qTable0Init; 
       bool   m_qTable1Init; 
}; 

다음으로 우리는 간단한 RTSP 서버를 구축하기 위해 비디오 소스로 사용할 수 있습니다