2010-10-28 16 views

Répondre

8

RTSP construire procédure serveur (Le code est en C mais regardez mon code client pour voir comment les API GStreamer-java sont utilisés. Il devrait être assez simple)

I modifié le code de l'URL

http://www.ip-sense.com/linuxsense/how-to-develop-a-rtsp-server-in-linux-using-gstreamer/

/* GStreamer 
* Copyright (C) 2008 Wim Taymans <wim.taymans at gmail.com> 
* Copyright (c) 2012 enthusiasticgeek <[email protected]> 
* 
* This library is free software; you can redistribute it and/or 
* modify it under the terms of the GNU Library General Public 
* License as published by the Free Software Foundation; either 
* version 2 of the License, or (at your option) any later version. 
* 
* This library is distributed in the hope that it will be useful, 
* but WITHOUT ANY WARRANTY; without even the implied warranty of 
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 
* Library General Public License for more details. 
* 
* You should have received a copy of the GNU Library General Public 
* License along with this library; if not, write to the 
* Free Software Foundation, Inc., 59 Temple Place - Suite 330, 
* Boston, MA 02111-1307, USA. 
*/ 


//Edited by: enthusiasticgeek (c) 2012 for Stack Overflow Sept 11, 2012 

//########################################################################### 
//Important 
//########################################################################### 

//On ubuntu: sudo apt-get install libgstrtspserver-0.10-0 libgstrtspserver-0.10-dev 

//Play with VLC 
//rtsp://localhost:8554/test 

//video decode only: gst-launch -v rtspsrc location="rtsp://localhost:8554/test" ! rtph264depay ! ffdec_h264 ! autovideosink 
//audio and video: 
//gst-launch -v rtspsrc location="rtsp://localhost:8554/test" name=demux demux. ! queue ! rtph264depay ! ffdec_h264 ! ffmpegcolorspace ! autovideosink sync=false demux. ! queue ! rtppcmadepay ! alawdec ! autoaudiosink 

//########################################################################### 
#include <gst/gst.h> 

#include <gst/rtsp-server/rtsp-server.h> 

/* define this if you want the resource to only be available when using 
* user/admin as the password */ 
#undef WITH_AUTH 

/* this timeout is periodically run to clean up the expired sessions from the 
* pool. This needs to be run explicitly currently but might be done 
* automatically as part of the mainloop. */ 
static gboolean 
timeout (GstRTSPServer * server, gboolean ignored) 
{ 
    GstRTSPSessionPool *pool; 

    pool = gst_rtsp_server_get_session_pool (server); 
    gst_rtsp_session_pool_cleanup (pool); 
    g_object_unref (pool); 

    return TRUE; 
} 

int 
main (int argc, char *argv[]) 
{ 
    GMainLoop *loop; 
    GstRTSPServer *server; 
    GstRTSPMediaMapping *mapping; 
    GstRTSPMediaFactory *factory; 
#ifdef WITH_AUTH 
    GstRTSPAuth *auth; 
    gchar *basic; 
#endif 

    gst_init (&argc, &argv); 

    loop = g_main_loop_new (NULL, FALSE); 

    /* create a server instance */ 
    server = gst_rtsp_server_new(); 
    gst_rtsp_server_set_service(server,"8554"); //set the port # 

    /* get the mapping for this server, every server has a default mapper object 
    * that be used to map uri mount points to media factories */ 
    mapping = gst_rtsp_server_get_media_mapping (server); 

#ifdef WITH_AUTH 
    /* make a new authentication manager. it can be added to control access to all 
    * the factories on the server or on individual factories. */ 
    auth = gst_rtsp_auth_new(); 
    basic = gst_rtsp_auth_make_basic ("user", "admin"); 
    gst_rtsp_auth_set_basic (auth, basic); 
    g_free (basic); 
    /* configure in the server */ 
    gst_rtsp_server_set_auth (server, auth); 
#endif 

    /* make a media factory for a test stream. The default media factory can use 
    * gst-launch syntax to create pipelines. 
    * any launch line works as long as it contains elements named pay%d. Each 
    * element with pay%d names will be a stream */ 
    factory = gst_rtsp_media_factory_new(); 

    gst_rtsp_media_factory_set_launch (factory, "(" 
     "videotestsrc ! video/x-raw-yuv,width=320,height=240,framerate=10/1 ! " 
     "x264enc ! queue ! rtph264pay name=pay0 pt=96 ! audiotestsrc ! audio/x-raw-int,rate=8000 ! alawenc ! rtppcmapay name=pay1 pt=97 "")"); 

    /* attach the test factory to the /test url */ 
    gst_rtsp_media_mapping_add_factory (mapping, "/test", factory); 

    /* don't need the ref to the mapper anymore */ 
    g_object_unref (mapping); 

    /* attach the server to the default maincontext */ 
    if (gst_rtsp_server_attach (server, NULL) == 0) 
    goto failed; 

    /* add a timeout for the session cleanup */ 
    g_timeout_add_seconds (2, (GSourceFunc) timeout, server); 

    /* start serving, this never stops */ 
    g_main_loop_run (loop); 

    return 0; 

    /* ERRORS */ 
failed: 
    { 
    g_print ("failed to attach the server\n"); 
    return -1; 
    } 
} 

Makefile

# Copyright (c) 2012 enthusiasticgeek 
# RTSP demo for Stack Overflow 

sample: 
    gcc -Wall -I/usr/include/gstreamer-0.10 rtsp.c -o rtsp `pkg-config --libs --cflags gstreamer-0.10 gstreamer-rtsp-0.10` -lglib-2.0 -lgstrtspserver-0.10 -lgstreamer-0.10 

Une fois que vous construisez le binaire, exécutez simple, il ./rtsp et d'ouvrir un autre onglet dans le terminal pour tester le serveur en utilisant le pipeline suivant.

Test du pipeline de décodage. Ça fonctionne bien!

gst-launch -v rtspsrc location="rtsp://localhost:8554/test" name=demux demux. ! queue ! rtph264depay ! ffdec_h264 ! ffmpegcolorspace ! autovideosink sync=false demux. ! queue ! rtppcmadepay ! alawdec ! autoaudiosink 

du code Java client

// Display RTSP streaming of video 
// (c) 2011 enthusiasticgeek 
// This code is distributed in the hope that it will be useful, 
// but WITHOUT ANY WARRANTY; without even the implied warranty of 
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE 
// Leave Credits intact 

package video2; //replace this with your package 
import java.awt.BorderLayout; 
import java.awt.Dimension; 

import javax.swing.JFrame; 
import javax.swing.SwingUtilities; 

//import org.gstreamer.Caps; 
import org.gstreamer.Element; 
import org.gstreamer.ElementFactory; 
import org.gstreamer.Gst; 
import org.gstreamer.Pad; 
import org.gstreamer.PadDirection; 
import org.gstreamer.Pipeline; 
import org.gstreamer.swing.VideoComponent; 

/** 
* A Simple videotest example. 
*/ 
public class Main { 
    public Main() { 
    } 
    private static Pipeline pipe; 
    public static void main(String[] args) { 
    // Quartz is abysmally slow at scaling video for some reason, so turn it off. 
    System.setProperty("apple.awt.graphics.UseQuartz", "false"); 

    args = Gst.init("SwingVideoTest", args); 

    pipe = new Pipeline("pipeline"); 
    /* 
    final Element videosrc = ElementFactory.make("videotestsrc", "source"); 
    final Element videofilter = ElementFactory.make("capsfilter", "flt"); 
    videofilter.setCaps(Caps.fromString("video/x-raw-yuv, width=720, height=576" 
      + ", bpp=32, depth=32, framerate=25/1")); 
    */ 

    pipe.getBus().connect(new Bus.ERROR() { 
     public void errorMessage(GstObject source, int code, String message) { 
      System.out.println("Error occurred: " + message); 
      Gst.quit(); 
     } 
    }); 
    pipe.getBus().connect(new Bus.STATE_CHANGED() { 
     public void stateChanged(GstObject source, State old, State current, State pending) { 
      if (source == pipe) { 
       System.out.println("Pipeline state changed from " + old + " to " + current); 
      } 
     } 
    }); 
    pipe.getBus().connect(new Bus.EOS() { 
     public void endOfStream(GstObject source) { 
      System.out.println("Finished playing file"); 
      Gst.quit(); 
     } 
    });   

    pipe.getBus().connect(new Bus.TAG() { 
     public void tagsFound(GstObject source, TagList tagList) { 
      for (String tag : tagList.getTagNames()) { 
       System.out.println("Found tag " + tag + " = " 
         + tagList.getValue(tag, 0)); 
      } 
     } 
    }); 

    final Element source = ElementFactory.make("rtspsrc", "Source"); 
    final Element demux = ElementFactory.make("rtpmp4vdepay", "Depay"); 
    final Element decoder=ElementFactory.make("ffdec_mpeg4", "Decoder"); 
    final Element colorspace = ElementFactory.make("ffmpegcolorspace", "Colorspace"); 
    //final Element sink = ElementFactory.make ("autovideosink", "Output"); 

    SwingUtilities.invokeLater(new Runnable() { 

     public void run() { 
      // Create the video component and link it in 
      VideoComponent videoComponent = new VideoComponent(); 
      Element videosink = videoComponent.getElement(); 

      source.connect(new Element.PAD_ADDED() { 
      public void padAdded(Element element, Pad pad) { 
      pad.link(demux.getStaticPad("sink")); 
      } 
      }); 

      Pad p = new Pad(null, PadDirection.SRC); 
      source.addPad(p); 

      source.set("location","rtsp://<user>:<pass>@<ip>/mpeg4/1/media.amp"); //replace this with your source 

      pipe.addMany(source, demux, decoder, colorspace, videosink); 
      Element.linkMany(demux, decoder, colorspace, videosink); 

      // Now create a JFrame to display the video output 
      JFrame frame = new JFrame("Swing Video Test"); 
      frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); 
      frame.add(videoComponent, BorderLayout.CENTER); 
      videoComponent.setPreferredSize(new Dimension(720, 576)); 
      frame.pack(); 
      frame.setVisible(true); 

      // Start the pipeline processing 
      pipe.play(); 
     } 
    }); 
    } 
} 
+0

Ceci est un client - pas un serveur. – user8709

+1

@ user8709 ok J'ai rectifié pour incorporer le serveur. L'écriture du code du serveur devrait être une tâche très facile pour lui/elle après mon lien de référence. J'ai mal lu la question plus tôt. J'espère au moins que je ne reçois pas de points négatifs (après tous les efforts pour écrire du code client à partir de ressources très très limitées) maintenant :). – enthusiasticgeek

+0

Certes, le code client est utile en soi. – user8709