Getting error when trying to stream live video from laptop while using libjitsi sample examples


#1

SEVERE: Failed to realize: net.sf.fmj.media.ProcessEngine@4b53f538

Oct 08, 2018 12:29:44 PM net.sf.fmj.media.Log error

SEVERE: Error: Unable to realize net.sf.fmj.media.ProcessEngine@4b53f538

Oct 08, 2018 12:29:44 PM org.jitsi.util.LoggerImpl log

Source Code :
Transmitter
/*

  • Copyright @ 2015 Atlassian Pty Ltd
  • Licensed under the Apache License, Version 2.0 (the “License”);
  • you may not use this file except in compliance with the License.
  • You may obtain a copy of the License at
  • http://www.apache.org/licenses/LICENSE-2.0
    
  • Unless required by applicable law or agreed to in writing, software
  • distributed under the License is distributed on an “AS IS” BASIS,
  • WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  • See the License for the specific language governing permissions and
  • limitations under the License.
    */
    package org.jitsi.examples;

import java.io.;
import java.net.
;
import java.util.*;

import org.jitsi.service.libjitsi.;
import org.jitsi.service.neomedia.
;
import org.jitsi.service.neomedia.device.;
import org.jitsi.service.neomedia.format.
;

/**

  • Implements an example application in the fashion of JMF’s AVTransmit2 example

  • which demonstrates the use of the libjitsi library for the purposes

  • of transmitting audio and video via RTP means.

  • @author Lyubomir Marinov
    /
    public class AVTransmit2Original
    {
    /
    *

    • The port which is the source of the transmission i.e. from which the
    • media is to be transmitted.
    • @see #LOCAL_PORT_BASE_ARG_NAME
      */
      private int localPortBase;

    /**

    • The MediaStream instances initialized by this instance indexed
    • by their respective MediaType ordinal.
      */
      private MediaStream[] mediaStreams;

    /**

    • The InetAddress of the host which is the target of the
    • transmission i.e. to which the media is to be transmitted.
    • @see #REMOTE_HOST_ARG_NAME
      */
      private InetAddress remoteAddr;

    /**

    • The port which is the target of the transmission i.e. to which the media
    • is to be transmitted.
    • @see #REMOTE_PORT_BASE_ARG_NAME
      */
      private int remotePortBase;

    /**

    • Initializes a new AVTransmit2 instance which is to transmit
    • audio and video to a specific host and a specific port.
    • @param localPortBase the port which is the source of the transmission
    • i.e. from which the media is to be transmitted
    • @param remoteHost the name of the host which is the target of the
    • transmission i.e. to which the media is to be transmitted
    • @param remotePortBase the port which is the target of the transmission
    • i.e. to which the media is to be transmitted
    • @throws Exception if any error arises during the parsing of the specified
    • localPortBase, remoteHost and remotePortBase
      */
      private AVTransmit2Original(
      String localPortBase,
      String remoteHost, String remotePortBase)
      throws Exception
      {
      this.localPortBase
      = (localPortBase == null)
      ? -1
      : Integer.parseInt(localPortBase);
      this.remoteAddr = InetAddress.getByName(remoteHost);
      this.remotePortBase = Integer.parseInt(remotePortBase);
      }

    /**

    • Starts the transmission. Returns null if transmission started ok.

    • Otherwise it returns a string with the reason why the setup failed.
      /
      private String start()
      throws Exception
      {
      /

      • Prepare for the start of the transmission i.e. initialize the
      • MediaStream instances.
        */
        //MediaType[] mediaTypes = new MediaType[] {MediaType.AUDIO};
        MediaType[] mediaTypes = new MediaType[] {MediaType.VIDEO};
        //MediaType[] mediaTypes = new MediaType[] {MediaType.AUDIO,MediaType.VIDEO};
        MediaService mediaService = LibJitsi.getMediaService();
        int localPort = localPortBase;
        int remotePort = remotePortBase;

      mediaStreams = new MediaStream[mediaTypes.length];
      for (MediaType mediaType : mediaTypes)
      {
      /*
      * The default MediaDevice (for a specific MediaType) is configured
      * (by the user of the application via some sort of UI) into the
      * ConfigurationService. If there is no ConfigurationService
      * instance known to LibJitsi, the first available MediaDevice of
      * the specified MediaType will be chosen by MediaService.
      */
      MediaDevice device
      = mediaService.getDefaultDevice(mediaType, MediaUseCase.CALL);
      MediaStream mediaStream = mediaService.createMediaStream(device);

       // direction
       /*
        * The AVTransmit2 example sends only and the AVReceive2 receives
        * only. In a call, the MediaStream's direction will most commonly
        * be set to SENDRECV.
        */
       mediaStream.setDirection(MediaDirection.SENDRECV);
      
       // format
       String encoding;
       double clockRate;
       /*
        * The AVTransmit2 and AVReceive2 examples use the H.264 video
        * codec. Its RTP transmission has no static RTP payload type number
        * assigned.
        */
       byte dynamicRTPPayloadType = -1;
       int channels = 1;
       switch (device.getMediaType())
       {
       case AUDIO:
       		
       		//PCMU
       		encoding = "PCMU";
       		clockRate = 8000;
      
       		//G723
       		//encoding = "G723";
       		//clockRate = 48000;
      
      
       		//GSM
       		//encoding = "GSM";
       		//clockRate = 8000;
      
       		//PCMA
       		//encoding = "PCMA";
       		//clockRate = 8000;
      
       		//iLBC
       		//encoding = "iLBC";
       		//clockRate = 8000;
      
       		//speex
       		//encoding = "speex";
       		//clockRate = 32000;
      
       		//G722
       		//encoding = "G722";
       		//clockRate = 8000;
      
      
       		//G729
       		//encoding = "G729";
       		//clockRate = 8000;
      
       		//telephone-event
       		//encoding = "telephone-event";
       		//clockRate = 8000;
      
       		//SILK
       		//encoding = "SILK";
       		//clockRate = 8000;
      
       		//opus
       		//encoding = "OPUS";
       		//clockRate = 48000;
      
       		//AMR-WB
       		//encoding = "AMR-WB";
       		//clockRate = 16000; 
       		
           /* PCMU has a static RTP payload type number assigned. */
           if(encoding.equalsIgnoreCase("opus")) {
       		channels = 2;
       		dynamicRTPPayloadType = 2;
       	}
           
           break;
       case VIDEO:
           encoding = "VP9";
       	//encoding = "JPEG";
       	//encoding = "VP8";
           clockRate = 90000;
           /*
            * The dymanic RTP payload type numbers are usually negotiated
            * in the signaling functionality.
            */
           dynamicRTPPayloadType = 99;
           break;
       default:
           encoding = null;
           clockRate = MediaFormatFactory.CLOCK_RATE_NOT_SPECIFIED;
           dynamicRTPPayloadType = -1;
       }
      
       if (encoding != null)
       {
           MediaFormat format
               = mediaService.getFormatFactory().createMediaFormat(
                       encoding,
                       clockRate,channels);
      
           /*
            * The MediaFormat instances which do not have a static RTP
            * payload type number association must be explicitly assigned
            * a dynamic RTP payload type number.
            */
           if (dynamicRTPPayloadType != -1)
           {
               mediaStream.addDynamicRTPPayloadType(
                       dynamicRTPPayloadType,
                       format);
           }
      
           mediaStream.setFormat(format);
       }
      
       // connector
       StreamConnector connector;
      
       if (localPortBase == -1)
       {
           connector = new DefaultStreamConnector();
       }
       else
       {
           int localRTPPort = localPort++;
           int localRTCPPort = localPort++;
      
           connector
               = new DefaultStreamConnector(
                       new DatagramSocket(localRTPPort),
                       new DatagramSocket(localRTCPPort));
       }
       mediaStream.setConnector(connector);
      
       // target
       /*
        * The AVTransmit2 and AVReceive2 examples follow the common
        * practice that the RTCP port is right after the RTP port.
        */
       int remoteRTPPort = remotePort++;
       int remoteRTCPPort = remotePort++;
      
       mediaStream.setTarget(
               new MediaStreamTarget(
                       new InetSocketAddress(remoteAddr, remoteRTPPort),
                       new InetSocketAddress(remoteAddr, remoteRTCPPort)));
      
       // name
       /*
        * The name is completely optional and it is not being used by the
        * MediaStream implementation at this time, it is just remembered so
        * that it can be retrieved via MediaStream#getName(). It may be
        * integrated with the signaling functionality if necessary.
        */
       mediaStream.setName(mediaType.toString());
      
       mediaStreams[mediaType.ordinal()] = mediaStream;
      

      }

      /*

      • Do start the transmission i.e. start the initialized MediaStream
      • instances.
        */
        for (MediaStream mediaStream : mediaStreams)
        if (mediaStream != null)
        mediaStream.start();

      return null;
      }

    /**

    • Stops the transmission if already started
      */
      private void stop()
      {
      if (mediaStreams != null)
      {
      for (int i = 0; i < mediaStreams.length; i++)
      {
      MediaStream mediaStream = mediaStreams[i];

           if (mediaStream != null)
           {
               try
               {
                   mediaStream.stop();
               }
               finally
               {
                   mediaStream.close();
                   mediaStreams[i] = null;
               }
           }
       }
      
       mediaStreams = null;
      

      }
      }

    /**

    • The name of the command-line argument which specifies the port from which
    • the media is to be transmitted. The command-line argument value will be
    • used as the port to transmit the audio RTP from, the next port after it
    • will be to transmit the audio RTCP from. Respectively, the subsequent
    • ports will be used to transmit the video RTP and RTCP from."
      */
      private static final String LOCAL_PORT_BASE_ARG_NAME
      = “–local-port-base=”;

    /**

    • The name of the command-line argument which specifies the name of the
    • host to which the media is to be transmitted.
      */
      private static final String REMOTE_HOST_ARG_NAME = “–remote-host=”;

    /**

    • The name of the command-line argument which specifies the port to which
    • the media is to be transmitted. The command-line argument value will be
    • used as the port to transmit the audio RTP to, the next port after it
    • will be to transmit the audio RTCP to. Respectively, the subsequent ports
    • will be used to transmit the video RTP and RTCP to."
      */
      private static final String REMOTE_PORT_BASE_ARG_NAME
      = “–remote-port-base=”;

    /**

    • The list of command-line arguments accepted as valid by the
    • AVTransmit2 application along with their human-readable usage
    • descriptions.
      */
      private static final String[][] ARGS
      = {
      {
      LOCAL_PORT_BASE_ARG_NAME,
      “The port which is the source of the transmission i.e. from”
      + " which the media is to be transmitted. The specified"
      + " value will be used as the port to transmit the audio"
      + " RTP from, the next port after it will be used to"
      + " transmit the audio RTCP from. Respectively, the"
      + " subsequent ports will be used to transmit the video RTP"
      + " and RTCP from."
      },
      {
      REMOTE_HOST_ARG_NAME,
      “The name of the host which is the target of the transmission”
      + " i.e. to which the media is to be transmitted"
      },
      {
      REMOTE_PORT_BASE_ARG_NAME,
      “The port which is the target of the transmission i.e. to which”
      + " the media is to be transmitted. The specified value"
      + " will be used as the port to transmit the audio RTP to"
      + " the next port after it will be used to transmit the"
      + " audio RTCP to. Respectively, the subsequent ports will"
      + " be used to transmit the video RTP and RTCP to."
      }
      };

    public static void main(String[] args)
    throws Exception
    {
    // We need two parameters to do the transmission. For example,
    // ant run-example -Drun.example.name=AVTransmit2 -Drun.example.arg.line="–remote-host=127.0.0.1 --remote-port-base=10000"
    /if (args.length < 2)
    {
    prUsage();
    }
    else
    {
    /
    Map<String, String> argMap = parseCommandLineArgs(args);
    argMap.put("–local-port-base=", “5000”);
    argMap.put("–remote-host=", “10.55.47.29”);
    argMap.put("–remote-port-base=", “9999”);
    LibJitsi.start();
    try
    {
    // Create a audio transmit object with the specified params.
    AVTransmit2Original at
    = new AVTransmit2Original(
    argMap.get(LOCAL_PORT_BASE_ARG_NAME),
    argMap.get(REMOTE_HOST_ARG_NAME),
    argMap.get(REMOTE_PORT_BASE_ARG_NAME));
    // Start the transmission
    String result = at.start();

             // result will be non-null if there was an error. The return
             // value is a String describing the possible error. Print it.
             if (result == null)
             {
                 System.err.println("Start transmission for 60 seconds...");
    
                 // Transmit for 60 seconds and then close the processor
                 // This is a safeguard when using a capture data source
                 // so that the capture device will be properly released
                 // before quitting.
                 // The right thing to do would be to have a GUI with a
                 // "Stop" button that would call stop on AVTransmit2
                 try
                 {
                     Thread.sleep(120000);
                 } catch (InterruptedException ie)
                 {
                 }
    
                 // Stop the transmission
                 at.stop();
    
                 System.err.println("...transmission ended.");
             }
             else
             {
                 System.err.println("Error : " + result);
             }
         }
         finally
         {
             LibJitsi.stop();
         }
     //}
    

    }

    /**

    • Parses the arguments specified to the AVTransmit2 application on

    • the command line.

    • @param args the arguments specified to the AVTransmit2

    • application on the command line

    • @return a Map containing the arguments specified to the

    • AVTransmit2 application on the command line in the form of

    • name-value associations
      */
      static Map<String, String> parseCommandLineArgs(String[] args)
      {
      Map<String, String> argMap = new HashMap<String, String>();

      for (String arg : args)
      {
      int keyEndIndex = arg.indexOf(’=’);
      String key;
      String value;

       if (keyEndIndex == -1)
       {
           key = arg;
           value = null;
       }
       else
       {
           key = arg.substring(0, keyEndIndex + 1);
           value = arg.substring(keyEndIndex + 1);
       }
       argMap.put(key, value);
      

      }
      return argMap;
      }

    /**

    • Outputs human-readable description about the usage of the

    • AVTransmit2 application and the command-line arguments it

    • accepts as valid.
      */
      private static void prUsage()
      {
      PrintStream err = System.err;

      err.println("Usage: " + AVTransmit2.class.getName() + " “);
      err.println(“Valid args:”);
      for (String[] arg : ARGS)
      err.println(” " + arg[0] + " " + arg[1]);
      }
      }

Another sample of transmitter

package org.jitsi.examples;

import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.util.List;
import java.util.Map;

import org.jitsi.impl.neomedia.MediaServiceImpl;
import org.jitsi.impl.neomedia.NeomediaServiceUtils;
import org.jitsi.service.libjitsi.LibJitsi;
import org.jitsi.service.neomedia.DefaultStreamConnector;
import org.jitsi.service.neomedia.MediaDirection;
import org.jitsi.service.neomedia.MediaService;
import org.jitsi.service.neomedia.MediaStream;
import org.jitsi.service.neomedia.MediaStreamTarget;
import org.jitsi.service.neomedia.MediaType;
import org.jitsi.service.neomedia.MediaUseCase;
import org.jitsi.service.neomedia.StreamConnector;
import org.jitsi.service.neomedia.device.MediaDevice;
import org.jitsi.service.neomedia.format.MediaFormat;

/**

  • This class streams screen recorded video. It can either send an H264 encoded

  • RTP stream or receive one depending on the value of the variable

  • isReceivingVideo_.
    */
    public class VideoStreamer {

    // Set to false if sending video, set to true if receiving video.
    private static final boolean isReceivingVideo_ = false;

    public final MediaService mediaService_;
    private final Map<MediaFormat, Byte> RTP_payload_number_map_;

    public static final int LOCAL_BASE_PORT_NUMBER = 15000;
    public static final String REMOTE_HOST_IP_ADDRESS = “10.55.46.55”;
    public static final int REMOTE_BASE_PORT_NUMBER = 10000;

    private MediaStream videoMediaStream_;
    private final int localBasePort_;
    private final InetAddress remoteAddress_;
    private final int remoteBasePort_;

    /**

    • Initializes a new VideoStreamer instance which is to send or receive
    • video from a specific host and a specific port.
    • @param isReceiver - true if this instance of VideoStreamer is receiving a
    • video stream, false if it is sending a video stream.
      */
      public VideoStreamer(boolean isReceiver) throws IOException {
      this.remoteAddress_ = InetAddress.getByName(REMOTE_HOST_IP_ADDRESS);
      mediaService_ = LibJitsi.getMediaService();
      RTP_payload_number_map_ = mediaService_.getDynamicPayloadTypePreferences();
      if (isReceiver) {
      this.localBasePort_ = LOCAL_BASE_PORT_NUMBER;
      this.remoteBasePort_ = REMOTE_BASE_PORT_NUMBER;
      startVideoStream(MediaDirection.RECVONLY);
      } else {
      // switch the local and remote ports for the transmitter so they hook up with the receiver.
      this.localBasePort_ = REMOTE_BASE_PORT_NUMBER;
      this.remoteBasePort_ = LOCAL_BASE_PORT_NUMBER;
      startVideoStream(MediaDirection.SENDONLY);
      }
      }

    /**

    • Initializes the receipt of video, starts it, and tries to record any

    • incoming packets.

    • @param intended_direction either sending or receiving an RTP video

    • stream.
      */
      public final void startVideoStream(final MediaDirection intended_direction) throws SocketException {
      final MediaType video_media_type = MediaType.VIDEO;
      final int local_video_port = localBasePort_;
      final int remote_video_port = remoteBasePort_;
      MediaDevice video_media_device = mediaService_.getDefaultDevice(video_media_type, MediaUseCase.CALL);
      final MediaStream video_media_stream = mediaService_.createMediaStream(video_media_device);
      video_media_stream.setDirection(intended_direction);
      // Obtain the list of formats that are available for a specific video_media_device and pick H264 if availible.
      MediaFormat video_format = null;

      MediaServiceImpl mediaServiceImpl
      = NeomediaServiceUtils.getMediaServiceImpl();
      MediaFormat f = mediaServiceImpl.getFormatFactory().createMediaFormat(“VP9”,90000,1);

      /* final List supported_video_formats = video_media_device.getSupportedFormats();
      for (final MediaFormat availible_video_format : supported_video_formats) {
      final String encoding = availible_video_format.getEncoding();
      final double clock_rate = availible_video_format.getClockRate();
      if (encoding.equals(“H264”) && clock_rate == 90000) {
      video_format = availible_video_format;
      }
      }/
      /if (video_format == null) {
      System.out.println(“You do not have the H264 video codec”);
      System.exit(-1);
      }
      /
      /
      final byte dynamic_RTP_payload_type_for_H264 = getRTPDynamicPayloadType(video_format);
      if (dynamic_RTP_payload_type_for_H264 < 96 || dynamic_RTP_payload_type_for_H264 > 127) {
      System.out.println(“Invalid RTP payload type number”);
      System.exit(-1);
      }*/
      //video_media_stream.addDynamicRTPPayloadType(dynamic_RTP_payload_type_for_H264, video_format);
      byte dynamic_RTP_payload_type_for_H264 = 99;
      video_media_stream.addDynamicRTPPayloadType(dynamic_RTP_payload_type_for_H264, f);
      video_media_stream.setFormat(f);

      final int local_RTP_video_port = local_video_port + 0;
      final int local_RTCP_video_port = local_video_port + 1;
      final StreamConnector video_connector = new DefaultStreamConnector(
      new DatagramSocket(local_RTP_video_port),
      new DatagramSocket(local_RTCP_video_port)
      );
      video_media_stream.setConnector(video_connector);
      final int remote_RTP_video_port = remote_video_port + 0;
      final int remote_RTCP_video_port = remote_video_port + 1;
      video_media_stream.setTarget(new MediaStreamTarget(
      new InetSocketAddress(remoteAddress_, remote_RTP_video_port),
      new InetSocketAddress(remoteAddress_, remote_RTCP_video_port))
      );
      video_media_stream.setName(video_media_type.toString());
      this.videoMediaStream_ = video_media_stream;
      videoMediaStream_.start();
      listenForVideoPackets(video_connector.getDataSocket());
      }

    public void listenForVideoPackets(final DatagramSocket videoDataSocket) {
    new Thread(new Runnable() {
    @Override
    public void run() {
    boolean socket_is_closed = false;
    while (!socket_is_closed) {
    final byte[] buffer = new byte[5000];
    final DatagramPacket packet = new DatagramPacket(buffer, buffer.length);
    try {
    videoDataSocket.receive(packet);
    final byte[] packet_data = new byte[packet.getLength()];
    System.arraycopy(packet.getData(), packet.getOffset(), packet_data, 0, packet.getLength());
    final StringBuilder string_builder = new StringBuilder();
    for (int i = 0; i < ((packet_data.length > 30) ? 30 : packet_data.length); ++i) {
    byte b = packet_data[i];
    string_builder.append(String.format("%02X ", b));
    }
    System.out.println("First thirty (or fewer) bytes of packet in hex: " + string_builder.toString());
    } catch (SocketException socket_closed) {
    System.out.println(“Socket is closed”);
    socket_is_closed = true;
    } catch (IOException exception) {
    exception.printStackTrace();
    }
    }
    }
    }).start();
    }

    /**

    • Checks if the given format exists in the list of formats with listed
    • dynamic RTP payload numbers and returns that number.
    • @param format - format to look up an RTP payload number for
    • @return - RTP payload on success or -1 either if payload number cannot be
    • found or if payload number is static.
      */
      public byte getRTPDynamicPayloadType(final MediaFormat format) {
      for (Map.Entry<MediaFormat, Byte> entry : RTP_payload_number_map_.entrySet()) {
      final MediaFormat map_format = (MediaFormat) entry.getKey();
      final Byte rtp_payload_type = (Byte) entry.getValue();
      if (map_format.getClockRate() == format.getClockRate() && map_format.getEncoding().equals(format.getEncoding())) {
      return rtp_payload_type;
      }
      }
      return -1;
      }

    /**

    • Close the MediaStream.
      */
      public void close() {
      try {
      this.videoMediaStream_.stop();
      } finally {
      this.videoMediaStream_.close();
      this.videoMediaStream_ = null;
      }
      }

    public static void main(String[] args) throws Exception {
    LibJitsi.start();
    try {
    VideoStreamer rtp_streamer
    = new VideoStreamer(isReceivingVideo_);
    try {
    /*
    * Wait for the media to be received and (hopefully) played back.
    * Transmits for 1 minute and receives for 30 seconds to allow the
    * tranmission to have a delay (if necessary).
    */
    final long then = System.currentTimeMillis();
    final long waiting_period;
    if (isReceivingVideo_) {
    waiting_period = 30000;
    } else {
    waiting_period = 60000;
    }
    try {
    while (System.currentTimeMillis() - then < waiting_period) {
    Thread.sleep(1000);
    }
    } catch (InterruptedException ie) {
    }
    } finally {
    rtp_streamer.close();
    }
    System.err.println(“Exiting VideoStreamer”);
    } finally {
    LibJitsi.stop();
    }
    }
    }