Page MenuHomePhabricator

No OneTemporary

This file is larger than 256 KB, so syntax highlighting was skipped.
diff --git a/deps/update_pjsip_210/patches/pjsip_210.patch b/deps/update_pjsip_210/patches/pjsip_210.patch
index 3218ea12..5d925122 100644
--- a/deps/update_pjsip_210/patches/pjsip_210.patch
+++ b/deps/update_pjsip_210/patches/pjsip_210.patch
@@ -1,8053 +1,8077 @@
diff -ruN pjproject-2.10/base_rev pjsip/base_rev
--- pjproject-2.10/base_rev 1970-01-01 01:00:00.000000000 +0100
+++ pjsip/base_rev 2021-02-07 00:21:58.163902742 +0100
@@ -0,0 +1 @@
+210
diff -ruN pjproject-2.10/pjlib/src/pj/os_core_unix.c pjsip/pjlib/src/pj/os_core_unix.c
--- pjproject-2.10/pjlib/src/pj/os_core_unix.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjlib/src/pj/os_core_unix.c 2021-02-06 23:59:38.145916811 +0100
@@ -37,6 +37,11 @@
#if defined(PJ_HAS_SEMAPHORE_H) && PJ_HAS_SEMAPHORE_H != 0
# include <semaphore.h>
+# if defined(PJ_DARWINOS) && PJ_DARWINOS!=0
+# include <mach/mach.h>
+# include <mach/task.h>
+# include <mach/semaphore.h>
+# endif
#endif
#include <unistd.h> // getpid()
@@ -107,7 +112,11 @@
#if defined(PJ_HAS_SEMAPHORE) && PJ_HAS_SEMAPHORE != 0
struct pj_sem_t
{
+#if defined(PJ_DARWINOS) && PJ_DARWINOS!=0
+ semaphore_t *sem;
+#else
sem_t *sem;
+#endif
char obj_name[PJ_MAX_OBJ_NAME];
};
#endif /* PJ_HAS_SEMAPHORE */
@@ -1569,35 +1578,16 @@
PJ_ASSERT_RETURN(sem, PJ_ENOMEM);
#if defined(PJ_DARWINOS) && PJ_DARWINOS!=0
- /* MacOS X doesn't support anonymous semaphore */
{
- char sem_name[PJ_GUID_MAX_LENGTH+1];
- pj_str_t nam;
-
- /* We should use SEM_NAME_LEN, but this doesn't seem to be
- * declared anywhere? The value here is just from trial and error
- * to get the longest name supported.
- */
-# define MAX_SEM_NAME_LEN 23
-
- /* Create a unique name for the semaphore. */
- if (PJ_GUID_STRING_LENGTH <= MAX_SEM_NAME_LEN) {
- nam.ptr = sem_name;
- pj_generate_unique_string(&nam);
- sem_name[nam.slen] = '\0';
- } else {
- pj_create_random_string(sem_name, MAX_SEM_NAME_LEN);
- sem_name[MAX_SEM_NAME_LEN] = '\0';
- }
-
- /* Create semaphore */
- sem->sem = sem_open(sem_name, O_CREAT|O_EXCL, S_IRUSR|S_IWUSR,
- initial);
- if (sem->sem == SEM_FAILED)
- return PJ_RETURN_OS_ERROR(pj_get_native_os_error());
-
- /* And immediately release the name as we don't need it */
- sem_unlink(sem_name);
+ kern_return_t err;
+ sem->sem = PJ_POOL_ALLOC_T(pool, semaphore_t);
+ err = semaphore_create(mach_task_self(), sem->sem, SYNC_POLICY_FIFO, initial);
+ if (err != KERN_SUCCESS) {
+ if (err == KERN_RESOURCE_SHORTAGE)
+ return PJ_RETURN_OS_ERROR(ENOMEM);
+ else
+ return PJ_RETURN_OS_ERROR(EINVAL);
+ }
}
#else
sem->sem = PJ_POOL_ALLOC_T(pool, sem_t);
@@ -1633,6 +1623,7 @@
{
#if PJ_HAS_THREADS
int result;
+ int error;
PJ_CHECK_STACK();
PJ_ASSERT_RETURN(sem, PJ_EINVAL);
@@ -1640,6 +1631,20 @@
PJ_LOG(6, (sem->obj_name, "Semaphore: thread %s is waiting",
pj_thread_this()->obj_name));
+#if defined(PJ_DARWINOS) && PJ_DARWINOS!=0
+ {
+ do
+ result = semaphore_wait(*(sem->sem));
+ while (result == KERN_ABORTED);
+
+ if (result == KERN_SUCCESS) {
+ result = error = 0;
+ } else {
+ result = -1;
+ error = EINVAL;
+ }
+ }
+#else
result = sem_wait( sem->sem );
if (result == 0) {
@@ -1648,12 +1653,14 @@
} else {
PJ_LOG(6, (sem->obj_name, "Semaphore: thread %s FAILED to acquire",
pj_thread_this()->obj_name));
+ error = pj_get_native_os_error();
}
+#endif
if (result == 0)
return PJ_SUCCESS;
else
- return PJ_RETURN_OS_ERROR(pj_get_native_os_error());
+ return PJ_RETURN_OS_ERROR(error);
#else
pj_assert( sem == (pj_sem_t*) 1 );
return PJ_SUCCESS;
@@ -1667,20 +1674,45 @@
{
#if PJ_HAS_THREADS
int result;
+ int error;
PJ_CHECK_STACK();
PJ_ASSERT_RETURN(sem, PJ_EINVAL);
+#if defined(PJ_DARWINOS) && PJ_DARWINOS!=0
+ {
+ mach_timespec_t interval;
+ kern_return_t err;
+
+ interval.tv_sec = 0;
+ interval.tv_nsec = 0;
+
+ err = semaphore_timedwait(*(sem->sem), interval);
+ if (err == KERN_SUCCESS) {
+ result = error = 0;
+ } else if (err == KERN_OPERATION_TIMED_OUT) {
+ result = -1;
+ error = EAGAIN;
+ } else {
+ result = -1;
+ error = EINVAL;
+ }
+ }
+#else
result = sem_trywait( sem->sem );
if (result == 0) {
PJ_LOG(6, (sem->obj_name, "Semaphore acquired by thread %s",
pj_thread_this()->obj_name));
+ } else {
+ error = pj_get_native_os_error();
}
+#endif
+
if (result == 0)
return PJ_SUCCESS;
else
- return PJ_RETURN_OS_ERROR(pj_get_native_os_error());
+ return PJ_RETURN_OS_ERROR(error);
#else
pj_assert( sem == (pj_sem_t*)1 );
return PJ_SUCCESS;
@@ -1694,14 +1726,30 @@
{
#if PJ_HAS_THREADS
int result;
+ int error;
PJ_LOG(6, (sem->obj_name, "Semaphore released by thread %s",
pj_thread_this()->obj_name));
+#if defined(PJ_DARWINOS) && PJ_DARWINOS!=0
+ {
+ kern_return_t err;
+ err = semaphore_signal(*(sem->sem));
+ if (err == KERN_SUCCESS) {
+ result = error = 0;
+ } else {
+ result = -1;
+ error = EINVAL;
+ }
+ }
+#else
result = sem_post( sem->sem );
+ if (result != 0)
+ error = pj_get_native_os_error();
+#endif
if (result == 0)
return PJ_SUCCESS;
else
- return PJ_RETURN_OS_ERROR(pj_get_native_os_error());
+ return PJ_RETURN_OS_ERROR(error);
#else
pj_assert( sem == (pj_sem_t*) 1);
return PJ_SUCCESS;
@@ -1715,6 +1763,7 @@
{
#if PJ_HAS_THREADS
int result;
+ int error;
PJ_CHECK_STACK();
PJ_ASSERT_RETURN(sem, PJ_EINVAL);
@@ -1722,15 +1771,26 @@
PJ_LOG(6, (sem->obj_name, "Semaphore destroyed by thread %s",
pj_thread_this()->obj_name));
#if defined(PJ_DARWINOS) && PJ_DARWINOS!=0
- result = sem_close( sem->sem );
+ {
+ kern_return_t err;
+ err = semaphore_destroy(mach_task_self(), *(sem->sem));
+ if (err == KERN_SUCCESS) {
+ result = error = -1;
+ } else {
+ result = -1;
+ error = EINVAL;
+ }
+ }
#else
result = sem_destroy( sem->sem );
+ if (result != 0)
+ error = pj_get_native_os_error();
#endif
if (result == 0)
return PJ_SUCCESS;
else
- return PJ_RETURN_OS_ERROR(pj_get_native_os_error());
+ return PJ_RETURN_OS_ERROR(error);
#else
pj_assert( sem == (pj_sem_t*) 1 );
return PJ_SUCCESS;
--- pjproject-2.10/pjmedia/include/pjmedia/config_auto.h.in 2021-02-05 09:00:45.241108718 +0100
+++ pjsip/pjmedia/include/pjmedia/config_auto.h.in 2021-02-22 21:46:52.232215546 +0100
-@@ -36,6 +36,17 @@
+@@ -36,6 +36,22 @@
#undef PJMEDIA_HAS_G711_CODEC
#endif
+#define PJMEDIA_HAS_VIDEO 1
+#define PJMEDIA_HAS_OPUS_CODEC 1
+#define PJMEDIA_HAS_VPX_CODEC_VP9 1
+#define PJMEDIA_HAS_FFMPEG_VID_CODEC 1
+#define PJMEDIA_HAS_LIBWEBRTC 1
+#define PJMEDIA_HAS_WEBRTC_AEC 1
-+#define PJMEDIA_HAS_SILK_CODEC 1
++
++#if defined(PJMEDIA_VIDEO_DEV_HAS_DARWIN) && (PJMEDIA_VIDEO_DEV_HAS_DARWIN != 0)
++#define PJMEDIA_VIDEO_DEV_HAS_AVF 1
++#define PJMEDIA_VIDEO_HAS_VTOOLBOX 1
++#define PJMEDIA_HAS_VID_TOOLBOX_CODEC 1
++#endif
+
+#ifdef PJMEDIA_USE_OLD_FFMPEG
+#undef PJMEDIA_USE_OLD_FFMPEG
+#endif
#endif /* __PJMEDIA_CONFIG_AUTO_H_ */
-
+
diff -ruN pjproject-2.10/pjmedia/build/Makefile pjproject-2.10/pjmedia/build/Makefile
--- pjproject-2.10/pjmedia/build/Makefile 2021-02-05 09:00:45.241108718 +0100
+++ pjsip/pjmedia/build/Makefile 2021-02-22 21:46:52.232215546 +0100
@@ -72,8 +72,8 @@
sound_legacy.o sound_port.o stereo_port.o stream_common.o \
stream.o stream_info.o tonegen.o transport_adapter_sample.o \
transport_ice.o transport_loop.o transport_srtp.o transport_udp.o \
- types.o vid_codec.o vid_codec_util.o \
- vid_port.o vid_stream.o vid_stream_info.o vid_conf.o \
+ transport_zrtp.o types.o vid_codec.o vid_codec_util.o mixer_port.o \
+ vid_port.o vid_stream.o vid_tee.o vid_stream_info.o vid_conf.o \
wav_player.o wav_playlist.o wav_writer.o wave.o \
wsola.o audiodev.o videodev.o
@@ -106,7 +106,7 @@
export PJMEDIA_VIDEODEV_SRCDIR = ../src/pjmedia-videodev
export PJMEDIA_VIDEODEV_OBJS += videodev.o errno.o avi_dev.o ffmpeg_dev.o \
colorbar_dev.o v4l2_dev.o opengl_dev.o \
- util.o
+ util.o fb_dev.o null_dev.o
export PJMEDIA_VIDEODEV_CFLAGS += $(_CFLAGS)
export PJMEDIA_VIDEODEV_CXXFLAGS += $(_CXXFLAGS)
export PJMEDIA_VIDEODEV_LDFLAGS += $(PJMEDIA_LDLIB) \
diff -ruN pjproject-2.10/pjmedia/include/pjmedia/event.h pjsip/pjmedia/include/pjmedia/event.h
--- pjproject-2.10/pjmedia/include/pjmedia/event.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia/event.h 2021-02-06 16:57:17.374166159 +0100
@@ -83,6 +83,11 @@
PJMEDIA_EVENT_KEYFRAME_MISSING = PJMEDIA_FOURCC('I', 'F', 'R', 'M'),
/**
+ * Remote video decoder asked for a keyframe.
+ */
+ PJMEDIA_EVENT_KEYFRAME_REQUESTED = PJMEDIA_FOURCC('I', 'F', 'R', 'R'),
+
+ /**
* Video orientation has been changed event.
*/
PJMEDIA_EVENT_ORIENT_CHANGED = PJMEDIA_FOURCC('O', 'R', 'N', 'T'),
diff -ruN pjproject-2.10/pjmedia/include/pjmedia/format.h pjsip/pjmedia/include/pjmedia/format.h
--- pjproject-2.10/pjmedia/include/pjmedia/format.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia/format.h 2021-02-06 18:30:18.321176790 +0100
@@ -97,6 +97,7 @@
/**
* 32bit RGB with alpha channel
*/
+ PJMEDIA_FORMAT_ARGB = PJMEDIA_FORMAT_PACK('A', 'R', 'G', 'B'),
PJMEDIA_FORMAT_RGBA = PJMEDIA_FORMAT_PACK('R', 'G', 'B', 'A'),
PJMEDIA_FORMAT_BGRA = PJMEDIA_FORMAT_PACK('B', 'G', 'R', 'A'),
diff -ruN pjproject-2.10/pjmedia/include/pjmedia/mixer_port.h pjsip/pjmedia/include/pjmedia/mixer_port.h
--- pjproject-2.10/pjmedia/include/pjmedia/mixer_port.h 1970-01-01 01:00:00.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia/mixer_port.h 2021-02-06 18:42:19.161906996 +0100
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2010 AG Projects
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+
+#ifndef __PJMEDIA_MIXER_PORT_H__
+#define __PJMEDIA_MIXER_PORT_H__
+
+/**
+ * @file mixer_port.h
+ * @brief Mixer media port.
+ */
+#include <pjmedia/port.h>
+
+
+
+/**
+ * @defgroup PJMEDIA_MIXER_PORT Mixer Port
+ * @ingroup PJMEDIA_PORT
+ * @brief The second simplest type of media port which forwards the frames it
+ * gets unchanged.
+ * @{
+ */
+
+
+PJ_BEGIN_DECL
+
+
+/**
+ * Create Mixer port.
+ *
+ * @param pool Pool to allocate memory.
+ * @param sampling_rate Sampling rate of the port.
+ * @param channel_count Number of channels.
+ * @param samples_per_frame Number of samples per frame.
+ * @param bits_per_sample Number of bits per sample.
+ * @param p_port Pointer to receive the port instance.
+ *
+ * @return PJ_SUCCESS on success.
+ */
+PJ_DECL(pj_status_t) pjmedia_mixer_port_create(pj_pool_t *pool,
+ unsigned sampling_rate,
+ unsigned channel_count,
+ unsigned samples_per_frame,
+ unsigned bits_per_sample,
+ pjmedia_port **p_port);
+
+
+PJ_END_DECL
+
+/**
+ * @}
+ */
+
+
+#endif /* __PJMEDIA_MIXER_PORT_H__ */
diff -ruN pjproject-2.10/pjmedia/include/pjmedia/rtcp.h pjsip/pjmedia/include/pjmedia/rtcp.h
--- pjproject-2.10/pjmedia/include/pjmedia/rtcp.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia/rtcp.h 2021-02-06 18:32:46.933482520 +0100
@@ -256,6 +256,8 @@
pjmedia_rtcp_stat stat; /**< Bidirectional stream stat. */
+ pj_bool_t keyframe_requested; /** Set to true when RTCP PLI is received */
+
#if defined(PJMEDIA_HAS_RTCP_XR) && (PJMEDIA_HAS_RTCP_XR != 0)
/**
* Specify whether RTCP XR processing is enabled on this session.
@@ -462,6 +464,23 @@
pj_size_t *length,
const pj_str_t *reason);
+/**
+ * Build an RTCP PLI packet. This packet can be appended to other RTCP
+ * packets, e.g: RTCP RR/SR, to compose a compound RTCP packet.
+ *
+ * @param session The RTCP session.
+ * @param buf The buffer to receive RTCP PLI packet.
+ * @param length On input, it will contain the buffer length.
+ * On output, it will contain the generated RTCP PLI
+ * packet length.
+ *
+ * @return PJ_SUCCESS on success.
+ */
+PJ_DECL(pj_status_t) pjmedia_rtcp_build_rtcp_pli(
+ pjmedia_rtcp_session *session,
+ void *buf,
+ pj_size_t *length);
+
/**
* Call this function if RTCP XR needs to be enabled/disabled in the
diff -ruN pjproject-2.10/pjmedia/include/pjmedia/signatures.h pjsip/pjmedia/include/pjmedia/signatures.h
--- pjproject-2.10/pjmedia/include/pjmedia/signatures.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia/signatures.h 2021-02-06 18:33:45.139162846 +0100
@@ -153,6 +153,7 @@
#define PJMEDIA_SIG_PORT_ECHO PJMEDIA_SIG_CLASS_PORT_AUD('E','C')
#define PJMEDIA_SIG_PORT_MEM_CAPTURE PJMEDIA_SIG_CLASS_PORT_AUD('M','C')
#define PJMEDIA_SIG_PORT_MEM_PLAYER PJMEDIA_SIG_CLASS_PORT_AUD('M','P')
+#define PJMEDIA_SIG_PORT_MIXER PJMEDIA_SIG_CLASS_PORT_AUD('M','X')
#define PJMEDIA_SIG_PORT_NULL PJMEDIA_SIG_CLASS_PORT_AUD('N','U')
#define PJMEDIA_SIG_PORT_RESAMPLE PJMEDIA_SIG_CLASS_PORT_AUD('R','E')
#define PJMEDIA_SIG_PORT_SPLIT_COMB PJMEDIA_SIG_CLASS_PORT_AUD('S','C')
diff -ruN pjproject-2.10/pjmedia/include/pjmedia/sound_port.h pjsip/pjmedia/include/pjmedia/sound_port.h
--- pjproject-2.10/pjmedia/include/pjmedia/sound_port.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia/sound_port.h 2021-02-06 18:34:38.880711750 +0100
@@ -344,6 +344,16 @@
/**
+ * Reset the EC state in the sound port.
+ *
+ * @param snd_port The sound device port.
+ *
+ * @return PJ_SUCCESS on success.
+ */
+PJ_DECL(pj_status_t) pjmedia_snd_port_reset_ec_state(pjmedia_snd_port *snd_port);
+
+
+/**
* Connect a port to the sound device port. If the sound device port has a
* sound recorder device, then this will start periodic function call to
* the port's put_frame() function. If the sound device has a sound player
diff -ruN pjproject-2.10/pjmedia/include/pjmedia/transport_ice.h pjsip/pjmedia/include/pjmedia/transport_ice.h
--- pjproject-2.10/pjmedia/include/pjmedia/transport_ice.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia/transport_ice.h 2021-02-06 17:09:28.151078775 +0100
@@ -74,6 +74,28 @@
pj_status_t status,
void *user_data);
+ /**
+ * This callback will be called when ICE state changes.
+ *
+ * @param tp PJMEDIA ICE transport.
+ * @param prev Previous state.
+ * @param curr Current state.
+ */
+ void (*on_ice_state)(pjmedia_transport *tp,
+ pj_ice_strans_state prev,
+ pj_ice_strans_state curr);
+
+ /**
+ * This callback will be called when ICE is stopped.
+ *
+ * @param tp PJMEDIA ICE transport.
+ * @param reason Reason for stopping ICE.
+ * @param err Error code
+ */
+ void (*on_ice_stop)(pjmedia_transport *tp,
+ char *reason,
+ pj_status_t err);
+
} pjmedia_ice_cb;
@@ -237,6 +259,17 @@
pjmedia_transport **p_tp);
/**
+ * Return the ICE stream transport associated with this PJMEDIA transport
+ *
+ * @param tp Media transport instance.
+ *
+ * @return Pointer to the pj_ice_strans instance associated with this
+ * media transport.
+ */
+PJ_DECL(pj_ice_strans*) pjmedia_ice_get_strans(pjmedia_transport *tp);
+
+
+/**
* Get the group lock for the ICE media transport.
*
* @param tp The ICE media transport.
diff -ruN pjproject-2.10/pjmedia/include/pjmedia/transport_zrtp.h pjsip/pjmedia/include/pjmedia/transport_zrtp.h
--- pjproject-2.10/pjmedia/include/pjmedia/transport_zrtp.h 1970-01-01 01:00:00.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia/transport_zrtp.h 2021-02-06 16:42:58.084103561 +0100
@@ -0,0 +1,647 @@
+/* $Id$ */
+/*
+ Copyright (C) 2010 Werner Dittmann
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#ifndef __PJMEDIA_TRANSPORT_ZRTP_H__
+#define __PJMEDIA_TRANSPORT_ZRTP_H__
+
+/**
+ * @file transport_zrtp.h
+ * @brief ZRTP Media Transport Adapter
+ */
+
+/* transport.h includes types.h -> config.h -> config_auto.h */
+#include <pjmedia/transport.h>
+
+#include "../../third_party/zsrtp/zrtp/zrtp/libzrtpcpp/ZrtpCWrapper.h"
+
+/**
+ * @defgroup PJMEDIA_TRANSPORT_ZRTP ZRTP Transport Adapter
+ * @brief This the ZRTP transport adapter.
+ * @{
+ *
+ * PJMEDIA extension to support GNU ZRTP.
+ *
+ * ZRTP was developed by Phil Zimmermann and provides functions to
+ * negotiate keys and other necessary data (crypto data) to set-up
+ * the Secure RTP (SRTP) crypto context. Refer to Phil's ZRTP
+ * specification at his <a href="http://zfoneproject.com/">Zfone
+ * project</a> site to get more detailed information about the
+ * capabilities of ZRTP.
+ *
+ * <b>Short overview of the ZRTP implementation</b>
+ *
+ * ZRTP is a specific protocol to negotiate encryption algorithms
+ * and the required key material. ZRTP uses a RTP session to
+ * exchange its protocol messages. Thus ZRTP is independent of any
+ * signaling protocol like SIP, XMPP and alike.
+ *
+ * A complete GNU ZRTP implementation consists of two parts, the
+ * GNU ZRTP core and some specific code that binds the GNU ZRTP core to
+ * the underlying RTP/SRTP stack and the operating system:
+ * <ul>
+ * <li>
+ * The GNU ZRTP core is independent of a specific RTP/SRTP
+ * stack and the operationg system and consists of the ZRTP
+ * protocol state engine, the ZRTP protocol messages, and the
+ * GNU ZRTP engine. The GNU ZRTP engine provides methods to
+ * setup ZRTP message and to analyze received ZRTP messages,
+ * to compute the crypto data required for SRTP, and to
+ * maintain the required hashes and HMAC.
+ * </li>
+ * <li>
+ * The second part of an implementation is specific
+ * <em>glue</em> code the binds the GNU ZRTP core to the
+ * actual RTP/SRTP implementation and other operating system
+ * specific services such as timers, mutexes.
+ * </li>
+ * </ul>
+ *
+ * The GNU ZRTP core uses callback methods (refer to
+ * zrtp_Callback) to access RTP/SRTP or operating specific methods,
+ * for example to send data via the RTP stack, to access
+ * timers, provide mutex handling, and to report events to the
+ * application.
+ *
+ * <b>The PJMEDIA ZRTP transport</b>
+ *
+ * ZRTP transport implements code that is specific to the pjmedia
+ * implementation. ZRTP transport also implements the specific code to
+ * provide the mutex and timeout handling to the GNU ZRTP
+ * core. Both, the mutex and the timeout handling, use the pjlib
+ * library to stay independent of the operating
+ * seystem.
+ *
+ * To perform its tasks ZRTP transport
+ * <ul>
+ * <li> implements the pjmedia transport functions and callbacks.
+ * </li>
+ * <li> implements the zrtp_Callbacks methods to provide
+ * access and other specific services (timer, mutex) to GNU
+ * ZRTP
+ * </li>
+ * <li> provides ZRTP specific methods that applications may use
+ * to control and setup GNU ZRTP
+ * </li>
+ * <li> can register and use an application specific callback
+ * class (refer to zrtp_UserCallbacks)
+ * </li>
+ * </ul>
+ *
+ * After instantiating a GNU ZRTP session (see below for a short
+ * example) applications may use the methods of
+ * ZRTP transport and the ZRTP engine to control and setup GNU ZRTP,
+ * for example enable or disable ZRTP processing or getting ZRTP status
+ * information.
+ *
+ * GNU ZRTP defines zrtp_UserCallback methods structure that an application
+ * may use and register with ZRTP transport. GNU ZRTP and ZRTP transport
+ * use the zrtp_UserCallback methods to report ZRTP events to the
+ * application. The application may display this information to
+ * the user or act otherwise.
+ *
+ * The following figure depicts the relationships between
+ * ZRTP transport, pjmedia RTP implementation, the GNU ZRTP core,
+ * SRTP and an application that provides zrtp_UserCallback methods.
+ *
+ @verbatim
+ +-----------+
+ | |
+ | SRTP-ZRTP |
+ | |
+ +-----------+
+ |C Wrapper |
+ +-----+-----+
+ |
+ | uses
+ |
+ +-----------------+ +-------+--------+ +-+-----------------+
+ | App (pjsua) | | | |C| |
+ | creates a | uses | transport_zrtp | uses | | GNU ZRTP |
+ | ZRTP transport +------+ implements +------+W| core |
+ | and implements | | zrtp_Callback | |r| implementation |
+ |zrtp_UserCallback| | | |a| (ZRtp et al) |
+ +-----------------+ +----------------+ |p| |
+ +-+-----------------+
+
+@endverbatim
+ *
+ * The following short code snippet shows how to use ZRTP transport
+ *
+ * @code
+ *
+ * #include <pjmedia/transport_zrtp.h>
+ * ...
+ * // Create media transport
+ * status = pjmedia_transport_udp_create(med_endpt, NULL, local_port,
+ * 0, &transport);
+ * if (status != PJ_SUCCESS)
+ * return status;
+ *
+ * status = pjmedia_transport_zrtp_create(med_endpt, NULL, transport,
+ * &zrtp_tp);
+ * app_perror(THIS_FILE, "Error creating zrtp", status);
+ * transport = zrtp_tp;
+ * if (dir == PJMEDIA_DIR_ENCODING)
+ * pjmedia_transport_zrtp_initialize(transport, "testenc.zid", 1, NULL);
+ * else
+ * pjmedia_transport_zrtp_initialize(transport, "testdec.zid", 1, NULL);
+ * ...
+ * @endcode
+ *
+ */
+
+#define PJMEDIA_TRANSPORT_TYPE_ZRTP PJMEDIA_TRANSPORT_TYPE_USER+2
+
+PJ_BEGIN_DECL
+
+/**
+ * ZRTP option.
+ */
+typedef enum pjmedia_zrtp_use
+{
+ /** When this flag is specified, ZRTP will be disabled. */
+ PJMEDIA_NO_ZRTP = 1,
+
+ /** When this flag is specified, PJSUA-LIB creates a ZRTP transport
+ * call calls back the applicaion for further process if callback is
+ * set.
+ */
+ PJMEDIA_CREATE_ZRTP = 2
+
+} pjmedia_zrtp_use;
+
+/**
+ * This structure specifies ZRTP transport specific info. This will fit
+ * into \a buffer field of pjmedia_transport_specific_info.
+ */
+typedef struct pjmedia_zrtp_info
+{
+ /**
+ * Specify whether the ZRTP transport is active for this session.
+ */
+ pj_bool_t active;
+
+ /**
+ * Specify the cipher being used.
+ */
+ char cipher[128];
+
+} pjmedia_zrtp_info;
+
+/**
+ * Application callback methods.
+ *
+ * The RTP stack specific part of GNU ZRTP uses these callback methods
+ * to report ZRTP events to the application. Thus the application that
+ * instantiates the RTP stack shall implement these methods and show these
+ * inforemation to the user.
+ *
+ * <b>CAVEAT</b><br/>
+ * All user callback methods run in the context of the RTP thread. Thus
+ * it is of paramount importance to keep the execution time of the methods
+ * as short as possible.
+ *
+ * @author Werner Dittmann <Werner.Dittmann@t-online.de>
+ */
+typedef struct pjmedia_zrtp_cb
+{
+ /**
+ * Inform user interface that security is active now.
+ *
+ * ZRTP calls this method if the sender and the receiver are
+ * in secure mode now.
+ *
+ * @param cipher
+ * Name and mode of cipher used to encrypt the SRTP stream
+ */
+ void (*secure_on)(pjmedia_transport *tp, char* cipher);
+
+ /**
+ * Inform user interface that security is not active any more.
+ *
+ * ZRTP calls this method if either the sender or the receiver
+ * left secure mode.
+ *
+ */
+ void (*secure_off)(pjmedia_transport *tp);
+
+ /**
+ * Show the Short Authentication String (SAS) on user interface.
+ *
+ * ZRTP calls this method to display the SAS and inform about the SAS
+ * verification status. The user interface shall enable a SAS verfication
+ * button (or similar UI element). The user shall click on this UI
+ * element after he/she confirmed the SAS code with the partner.
+ *
+ * @param sas
+ * The string containing the SAS.
+ * @param verified
+ * If <code>verified</code> is true then SAS was verified by both
+ * parties during a previous call, otherwise it is set to false.
+ */
+ void (*show_sas)(pjmedia_transport *tp, char* sas, int32_t verified);
+
+ /**
+ * Inform the user that ZRTP received "go clear" message from its peer.
+ *
+ * On receipt of a go clear message the user is requested to confirm
+ * a switch to unsecure (clear) modus. Until the user confirms ZRTP
+ * (and the underlying RTP) does not send any data.
+ *
+ */
+ void (*confirm_go_clear)(pjmedia_transport *tp);
+
+ /**
+ * Show some information to user.
+ *
+ * ZRTP calls this method to display some information to the user.
+ * Along with the message ZRTP provides a severity indicator that
+ * defines: Info, Warning, Error, and Alert. Refer to the <code>
+ * MessageSeverity</code> enum in <code>ZrtpCodes.h</code>. The
+ * UI may use this indicator to highlight messages or alike.
+ *
+ * @param sev
+ * Severity of the message.
+ * @param subCode
+ * The subcode identifying the reason.
+ */
+ void (*show_message)(pjmedia_transport *tp, int32_t sev, int32_t subCode);
+
+ /**
+ * ZRTP transport calls this if the negotiation failed.
+ *
+ * ZRTPQueue calls this method in case ZRTP negotiation failed. The
+ * parameters show the severity as well as some explanatory text.
+ * Refer to the <code>MessageSeverity</code> enum above.
+ *
+ * @param severity
+ * This defines the message's severity
+ * @param subCode
+ * The subcode identifying the reason.
+ */
+ void (*negotiation_failed)(pjmedia_transport *tp, int32_t severity, int32_t subCode);
+
+ /**
+ * ZRTP transport calls this method if the other side does not support ZRTP.
+ *
+ * If the other side does not answer the ZRTP <em>Hello</em> packets then
+ * ZRTP calls this method.
+ *
+ */
+ void (*not_supported_by_other)(pjmedia_transport *tp);
+
+ /**
+ * ZRTP transport calls this method to inform about a PBX enrollment request.
+ *
+ * Please refer to chapter 8.3 ff to get more details about PBX enrollment
+ * and SAS relay.
+ *
+ * @param info
+ * Give some information to the user about the PBX requesting an
+ * enrollment.
+ */
+ void (*ask_enrollment)(pjmedia_transport *tp, int32_t info);
+
+ /**
+ * ZRTP transport calls this method to inform about PBX enrollment result.
+ *
+ * Informs the use about the acceptance or denial of an PBX enrollment
+ * request
+ *
+ * @param info
+ * Give some information to the user about the result of an
+ * enrollment.
+ */
+ void (*inform_enrollment)(pjmedia_transport *tp, int32_t info);
+
+ /**
+ * ZRTP transport calls this method to request a SAS signature.
+ *
+ * After ZRTP core was able to compute the Short Authentication String
+ * (SAS) it calls this method. The client may now use an approriate
+ * method to sign the SAS. The client may use
+ * setSignatureData() of ZrtpQueue to store the signature
+ * data an enable signature transmission to the other peer. Refer
+ * to chapter 8.2 of ZRTP specification.
+ *
+ * @param sas
+ * The SAS string to sign.
+ * @see ZrtpQueue#setSignatureData
+ *
+ */
+ void (*sign_sas)(pjmedia_transport *tp, uint8_t* sas);
+
+ /**
+ * ZRTP transport calls this method to request a SAS signature check.
+ *
+ * After ZRTP received a SAS signature in one of the Confirm packets it
+ * call this method. The client may use <code>getSignatureLength()</code>
+ * and <code>getSignatureData()</code>of ZrtpQueue to get the signature
+ * data and perform the signature check. Refer to chapter 8.2 of ZRTP
+ * specification.
+ *
+ * If the signature check fails the client may return false to ZRTP. In
+ * this case ZRTP signals an error to the other peer and terminates
+ * the ZRTP handshake.
+ *
+ * @param sas
+ * The SAS string that was signed by the other peer.
+ * @return
+ * true if the signature was ok, false otherwise.
+ *
+ */
+ int32_t (*check_sas_signature)(pjmedia_transport *tp, uint8_t* sas);
+} pjmedia_zrtp_cb;
+
+
+/**
+ * Create the transport adapter, specifying the underlying transport to be
+ * used to send and receive RTP/RTCP packets.
+ *
+ * @param endpt The media endpoint.
+ * @param timer_heap The heap where timers will be scheduled.
+ * @param transport The underlying media transport to send and receive
+ * RTP/RTCP packets.
+ * @param p_tp Pointer to receive the media transport instance.
+ *
+ * @param close_slave
+ * Close the slave transport on transport_destroy. PJSUA-LIB
+ * sets this to PJ_FALSE because it takes care of this.
+ *
+ * @return PJ_SUCCESS on success, or the appropriate error code.
+ */
+PJ_DECL(pj_status_t) pjmedia_transport_zrtp_create( pjmedia_endpt *endpt,
+ pj_timer_heap_t *timer_heap,
+ pjmedia_transport *transport,
+ pjmedia_transport **p_tp,
+ pj_bool_t close_slave);
+
+/*
+ * Implement the specific ZRTP transport functions
+ */
+
+/**
+ * Initialize the ZRTP transport.
+ *
+ * Before an application can use ZRTP it has to initialize the
+ * ZRTP implementation. This method opens a file that contains ZRTP specific
+ * information such as the applications ZID (ZRTP id) and its
+ * retained shared secrets.
+ *
+ * Before an application initializes the ZRTP it may use ZRTP functions
+ * to set specific configuration data. See the relevant documentation
+ * in @c ZrtpCWrapper.h . The application can peform this after
+ * it created transport_zrtp.
+ *
+ * If one application requires several ZRTP sessions all
+ * sessions use the same timeout thread and use the same ZID
+ * file. Therefore an application does not need to do any
+ * synchronisation regading ZID files or timeouts. This is
+ * managed by the ZRTP implementation.
+ *
+ * The current implementation of ZRTP transport does not support
+ * different ZID files for one application instance. This
+ * restriction may be removed in later versions.
+ *
+ * The application may specify its own ZID file name. If no
+ * ZID file name is specified it defaults to
+ * <code>$HOME/.GNUccRTP.zid</code> if the <code>HOME</code>
+ * environment variable is set. If it is not set the current
+ * directory is used.
+ *
+ * If the method could set up the timeout thread and open the ZID
+ * file then it enables ZRTP processing and returns.
+ *
+ * @param tp
+ * Pointer to the ZRTP transport data as returned by
+ * @c pjmedia_transport_zrtp_create.
+ *
+ * @param zidFilename
+ * The name of the ZID file, can be a relative or absolut
+ * filename.
+ *
+ * @param autoEnable
+ * if set to true the method automatically sets enableZrtp to
+ * true. This enables the ZRTP auto-sense mode.
+ *
+ * @param zrtp_cb
+ * Pointer the application's ZRTP callbacks structure. Setting
+ * a NULL switches off the user callbacks
+ * @return
+ * PJ_SUCCESS on success, ZRTP processing enabled, other codes
+ * leave ZRTP processing disabled.
+ *
+ */
+PJ_DECL(pj_status_t) pjmedia_transport_zrtp_initialize(pjmedia_transport *tp,
+ const char *zidFilename,
+ pj_bool_t autoEnable,
+ pjmedia_zrtp_cb *zrtp_cb);
+/**
+ * Enable or disable ZRTP processing.
+ *
+ * Call this method to enable or disable ZRTP processing after
+ * calling <code>pjmedia_transport_zrtp_initialize</code> with the
+ * parameter @c autoEnable set to false. This can be done before
+ * using a RTP session or at any time during a RTP session.
+ *
+ * Existing SRTP sessions or currently active ZRTP processing will
+ * not be stopped or disconnected.
+ *
+ * If the application enables ZRTP then:
+ * <ul>
+ * <li>ZRTP transport starts to send ZRTP Hello packets after at least
+ * one RTP packet was sent and received on the associated RTP
+ * session. Thus if an application enables ZRTP and ZRTP transport
+ * detects traffic on the RTP session then ZRTP transport automatically
+ * starts the ZRTP protocol. This automatic start is convenient
+ * for applications that negotiate RTP parameters and set up RTP
+ * sessions but the actual RTP traffic starts some time later.
+ * </li>
+ * <li>ZRTP transport analyses incoming packets to detect ZRTP
+ * messages. If ZRTP was started, either via automatic start (see
+ * above) or explicitly via @c zrtp_startZrtp, then ZrtpQueue
+ * forwards ZRTP packets to the GNU ZRTP core.
+ * </ul>
+ *
+ * @param tp
+ * Pointer to the ZRTP transport data as returned by
+ * @c pjmedia_transport_zrtp_create.
+ *
+ * @param onOff
+ * @c 1 to enable ZRTP, @c 0 to disable ZRTP
+ */
+PJ_DECL(void) pjmedia_transport_zrtp_setEnableZrtp(pjmedia_transport *tp, pj_bool_t onOff);
+
+/**
+ * Return the state of ZRTP enable state.
+ *
+ * @param tp
+ * Pointer to the ZRTP transport data as returned by
+ * @c pjmedia_transport_zrtp_create.
+ *
+ * @return @c true if ZRTP processing is enabled, @c false
+ * otherwise.
+ */
+PJ_DECL(pj_bool_t) pjmedia_transport_zrtp_isEnableZrtp(pjmedia_transport *tp);
+
+/**
+ * Starts the ZRTP protocol engine.
+ *
+ * Applications may call this method to immediatly start the ZRTP protocol
+ * engine any time after initializing ZRTP and setting optinal parameters,
+ * for example client id or multi-stream parameters.
+ *
+ * If the application does not call this method but sucessfully initialized
+ * the ZRTP engine using @c pjmedia_transport_zrtp_initialize then ZRTP may
+ * also start, depending on the autoEnable parameter.
+ *
+ * @param tp
+ * Pointer to the ZRTP transport data as returned by
+ * @c pjmedia_transport_zrtp_create.
+ *
+ * @see pjmedia_transport_zrtp_initialize
+ */
+PJ_DECL(void) pjmedia_transport_zrtp_startZrtp(pjmedia_transport *tp);
+
+/**
+ * Stops the ZRTP protocol engine.
+ *
+ * Applications call this method to stop the ZRTP protocol
+ * engine. The ZRTP transport can not start or process any ZRTP
+ * negotiations.
+ *
+ * This call does not deactivate SRTP processing of ZRTP transport, thus
+ * the ZRTP transport still encrypts/decrypts data via SRTP.
+ *
+ * @param tp
+ * Pointer to the ZRTP transport data as returned by
+ * @c pjmedia_transport_zrtp_create.
+ *
+ */
+PJ_DECL(void) pjmedia_transport_zrtp_stopZrtp(pjmedia_transport *tp);
+
+/**
+ * Set the local SSRC in case of receive-only sessions.
+ *
+ * Receiver-only RTP sessions never send RTP packets, thus ZRTP cannot learn
+ * the local (sender) SSRC. ZRTP requires the SSRC to bind the RTP session
+ * to the SRTP and its handshake. In this case the application shall generate
+ * a SSRC value and set it.
+ *
+ * Usually an application knows if a specific RTP session is receive-only, for
+ * example by inspecting and parsing the SDP data.
+ *
+ * If the application later decides to switch this RTP session to full-duplex
+ * mode (send and receive) it shall use the generated SSRC to intialize the
+ * RTP session. Then the outgoing packets are encrypted by SRTP.
+ *
+ * @param tp
+ * Pointer to the ZRTP transport data as returned by
+ * @c pjmedia_transport_zrtp_create.
+ *
+ * @param ssrc
+ * The local ssrc value in host order.
+ */
+PJ_DECL(void) pjmedia_transport_zrtp_setLocalSSRC(pjmedia_transport *tp, uint32_t ssrc);
+
+/**
+ * Check the state of the MitM mode flag.
+ *
+ * If true then this ZRTP session acts as MitM, usually enabled by a PBX
+ * client (user agent)
+ *
+ * @return state of mitmMode
+ */
+PJ_DECL(pj_bool_t) pjmedia_transport_zrtp_isMitmMode(pjmedia_transport *tp);
+
+/**
+ * Set the state of the MitM mode flag.
+ *
+ * If MitM mode is set to true this ZRTP session acts as MitM, usually
+ * enabled by a PBX client (user agent).
+ *
+ * @param mitmMode defines the new state of the mitmMode flag
+ */
+PJ_DECL(void) pjmedia_transport_zrtp_setMitmMode(pjmedia_transport *tp, pj_bool_t mitmMode);
+
+/**
+ * Set / reset the SAS verification flag.
+ *
+ */
+PJ_DECL(void) pjmedia_transport_zrtp_setSASVerified(pjmedia_transport *tp, pj_bool_t verified);
+
+/**
+ * Get the peer's ZID.
+ *
+ */
+PJ_DECL(int) pjmedia_transport_zrtp_getPeerZid(pjmedia_transport *tp, unsigned char* data);
+
+/**
+ * Get the peer's name.
+ *
+ */
+PJ_DECL(char*) pjmedia_transport_zrtp_getPeerName(pjmedia_transport *tp);
+
+/**
+ * Set the peer's name.
+ *
+ */
+PJ_DECL(void) pjmedia_transport_zrtp_putPeerName(pjmedia_transport *tp, const char *name);
+
+
+PJ_DECL(char*) pjmedia_transport_zrtp_getMultiStreamParameters(pjmedia_transport *tp, pj_int32_t *length);
+
+PJ_DECL(void) pjmedia_transport_zrtp_setMultiStreamParameters(pjmedia_transport *tp, const char *parameters, pj_int32_t length, pjmedia_transport *master_tp);
+
+/**
+ * Get the ZRTP context pointer.
+ *
+ * Appplications need the ZRTP context pointer if they call ZRTP specific
+ * methods. The ZRTP specific include file @c ZrtpCWrapper contains the
+ * descriptions of the ZRTP methods.
+ *
+ * @return Pointer to ZRTP context
+ *
+ * @see zrtp_setAuxSecret()
+ * @see zrtp_setPbxSecret()
+ * @see zrtp_inState()
+ * @see zrtp_SASVerified()
+ * @see zrtp_resetSASVerified()
+ * @see zrtp_getHelloHash()
+ * @see zrtp_getMultiStrParams()
+ * @see zrtp_setMultiStrParams()
+ * @see zrtp_isMultiStream()
+ * @see zrtp_isMultiStreamAvailable()
+ * @see zrtp_acceptEnrollment()
+ * @see zrtp_setSignatureData()
+ * @see zrtp_getSignatureData()
+ * @see zrtp_getSignatureLength()
+ * @see zrtp_getZid();
+ */
+PJ_DECL(ZrtpContext*) pjmedia_transport_zrtp_getZrtpContext(pjmedia_transport *tp);
+
+PJ_END_DECL
+
+
+/**
+ * @}
+ */
+
+#endif /* __PJMEDIA_TRANSPORT_ADAPTER_SAMPLE_H__ */
+
+
diff -ruN pjproject-2.10/pjmedia/include/pjmedia/vid_stream.h pjsip/pjmedia/include/pjmedia/vid_stream.h
--- pjproject-2.10/pjmedia/include/pjmedia/vid_stream.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia/vid_stream.h 2021-02-06 18:35:36.234362341 +0100
@@ -471,6 +471,16 @@
pjmedia_vid_stream_get_rtp_session_info(pjmedia_vid_stream *stream,
pjmedia_stream_rtp_sess_info *session_info);
+/**
+ * Send RTCP PLI for the media stream.
+ *
+ * @param stream The media stream.
+ *
+ * @return PJ_SUCCESS on success.
+ */
+PJ_DECL(pj_status_t) pjmedia_vid_stream_send_rtcp_pli(
+ pjmedia_vid_stream *stream);
+
/**
* @}
--- pjproject-2.10/pjmedia/include/pjmedia/audiodev.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia/audiodev.h 2021-02-06 16:53:28.395408793 +0100
@@ -29,6 +29,7 @@
* @file audiodev.h
* @brief Audio device API.
*/
+#include <pj/os.h>
#include <pjmedia-audiodev/config.h>
#include <pjmedia-audiodev/errno.h>
#include <pjmedia/format.h>
@@ -93,6 +94,38 @@
typedef pjmedia_aud_dev_factory*
(*pjmedia_aud_dev_factory_create_func_ptr)(pj_pool_factory*);
+typedef enum pjmedia_aud_dev_event {
+ PJMEDIA_AUD_DEV_DEFAULT_INPUT_CHANGED,
+ PJMEDIA_AUD_DEV_DEFAULT_OUTPUT_CHANGED,
+ PJMEDIA_AUD_DEV_LIST_WILL_REFRESH,
+ PJMEDIA_AUD_DEV_LIST_DID_REFRESH
+} pjmedia_aud_dev_event;
+
+
+typedef void (*pjmedia_aud_dev_observer_callback)(pjmedia_aud_dev_event event);
+
+/**
+ * This structure specifies the parameters to set an audio device observer
+ */
+typedef struct pjmedia_aud_dev_observer {
+ pjmedia_aud_dev_observer_callback cb;
+ pj_pool_t *pool;
+ pj_mutex_t *lock;
+ pj_thread_t *thread;
+ pj_thread_desc thread_desc;
+} pjmedia_aud_dev_observer;
+
+/**
+ * Set an audio device observer callback.
+ *
+ * @param cb The callback that needs to be registred, or NULL in
+ * in case it needs to be unregistered. Only one callback
+ * can be registered.
+ *
+ * @return PJ_SUCCESS on successful operation or the appropriate
+ * error code.
+ */
+PJ_DECL(pj_status_t) pjmedia_aud_dev_set_observer_cb(pjmedia_aud_dev_observer_callback cb);
/* Audio driver structure */
typedef struct pjmedia_aud_driver
@@ -120,6 +150,8 @@
unsigned dev_cnt; /* Total number of devices. */
pj_uint32_t dev_list[PJMEDIA_AUD_MAX_DEVS];/* Array of device IDs. */
+ pjmedia_aud_dev_observer dev_observer;
+
} pjmedia_aud_subsys;
diff -ruN pjproject-2.10/pjmedia/include/pjmedia-audiodev/audiodev_imp.h pjsip/pjmedia/include/pjmedia-audiodev/audiodev_imp.h
--- pjproject-2.10/pjmedia/include/pjmedia-audiodev/audiodev_imp.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia-audiodev/audiodev_imp.h 2021-02-06 18:37:31.685678395 +0100
@@ -29,6 +29,15 @@
* @{
*/
+typedef enum pjmedia_aud_dev_change_event {
+ DEFAULT_INPUT_CHANGED = 1,
+ DEFAULT_OUTPUT_CHANGED,
+ DEVICE_LIST_CHANGED
+} pjmedia_aud_dev_change_event;
+
+typedef void (*pjmedia_aud_dev_change_callback)(pjmedia_aud_dev_change_event event);
+
+
/**
* Sound device factory operations.
*/
@@ -99,6 +108,30 @@
*/
pj_status_t (*refresh)(pjmedia_aud_dev_factory *f);
+ /**
+ * Set audio device change callback
+ *
+ * @param f The audio device factory.
+ * @param cb The audio device change callback.
+ */
+ void (*set_dev_change_cb)(pjmedia_aud_dev_factory *f,
+ pjmedia_aud_dev_change_callback cb);
+
+ /**
+ * Get default recording device index
+ *
+ * @param f The audio device factory.
+ */
+ int (*get_default_rec_dev)(pjmedia_aud_dev_factory *f);
+
+ /**
+ * Get default playback device index
+ *
+ * @param f The audio device factory.
+ */
+ int (*get_default_play_dev)(pjmedia_aud_dev_factory *f);
+
+
} pjmedia_aud_dev_factory_op;
diff -ruN pjproject-2.10/pjmedia/include/pjmedia.h pjsip/pjmedia/include/pjmedia.h
--- pjproject-2.10/pjmedia/include/pjmedia.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia.h 2021-02-06 18:40:23.766607985 +0100
@@ -44,6 +44,7 @@
#include <pjmedia/jbuf.h>
#include <pjmedia/master_port.h>
#include <pjmedia/mem_port.h>
+#include <pjmedia/mixer_port.h>
#include <pjmedia/null_port.h>
#include <pjmedia/plc.h>
#include <pjmedia/port.h>
@@ -67,6 +68,7 @@
#include <pjmedia/transport_ice.h>
#include <pjmedia/transport_loop.h>
#include <pjmedia/transport_srtp.h>
+#include <pjmedia/transport_zrtp.h>
#include <pjmedia/transport_udp.h>
#include <pjmedia/vid_codec.h>
#include <pjmedia/vid_conf.h>
@@ -74,7 +76,7 @@
#include <pjmedia/vid_conf.h>
#include <pjmedia/vid_port.h>
#include <pjmedia/vid_stream.h>
-//#include <pjmedia/vid_tee.h>
+#include <pjmedia/vid_tee.h>
#include <pjmedia/wav_playlist.h>
#include <pjmedia/wav_port.h>
#include <pjmedia/wave.h>
diff -ruN pjproject-2.10/pjmedia/include/pjmedia-videodev/fb_dev.h pjsip/pjmedia/include/pjmedia-videodev/fb_dev.h
--- pjproject-2.10/pjmedia/include/pjmedia-videodev/fb_dev.h 1970-01-01 01:00:00.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia-videodev/fb_dev.h 2021-02-06 18:43:39.220193284 +0100
@@ -0,0 +1,32 @@
+/* $Id$ */
+/*
+ * Copyright (C) 2014-present AG Projects
+ * Copyright (C) 2013-2014 Teluu Inc. (http://www.teluu.com)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+#ifndef PJMEDIA_VIDEODEV_FB_DEV_H__
+#define PJMEDIA_VIDEODEV_FB_DEV_H__
+
+#include <pjmedia-videodev/videodev_imp.h>
+
+typedef void (*pjmedia_vid_dev_fb_frame_cb)(const pjmedia_frame *frame, const pjmedia_rect_size size, void *user_data);
+
+pj_status_t
+pjmedia_vid_dev_fb_set_callback(pjmedia_vid_dev_stream *strm,
+ pjmedia_vid_dev_fb_frame_cb cb,
+ void *user_data);
+
+#endif /* PJMEDIA_VIDEODEV_FB_DEV_H__ */
diff -ruN pjproject-2.10/pjmedia/include/pjmedia_videodev.h pjsip/pjmedia/include/pjmedia_videodev.h
--- pjproject-2.10/pjmedia/include/pjmedia_videodev.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/include/pjmedia_videodev.h 2021-02-06 18:39:51.429682633 +0100
@@ -27,5 +27,6 @@
#include <pjmedia-videodev/videodev.h>
#include <pjmedia-videodev/videodev_imp.h>
#include <pjmedia-videodev/avi_dev.h>
+#include <pjmedia-videodev/fb_dev.h>
#endif /* __PJMEDIA_VIDEODEV_H__ */
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/converter.c pjsip/pjmedia/src/pjmedia/converter.c
--- pjproject-2.10/pjmedia/src/pjmedia/converter.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/converter.c 2021-02-06 20:19:28.491163002 +0100
@@ -19,6 +19,7 @@
#include <pjmedia/converter.h>
#include <pj/assert.h>
#include <pj/errno.h>
+#include <pj/log.h>
#define THIS_FILE "converter.c"
@@ -174,6 +175,24 @@
if (status != PJ_SUCCESS)
return status;
+ if (param->src.type == PJMEDIA_TYPE_VIDEO) {
+ char src_fourcc_name[5];
+ char dst_fourcc_name[5];
+ PJ_LOG(4, (THIS_FILE, "Converter %p (%s) created for video: %dx%d %s -> %dx%d %s",
+ cv,
+ f->name,
+ param->src.det.vid.size.w,
+ param->src.det.vid.size.h,
+ pjmedia_fourcc_name(param->src.id, src_fourcc_name),
+ param->dst.det.vid.size.w,
+ param->dst.det.vid.size.h,
+ pjmedia_fourcc_name(param->dst.id, dst_fourcc_name)));
+ } else if (param->src.type == PJMEDIA_TYPE_AUDIO) {
+ PJ_LOG(4, (THIS_FILE, "Converter %p created for audio", cv));
+ } else {
+ PJ_LOG(4, (THIS_FILE, "Converter %p created for unknown", cv));
+ }
+
*p_cv = cv;
return PJ_SUCCESS;
@@ -188,6 +207,7 @@
PJ_DEF(void) pjmedia_converter_destroy(pjmedia_converter *cv)
{
+ PJ_LOG(4, (THIS_FILE, "Converter %p destroyed", cv));
(*cv->op->destroy)(cv);
}
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/echo_common.c pjsip/pjmedia/src/pjmedia/echo_common.c
--- pjproject-2.10/pjmedia/src/pjmedia/echo_common.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/echo_common.c 2021-02-06 20:22:22.888063103 +0100
@@ -135,7 +135,7 @@
#if defined(PJMEDIA_HAS_WEBRTC_AEC) && PJMEDIA_HAS_WEBRTC_AEC!=0
static struct ec_operations webrtc_aec_op =
{
- "WebRTC AEC",
+ "WEBRTC AEC",
&webrtc_aec_create,
&webrtc_aec_destroy,
&webrtc_aec_reset,
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/echo_webrtc_aec.c pjsip/pjmedia/src/pjmedia/echo_webrtc_aec.c
--- pjproject-2.10/pjmedia/src/pjmedia/echo_webrtc_aec.c 1970-01-01 01:00:00.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/echo_webrtc_aec.c 2021-02-06 18:45:18.907037776 +0100
@@ -0,0 +1,643 @@
+/**
+ * Copyright (C) 2011-2013 AG Projects
+ * Copyright (C) 2010 Regis Montoya (aka r3gis - www.r3gis.fr)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include <pjmedia/echo.h>
+#include <pjmedia/errno.h>
+#include <pjmedia/frame.h>
+#include <pj/assert.h>
+#include <pj/log.h>
+#include <pj/pool.h>
+
+
+#if defined(PJMEDIA_HAS_WEBRTC_AEC) && PJMEDIA_HAS_WEBRTC_AEC != 0
+
+/* 0: conservative, 1: moderate, 2: aggresive */
+#ifndef PJMEDIA_WEBRTC_AEC_AGGRESSIVENESS
+ #define PJMEDIA_WEBRTC_AEC_AGGRESSIVENESS 2
+#endif
+
+/* 0: mild, 1: mediumn, 2: aggressive */
+#ifndef PJMEDIA_WEBRTC_NS_POLICY
+ #define PJMEDIA_WEBRTC_NS_POLICY 0
+#endif
+
+#define THIS_FILE "echo_webrtc_aec.c"
+
+#include <third_party/webrtc/src/common_audio/signal_processing_library/main/interface/signal_processing_library.h>
+#include <third_party/webrtc/src/modules/audio_processing/aec/main/interface/echo_cancellation.h>
+#include <third_party/webrtc/src/modules/audio_processing/agc/main/interface/gain_control.h>
+#include <third_party/webrtc/src/modules/audio_processing/ns/main/interface/noise_suppression.h>
+
+#include "echo_internal.h"
+
+
+/*
+ * This file contains the implementation of an echo canceller and noise suppressor for PJSIP which uses components
+ * from the WebRTC project. Things to take into account:
+ *
+ * - The WebRTC engine works with 10ms frames, while in PJSIP we use 20ms frames mostly, all data fed to WebRTC elements needs
+ * to be chunked in 10ms chunks.
+ * - When a 32kHz sampling rate is used, the WebRTC engine needs frames to be passed split into low and high frequencies. PJSIP
+ * will give us a frame with all frequencies, so the signal processing library in WebRTC must be used to split frames into low
+ * and high frequencies, and combine them later.
+ */
+
+
+typedef struct AudioBuffer
+{
+ int samples_per_channel;
+ pj_bool_t is_split;
+
+ WebRtc_Word16* data;
+ WebRtc_Word16 low_pass_data[160];
+ WebRtc_Word16 high_pass_data[160];
+
+ WebRtc_Word32 analysis_filter_state1[6];
+ WebRtc_Word32 analysis_filter_state2[6];
+ WebRtc_Word32 synthesis_filter_state1[6];
+ WebRtc_Word32 synthesis_filter_state2[6];
+} AudioBuffer;
+
+static WebRtc_Word16* AudioBuffer_GetData(AudioBuffer *ab);
+static WebRtc_Word16* AudioBuffer_GetLowPassData(AudioBuffer *ab);
+static WebRtc_Word16* AudioBuffer_GetHighPassData(AudioBuffer *ab);
+static void AudioBuffer_SetData(AudioBuffer *ab, WebRtc_Word16 *data);
+static void AudioBuffer_Initialize(AudioBuffer *ab, int sample_rate);
+static int AudioBuffer_SamplesPerChannel(AudioBuffer *ab);
+
+
+static WebRtc_Word16* AudioBuffer_GetData(AudioBuffer *ab)
+{
+ pj_assert(ab->data);
+
+ if (ab->is_split) {
+ WebRtcSpl_SynthesisQMF(ab->low_pass_data,
+ ab->high_pass_data,
+ ab->data,
+ ab->synthesis_filter_state1,
+ ab->synthesis_filter_state2);
+ }
+ return ab->data;
+}
+
+
+static WebRtc_Word16* AudioBuffer_GetLowPassData(AudioBuffer *ab)
+{
+ if (!ab->is_split) {
+ return ab->data;
+ } else {
+ return ab->low_pass_data;
+ }
+}
+
+
+static WebRtc_Word16* AudioBuffer_GetHighPassData(AudioBuffer *ab)
+{
+ if (!ab->is_split) {
+ return ab->data;
+ } else {
+ return ab->high_pass_data;
+ }
+}
+
+
+static void AudioBuffer_Initialize(AudioBuffer *ab, int sample_rate)
+{
+ pj_bzero(ab, sizeof(AudioBuffer));
+ if (sample_rate == 32000) {
+ ab->is_split = PJ_TRUE;
+ ab->samples_per_channel = 160;
+ } else {
+ ab->is_split = PJ_FALSE;
+ ab->samples_per_channel = sample_rate / 100;
+ }
+}
+
+
+static void AudioBuffer_SetData(AudioBuffer *ab, WebRtc_Word16 *data)
+{
+ ab->data = data;
+ if (ab->is_split) {
+ /* split data into low and high bands */
+ WebRtcSpl_AnalysisQMF(ab->data, /* input data */
+ ab->low_pass_data, /* pointer to low pass data storage*/
+ ab->high_pass_data, /* pointer to high pass data storage*/
+ ab->analysis_filter_state1,
+ ab->analysis_filter_state2);
+ }
+}
+
+
+static int AudioBuffer_SamplesPerChannel(AudioBuffer *ab)
+{
+ return ab->samples_per_channel;
+}
+
+
+const WebRtc_Word16 kFilterCoefficients8kHz[5] =
+ {3798, -7596, 3798, 7807, -3733};
+
+const WebRtc_Word16 kFilterCoefficients[5] =
+ {4012, -8024, 4012, 8002, -3913};
+
+typedef struct {
+ WebRtc_Word16 y[4];
+ WebRtc_Word16 x[2];
+ const WebRtc_Word16* ba;
+} HighPassFilterState;
+
+
+static int HighPassFilter_Initialize(HighPassFilterState* hpf, int sample_rate) {
+ assert(hpf != NULL);
+
+ if (sample_rate == 8000) {
+ hpf->ba = kFilterCoefficients8kHz;
+ } else {
+ hpf->ba = kFilterCoefficients;
+ }
+
+ WebRtcSpl_MemSetW16(hpf->x, 0, 2);
+ WebRtcSpl_MemSetW16(hpf->y, 0, 4);
+
+ return 0;
+}
+
+
+static int HighPassFilter_Process(HighPassFilterState* hpf, WebRtc_Word16* data, int length) {
+ assert(hpf != NULL);
+
+ int i;
+ WebRtc_Word32 tmp_int32 = 0;
+ WebRtc_Word16* y = hpf->y;
+ WebRtc_Word16* x = hpf->x;
+ const WebRtc_Word16* ba = hpf->ba;
+
+ for (i = 0; i < length; i++) {
+ // y[i] = b[0] * x[i] + b[1] * x[i-1] + b[2] * x[i-2]
+ // + -a[1] * y[i-1] + -a[2] * y[i-2];
+
+ tmp_int32 = WEBRTC_SPL_MUL_16_16(y[1], ba[3]); // -a[1] * y[i-1] (low part)
+ tmp_int32 += WEBRTC_SPL_MUL_16_16(y[3], ba[4]); // -a[2] * y[i-2] (low part)
+ tmp_int32 = (tmp_int32 >> 15);
+ tmp_int32 += WEBRTC_SPL_MUL_16_16(y[0], ba[3]); // -a[1] * y[i-1] (high part)
+ tmp_int32 += WEBRTC_SPL_MUL_16_16(y[2], ba[4]); // -a[2] * y[i-2] (high part)
+ tmp_int32 = (tmp_int32 << 1);
+
+ tmp_int32 += WEBRTC_SPL_MUL_16_16(data[i], ba[0]); // b[0]*x[0]
+ tmp_int32 += WEBRTC_SPL_MUL_16_16(x[0], ba[1]); // b[1]*x[i-1]
+ tmp_int32 += WEBRTC_SPL_MUL_16_16(x[1], ba[2]); // b[2]*x[i-2]
+
+ // Update state (input part)
+ x[1] = x[0];
+ x[0] = data[i];
+
+ // Update state (filtered part)
+ y[2] = y[0];
+ y[3] = y[1];
+ y[0] = (WebRtc_Word16)(tmp_int32 >> 13);
+ y[1] = (WebRtc_Word16)((tmp_int32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)(y[0]), 13)) << 2);
+
+ // Rounding in Q12, i.e. add 2^11
+ tmp_int32 += 2048;
+
+ // Saturate (to 2^27) so that the HP filtered signal does not overflow
+ tmp_int32 = WEBRTC_SPL_SAT((WebRtc_Word32)(134217727), tmp_int32, (WebRtc_Word32)(-134217728));
+
+ // Convert back to Q0 and use rounding
+ data[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp_int32, 12);
+
+ }
+
+ return 0;
+}
+
+
+typedef struct webrtc_ec
+{
+ void *AEC_inst;
+ void *AGC_inst;
+ NsHandle *NS_inst;
+
+ pj_bool_t needs_reset;
+ unsigned skip_frames;
+ unsigned silence_frames;
+
+ unsigned clock_rate;
+ unsigned echo_tail;
+ unsigned samples_per_frame;
+ unsigned samples_per_10ms_frame;
+
+ WebRtc_Word32 mic_capture_level;
+ WebRtc_Word16 has_echo;
+ WebRtc_UWord8 is_saturated;
+
+ HighPassFilterState hpf;
+ AudioBuffer capture_audio_buffer;
+ AudioBuffer playback_audio_buffer;
+
+ pj_int16_t *tmp_frame;
+ pj_int16_t *empty_frame;
+} webrtc_ec;
+
+
+#define WEBRTC_AEC_ERROR(aec_inst, tag) \
+ do { \
+ unsigned status = WebRtcAec_get_error_code(aec_inst); \
+ PJ_LOG(4, (THIS_FILE, "WebRTC AEC ERROR (%s) %d", tag, status)); \
+ } while (0) \
+
+
+#define WEBRTC_AGC_ERROR(ns_inst, text) \
+ do { \
+ PJ_LOG(4, (THIS_FILE, "WebRTC AGC ERROR (%s)", text)); \
+ } while (0) \
+
+
+#define WEBRTC_NS_ERROR(ns_inst, text) \
+ do { \
+ PJ_LOG(4, (THIS_FILE, "WebRTC NS ERROR (%s)", text)); \
+ } while (0) \
+
+
+PJ_DEF(pj_status_t) webrtc_aec_create(pj_pool_t *pool,
+ unsigned clock_rate,
+ unsigned channel_count,
+ unsigned samples_per_frame,
+ unsigned tail_ms,
+ unsigned options,
+ void **p_echo )
+{
+ webrtc_ec *echo;
+ int status;
+
+ *p_echo = NULL;
+
+ if (clock_rate != 16000 && clock_rate != 32000) {
+ PJ_LOG(4, (THIS_FILE, "Unsupported sample rate: %d", clock_rate));
+ return PJ_EINVAL;
+ }
+
+ echo = PJ_POOL_ZALLOC_T(pool, webrtc_ec);
+ PJ_ASSERT_RETURN(echo != NULL, PJ_ENOMEM);
+
+ status = WebRtcAec_Create(&echo->AEC_inst);
+ if(status != 0) {
+ PJ_LOG(4, (THIS_FILE, "Couldn't allocate memory for WebRTC AEC"));
+ goto error;
+ }
+
+ status = WebRtcAec_Init(echo->AEC_inst, clock_rate, clock_rate);
+ if(status != 0) {
+ WEBRTC_AEC_ERROR(echo->AEC_inst, "initialization");
+ goto error;
+ }
+
+ AecConfig aec_config;
+ aec_config.nlpMode = PJMEDIA_WEBRTC_AEC_AGGRESSIVENESS;
+ aec_config.skewMode = kAecFalse;
+ aec_config.metricsMode = kAecFalse;
+
+ status = WebRtcAec_set_config(echo->AEC_inst, aec_config);
+ if(status != 0) {
+ WEBRTC_AEC_ERROR(echo->AEC_inst, "config initialization");
+ goto error;
+ }
+
+ status = WebRtcAgc_Create(&echo->AGC_inst);
+ if(status != 0) {
+ PJ_LOG(4, (THIS_FILE, "Couldn't allocate memory for WebRTC AGC"));
+ goto error;
+ }
+
+ status = WebRtcAgc_Init(echo->AGC_inst, 0, 255, kAgcModeAdaptiveAnalog, clock_rate);
+ if(status != 0) {
+ WEBRTC_AGC_ERROR(echo->AGC_inst, "initialization");
+ goto error;
+ }
+
+ WebRtcAgc_config_t agc_config;
+ agc_config.targetLevelDbfs = 7;
+ agc_config.compressionGaindB = 0;
+ agc_config.limiterEnable = kAgcFalse;
+
+ status = WebRtcAgc_set_config(echo->AGC_inst, agc_config);
+ if(status != 0) {
+ WEBRTC_AGC_ERROR(echo->AGC_inst, "config initialization");
+ goto error;
+ }
+
+ status = WebRtcNs_Create(&echo->NS_inst);
+ if(status != 0) {
+ PJ_LOG(4, (THIS_FILE, "Couldn't allocate memory for WebRTC NS"));
+ goto error;
+ }
+
+ status = WebRtcNs_Init(echo->NS_inst, clock_rate);
+ if(status != 0) {
+ WEBRTC_NS_ERROR(echo->NS_inst, "initialization");
+ goto error;
+ }
+
+ status = WebRtcNs_set_policy(echo->NS_inst, PJMEDIA_WEBRTC_NS_POLICY);
+ if (status != 0) {
+ WEBRTC_NS_ERROR(echo->NS_inst, "failed to set policy");
+ }
+
+ echo->clock_rate = clock_rate;
+ echo->samples_per_frame = samples_per_frame;
+ echo->samples_per_10ms_frame = clock_rate / 100; /* the WebRTC engine works with 10ms frames */
+ echo->echo_tail = tail_ms;
+ echo->needs_reset = PJ_TRUE;
+ echo->skip_frames = 0;
+ echo->silence_frames = 0;
+ echo->mic_capture_level = 255; /* initial mic capture level, maximum */
+
+ /* Allocate temporary frames for echo cancellation */
+ echo->tmp_frame = (pj_int16_t*) pj_pool_zalloc(pool, sizeof(pj_int16_t)*samples_per_frame);
+ PJ_ASSERT_RETURN(echo->tmp_frame, PJ_ENOMEM);
+
+ echo->empty_frame = (pj_int16_t*) pj_pool_zalloc(pool, sizeof(pj_int16_t)*samples_per_frame);
+ PJ_ASSERT_RETURN(echo->empty_frame, PJ_ENOMEM);
+
+ /* Initialize audio buffers */
+ AudioBuffer_Initialize(&echo->capture_audio_buffer, clock_rate);
+ AudioBuffer_Initialize(&echo->playback_audio_buffer, clock_rate);
+
+ /* Initialize high pass filter */
+ HighPassFilter_Initialize(&echo->hpf, clock_rate);
+
+ PJ_LOG(4, (THIS_FILE, "WebRTC AEC and NS initialized"));
+ *p_echo = echo;
+ return PJ_SUCCESS;
+
+error:
+ if (echo->AEC_inst)
+ WebRtcAec_Free(echo->AEC_inst);
+ if (echo->AGC_inst)
+ WebRtcAgc_Free(echo->AGC_inst);
+ if (echo->NS_inst)
+ WebRtcNs_Free(echo->NS_inst);
+ return PJ_EBUG;
+}
+
+
+PJ_DEF(pj_status_t) webrtc_aec_destroy(void *state )
+{
+ webrtc_ec *echo = (webrtc_ec*) state;
+ PJ_ASSERT_RETURN(echo, PJ_EINVAL);
+
+ if (echo->AEC_inst) {
+ WebRtcAec_Free(echo->AEC_inst);
+ echo->AEC_inst = NULL;
+ }
+ if (echo->AGC_inst) {
+ WebRtcAgc_Free(echo->AGC_inst);
+ echo->AGC_inst = NULL;
+ }
+ if (echo->NS_inst) {
+ WebRtcNs_Free(echo->NS_inst);
+ echo->NS_inst = NULL;
+ }
+
+ return PJ_SUCCESS;
+}
+
+
+PJ_DEF(void) webrtc_aec_reset(void *state)
+{
+ /* Synchronously reset later, before processing the next frame, to avoid race conditions */
+ ((webrtc_ec*)state)->needs_reset = PJ_TRUE;
+}
+
+
+static void aec_reset(webrtc_ec *echo)
+{
+ PJ_ASSERT_ON_FAIL(echo && echo->AEC_inst && echo->AGC_inst && echo->NS_inst, {return;});
+
+ int status = 0;
+
+ /* re-initialize the AEC */
+ status = WebRtcAec_Init(echo->AEC_inst, echo->clock_rate, echo->clock_rate);
+ if(status != 0) {
+ WEBRTC_AEC_ERROR(echo->AEC_inst, "re-initialization");
+ return;
+ }
+
+ AecConfig aec_config;
+ aec_config.nlpMode = PJMEDIA_WEBRTC_AEC_AGGRESSIVENESS;
+ aec_config.skewMode = kAecFalse;
+ aec_config.metricsMode = kAecFalse;
+
+ status = WebRtcAec_set_config(echo->AEC_inst, aec_config);
+ if(status != 0) {
+ WEBRTC_AEC_ERROR(echo->AEC_inst, "configuration re-initialization");
+ return;
+ }
+
+ /* re-initialize the AGC */
+ status = WebRtcAgc_Init(echo->AGC_inst, 0, 255, kAgcModeAdaptiveAnalog, echo->clock_rate);
+ if(status != 0) {
+ WEBRTC_AGC_ERROR(echo->AGC_inst, "initialization");
+ return;
+ }
+
+ WebRtcAgc_config_t agc_config;
+ agc_config.targetLevelDbfs = 7;
+ agc_config.compressionGaindB = 0;
+ agc_config.limiterEnable = kAgcFalse;
+
+ status = WebRtcAgc_set_config(echo->AGC_inst, agc_config);
+ if(status != 0) {
+ WEBRTC_AGC_ERROR(echo->AGC_inst, "config initialization");
+ return;
+ }
+
+ /* re-initialize the NS */
+ status = WebRtcNs_Init(echo->NS_inst, echo->clock_rate);
+ if(status != 0) {
+ WEBRTC_NS_ERROR(echo->NS_inst, "re-initialization");
+ return;
+ }
+
+ status = WebRtcNs_set_policy(echo->NS_inst, PJMEDIA_WEBRTC_NS_POLICY);
+ if (status != 0) {
+ WEBRTC_NS_ERROR(echo->NS_inst, "configuration re-initialization");
+ return;
+ }
+
+ /* re-initialize audio buffers */
+ AudioBuffer_Initialize(&echo->capture_audio_buffer, echo->clock_rate);
+ AudioBuffer_Initialize(&echo->playback_audio_buffer, echo->clock_rate);
+
+ /* re-initialize high pass filter state */
+ HighPassFilter_Initialize(&echo->hpf, echo->clock_rate);
+
+ /* re-initialize mic level */
+ echo->mic_capture_level = 255;
+
+ PJ_LOG(4, (THIS_FILE, "WebRTC AEC reset succeeded"));
+}
+
+
+/*
+ * Perform echo cancellation.
+ */
+PJ_DEF(pj_status_t) webrtc_aec_cancel_echo(void *state,
+ pj_int16_t *rec_frm,
+ const pj_int16_t *play_frm,
+ unsigned options,
+ void *reserved)
+{
+ webrtc_ec *echo = (webrtc_ec*) state;
+ pj_int16_t *capture_frame, *result_frame;
+ int i, status;
+
+ /* Sanity checks */
+ PJ_ASSERT_RETURN(echo && echo->AEC_inst && echo->AGC_inst && echo->NS_inst, PJ_EINVAL);
+ PJ_ASSERT_RETURN(rec_frm && play_frm && options==0 && reserved==NULL, PJ_EINVAL);
+
+ /* Check if a reset is needed */
+ if (echo->needs_reset) {
+ aec_reset(echo);
+ echo->needs_reset = PJ_FALSE;
+ echo->skip_frames = 15;
+ echo->silence_frames = 10;
+ }
+
+ if (echo->skip_frames) {
+ echo->skip_frames--;
+ capture_frame = echo->empty_frame;
+ result_frame = echo->empty_frame;
+ } else if (echo->silence_frames) {
+ echo->silence_frames--;
+ capture_frame = rec_frm;
+ result_frame = echo->empty_frame;
+ } else {
+ capture_frame = rec_frm;
+ result_frame = echo->tmp_frame;
+ }
+
+ /* Copy record frame to a temporary buffer, in case things go wrong audio will be returned unchanged */
+ pjmedia_copy_samples(echo->tmp_frame, capture_frame, echo->samples_per_frame);
+
+ for(i=0; i < echo->samples_per_frame; i+= echo->samples_per_10ms_frame) {
+ /* feed a 10ms frame into the audio buffers */
+ AudioBuffer_SetData(&echo->capture_audio_buffer, (WebRtc_Word16 *) (&echo->tmp_frame[i]));
+ AudioBuffer_SetData(&echo->playback_audio_buffer, (WebRtc_Word16 *) (&play_frm[i]));
+
+ /* Apply high pass filer */
+ HighPassFilter_Process(&echo->hpf,
+ AudioBuffer_GetLowPassData(&echo->capture_audio_buffer),
+ AudioBuffer_SamplesPerChannel(&echo->capture_audio_buffer));
+
+ /* Analyze capture data gain
+ * NOTE: if we used kAgcModeAdaptiveDigital we'd use WebRtcAgc_VirtualMic instead
+ */
+ status = WebRtcAgc_AddMic(echo->AGC_inst,
+ AudioBuffer_GetLowPassData(&echo->capture_audio_buffer),
+ AudioBuffer_GetHighPassData(&echo->capture_audio_buffer),
+ AudioBuffer_SamplesPerChannel(&echo->capture_audio_buffer));
+ if(status != 0) {
+ WEBRTC_AGC_ERROR(echo->AGC_inst, "gain analysis");
+ return PJ_EBUG;
+ }
+
+ /* Feed farend buffer to AGC */
+ status = WebRtcAgc_AddFarend(echo->AGC_inst,
+ AudioBuffer_GetLowPassData(&echo->playback_audio_buffer),
+ AudioBuffer_SamplesPerChannel(&echo->playback_audio_buffer));
+ if(status != 0) {
+ WEBRTC_AGC_ERROR(echo->AGC_inst, "farend buffering");
+ return PJ_EBUG;
+ }
+
+ /* Feed farend buffer to AEC */
+ status = WebRtcAec_BufferFarend(echo->AEC_inst,
+ AudioBuffer_GetLowPassData(&echo->playback_audio_buffer),
+ AudioBuffer_SamplesPerChannel(&echo->playback_audio_buffer));
+ if(status != 0) {
+ WEBRTC_AEC_ERROR(echo->AEC_inst, "farend buffering");
+ return PJ_EBUG;
+ }
+
+ /* Noise suppression */
+ status = WebRtcNs_Process(echo->NS_inst,
+ AudioBuffer_GetLowPassData(&echo->capture_audio_buffer),
+ AudioBuffer_GetHighPassData(&echo->capture_audio_buffer),
+ AudioBuffer_GetLowPassData(&echo->capture_audio_buffer),
+ AudioBuffer_GetHighPassData(&echo->capture_audio_buffer));
+ if (status != 0) {
+ WEBRTC_NS_ERROR(echo->NS_inst, "ns processing");
+ return PJ_EBUG;
+ }
+
+ /* Process echo cancellation */
+ status = WebRtcAec_Process(echo->AEC_inst,
+ AudioBuffer_GetLowPassData(&echo->capture_audio_buffer),
+ AudioBuffer_GetHighPassData(&echo->capture_audio_buffer),
+ AudioBuffer_GetLowPassData(&echo->capture_audio_buffer),
+ AudioBuffer_GetHighPassData(&echo->capture_audio_buffer),
+ AudioBuffer_SamplesPerChannel(&echo->capture_audio_buffer),
+ echo->echo_tail,
+ 0);
+ if(status != 0) {
+ WEBRTC_AEC_ERROR(echo->AEC_inst, "echo processing");
+ return PJ_EBUG;
+ }
+
+ WebRtcAec_get_echo_status(echo->AEC_inst, &echo->has_echo);
+#if 0
+ if (echo->has_echo) {
+ PJ_LOG(4, (THIS_FILE, "Sound might have echo"));
+ }
+#endif
+
+ /* Process gain control */
+ status = WebRtcAgc_Process(echo->AGC_inst,
+ AudioBuffer_GetLowPassData(&echo->capture_audio_buffer),
+ AudioBuffer_GetHighPassData(&echo->capture_audio_buffer),
+ AudioBuffer_SamplesPerChannel(&echo->capture_audio_buffer),
+ AudioBuffer_GetLowPassData(&echo->capture_audio_buffer),
+ AudioBuffer_GetHighPassData(&echo->capture_audio_buffer),
+ echo->mic_capture_level,
+ &echo->mic_capture_level,
+ echo->has_echo,
+ &echo->is_saturated);
+ if (status != 0) {
+ WEBRTC_AGC_ERROR(echo->AGC_inst, "agc processing");
+ return PJ_EBUG;
+ }
+#if 0
+ if (echo->is_saturated) {
+ PJ_LOG(4, (THIS_FILE, "Sound might be saturated"));
+ }
+#endif
+
+ /* finish frame processing, in case we are working at 32kHz low and high bands will be combined */
+ AudioBuffer_GetData(&echo->capture_audio_buffer);
+ }
+
+ /* Copy temporary buffer back to original rec_frm */
+ pjmedia_copy_samples(rec_frm, result_frame, echo->samples_per_frame);
+
+ return PJ_SUCCESS;
+
+}
+
+
+#endif
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/endpoint.c pjsip/pjmedia/src/pjmedia/endpoint.c
--- pjproject-2.10/pjmedia/src/pjmedia/endpoint.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/endpoint.c 2021-02-06 20:23:26.353848918 +0100
@@ -688,6 +688,7 @@
/* Put bandwidth info in media level using bandwidth modifier "TIAS"
* (RFC3890).
*/
+#if 0
if (max_bitrate && pjmedia_add_bandwidth_tias_in_sdp) {
const pj_str_t STR_BANDW_MODIFIER = { "TIAS", 4 };
pjmedia_sdp_bandw *b;
@@ -697,6 +698,7 @@
b->value = max_bitrate;
m->bandw[m->bandw_count++] = b;
}
+#endif
*p_m = m;
return PJ_SUCCESS;
--- pjproject-2.10/pjmedia/src/pjmedia/ffmpeg_util.h 2021-03-19 16:51:27.589562664 +0100
+++ pjsip/pjmedia/src/pjmedia/ffmpeg_util.h 2021-03-19 17:00:32.821251088 +0100
@@ -38,13 +38,13 @@
#include <libavcodec/avcodec.h>
-#ifdef PJMEDIA_USE_OLD_FFMPEG
-# define AVPixelFormat PixelFormat
-# define AV(str) str
-# define PIX_FMT_GBRP PIX_FMT_GBR24P
-#else
+//#ifdef PJMEDIA_USE_OLD_FFMPEG
+//# define AVPixelFormat PixelFormat
+//# define AV(str) str
+//# define PIX_FMT_GBRP PIX_FMT_GBR24P
+//#else
# define AV(str) AV_ ## str
-#endif
+//#endif
void pjmedia_ffmpeg_add_ref();
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/ffmpeg_util.c pjsip/pjmedia/src/pjmedia/ffmpeg_util.c
--- pjproject-2.10/pjmedia/src/pjmedia/ffmpeg_util.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/ffmpeg_util.c 2021-02-06 20:25:00.472499296 +0100
@@ -34,10 +34,11 @@
enum AVPixelFormat pf;
} ffmpeg_fmt_table[] =
{
+ { PJMEDIA_FORMAT_ARGB, AV(PIX_FMT_ARGB)},
{ PJMEDIA_FORMAT_RGBA, AV(PIX_FMT_RGBA)},
{ PJMEDIA_FORMAT_RGB24,AV(PIX_FMT_BGR24)},
{ PJMEDIA_FORMAT_BGRA, AV(PIX_FMT_BGRA)},
- { PJMEDIA_FORMAT_GBRP, AV(PIX_FMT_GBRP)},
+ { PJMEDIA_FORMAT_GBRP, AV(PIX_FMT_GBR24P)},
{ PJMEDIA_FORMAT_AYUV, AV(PIX_FMT_NONE)},
{ PJMEDIA_FORMAT_YUY2, AV(PIX_FMT_YUYV422)},
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/format.c pjsip/pjmedia/src/pjmedia/format.c
--- pjproject-2.10/pjmedia/src/pjmedia/format.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/format.c 2021-02-06 20:25:25.677209428 +0100
@@ -77,6 +77,7 @@
static pjmedia_video_format_info built_in_vid_fmt_info[] =
{
{PJMEDIA_FORMAT_RGB24, "RGB24", PJMEDIA_COLOR_MODEL_RGB, 24, 1, &apply_packed_fmt},
+ {PJMEDIA_FORMAT_ARGB, "ARGB", PJMEDIA_COLOR_MODEL_RGB, 32, 1, &apply_packed_fmt},
{PJMEDIA_FORMAT_RGBA, "RGBA", PJMEDIA_COLOR_MODEL_RGB, 32, 1, &apply_packed_fmt},
{PJMEDIA_FORMAT_BGRA, "BGRA", PJMEDIA_COLOR_MODEL_RGB, 32, 1, &apply_packed_fmt},
{PJMEDIA_FORMAT_DIB , "DIB ", PJMEDIA_COLOR_MODEL_RGB, 24, 1, &apply_packed_fmt},
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/mixer_port.c pjsip/pjmedia/src/pjmedia/mixer_port.c
--- pjproject-2.10/pjmedia/src/pjmedia/mixer_port.c 1970-01-01 01:00:00.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/mixer_port.c 2021-02-06 18:45:36.859549796 +0100
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2010 AG Projects
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+
+#include <string.h>
+
+#include <pjmedia/mixer_port.h>
+#include <pjmedia/errno.h>
+#include <pj/assert.h>
+#include <pj/pool.h>
+#include <pj/string.h>
+
+
+#define SIGNATURE PJMEDIA_SIG_PORT_MIXER
+#define MIN(a, b) ((a)>(b)?(b):(a))
+
+struct mixer_port
+{
+ pjmedia_port base;
+ pjmedia_frame_type last_frame_type;
+ pj_size_t last_frame_size;
+ pj_timestamp last_frame_timestamp;
+ pj_int16_t* buffer;
+ pj_size_t buffer_size;
+};
+
+static pj_status_t mixer_get_frame(pjmedia_port *port, pjmedia_frame *frame);
+static pj_status_t mixer_put_frame(pjmedia_port *port, pjmedia_frame *frame);
+static pj_status_t mixer_on_destroy(pjmedia_port *port);
+
+
+PJ_DEF(pj_status_t) pjmedia_mixer_port_create(pj_pool_t *pool,
+ unsigned sampling_rate,
+ unsigned channel_count,
+ unsigned samples_per_frame,
+ unsigned bits_per_sample,
+ pjmedia_port **p_port)
+{
+ struct mixer_port *port;
+ const pj_str_t name = pj_str("mixer-port");
+
+ PJ_ASSERT_RETURN(pool && p_port, PJ_EINVAL);
+
+ port = PJ_POOL_ZALLOC_T(pool, struct mixer_port);
+ PJ_ASSERT_RETURN(port != NULL, PJ_ENOMEM);
+
+ pjmedia_port_info_init(&port->base.info, &name, SIGNATURE, sampling_rate,
+ channel_count, bits_per_sample, samples_per_frame);
+
+ port->base.get_frame = &mixer_get_frame;
+ port->base.put_frame = &mixer_put_frame;
+ port->base.on_destroy = &mixer_on_destroy;
+ port->last_frame_type = PJMEDIA_FRAME_TYPE_NONE;
+ port->last_frame_size = 0;
+ port->last_frame_timestamp.u64 = 0;
+ port->buffer = (pj_int16_t*) pj_pool_calloc(pool, samples_per_frame, sizeof(pj_int16_t));
+ port->buffer_size = sizeof(pj_int16_t) * samples_per_frame;
+
+ *p_port = &port->base;
+
+ return PJ_SUCCESS;
+}
+
+
+
+/*
+ * Put frame to file.
+ */
+static pj_status_t mixer_put_frame(pjmedia_port *this_port, pjmedia_frame *frame)
+{
+ struct mixer_port* port = (struct mixer_port*) this_port;
+
+ if (!frame->size || frame->type != PJMEDIA_FRAME_TYPE_AUDIO) {
+ port->last_frame_type = PJMEDIA_FRAME_TYPE_NONE;
+ port->last_frame_size = 0;
+ port->last_frame_timestamp.u64 = 0;
+ return PJ_SUCCESS;
+ }
+
+ PJ_ASSERT_RETURN(frame->size <= port->buffer_size, PJ_EINVAL);
+
+ port->last_frame_type = frame->type;
+ pj_get_timestamp(&port->last_frame_timestamp);
+ port->last_frame_size = MIN(port->buffer_size, frame->size);
+ memcpy(port->buffer, frame->buf, port->last_frame_size);
+
+ return PJ_SUCCESS;
+}
+
+
+/*
+ * Get frame from file.
+ */
+static pj_status_t mixer_get_frame(pjmedia_port *this_port, pjmedia_frame *frame)
+{
+ struct mixer_port* port = (struct mixer_port*) this_port;
+ pj_timestamp now;
+ pj_uint32_t frame_age;
+
+ pj_get_timestamp(&now);
+ frame_age = pj_elapsed_usec(&port->last_frame_timestamp, &now);
+
+ if (port->last_frame_timestamp.u64 != 0 && frame_age <= 100000) {
+ frame->type = port->last_frame_type;
+ frame->size = port->last_frame_size;
+ frame->timestamp.u64 = 0;
+ if (port->last_frame_size > 0) {
+ memcpy(frame->buf, port->buffer, port->last_frame_size);
+ }
+ } else {
+ frame->type = PJMEDIA_FRAME_TYPE_NONE;
+ frame->size = 0;
+ frame->timestamp.u64 = 0;
+ }
+
+ return PJ_SUCCESS;
+}
+
+
+/*
+ * Destroy port.
+ */
+static pj_status_t mixer_on_destroy(pjmedia_port *this_port)
+{
+ return PJ_SUCCESS;
+}
+
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/rtcp.c pjsip/pjmedia/src/pjmedia/rtcp.c
--- pjproject-2.10/pjmedia/src/pjmedia/rtcp.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/rtcp.c 2021-02-06 20:30:29.245771690 +0100
@@ -1003,6 +1003,33 @@
sess->stat.rx.update_cnt++;
}
+PJ_DEF(pj_status_t) pjmedia_rtcp_build_rtcp_pli(pjmedia_rtcp_session *session,
+ void *buf,
+ pj_size_t *length)
+{
+ pjmedia_rtcp_common *hdr;
+ pj_uint8_t *p;
+ pj_size_t len = 12; /* pjmedia_rtcp_common + media SSRC (uint32_t) */
+
+ PJ_ASSERT_RETURN(session && buf && length, PJ_EINVAL);
+
+ /* Verify buffer length */
+ if (len > *length)
+ return PJ_ETOOSMALL;
+
+ /* Build RTCP PLI */
+ hdr = (pjmedia_rtcp_common*)buf;
+ pj_memcpy(hdr, &session->rtcp_sr_pkt.common, sizeof(*hdr));
+ hdr->pt = RTCP_PSFB;
+ hdr->count = 1; /* FMT: 1 == Picture Loss Indication (PLI) */
+ hdr->length = pj_htons((pj_uint16_t)(len/4 - 1));
+
+ p = (pj_uint8_t*)hdr + sizeof(*hdr);
+ pj_memset(p, 0, (pj_uint8_t*)hdr + len - p);
+ *length = len;
+ return PJ_SUCCESS;
+}
+
PJ_DEF(pj_status_t) pjmedia_rtcp_build_rtcp_sdes(
pjmedia_rtcp_session *session,
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/sound_port.c pjsip/pjmedia/src/pjmedia/sound_port.c
--- pjproject-2.10/pjmedia/src/pjmedia/sound_port.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/sound_port.c 2021-02-06 21:42:52.101118944 +0100
@@ -98,7 +98,7 @@
if (snd_port->ec_state) {
if (snd_port->ec_suspended) {
snd_port->ec_suspended = PJ_FALSE;
- //pjmedia_echo_state_reset(snd_port->ec_state);
+ pjmedia_echo_reset(snd_port->ec_state);
PJ_LOG(4,(THIS_FILE, "EC activated"));
}
snd_port->ec_suspend_count = 0;
@@ -312,14 +312,9 @@
snd_port->aud_param.ec_tail_ms));
}
- status = pjmedia_snd_port_set_ec(snd_port, pool,
- snd_port->aud_param.ec_tail_ms,
- snd_port->prm_ec_options);
- if (status != PJ_SUCCESS) {
- pjmedia_aud_stream_destroy(snd_port->aud_stream);
- snd_port->aud_stream = NULL;
- return status;
- }
+ pjmedia_snd_port_set_ec(snd_port, pool,
+ snd_port->aud_param.ec_tail_ms,
+ snd_port->prm_ec_options);
}
/* Start sound stream. */
@@ -550,6 +545,17 @@
}
+/* Reset EC state */
+PJ_DEF(pj_status_t) pjmedia_snd_port_reset_ec_state( pjmedia_snd_port *snd_port )
+{
+ PJ_ASSERT_RETURN(snd_port, PJ_EINVAL);
+ if (snd_port->ec_state) {
+ pjmedia_echo_reset(snd_port->ec_state);
+ PJ_LOG(4,(THIS_FILE, "EC reset"));
+ }
+ return PJ_SUCCESS;
+}
+
/*
* Change EC settings.
*/
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/transport_ice.c pjsip/pjmedia/src/pjmedia/transport_ice.c
--- pjproject-2.10/pjmedia/src/pjmedia/transport_ice.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/transport_ice.c 2021-02-06 21:40:50.021656143 +0100
@@ -163,6 +163,10 @@
pj_ice_strans_op op,
pj_status_t status);
+static void ice_on_ice_state(pj_ice_strans *ice_st,
+ pj_ice_strans_state prev,
+ pj_ice_strans_state curr);
+
/*
* Clean up ICE resources.
*/
@@ -280,6 +284,7 @@
/* Configure ICE callbacks */
pj_bzero(&ice_st_cb, sizeof(ice_st_cb));
ice_st_cb.on_ice_complete = &ice_on_ice_complete;
+ ice_st_cb.on_ice_state = &ice_on_ice_state;
ice_st_cb.on_rx_data = &ice_on_rx_data;
/* Configure RTP socket buffer settings, if not set */
@@ -314,6 +319,17 @@
return PJ_SUCCESS;
}
+/*
+ * Get the ICE stream transport associated with this media transport.
+ */
+PJ_DEF(pj_ice_strans*) pjmedia_ice_get_strans(pjmedia_transport *tp)
+{
+ struct transport_ice *tp_ice;
+
+ tp_ice = (struct transport_ice*) tp;
+ return tp_ice->ice_st;
+}
+
PJ_DEF(pj_grp_lock_t *) pjmedia_ice_get_grp_lock(pjmedia_transport *tp)
{
PJ_ASSERT_RETURN(tp, NULL);
@@ -403,6 +419,10 @@
"Stopping ICE, reason=%s", reason));
}
+ /* Notify application about ICE stop */
+ if (tp_ice->cb.on_ice_stop)
+ (*tp_ice->cb.on_ice_stop)(&tp_ice->base, (char *)reason, err);
+
if (tp_ice->ice_st) {
pj_ice_strans_stop_ice(tp_ice->ice_st);
}
@@ -1745,17 +1765,17 @@
if (status != PJ_SUCCESS)
return status;
- pj_sockaddr_cp(&info->sock_info.rtp_addr_name, &cand.addr);
+ pj_sockaddr_cp(&info->sock_info.rtp_addr_name, &cand.base_addr);
/* Get RTCP default address */
if (tp_ice->use_rtcp_mux) {
- pj_sockaddr_cp(&info->sock_info.rtcp_addr_name, &cand.addr);
+ pj_sockaddr_cp(&info->sock_info.rtcp_addr_name, &cand.base_addr);
} else if (tp_ice->comp_cnt > 1) {
status = pj_ice_strans_get_def_cand(tp_ice->ice_st, 2, &cand);
if (status != PJ_SUCCESS)
return status;
- pj_sockaddr_cp(&info->sock_info.rtcp_addr_name, &cand.addr);
+ pj_sockaddr_cp(&info->sock_info.rtcp_addr_name, &cand.base_addr);
}
/* Set remote address originating RTP & RTCP if this transport has
@@ -2115,6 +2135,19 @@
}
}
+static void ice_on_ice_state(pj_ice_strans *ice_st,
+ pj_ice_strans_state prev,
+ pj_ice_strans_state curr)
+{
+ struct transport_ice *tp_ice;
+
+ tp_ice = (struct transport_ice*) pj_ice_strans_get_user_data(ice_st);
+
+ /* Notify application */
+ if (tp_ice->cb.on_ice_state)
+ (*tp_ice->cb.on_ice_state)(&tp_ice->base, prev, curr);
+}
+
/* Simulate lost */
static pj_status_t transport_simulate_lost(pjmedia_transport *tp,
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/transport_zrtp.c pjsip/pjmedia/src/pjmedia/transport_zrtp.c
--- pjproject-2.10/pjmedia/src/pjmedia/transport_zrtp.c 1970-01-01 01:00:00.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/transport_zrtp.c 2021-02-06 16:42:04.322625052 +0100
@@ -0,0 +1,1261 @@
+/* $Id$ */
+/*
+ * Copyright (C) 2010 Werner Dittmann
+ * This is the pjmedia ZRTP transport module.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+
+#include <pjmedia/transport_zrtp.h>
+#include <pjmedia/endpoint.h>
+#include <pjlib.h>
+#include <pjlib-util.h>
+
+#include "../../third_party/zsrtp/include/ZsrtpCWrapper.h"
+
+#define THIS_FILE "transport_zrtp.c"
+
+#define MAX_RTP_BUFFER_LEN PJMEDIA_MAX_MTU
+#define MAX_RTCP_BUFFER_LEN PJMEDIA_MAX_MTU
+
+
+/* Transport functions prototypes */
+static pj_status_t transport_get_info(pjmedia_transport *tp,
+ pjmedia_transport_info *info);
+static pj_status_t transport_attach(pjmedia_transport *tp,
+ void *user_data,
+ const pj_sockaddr_t *rem_addr,
+ const pj_sockaddr_t *rem_rtcp,
+ unsigned addr_len,
+ void (*rtp_cb)(void*,
+ void*,
+ pj_ssize_t),
+ void (*rtcp_cb)(void*,
+ void*,
+ pj_ssize_t));
+static void transport_detach(pjmedia_transport *tp,
+ void *strm);
+static pj_status_t transport_send_rtp(pjmedia_transport *tp,
+ const void *pkt,
+ pj_size_t size);
+static pj_status_t transport_send_rtcp(pjmedia_transport *tp,
+ const void *pkt,
+ pj_size_t size);
+static pj_status_t transport_send_rtcp2(pjmedia_transport *tp,
+ const pj_sockaddr_t *addr,
+ unsigned addr_len,
+ const void *pkt,
+ pj_size_t size);
+static pj_status_t transport_media_create(pjmedia_transport *tp,
+ pj_pool_t *sdp_pool,
+ unsigned options,
+ const pjmedia_sdp_session *rem_sdp,
+ unsigned media_index);
+static pj_status_t transport_encode_sdp(pjmedia_transport *tp,
+ pj_pool_t *sdp_pool,
+ pjmedia_sdp_session *local_sdp,
+ const pjmedia_sdp_session *rem_sdp,
+ unsigned media_index);
+static pj_status_t transport_media_start(pjmedia_transport *tp,
+ pj_pool_t *pool,
+ const pjmedia_sdp_session *local_sdp,
+ const pjmedia_sdp_session *rem_sdp,
+ unsigned media_index);
+static pj_status_t transport_media_stop(pjmedia_transport *tp);
+static pj_status_t transport_simulate_lost(pjmedia_transport *tp,
+ pjmedia_dir dir,
+ unsigned pct_lost);
+static pj_status_t transport_destroy(pjmedia_transport *tp);
+
+
+/* The transport operations */
+static struct pjmedia_transport_op tp_zrtp_op =
+{
+ &transport_get_info,
+ &transport_attach,
+ &transport_detach,
+ &transport_send_rtp,
+ &transport_send_rtcp,
+ &transport_send_rtcp2,
+ &transport_media_create,
+ &transport_encode_sdp,
+ &transport_media_start,
+ &transport_media_stop,
+ &transport_simulate_lost,
+ &transport_destroy
+};
+
+/* The transport zrtp instance */
+struct tp_zrtp
+{
+ pjmedia_transport base;
+ pj_pool_t *pool;
+
+ /* Stream information. */
+ void *stream_user_data;
+ void (*stream_rtp_cb)(void *user_data,
+ void *pkt,
+ pj_ssize_t);
+ void (*stream_rtcp_cb)(void *user_data,
+ void *pkt,
+ pj_ssize_t);
+
+ /* Add your own member here.. */
+ uint64_t protect;
+ uint64_t unprotect;
+ int32_t unprotect_err;
+ int32_t refcount;
+ pj_timer_heap_t* timer_heap;
+ pj_timer_entry timeoutEntry;
+ pj_mutex_t* zrtpMutex;
+ ZsrtpContext* srtpReceive;
+ ZsrtpContext* srtpSend;
+ ZsrtpContextCtrl* srtcpReceive;
+ ZsrtpContextCtrl* srtcpSend;
+ void* sendBuffer;
+ void* sendBufferCtrl;
+ pj_uint8_t* zrtpBuffer;
+// pj_int32_t sendBufferLen;
+ pj_uint32_t peerSSRC; /* stored in host order */
+ pj_uint32_t localSSRC; /* stored in host order */
+ char* clientIdString;
+ pjmedia_transport *slave_tp;
+ pjmedia_zrtp_cb cb;
+ ZrtpContext* zrtpCtx;
+ pj_uint16_t zrtpSeq;
+ pj_bool_t enableZrtp;
+ pj_bool_t started;
+ pj_bool_t close_slave;
+ pj_bool_t mitmMode;
+ char cipher[128];
+};
+
+/* Forward declaration of thethe ZRTP specific callback functions that this
+ adapter must implement */
+static int32_t zrtp_sendDataZRTP(ZrtpContext* ctx, const uint8_t* data, int32_t length) ;
+static int32_t zrtp_activateTimer(ZrtpContext* ctx, int32_t time) ;
+static int32_t zrtp_cancelTimer(ZrtpContext* ctx) ;
+static void zrtp_sendInfo(ZrtpContext* ctx, int32_t severity, int32_t subCode) ;
+static int32_t zrtp_srtpSecretsReady(ZrtpContext* ctx, C_SrtpSecret_t* secrets, int32_t part) ;
+static void zrtp_srtpSecretsOff(ZrtpContext* ctx, int32_t part) ;
+static void zrtp_srtpSecretsOn(ZrtpContext* ctx, char* c, char* s, int32_t verified) ;
+static void zrtp_handleGoClear(ZrtpContext* ctx) ;
+static void zrtp_zrtpNegotiationFailed(ZrtpContext* ctx, int32_t severity, int32_t subCode) ;
+static void zrtp_zrtpNotSuppOther(ZrtpContext* ctx) ;
+static void zrtp_synchEnter(ZrtpContext* ctx) ;
+static void zrtp_synchLeave(ZrtpContext* ctx) ;
+static void zrtp_zrtpAskEnrollment(ZrtpContext* ctx, int32_t info) ;
+static void zrtp_zrtpInformEnrollment(ZrtpContext* ctx, int32_t info) ;
+static void zrtp_signSAS(ZrtpContext* ctx, uint8_t* sasHash) ;
+static int32_t zrtp_checkSASSignature(ZrtpContext* ctx, uint8_t* sasHash) ;
+
+/* The callback function structure for ZRTP */
+static zrtp_Callbacks c_callbacks =
+{
+ &zrtp_sendDataZRTP,
+ &zrtp_activateTimer,
+ &zrtp_cancelTimer,
+ &zrtp_sendInfo,
+ &zrtp_srtpSecretsReady,
+ &zrtp_srtpSecretsOff,
+ &zrtp_srtpSecretsOn,
+ &zrtp_handleGoClear,
+ &zrtp_zrtpNegotiationFailed,
+ &zrtp_zrtpNotSuppOther,
+ &zrtp_synchEnter,
+ &zrtp_synchLeave,
+ &zrtp_zrtpAskEnrollment,
+ &zrtp_zrtpInformEnrollment,
+ &zrtp_signSAS,
+ &zrtp_checkSASSignature
+};
+
+static void timer_callback(pj_timer_heap_t *ht, pj_timer_entry *e);
+
+static char clientId[] = "SIP SIMPLE Client SDK";
+
+/*
+ * Create the ZRTP transport.
+ */
+PJ_DEF(pj_status_t) pjmedia_transport_zrtp_create(pjmedia_endpt *endpt,
+ pj_timer_heap_t *timer_heap,
+ pjmedia_transport *tp,
+ pjmedia_transport **p_tp,
+ pj_bool_t close_slave)
+{
+ pj_pool_t *pool;
+ struct tp_zrtp *zrtp;
+
+ PJ_ASSERT_RETURN(endpt && tp && p_tp, PJ_EINVAL);
+
+ /* Create the pool and initialize the adapter structure */
+ pool = pjmedia_endpt_create_pool(endpt, "zrtp%p", 5*1024, 512);
+ zrtp = PJ_POOL_ZALLOC_T(pool, struct tp_zrtp);
+ zrtp->pool = pool;
+
+ /* Initialize base pjmedia_transport */
+ pj_memcpy(zrtp->base.name, pool->obj_name, PJ_MAX_OBJ_NAME);
+ zrtp->base.type = tp->type;
+ zrtp->base.op = &tp_zrtp_op;
+
+ /* Set the timer heap to be used for timers */
+ zrtp->timer_heap = timer_heap;
+
+ /* Create the empty wrapper */
+ zrtp->zrtpCtx = zrtp_CreateWrapper();
+
+ /* Initialize standard values */
+ zrtp->clientIdString = clientId; /* Set standard name */
+ zrtp->zrtpSeq = 1; /* TODO: randomize */
+ pj_mutex_create_simple(zrtp->pool, "zrtp", &zrtp->zrtpMutex);
+ zrtp->zrtpBuffer = pj_pool_zalloc(pool, MAX_ZRTP_SIZE);
+ zrtp->sendBuffer = pj_pool_zalloc(pool, MAX_RTP_BUFFER_LEN);
+ zrtp->sendBufferCtrl = pj_pool_zalloc(pool, MAX_RTCP_BUFFER_LEN);
+
+ zrtp->slave_tp = tp;
+ zrtp->close_slave = close_slave;
+ zrtp->mitmMode = PJ_FALSE;
+
+ /* Done */
+ zrtp->refcount++;
+ *p_tp = &zrtp->base;
+ return PJ_SUCCESS;
+}
+
+PJ_DECL(pj_status_t) pjmedia_transport_zrtp_initialize(pjmedia_transport *tp,
+ const char *zidFilename,
+ pj_bool_t autoEnable,
+ pjmedia_zrtp_cb *cb)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ PJ_ASSERT_RETURN(tp, PJ_EINVAL);
+
+ zrtp_initializeZrtpEngine(zrtp->zrtpCtx, &c_callbacks, zrtp->clientIdString,
+ zidFilename, zrtp, zrtp->mitmMode);
+ zrtp->enableZrtp = autoEnable;
+ if (cb)
+ pj_memcpy(&zrtp->cb, cb, sizeof(pjmedia_zrtp_cb));
+ return PJ_SUCCESS;
+}
+
+static void timer_callback(pj_timer_heap_t *ht, pj_timer_entry *e)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)e->user_data;
+
+ zrtp_processTimeout(zrtp->zrtpCtx);
+ PJ_UNUSED_ARG(ht);
+}
+
+/*
+ * Here start with callback functions that support the ZRTP core
+ */
+static int32_t zrtp_sendDataZRTP(ZrtpContext* ctx, const uint8_t* data, int32_t length)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+ pj_uint16_t totalLen = length + 12; /* Fixed number of bytes of ZRTP header */
+ pj_uint32_t crc;
+ pj_uint8_t* buffer = zrtp->zrtpBuffer;
+ pj_uint16_t* pus;
+ pj_uint32_t* pui;
+
+ if ((totalLen) > MAX_ZRTP_SIZE)
+ return 0;
+
+ /* Get some handy pointers */
+ pus = (pj_uint16_t*)buffer;
+ pui = (pj_uint32_t*)buffer;
+
+ /* set up fixed ZRTP header */
+ *buffer = 0x10; /* invalid RTP version - refer to ZRTP spec chap 5 */
+ *(buffer + 1) = 0;
+ pus[1] = pj_htons(zrtp->zrtpSeq++);
+ pui[1] = pj_htonl(ZRTP_MAGIC);
+ pui[2] = pj_htonl(zrtp->localSSRC); /* stored in host order */
+
+ /* Copy ZRTP message data behind the header data */
+ pj_memcpy(buffer+12, data, length);
+
+ /* Setup and compute ZRTP CRC */
+ crc = zrtp_GenerateCksum(buffer, totalLen-CRC_SIZE);
+
+ /* convert and store CRC in ZRTP packet.*/
+ crc = zrtp_EndCksum(crc);
+ *(uint32_t*)(buffer+totalLen-CRC_SIZE) = pj_htonl(crc);
+
+ /* Send the ZRTP packet using the slave transport */
+ return (pjmedia_transport_send_rtp(zrtp->slave_tp, buffer, totalLen) == PJ_SUCCESS) ? 1 : 0;
+}
+
+static int32_t zrtp_activateTimer(ZrtpContext* ctx, int32_t time)
+{
+ pj_time_val timeout;
+ pj_status_t status;
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+
+ timeout.sec = time / 1000;
+ timeout.msec = time % 1000;
+
+ pj_timer_entry_init(&zrtp->timeoutEntry, 0, zrtp, &timer_callback);
+ status = pj_timer_heap_schedule(zrtp->timer_heap, &zrtp->timeoutEntry, &timeout);
+ if (status == PJ_SUCCESS)
+ return 1;
+ else
+ return 0;
+}
+
+static int32_t zrtp_cancelTimer(ZrtpContext* ctx)
+{
+ pj_status_t status;
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+
+ status = pj_timer_heap_cancel(zrtp->timer_heap, &zrtp->timeoutEntry);
+ if (status == PJ_SUCCESS)
+ return 1;
+ else
+ return 0;
+}
+
+static void zrtp_sendInfo(ZrtpContext* ctx, int32_t severity, int32_t subCode)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+
+ if (zrtp->cb.show_message)
+ zrtp->cb.show_message(&zrtp->base, severity, subCode);
+
+}
+
+static int32_t zrtp_srtpSecretsReady(ZrtpContext* ctx, C_SrtpSecret_t* secrets, int32_t part)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+
+ ZsrtpContext* recvCrypto;
+ ZsrtpContext* senderCrypto;
+ ZsrtpContextCtrl* recvCryptoCtrl;
+ ZsrtpContextCtrl* senderCryptoCtrl;
+ int cipher;
+ int authn;
+ int authKeyLen;
+ // int srtcpAuthTagLen;
+
+ if (secrets->authAlgorithm == zrtp_Sha1) {
+ authn = SrtpAuthenticationSha1Hmac;
+ authKeyLen = 20;
+ // srtcpAuthTagLen = 80; // Always 80 bit for SRTCP / SHA1
+ }
+
+ if (secrets->authAlgorithm == zrtp_Skein) {
+ authn = SrtpAuthenticationSkeinHmac;
+ authKeyLen = 32;
+ // srtcpAuthTagLen = 64; // Always 64 bit for SRTCP / Skein
+ }
+
+ if (secrets->symEncAlgorithm == zrtp_Aes)
+ cipher = SrtpEncryptionAESCM;
+
+ if (secrets->symEncAlgorithm == zrtp_TwoFish)
+ cipher = SrtpEncryptionTWOCM;
+
+ if (part == ForSender) {
+ // To encrypt packets: intiator uses initiator keys,
+ // responder uses responder keys
+ // Create a "half baked" crypto context first and store it. This is
+ // the main crypto context for the sending part of the connection.
+ if (secrets->role == Initiator) {
+ senderCrypto = zsrtp_CreateWrapper(zrtp->localSSRC,
+ 0,
+ 0L, // keyderivation << 48,
+ cipher, // encryption algo
+ authn, // authtentication algo
+ (unsigned char*)secrets->keyInitiator, // Master Key
+ secrets->initKeyLen / 8, // Master Key length
+ (unsigned char*)secrets->saltInitiator, // Master Salt
+ secrets->initSaltLen / 8, // Master Salt length
+ secrets->initKeyLen / 8, // encryption keyl
+ authKeyLen, // authentication key len
+ secrets->initSaltLen / 8, // session salt len
+ secrets->srtpAuthTagLen / 8); // authentication tag lenA
+
+ senderCryptoCtrl = zsrtp_CreateWrapperCtrl(zrtp->localSSRC,
+ cipher, // encryption algo
+ authn, // authtication algo
+ (unsigned char*)secrets->keyInitiator, // Master Key
+ secrets->initKeyLen / 8, // Master Key length
+ (unsigned char*)secrets->saltInitiator, // Master Salt
+ secrets->initSaltLen / 8, // Master Salt length
+ secrets->initKeyLen / 8, // encryption keyl
+ authKeyLen, // authentication key len
+ secrets->initSaltLen / 8, // session salt len
+ secrets->srtpAuthTagLen / 8); // authentication tag len
+ // srtcpAuthTagLen / 8); // authentication tag len
+ }
+ else {
+ senderCrypto = zsrtp_CreateWrapper(zrtp->localSSRC,
+ 0,
+ 0L, // keyderivation << 48,
+ cipher, // encryption algo
+ authn, // authtentication algo
+ (unsigned char*)secrets->keyResponder, // Master Key
+ secrets->respKeyLen / 8, // Master Key length
+ (unsigned char*)secrets->saltResponder, // Master Salt
+ secrets->respSaltLen / 8, // Master Salt length
+ secrets->respKeyLen / 8, // encryption keyl
+ authKeyLen, // authentication key len
+ secrets->respSaltLen / 8, // session salt len
+ secrets->srtpAuthTagLen / 8); // authentication tag len
+
+ senderCryptoCtrl = zsrtp_CreateWrapperCtrl(zrtp->localSSRC,
+ cipher, // encryption algo
+ authn, // authtication algo
+ (unsigned char*)secrets->keyResponder, // Master Key
+ secrets->respKeyLen / 8, // Master Key length
+ (unsigned char*)secrets->saltResponder, // Master Salt
+ secrets->respSaltLen / 8, // Master Salt length
+ secrets->respKeyLen / 8, // encryption keyl
+ authKeyLen, // authentication key len
+ secrets->respSaltLen / 8, // session salt len
+ secrets->srtpAuthTagLen / 8); // authentication tag len
+ // srtcpAuthTagLen / 8); // authentication tag len
+ }
+ if (senderCrypto == NULL) {
+ return 0;
+ }
+ // Create a SRTP crypto context for real SSRC sender stream.
+ // Note: key derivation can be done at this time only if the
+ // key derivation rate is 0 (disabled). For ZRTP this is the
+ // case: the key derivation is defined as 2^48
+ // which is effectively 0.
+ zsrtp_deriveSrtpKeys(senderCrypto, 0L);
+ zrtp->srtpSend = senderCrypto;
+
+ zsrtp_deriveSrtpKeysCtrl(senderCryptoCtrl);
+ zrtp->srtcpSend = senderCryptoCtrl;
+ }
+ if (part == ForReceiver) {
+ // To decrypt packets: intiator uses responder keys,
+ // responder initiator keys
+ // See comment above.
+ if (secrets->role == Initiator) {
+ recvCrypto = zsrtp_CreateWrapper(zrtp->peerSSRC,
+ 0,
+ 0L, // keyderivation << 48,
+ cipher, // encryption algo
+ authn, // authtentication algo
+ (unsigned char*)secrets->keyResponder, // Master Key
+ secrets->respKeyLen / 8, // Master Key length
+ (unsigned char*)secrets->saltResponder, // Master Salt
+ secrets->respSaltLen / 8, // Master Salt length
+ secrets->respKeyLen / 8, // encryption keyl
+ authKeyLen, // authentication key len
+ secrets->respSaltLen / 8, // session salt len
+ secrets->srtpAuthTagLen / 8); // authentication tag len
+
+ recvCryptoCtrl = zsrtp_CreateWrapperCtrl(zrtp->peerSSRC,
+ cipher, // encryption algo
+ authn, // authtication algo
+ (unsigned char*)secrets->keyResponder, // Master Key
+ secrets->respKeyLen / 8, // Master Key length
+ (unsigned char*)secrets->saltResponder, // Master Salt
+ secrets->respSaltLen / 8, // Master Salt length
+ secrets->respKeyLen / 8, // encryption keyl
+ authKeyLen, // authentication key len
+ secrets->respSaltLen / 8, // session salt len
+ secrets->srtpAuthTagLen / 8); // authentication tag len
+ // srtcpAuthTagLen / 8); // authentication tag len
+ }
+ else {
+ recvCrypto = zsrtp_CreateWrapper(zrtp->peerSSRC,
+ 0,
+ 0L, // keyderivation << 48,
+ cipher, // encryption algo
+ authn, // authtentication algo
+ (unsigned char*)secrets->keyInitiator, // Master Key
+ secrets->initKeyLen / 8, // Master Key length
+ (unsigned char*)secrets->saltInitiator, // Master Salt
+ secrets->initSaltLen / 8, // Master Salt length
+ secrets->initKeyLen / 8, // encryption keyl
+ authKeyLen, // authentication key len
+ secrets->initSaltLen / 8, // session salt len
+ secrets->srtpAuthTagLen / 8); // authentication tag len
+
+ recvCryptoCtrl = zsrtp_CreateWrapperCtrl(zrtp->peerSSRC,
+ cipher, // encryption algo
+ authn, // authtication algo
+ (unsigned char*)secrets->keyInitiator, // Master Key
+ secrets->initKeyLen / 8, // Master Key length
+ (unsigned char*)secrets->saltInitiator, // Master Salt
+ secrets->initSaltLen / 8, // Master Salt length
+ secrets->initKeyLen / 8, // encryption keyl
+ authKeyLen, // authentication key len
+ secrets->initSaltLen / 8, // session salt len
+ secrets->srtpAuthTagLen / 8); // authentication tag len
+ // srtcpAuthTagLen / 8); // authentication tag len
+ }
+ if (recvCrypto == NULL) {
+ return 0;
+ }
+ // Create a SRTP crypto context for real SSRC input stream.
+ // If the sender didn't provide a SSRC just insert the template
+ // into the queue. After we received the first packet the real
+ // crypto context will be created.
+ //
+ // Note: key derivation can be done at this time only if the
+ // key derivation rate is 0 (disabled). For ZRTP this is the
+ // case: the key derivation is defined as 2^48
+ // which is effectively 0.
+ zsrtp_deriveSrtpKeys(recvCrypto, 0L);
+ zrtp->srtpReceive = recvCrypto;
+
+ zsrtp_deriveSrtpKeysCtrl(recvCryptoCtrl);
+ zrtp->srtcpReceive = recvCryptoCtrl;
+ }
+ return 1;
+}
+
+static void zrtp_srtpSecretsOff(ZrtpContext* ctx, int32_t part)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+
+ if (part == ForSender)
+ {
+ zsrtp_DestroyWrapper(zrtp->srtpSend);
+ zsrtp_DestroyWrapperCtrl(zrtp->srtcpSend);
+ zrtp->srtpSend = NULL;
+ zrtp->srtcpSend = NULL;
+ }
+ if (part == ForReceiver)
+ {
+ zsrtp_DestroyWrapper(zrtp->srtpReceive);
+ zsrtp_DestroyWrapperCtrl(zrtp->srtcpReceive);
+ zrtp->srtpReceive = NULL;
+ zrtp->srtcpReceive = NULL;
+ }
+
+ if (zrtp->cb.secure_off)
+ zrtp->cb.secure_off(&zrtp->base);
+}
+
+static void zrtp_srtpSecretsOn(ZrtpContext* ctx, char* c, char* s, int32_t verified)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+ int len;
+
+ len = strlen(c);
+ if (len > sizeof(zrtp->cipher) - 1)
+ len = sizeof(zrtp->cipher) - 1;
+ memcpy(zrtp->cipher, c, len);
+ zrtp->cipher[len] = '\0';
+
+ if (zrtp->cb.secure_on)
+ zrtp->cb.secure_on(&zrtp->base, c);
+
+ if (s && strlen(s) > 0 && zrtp->cb.show_sas)
+ zrtp->cb.show_sas(&zrtp->base, s, verified);
+}
+
+static void zrtp_handleGoClear(ZrtpContext* ctx)
+{
+ /* TODO: implement */
+}
+
+static void zrtp_zrtpNegotiationFailed(ZrtpContext* ctx, int32_t severity, int32_t subCode)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+
+ if (zrtp->cb.negotiation_failed)
+ zrtp->cb.negotiation_failed(&zrtp->base, severity, subCode);
+}
+
+static void zrtp_zrtpNotSuppOther(ZrtpContext* ctx)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+
+ if (zrtp->cb.not_supported_by_other)
+ zrtp->cb.not_supported_by_other(&zrtp->base);
+}
+
+static void zrtp_synchEnter(ZrtpContext* ctx)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+ pj_mutex_lock(zrtp->zrtpMutex);
+}
+
+static void zrtp_synchLeave(ZrtpContext* ctx)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+ pj_mutex_unlock(zrtp->zrtpMutex);
+}
+
+static void zrtp_zrtpAskEnrollment(ZrtpContext* ctx, int32_t info)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+
+ if (zrtp->cb.ask_enrollment)
+ zrtp->cb.ask_enrollment(&zrtp->base, info);
+}
+
+static void zrtp_zrtpInformEnrollment(ZrtpContext* ctx, int32_t info)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+
+ if (zrtp->cb.inform_enrollment)
+ zrtp->cb.inform_enrollment(&zrtp->base, info);
+}
+
+static void zrtp_signSAS(ZrtpContext* ctx, uint8_t* sasHash)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+
+ if (zrtp->cb.sign_sas)
+ zrtp->cb.sign_sas(&zrtp->base, sasHash);
+}
+
+static int32_t zrtp_checkSASSignature(ZrtpContext* ctx, uint8_t* sasHash)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)ctx->userData;
+
+ if (zrtp->cb.check_sas_signature)
+ return zrtp->cb.check_sas_signature(&zrtp->base, sasHash);
+ return 0;
+}
+
+/*
+ * Implement the specific ZRTP transport functions
+ */
+PJ_DEF(void) pjmedia_transport_zrtp_setEnableZrtp(pjmedia_transport *tp, pj_bool_t onOff)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_assert(tp);
+
+ zrtp->enableZrtp = onOff;
+}
+
+PJ_DEF(pj_bool_t) pjmedia_transport_zrtp_isEnableZrtp(pjmedia_transport *tp)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ PJ_ASSERT_RETURN(tp, PJ_FALSE);
+
+ return zrtp->enableZrtp;
+
+}
+
+PJ_DEF(void) pjmedia_transport_zrtp_startZrtp(pjmedia_transport *tp)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_assert(tp && zrtp->zrtpCtx);
+
+ zrtp_startZrtpEngine(zrtp->zrtpCtx);
+ zrtp->started = 1;
+}
+
+PJ_DEF(void) pjmedia_transport_zrtp_stopZrtp(pjmedia_transport *tp)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_assert(tp && zrtp->zrtpCtx);
+
+ zrtp_stopZrtpEngine(zrtp->zrtpCtx);
+ zrtp->started = 0;
+}
+
+PJ_DEF(void) pjmedia_transport_zrtp_setLocalSSRC(pjmedia_transport *tp, uint32_t ssrc)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_assert(tp);
+
+ zrtp->localSSRC = ssrc;
+}
+
+PJ_DEF(pj_bool_t) pjmedia_transport_zrtp_isMitmMode(pjmedia_transport *tp)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_assert(tp);
+
+ return zrtp->mitmMode;
+}
+
+PJ_DEF(void) pjmedia_transport_zrtp_setMitmMode(pjmedia_transport *tp, pj_bool_t mitmMode)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_assert(tp);
+
+ zrtp->mitmMode = mitmMode;
+}
+
+PJ_DEF(ZrtpContext*) pjmedia_transport_zrtp_getZrtpContext(pjmedia_transport *tp)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ PJ_ASSERT_RETURN(tp, NULL);
+
+ return zrtp->zrtpCtx;
+}
+
+PJ_DEF(void) pjmedia_transport_zrtp_setSASVerified(pjmedia_transport *tp, pj_bool_t verified)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_assert(tp);
+
+ if (verified)
+ zrtp_SASVerified(zrtp->zrtpCtx);
+ else
+ zrtp_resetSASVerified(zrtp->zrtpCtx);
+}
+
+PJ_DEF(int) pjmedia_transport_zrtp_getPeerZid(pjmedia_transport *tp, unsigned char* data)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_assert(tp);
+
+ return zrtp_getPeerZid(zrtp->zrtpCtx, data);
+}
+
+PJ_DEF(char*) pjmedia_transport_zrtp_getPeerName(pjmedia_transport *tp)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_assert(tp);
+
+ return zrtp_getPeerName(zrtp->zrtpCtx);
+}
+
+PJ_DEF(void) pjmedia_transport_zrtp_putPeerName(pjmedia_transport *tp, const char *name)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_assert(tp);
+
+ zrtp_putPeerName(zrtp->zrtpCtx, name);
+}
+
+PJ_DEF(char*) pjmedia_transport_zrtp_getMultiStreamParameters(pjmedia_transport *tp, pj_int32_t *length)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_assert(tp);
+
+ return zrtp_getMultiStrParams(zrtp->zrtpCtx, length);
+}
+
+PJ_DEF(void) pjmedia_transport_zrtp_setMultiStreamParameters(pjmedia_transport *tp, const char *parameters, pj_int32_t length, pjmedia_transport *master_tp)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ struct tp_zrtp *master_zrtp = (struct tp_zrtp*)master_tp;
+ pj_assert(tp);
+ pj_assert(master_tp);
+
+ zrtp_setMultiStrParams(zrtp->zrtpCtx, (char*) parameters, length, master_zrtp->zrtpCtx);
+}
+
+/*
+ * get_info() is called to get the transport addresses to be put
+ * in SDP c= line and a=rtcp line.
+ */
+static pj_status_t transport_get_info(pjmedia_transport *tp,
+ pjmedia_transport_info *info)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pjmedia_zrtp_info zrtp_info;
+ int spc_info_idx;
+
+ PJ_ASSERT_RETURN(tp && info, PJ_EINVAL);
+ PJ_ASSERT_RETURN(info->specific_info_cnt <
+ PJMEDIA_TRANSPORT_SPECIFIC_INFO_MAXCNT, PJ_ETOOMANY);
+
+ zrtp_info.active = zrtp_inState(zrtp->zrtpCtx, SecureState) ? PJ_TRUE : PJ_FALSE;
+ if (zrtp_info.active)
+ memcpy(zrtp_info.cipher, zrtp->cipher, sizeof(zrtp->cipher));
+ else
+ zrtp_info.cipher[0] = '\0';
+
+ spc_info_idx = info->specific_info_cnt++;
+ info->spc_info[spc_info_idx].type = PJMEDIA_TRANSPORT_TYPE_ZRTP;
+
+ pj_memcpy(&info->spc_info[spc_info_idx].buffer, &zrtp_info,
+ sizeof(zrtp_info));
+
+ return pjmedia_transport_get_info(zrtp->slave_tp, info);
+}
+
+/* This is our RTP callback, that is called by the slave transport when it
+ * receives RTP packet.
+ */
+static void transport_rtp_cb(void *user_data, void *pkt, pj_ssize_t size)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)user_data;
+
+ pj_uint8_t* buffer = (pj_uint8_t*)pkt;
+ int32_t newLen = 0;
+ pj_status_t rc = PJ_SUCCESS;
+
+ pj_assert(zrtp && zrtp->stream_rtcp_cb && pkt);
+
+ // check if this could be a real RTP/SRTP packet.
+ if ((*buffer & 0xf0) != 0x10)
+ {
+ // Could be real RTP, check if we are in secure mode
+ if (zrtp->srtpReceive == NULL || size < 0)
+ {
+ zrtp->stream_rtp_cb(zrtp->stream_user_data, pkt, size);
+ }
+ else
+ {
+ rc = zsrtp_unprotect(zrtp->srtpReceive, pkt, size, &newLen);
+ if (rc == 1)
+ {
+ zrtp->unprotect++;
+ zrtp->stream_rtp_cb(zrtp->stream_user_data, pkt,
+ newLen);
+ zrtp->unprotect_err = 0;
+ }
+ else
+ {
+ if (zrtp->cb.show_message)
+ {
+ if (rc == -1)
+ zrtp->cb.show_message(&zrtp->base, zrtp_Warning, zrtp_WarningSRTPauthError);
+ else
+ zrtp->cb.show_message(&zrtp->base, zrtp_Warning, zrtp_WarningSRTPreplayError);
+ }
+ zrtp->unprotect_err = rc;
+ /* We failed to decrypt the packet, but forward it regardless to the slave
+ * transport, it might not have been encrypted after all */
+ zrtp->stream_rtp_cb(zrtp->stream_user_data, pkt, size);
+ }
+ }
+ if (!zrtp->started && zrtp->enableZrtp)
+ pjmedia_transport_zrtp_startZrtp((pjmedia_transport *)zrtp);
+
+ return;
+ }
+
+ // We assume all other packets are ZRTP packets here. Process
+ // if ZRTP processing is enabled. Because valid RTP packets are
+ // already handled we delete any packets here after processing.
+ if (zrtp->enableZrtp && zrtp->zrtpCtx != NULL)
+ {
+ // Get CRC value into crc (see above how to compute the offset)
+ pj_uint16_t temp = size - CRC_SIZE;
+ pj_uint32_t crc = *(uint32_t*)(buffer + temp);
+ crc = pj_ntohl(crc);
+
+ if (!zrtp_CheckCksum(buffer, temp, crc))
+ {
+ if (zrtp->cb.show_message)
+ zrtp->cb.show_message(&zrtp->base, zrtp_Warning, zrtp_WarningCRCmismatch);
+ return;
+ }
+
+ pj_uint32_t magic = *(pj_uint32_t*)(buffer + 4);
+ magic = pj_ntohl(magic);
+
+ // Check if it is really a ZRTP packet, return, no further processing
+ if (magic != ZRTP_MAGIC)
+ return;
+
+ // cover the case if the other party sends _only_ ZRTP packets at the
+ // beginning of a session. Start ZRTP in this case as well.
+ if (!zrtp->started)
+ {
+ pjmedia_transport_zrtp_startZrtp((pjmedia_transport *)zrtp);
+ }
+ // this now points beyond the undefined and length field.
+ // We need them, thus adjust
+ unsigned char* zrtpMsg = (buffer + 12);
+
+ // store peer's SSRC in host order, used when creating the CryptoContext
+ zrtp->peerSSRC = *(pj_uint32_t*)(buffer + 8);
+ zrtp->peerSSRC = pj_ntohl(zrtp->peerSSRC);
+ zrtp_processZrtpMessage(zrtp->zrtpCtx, zrtpMsg, zrtp->peerSSRC, size);
+ }
+}
+
+
+/* This is our RTCP callback, that is called by the slave transport when it
+ * receives RTCP packet.
+ */
+static void transport_rtcp_cb(void *user_data, void *pkt, pj_ssize_t size)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)user_data;
+ int32_t newLen = 0;
+ pj_status_t rc = PJ_SUCCESS;
+
+ pj_assert(zrtp && zrtp->stream_rtcp_cb);
+
+ if (zrtp->srtcpReceive == NULL || size < 0)
+ {
+ zrtp->stream_rtcp_cb(zrtp->stream_user_data, pkt, size);
+ }
+ else
+ {
+ rc = zsrtp_unprotectCtrl(zrtp->srtcpReceive, pkt, size, &newLen);
+
+ if (rc == 1)
+ {
+ /* Call stream's callback */
+ zrtp->stream_rtcp_cb(zrtp->stream_user_data, pkt, newLen);
+ }
+ else
+ {
+ // Testing: print some error output
+ }
+ }
+}
+
+
+/*
+ * attach() is called by stream to register callbacks that we should
+ * call on receipt of RTP and RTCP packets.
+ */
+static pj_status_t transport_attach(pjmedia_transport *tp,
+ void *user_data,
+ const pj_sockaddr_t *rem_addr,
+ const pj_sockaddr_t *rem_rtcp,
+ unsigned addr_len,
+ void (*rtp_cb)(void*,
+ void*,
+ pj_ssize_t),
+ void (*rtcp_cb)(void*,
+ void*,
+ pj_ssize_t))
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_status_t status;
+
+ PJ_ASSERT_RETURN(tp && rem_addr && addr_len, PJ_EINVAL);
+
+ /* In this example, we will save the stream information and callbacks
+ * to our structure, and we will register different RTP/RTCP callbacks
+ * instead.
+ */
+ pj_assert(zrtp->stream_user_data == NULL);
+ zrtp->stream_user_data = user_data;
+ zrtp->stream_rtp_cb = rtp_cb;
+ zrtp->stream_rtcp_cb = rtcp_cb;
+
+ status = pjmedia_transport_attach(zrtp->slave_tp, zrtp, rem_addr,
+ rem_rtcp, addr_len, &transport_rtp_cb,
+ &transport_rtcp_cb);
+ if (status != PJ_SUCCESS)
+ {
+ zrtp->stream_user_data = NULL;
+ zrtp->stream_rtp_cb = NULL;
+ zrtp->stream_rtcp_cb = NULL;
+ return status;
+ }
+
+ return PJ_SUCCESS;
+}
+
+/*
+ * detach() is called when the media is terminated, and the stream is
+ * to be disconnected from us.
+ */
+static void transport_detach(pjmedia_transport *tp, void *strm)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+
+ PJ_UNUSED_ARG(strm);
+ PJ_ASSERT_ON_FAIL(tp, return);
+
+ if (zrtp->stream_user_data != NULL)
+ {
+ pjmedia_transport_detach(zrtp->slave_tp, zrtp);
+ zrtp->stream_user_data = NULL;
+ zrtp->stream_rtp_cb = NULL;
+ zrtp->stream_rtcp_cb = NULL;
+ }
+}
+
+
+/*
+ * send_rtp() is called to send RTP packet. The "pkt" and "size" argument
+ * contain both the RTP header and the payload.
+ */
+static pj_status_t transport_send_rtp(pjmedia_transport *tp,
+ const void *pkt,
+ pj_size_t size)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_uint32_t* pui = (pj_uint32_t*)pkt;
+ int32_t newLen = 0;
+ pj_status_t rc = PJ_SUCCESS;
+
+ PJ_ASSERT_RETURN(tp && pkt, PJ_EINVAL);
+
+
+ if (!zrtp->started && zrtp->enableZrtp)
+ {
+ if (zrtp->localSSRC == 0)
+ zrtp->localSSRC = pj_ntohl(pui[2]); /* Learn own SSRC before starting ZRTP */
+
+ pjmedia_transport_zrtp_startZrtp((pjmedia_transport *)zrtp);
+ }
+
+ if (zrtp->srtpSend == NULL)
+ {
+ return pjmedia_transport_send_rtp(zrtp->slave_tp, pkt, size);
+ }
+ else
+ {
+ if (size+80 > MAX_RTP_BUFFER_LEN)
+ return PJ_ETOOBIG;
+
+ pj_memcpy(zrtp->sendBuffer, pkt, size);
+ rc = zsrtp_protect(zrtp->srtpSend, zrtp->sendBuffer, size, &newLen);
+ zrtp->protect++;
+
+ if (rc == 1)
+ return pjmedia_transport_send_rtp(zrtp->slave_tp, zrtp->sendBuffer, newLen);
+ else
+ return PJ_EIGNORED;
+ }
+}
+
+
+/*
+ * send_rtcp() is called to send RTCP packet. The "pkt" and "size" argument
+ * contain the RTCP packet.
+ */
+static pj_status_t transport_send_rtcp(pjmedia_transport *tp,
+ const void *pkt,
+ pj_size_t size)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_status_t rc = PJ_SUCCESS;
+ int32_t newLen = 0;
+ PJ_ASSERT_RETURN(tp, PJ_EINVAL);
+
+ /* You may do some processing to the RTCP packet here if you want. */
+ if (zrtp->srtcpSend == NULL)
+ {
+ return pjmedia_transport_send_rtcp(zrtp->slave_tp, pkt, size);
+ }
+ else
+ {
+ if (size+80 > MAX_RTCP_BUFFER_LEN)
+ return PJ_ETOOBIG;
+
+ pj_memcpy(zrtp->sendBufferCtrl, pkt, size);
+ rc = zsrtp_protectCtrl(zrtp->srtcpSend, zrtp->sendBufferCtrl, size, &newLen);
+
+ if (rc == 1)
+ return pjmedia_transport_send_rtcp(zrtp->slave_tp, zrtp->sendBufferCtrl, newLen);
+ else
+ return PJ_EIGNORED;
+ }
+
+ /* Send the packet using the slave transport */
+// return pjmedia_transport_send_rtcp(zrtp->slave_tp, pkt, size);
+}
+
+
+/*
+ * This is another variant of send_rtcp(), with the alternate destination
+ * address in the argument.
+ */
+static pj_status_t transport_send_rtcp2(pjmedia_transport *tp,
+ const pj_sockaddr_t *addr,
+ unsigned addr_len,
+ const void *pkt,
+ pj_size_t size)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ PJ_ASSERT_RETURN(tp, PJ_EINVAL);
+
+ return pjmedia_transport_send_rtcp2(zrtp->slave_tp, addr, addr_len,
+ pkt, size);
+}
+
+/*
+ * The media_create() is called when the transport is about to be used for
+ * a new call.
+ */
+static pj_status_t transport_media_create(pjmedia_transport *tp,
+ pj_pool_t *sdp_pool,
+ unsigned options,
+ const pjmedia_sdp_session *rem_sdp,
+ unsigned media_index)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ PJ_ASSERT_RETURN(tp, PJ_EINVAL);
+
+ /* if "rem_sdp" is not NULL, it means we are UAS. You may do some
+ * inspections on the incoming SDP to verify that the SDP is acceptable
+ * for us. If the SDP is not acceptable, we can reject the SDP by
+ * returning non-PJ_SUCCESS.
+ */
+ if (rem_sdp)
+ {
+ /* Do your stuff.. */
+ }
+
+ /* Once we're done with our initialization, pass the call to the
+ * slave transports to let it do it's own initialization too.
+ */
+ return pjmedia_transport_media_create(zrtp->slave_tp, sdp_pool, options,
+ rem_sdp, media_index);
+}
+
+/*
+ * The encode_sdp() is called when we're about to send SDP to remote party,
+ * either as SDP offer or as SDP answer.
+ */
+static pj_status_t transport_encode_sdp(pjmedia_transport *tp,
+ pj_pool_t *sdp_pool,
+ pjmedia_sdp_session *local_sdp,
+ const pjmedia_sdp_session *rem_sdp,
+ unsigned media_index)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ int32_t numVersions, i;
+
+ PJ_ASSERT_RETURN(tp, PJ_EINVAL);
+
+ /* If "rem_sdp" is not NULL, it means we're encoding SDP answer. You may
+ * do some more checking on the SDP's once again to make sure that
+ * everything is okay before we send SDP.
+ */
+ if (rem_sdp)
+ {
+ /* Do checking stuffs here.. */
+ }
+
+ /* Add zrtp-hash attributes to both INVITE and 200 OK. */
+ numVersions = zrtp_getNumberSupportedVersions(zrtp->zrtpCtx);
+ for (i = 0; i < numVersions; i++) {
+ char *zrtp_hello_hash = zrtp_getHelloHash(zrtp->zrtpCtx, i);
+ if (zrtp_hello_hash && *zrtp_hello_hash) {
+ int zrtp_hello_hash_len = strlen(zrtp_hello_hash);
+ pj_str_t *zrtp_hash_str = PJ_POOL_ALLOC_T(sdp_pool, pj_str_t);
+ pjmedia_sdp_attr *zrtp_hash = NULL;
+
+ zrtp_hash_str->ptr = zrtp_hello_hash;
+ zrtp_hash_str->slen = zrtp_hello_hash_len;
+
+ zrtp_hash = pjmedia_sdp_attr_create(sdp_pool, "zrtp-hash", zrtp_hash_str);
+ if (zrtp_hash &&
+ pjmedia_sdp_attr_add(&local_sdp->media[media_index]->attr_count, local_sdp->media[media_index]->attr, zrtp_hash) == PJ_SUCCESS) {
+ PJ_LOG(4, (THIS_FILE, "attribute added: a=zrtp-hash:%s", zrtp_hello_hash));
+ }
+ else {
+ PJ_LOG(4, (THIS_FILE, "error adding attribute: a=zrtp-hash:%s", zrtp_hello_hash));
+ }
+ }
+ }
+
+ /* You may do anything to the local_sdp, e.g. adding new attributes, or
+ * even modifying the SDP if you want.
+ */
+ if (0)
+ {
+ /* Say we add a proprietary attribute here.. */
+ pjmedia_sdp_attr *my_attr;
+
+ my_attr = PJ_POOL_ALLOC_T(sdp_pool, pjmedia_sdp_attr);
+ pj_strdup2(sdp_pool, &my_attr->name, "X-zrtp");
+ pj_strdup2(sdp_pool, &my_attr->value, "some value");
+
+ pjmedia_sdp_attr_add(&local_sdp->media[media_index]->attr_count,
+ local_sdp->media[media_index]->attr,
+ my_attr);
+ }
+
+ /* And then pass the call to slave transport to let it encode its
+ * information in the SDP. You may choose to call encode_sdp() to slave
+ * first before adding your custom attributes if you want.
+ */
+ return pjmedia_transport_encode_sdp(zrtp->slave_tp, sdp_pool, local_sdp, rem_sdp, media_index);
+}
+
+/*
+ * The media_start() is called once both local and remote SDP have been
+ * negotiated successfully, and the media is ready to start. Here we can start
+ * committing our processing.
+ */
+static pj_status_t transport_media_start(pjmedia_transport *tp,
+ pj_pool_t *pool,
+ const pjmedia_sdp_session *local_sdp,
+ const pjmedia_sdp_session *rem_sdp,
+ unsigned media_index)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ PJ_ASSERT_RETURN(tp, PJ_EINVAL);
+
+ /* Do something.. */
+
+ /* And pass the call to the slave transport */
+ return pjmedia_transport_media_start(zrtp->slave_tp, pool, local_sdp,
+ rem_sdp, media_index);
+}
+
+/*
+ * The media_stop() is called when media has been stopped.
+ */
+static pj_status_t transport_media_stop(pjmedia_transport *tp)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ PJ_ASSERT_RETURN(tp, PJ_EINVAL);
+
+ /* Do something.. */
+ PJ_LOG(4, (THIS_FILE, "Media stop - encrypted packets: %ld, decrypted packets: %ld",
+ zrtp->protect, zrtp->unprotect));
+
+ /* And pass the call to the slave transport */
+ return pjmedia_transport_media_stop(zrtp->slave_tp);
+}
+
+/*
+ * simulate_lost() is called to simulate packet lost
+ */
+static pj_status_t transport_simulate_lost(pjmedia_transport *tp,
+ pjmedia_dir dir,
+ unsigned pct_lost)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+
+ PJ_ASSERT_RETURN(tp, PJ_EINVAL);
+
+ return pjmedia_transport_simulate_lost(zrtp->slave_tp, dir, pct_lost);
+}
+
+/*
+ * destroy() is called when the transport is no longer needed.
+ */
+static pj_status_t transport_destroy(pjmedia_transport *tp)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+
+ PJ_ASSERT_RETURN(tp, PJ_EINVAL);
+
+ PJ_LOG(4, (THIS_FILE, "Destroy - encrypted packets: %ld, decrypted packets: %ld",
+ zrtp->protect, zrtp->unprotect));
+
+ /* close the slave transport in case */
+ if (zrtp->close_slave && zrtp->slave_tp)
+ pjmedia_transport_close(zrtp->slave_tp);
+
+ /* Self destruct.. */
+ zrtp_stopZrtpEngine(zrtp->zrtpCtx);
+ zrtp_DestroyWrapper(zrtp->zrtpCtx);
+ zrtp->zrtpCtx = NULL;
+
+ /* In case mutex is being acquired by other thread */
+ pj_mutex_lock(zrtp->zrtpMutex);
+ pj_mutex_unlock(zrtp->zrtpMutex);
+ pj_mutex_destroy(zrtp->zrtpMutex);
+
+ pj_pool_release(zrtp->pool);
+
+ return PJ_SUCCESS;
+}
+
+
+
+
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/vid_stream.c pjsip/pjmedia/src/pjmedia/vid_stream.c
--- pjproject-2.10/pjmedia/src/pjmedia/vid_stream.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/vid_stream.c 2021-02-06 21:17:10.577365678 +0100
@@ -139,9 +139,15 @@
unsigned dec_delay_cnt; /**< Decoding delay (in frames).*/
pjmedia_event fmt_event; /**< Buffered fmt_changed event
to avoid deadlock */
+ pjmedia_event found_keyframe_event;
+ /**< Buffered found keyframe
+ event for delayed republish*/
pjmedia_event miss_keyframe_event;
/**< Buffered missing keyframe
event for delayed republish*/
+ pjmedia_event keyframe_req_event;
+ /**< Buffered keyframe request
+ event for delayed republish*/
unsigned frame_size; /**< Size of encoded base frame.*/
unsigned frame_ts_len; /**< Frame length in timestamp. */
@@ -410,6 +416,11 @@
pj_memcpy(&stream->fmt_event, event, sizeof(*event));
return PJ_SUCCESS;
+ case PJMEDIA_EVENT_KEYFRAME_FOUND:
+ /* Republish this event later from get_frame(). */
+ pj_memcpy(&stream->found_keyframe_event, event, sizeof(*event));
+ return PJ_SUCCESS;
+
case PJMEDIA_EVENT_KEYFRAME_MISSING:
/* Republish this event later from get_frame(). */
pj_memcpy(&stream->miss_keyframe_event, event, sizeof(*event));
@@ -534,7 +541,7 @@
pkt_len);
/* Send RTCP */
- send_rtcp(stream, PJ_TRUE, PJ_FALSE);
+ send_rtcp(stream, PJ_TRUE, PJ_FALSE, PJ_FALSE, PJ_FALSE);
/* Update stats in case the stream is paused */
stream->rtcp.stat.rtp_tx_last_seq = pj_ntohs(stream->enc->rtp.out_hdr.seq);
@@ -1028,6 +1039,14 @@
}
pjmedia_rtcp_rx_rtcp(&stream->rtcp, pkt, bytes_read);
+
+ /* XXX: posting some event from the RTCP session might be a better option */
+ if (stream->rtcp.keyframe_requested) {
+ pjmedia_event event;
+ pjmedia_event_init(&event, PJMEDIA_EVENT_KEYFRAME_REQUESTED, NULL, stream);
+ pj_memcpy(&stream->keyframe_req_event, &event, sizeof(event));
+ }
+
}
static pj_status_t put_frame(pjmedia_port *port,
@@ -1419,8 +1438,8 @@
new_fps.denum = ts_diff;
}
- /* Only apply the new FPS when it is >0, <=100, and increasing */
- if (new_fps.num/new_fps.denum <= 100 &&
+ /* Only apply the new FPS when it is >0, <=60, and increasing */
+ if (new_fps.num/new_fps.denum <= 60 &&
new_fps.num/new_fps.denum > 0 &&
new_fps.num*1.0/new_fps.denum >
stream->dec_max_fps.num*1.0/stream->dec_max_fps.denum)
@@ -1516,8 +1535,10 @@
/* Override the framerate to be 1.5x higher in the event
* for the renderer.
*/
+#if 0
fmt_chg_data->new_fmt.det.vid.fps.num *= 3;
fmt_chg_data->new_fmt.det.vid.fps.num /= 2;
+#endif
} else {
pjmedia_format_copy(&stream->info.codec_param->enc_fmt,
&fmt_chg_data->new_fmt);
@@ -1534,12 +1555,25 @@
stream->fmt_event.type = PJMEDIA_EVENT_NONE;
}
+ if (stream->found_keyframe_event.type != PJMEDIA_EVENT_NONE) {
+ pjmedia_event_publish(NULL, port, &stream->found_keyframe_event,
+ PJMEDIA_EVENT_PUBLISH_POST_EVENT);
+ stream->found_keyframe_event.type = PJMEDIA_EVENT_NONE;
+ }
+
if (stream->miss_keyframe_event.type != PJMEDIA_EVENT_NONE) {
pjmedia_event_publish(NULL, port, &stream->miss_keyframe_event,
PJMEDIA_EVENT_PUBLISH_POST_EVENT);
stream->miss_keyframe_event.type = PJMEDIA_EVENT_NONE;
}
+ if (stream->keyframe_req_event.type != PJMEDIA_EVENT_NONE) {
+ pjmedia_event_publish(NULL, port, &stream->keyframe_req_event,
+ PJMEDIA_EVENT_PUBLISH_POST_EVENT);
+ stream->keyframe_req_event.type = PJMEDIA_EVENT_NONE;
+ }
+
+
pj_mutex_lock( stream->jb_mutex );
if (stream->dec_frame.size == 0) {
@@ -1846,7 +1880,9 @@
* local renderer clock) or video setup lag. Note that the actual framerate
* will be continuously calculated based on the incoming RTP timestamps.
*/
+#if 0
vfd_dec->fps.num = vfd_dec->fps.num * 3 / 2;
+#endif
stream->dec_max_fps = vfd_dec->fps;
/* Create decoder channel */
@@ -2362,6 +2398,46 @@
return PJ_SUCCESS;
}
+
+/*
+ * Send RTCP PLI.
+ */
+PJ_DEF(pj_status_t) pjmedia_vid_stream_send_rtcp_pli(
+ pjmedia_vid_stream *stream)
+{
+ PJ_ASSERT_RETURN(stream, PJ_EINVAL);
+
+ if (stream->enc && stream->transport) {
+ void *sr_rr_pkt;
+ pj_uint8_t *pkt;
+ int len, max_len;
+ pj_status_t status;
+ pj_size_t pli_len;
+
+ /* Build RTCP RR/SR packet */
+ pjmedia_rtcp_build_rtcp(&stream->rtcp, &sr_rr_pkt, &len);
+
+ pkt = (pj_uint8_t*) stream->out_rtcp_pkt;
+ pj_memcpy(pkt, sr_rr_pkt, len);
+ max_len = stream->out_rtcp_pkt_size;
+
+ /* Build RTCP PLI packet */
+ pli_len = max_len - len;
+ status = pjmedia_rtcp_build_rtcp_pli(&stream->rtcp, pkt+len, &pli_len);
+ if (status != PJ_SUCCESS) {
+ PJ_PERROR(4,(stream->name.ptr, status, "Error generating RTCP PLI"));
+ } else {
+ len += (int)pli_len;
+ }
+
+ /* Send! */
+ status = pjmedia_transport_send_rtcp(stream->transport, pkt, len);
+
+ return status;
+ }
+
+ return PJ_SUCCESS;
+}
/*
diff -ruN pjproject-2.10/pjmedia/src/pjmedia/vid_tee.c pjsip/pjmedia/src/pjmedia/vid_tee.c
--- pjproject-2.10/pjmedia/src/pjmedia/vid_tee.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia/vid_tee.c 2021-02-06 21:28:30.516675163 +0100
@@ -52,6 +52,7 @@
unsigned dst_port_cnt;
vid_tee_dst_port *dst_ports;
pj_uint8_t *put_frm_flag;
+ pj_mutex_t *lock;
struct vid_tee_conv_t {
pjmedia_converter *conv;
@@ -86,6 +87,11 @@
tee->pf = pool->factory;
tee->pool = pj_pool_create(tee->pf, "video tee", 500, 500, NULL);
+ /* Create lock */
+ status = pj_mutex_create_simple(pool, "vid-tee-mutex", &tee->lock);
+ if (status != PJ_SUCCESS)
+ return status;
+
/* Initialize video tee structure */
tee->dst_port_maxcnt = max_dst_cnt;
tee->dst_ports = (vid_tee_dst_port*)
@@ -100,14 +106,16 @@
/* Initialize video tee buffer, its size is one frame */
vfi = pjmedia_get_video_format_info(NULL, fmt->id);
- if (vfi == NULL)
- return PJMEDIA_EBADFMT;
+ if (vfi == NULL) {
+ status = PJMEDIA_EBADFMT;
+ goto on_error;
+ }
pj_bzero(&vafp, sizeof(vafp));
vafp.size = fmt->det.vid.size;
status = vfi->apply_fmt(vfi, &vafp);
if (status != PJ_SUCCESS)
- return status;
+ goto on_error;
tee->buf_size = vafp.framebytes;
@@ -118,7 +126,7 @@
PJMEDIA_DIR_ENCODING,
fmt);
if (status != PJ_SUCCESS)
- return status;
+ goto on_error;;
tee->base.get_frame = &tee_get_frame;
tee->base.put_frame = &tee_put_frame;
@@ -128,6 +136,12 @@
*p_vid_tee = &tee->base;
return PJ_SUCCESS;
+
+on_error:
+ pj_mutex_destroy(tee->lock);
+ tee->lock = NULL;
+ return status;
+
}
static void realloc_buf(vid_tee_port *vid_tee,
@@ -169,21 +183,29 @@
{
vid_tee_port *tee = (vid_tee_port*)vid_tee;
pjmedia_video_format_detail *vfd;
+ pj_status_t status;
PJ_ASSERT_RETURN(vid_tee && vid_tee->info.signature==TEE_PORT_SIGN,
PJ_EINVAL);
- if (tee->dst_port_cnt >= tee->dst_port_maxcnt)
- return PJ_ETOOMANY;
-
- if (vid_tee->info.fmt.id != port->info.fmt.id)
- return PJMEDIA_EBADFMT;
+ pj_mutex_lock(tee->lock);
+
+ if (tee->dst_port_cnt >= tee->dst_port_maxcnt) {
+ status = PJ_ETOOMANY;
+ goto end;
+ }
+
+ if (vid_tee->info.fmt.id != port->info.fmt.id) {
+ status = PJMEDIA_EBADFMT;
+ goto end;
+ }
vfd = pjmedia_format_get_video_format_detail(&port->info.fmt, PJ_TRUE);
if (vfd->size.w != vid_tee->info.fmt.det.vid.size.w ||
vfd->size.h != vid_tee->info.fmt.det.vid.size.h)
{
- return PJMEDIA_EBADFMT;
+ status = PJMEDIA_EBADFMT;
+ goto end;
}
realloc_buf(tee, (option & PJMEDIA_VID_TEE_DST_DO_IN_PLACE_PROC)?
@@ -194,7 +216,12 @@
tee->dst_ports[tee->dst_port_cnt].option = option;
++tee->dst_port_cnt;
- return PJ_SUCCESS;
+ status = PJ_SUCCESS;
+
+end:
+ pj_mutex_unlock(tee->lock);
+ return status;
+
}
@@ -208,12 +235,17 @@
{
vid_tee_port *tee = (vid_tee_port*)vid_tee;
pjmedia_video_format_detail *vfd;
+ pj_status_t status;
PJ_ASSERT_RETURN(vid_tee && vid_tee->info.signature==TEE_PORT_SIGN,
PJ_EINVAL);
-
- if (tee->dst_port_cnt >= tee->dst_port_maxcnt)
- return PJ_ETOOMANY;
+
+ pj_mutex_lock(tee->lock);
+
+ if (tee->dst_port_cnt >= tee->dst_port_maxcnt) {
+ status = PJ_ETOOMANY;
+ goto end;
+ }
pj_bzero(&tee->tee_conv[tee->dst_port_cnt], sizeof(tee->tee_conv[0]));
@@ -226,17 +258,18 @@
const pjmedia_video_format_info *vfi;
pjmedia_video_apply_fmt_param vafp;
pjmedia_conversion_param conv_param;
- pj_status_t status;
vfi = pjmedia_get_video_format_info(NULL, port->info.fmt.id);
- if (vfi == NULL)
+ if (vfi == NULL) {
- return PJMEDIA_EBADFMT;
+ status = PJMEDIA_EBADFMT;
+ goto end;
+ }
pj_bzero(&vafp, sizeof(vafp));
vafp.size = port->info.fmt.det.vid.size;
status = vfi->apply_fmt(vfi, &vafp);
if (status != PJ_SUCCESS)
- return status;
+ goto end;
realloc_buf(tee, (option & PJMEDIA_VID_TEE_DST_DO_IN_PLACE_PROC)?
2: 1, vafp.framebytes);
@@ -248,7 +280,7 @@
NULL, tee->pool, &conv_param,
&tee->tee_conv[tee->dst_port_cnt].conv);
if (status != PJ_SUCCESS)
- return status;
+ goto end;
tee->tee_conv[tee->dst_port_cnt].conv_buf_size = vafp.framebytes;
} else {
@@ -259,8 +291,12 @@
tee->dst_ports[tee->dst_port_cnt].dst = port;
tee->dst_ports[tee->dst_port_cnt].option = option;
++tee->dst_port_cnt;
-
- return PJ_SUCCESS;
+
+ status = PJ_SUCCESS;
+
+end:
+ pj_mutex_unlock(tee->lock);
+ return status;
}
@@ -276,6 +312,8 @@
PJ_ASSERT_RETURN(vid_tee && vid_tee->info.signature==TEE_PORT_SIGN,
PJ_EINVAL);
+ pj_mutex_lock(tee->lock);
+
for (i = 0; i < tee->dst_port_cnt; ++i) {
if (tee->dst_ports[i].dst == port) {
if (tee->tee_conv[i].conv)
@@ -286,10 +324,13 @@
pj_array_erase(tee->tee_conv, sizeof(tee->tee_conv[0]),
tee->dst_port_cnt, i);
--tee->dst_port_cnt;
+
+ pj_mutex_unlock(tee->lock);
return PJ_SUCCESS;
}
}
+ pj_mutex_unlock(tee->lock);
return PJ_ENOTFOUND;
}
@@ -300,6 +341,12 @@
unsigned i, j;
const pj_uint8_t PUT_FRM_DONE = 1;
+ if (pj_mutex_trylock(tee->lock) != PJ_SUCCESS) {
+ /* we are busy adding / removing consumers */
+ return PJ_SUCCESS;
+ }
+
+
pj_bzero(tee->put_frm_flag, tee->dst_port_cnt *
sizeof(tee->put_frm_flag[0]));
@@ -364,6 +411,7 @@
}
}
+ pj_mutex_unlock(tee->lock);
return PJ_SUCCESS;
}
@@ -383,6 +431,11 @@
PJ_ASSERT_RETURN(port && port->info.signature==TEE_PORT_SIGN, PJ_EINVAL);
+ if (tee->lock) {
+ pj_mutex_destroy(tee->lock);
+ tee->lock = NULL;
+ }
+
pj_pool_release(tee->pool);
if (tee->buf_pool)
pj_pool_release(tee->buf_pool);
diff -ruN pjproject-2.10/pjmedia/src/pjmedia-audiodev/alsa_dev.c pjsip/pjmedia/src/pjmedia-audiodev/alsa_dev.c
--- pjproject-2.10/pjmedia/src/pjmedia-audiodev/alsa_dev.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia-audiodev/alsa_dev.c 2021-02-06 23:08:42.203153000 +0100
@@ -43,7 +43,7 @@
#define ALSASOUND_CAPTURE 2
#define MAX_SOUND_CARDS 5
#define MAX_SOUND_DEVICES_PER_CARD 5
-#define MAX_DEVICES 32
+#define MAX_DEVICES 128
#define MAX_MIX_NAME_LEN 64
/* Set to 1 to enable tracing */
@@ -74,6 +74,10 @@
pjmedia_aud_play_cb play_cb,
void *user_data,
pjmedia_aud_stream **p_strm);
+static void alsa_factory_set_observer(pjmedia_aud_dev_factory *f,
+ pjmedia_aud_dev_change_callback cb);
+static int alsa_factory_get_default_rec_dev(pjmedia_aud_dev_factory *f);
+static int alsa_factory_get_default_play_dev(pjmedia_aud_dev_factory *f);
/*
* Stream prototypes
@@ -90,6 +94,15 @@
static pj_status_t alsa_stream_stop(pjmedia_aud_stream *strm);
static pj_status_t alsa_stream_destroy(pjmedia_aud_stream *strm);
+/* alsa device info */
+struct alsa_dev_info
+{
+ pjmedia_aud_dev_info info;
+ char alsa_name[64];
+ int input_count;
+ int output_count;
+};
+
struct alsa_factory
{
@@ -99,7 +112,7 @@
pj_pool_t *base_pool;
unsigned dev_cnt;
- pjmedia_aud_dev_info devs[MAX_DEVICES];
+ struct alsa_dev_info devs[MAX_DEVICES];
char pb_mixer_name[MAX_MIX_NAME_LEN];
};
@@ -140,7 +153,10 @@
&alsa_factory_get_dev_info,
&alsa_factory_default_param,
&alsa_factory_create_stream,
- &alsa_factory_refresh
+ &alsa_factory_refresh,
+ &alsa_factory_set_observer,
+ &alsa_factory_get_default_rec_dev,
+ &alsa_factory_get_default_play_dev
};
static pjmedia_aud_stream_op alsa_stream_op =
@@ -214,9 +230,9 @@
}
-static pj_status_t add_dev (struct alsa_factory *af, const char *dev_name)
+static pj_status_t add_dev (struct alsa_factory *af, const char *dev_name, const char *dev_desc)
{
- pjmedia_aud_dev_info *adi;
+ struct alsa_dev_info *adi;
snd_pcm_t* pcm;
int pb_result, ca_result;
@@ -258,23 +274,63 @@
pj_bzero(adi, sizeof(*adi));
/* Set device name */
- strncpy(adi->name, dev_name, sizeof(adi->name));
+ strncpy(adi->alsa_name, dev_name, sizeof(adi->alsa_name));
+
+ /* Set comprehensive device name */
+ int name_size = sizeof(adi->info.name);
+ if (dev_desc) {
+ pj_bool_t name_set = PJ_FALSE;
+ if (strncmp("sysdefault", dev_name, 10) == 0) {
+ /* Only use first line for default device*/
+ char *ptr = strstr(dev_desc, "\n");
+ if (ptr) {
+ int len = ptr - dev_desc;
+ strncpy(adi->info.name, dev_desc, (len >= name_size-1)?name_size:len);
+ name_set = PJ_TRUE;
+ }
+ } else if (strncmp("iec958", dev_name, 6) == 0) {
+ /* Mangle name for SPDIF devices*/
+ char *ptr = strstr(dev_desc, ",");
+ if (ptr) {
+ int len = ptr - dev_desc;
+ if (len + 18 < name_size) {
+ strncpy(adi->info.name, dev_desc, len);
+ strncpy(adi->info.name+len, ", Digital (S/PDIF)", 18);
+ name_set = PJ_TRUE;
+ }
+ }
+ }
+
+ if (!name_set) {
+ /* Use the entire description for other device names */
+ int i = 0;
+ while (i < name_size-1 && dev_desc[i] != '\0') {
+ if (dev_desc[i] == '\n' || dev_desc[i] == '\r')
+ adi->info.name[i] = ' ';
+ else
+ adi->info.name[i] = dev_desc[i];
+ i++;
+ }
+ }
+ } else {
+ strncpy(adi->info.name, dev_name, name_size);
+ }
/* Check the number of playback channels */
- adi->output_count = (pb_result>=0) ? 1 : 0;
+ adi->info.output_count = (pb_result>=0) ? 1 : 0;
/* Check the number of capture channels */
- adi->input_count = (ca_result>=0) ? 1 : 0;
+ adi->info.input_count = (ca_result>=0) ? 1 : 0;
/* Set the default sample rate */
- adi->default_samples_per_sec = 8000;
+ adi->info.default_samples_per_sec = 8000;
/* Driver name */
- strcpy(adi->driver, "ALSA");
+ strcpy(adi->info.driver, "ALSA");
++af->dev_cnt;
- PJ_LOG (5,(THIS_FILE, "Added sound device %s", adi->name));
+ PJ_LOG (5,(THIS_FILE, "Added sound device %s", adi->alsa_name));
return PJ_SUCCESS;
}
@@ -399,10 +455,26 @@
n = hints;
while (*n != NULL) {
char *name = snd_device_name_get_hint(*n, "NAME");
- if (name != NULL) {
- if (0 != strcmp("null", name))
- add_dev(af, name);
+ char *desc = snd_device_name_get_hint(*n, "DESC");
+ if (name != NULL) {
+ if (strncmp("null", name, 4) == 0 ||
+ strncmp("front", name, 5) == 0 ||
+ strncmp("rear", name, 4) == 0 ||
+ strncmp("side", name, 4) == 0 ||
+ strncmp("dmix", name, 4) == 0 ||
+ strncmp("dsnoop", name, 6) == 0 ||
+ strncmp("hw", name, 2) == 0 ||
+ strncmp("plughw", name, 6) == 0 ||
+ strncmp("center_lfe", name, 10) == 0 ||
+ strncmp("surround", name, 8) == 0 ||
+ (strncmp("default", name, 7) == 0 && strstr(name, ":CARD=") != NULL)) {
+ /* skip these devices, 'sysdefault' always contains the relevant information */
+ ;
+ } else {
+ add_dev(af, name, desc);
+ }
free(name);
+ free(desc);
}
n++;
}
@@ -440,7 +512,7 @@
PJ_ASSERT_RETURN(index>=0 && index<af->dev_cnt, PJ_EINVAL);
- pj_memcpy(info, &af->devs[index], sizeof(*info));
+ pj_memcpy(info, &af->devs[index].info, sizeof(*info));
info->caps = PJMEDIA_AUD_DEV_CAP_INPUT_LATENCY |
PJMEDIA_AUD_DEV_CAP_OUTPUT_LATENCY;
return PJ_SUCCESS;
@@ -452,22 +524,22 @@
pjmedia_aud_param *param)
{
struct alsa_factory *af = (struct alsa_factory*)f;
- pjmedia_aud_dev_info *adi;
+ struct alsa_dev_info *adi;
PJ_ASSERT_RETURN(index>=0 && index<af->dev_cnt, PJ_EINVAL);
adi = &af->devs[index];
pj_bzero(param, sizeof(*param));
- if (adi->input_count && adi->output_count) {
+ if (adi->info.input_count && adi->info.output_count) {
param->dir = PJMEDIA_DIR_CAPTURE_PLAYBACK;
param->rec_id = index;
param->play_id = index;
- } else if (adi->input_count) {
+ } else if (adi->info.input_count) {
param->dir = PJMEDIA_DIR_CAPTURE;
param->rec_id = index;
param->play_id = PJMEDIA_AUD_INVALID_DEV;
- } else if (adi->output_count) {
+ } else if (adi->info.output_count) {
param->dir = PJMEDIA_DIR_PLAYBACK;
param->play_id = index;
param->rec_id = PJMEDIA_AUD_INVALID_DEV;
@@ -475,11 +547,11 @@
return PJMEDIA_EAUD_INVDEV;
}
- param->clock_rate = adi->default_samples_per_sec;
+ param->clock_rate = adi->info.default_samples_per_sec;
param->channel_count = 1;
- param->samples_per_frame = adi->default_samples_per_sec * 20 / 1000;
+ param->samples_per_frame = adi->info.default_samples_per_sec * 20 / 1000;
param->bits_per_sample = 16;
- param->flags = adi->caps;
+ param->flags = adi->info.caps;
param->input_latency_ms = PJMEDIA_SND_DEFAULT_REC_LATENCY;
param->output_latency_ms = PJMEDIA_SND_DEFAULT_PLAY_LATENCY;
@@ -626,9 +698,9 @@
/* Open PCM for playback */
PJ_LOG (5,(THIS_FILE, "open_playback: Open playback device '%s'",
- stream->af->devs[param->play_id].name));
+ stream->af->devs[param->play_id].alsa_name));
result = snd_pcm_open (&stream->pb_pcm,
- stream->af->devs[param->play_id].name,
+ stream->af->devs[param->play_id].alsa_name,
SND_PCM_STREAM_PLAYBACK,
0);
if (result < 0)
@@ -722,7 +794,7 @@
PJ_LOG (5,(THIS_FILE, "Opened device alsa(%s) for playing, sample rate=%d"
", ch=%d, bits=%d, period size=%d frames, latency=%d ms",
- stream->af->devs[param->play_id].name,
+ stream->af->devs[param->play_id].alsa_name,
rate, param->channel_count,
param->bits_per_sample, stream->pb_frames,
(int)stream->param.output_latency_ms));
@@ -746,9 +818,9 @@
/* Open PCM for capture */
PJ_LOG (5,(THIS_FILE, "open_capture: Open capture device '%s'",
- stream->af->devs[param->rec_id].name));
+ stream->af->devs[param->rec_id].alsa_name));
result = snd_pcm_open (&stream->ca_pcm,
- stream->af->devs[param->rec_id].name,
+ stream->af->devs[param->rec_id].alsa_name,
SND_PCM_STREAM_CAPTURE,
0);
if (result < 0)
@@ -842,7 +914,7 @@
PJ_LOG (5,(THIS_FILE, "Opened device alsa(%s) for capture, sample rate=%d"
", ch=%d, bits=%d, period size=%d frames, latency=%d ms",
- stream->af->devs[param->rec_id].name,
+ stream->af->devs[param->rec_id].alsa_name,
rate, param->channel_count,
param->bits_per_sample, stream->ca_frames,
(int)stream->param.input_latency_ms));
@@ -903,6 +975,27 @@
return PJ_SUCCESS;
}
+/* API: set audio device change observer */
+static void alsa_factory_set_observer(pjmedia_aud_dev_factory *f,
+ pjmedia_aud_dev_change_callback cb)
+{
+ PJ_UNUSED_ARG(f);
+ PJ_UNUSED_ARG(cb);
+}
+
+/* API: get default recording device */
+static int alsa_factory_get_default_rec_dev(pjmedia_aud_dev_factory *f)
+{
+ PJ_UNUSED_ARG(f);
+ return -1;
+}
+
+/* API: get default playback device */
+static int alsa_factory_get_default_play_dev(pjmedia_aud_dev_factory *f)
+{
+ PJ_UNUSED_ARG(f);
+ return -1;
+}
/* API: get running parameter */
static pj_status_t alsa_stream_get_param(pjmedia_aud_stream *s,
diff -ruN pjproject-2.10/pjmedia/src/pjmedia-audiodev/coreaudio_dev.m pjsip/pjmedia/src/pjmedia-audiodev/coreaudio_dev.m
--- pjproject-2.10/pjmedia/src/pjmedia-audiodev/coreaudio_dev.m 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia-audiodev/coreaudio_dev.m 2021-02-06 22:51:16.641714862 +0100
@@ -173,6 +173,11 @@
void *user_data,
pjmedia_aud_stream **p_aud_strm);
+static void ca_factory_set_observer(pjmedia_aud_dev_factory *f,
+ pjmedia_aud_dev_change_callback cb);
+static int ca_factory_get_default_rec_dev(pjmedia_aud_dev_factory *f);
+static int ca_factory_get_default_play_dev(pjmedia_aud_dev_factory *f);
+
static pj_status_t ca_stream_get_param(pjmedia_aud_stream *strm,
pjmedia_aud_param *param);
static pj_status_t ca_stream_get_cap(pjmedia_aud_stream *strm,
@@ -206,7 +211,10 @@
&ca_factory_get_dev_info,
&ca_factory_default_param,
&ca_factory_create_stream,
- &ca_factory_refresh
+ &ca_factory_refresh,
+ &ca_factory_set_observer,
+ &ca_factory_get_default_rec_dev,
+ &ca_factory_get_default_play_dev
};
static pjmedia_aud_stream_op stream_op =
@@ -717,6 +725,169 @@
return PJ_SUCCESS;
}
+static OSStatus property_listener_proc(AudioObjectID objectID,
+ UInt32 numberAddresses,
+ const AudioObjectPropertyAddress inAddresses[],
+ void *clientData)
+{
+ pjmedia_aud_dev_change_callback cb = (pjmedia_aud_dev_change_callback)clientData;
+ pjmedia_aud_dev_change_event event;
+ UInt32 i;
+
+ for(i = 0; i < numberAddresses; i++) {
+ event = 0;
+ switch (inAddresses[i].mSelector) {
+ case kAudioHardwarePropertyDefaultInputDevice:
+ event = DEFAULT_INPUT_CHANGED;
+ break;
+ case kAudioHardwarePropertyDefaultOutputDevice:
+ event = DEFAULT_OUTPUT_CHANGED;
+ break;
+ case kAudioHardwarePropertyDevices:
+ event = DEVICE_LIST_CHANGED;
+ break;
+ default:
+ break;
+ }
+ if (event > 0) {
+ (cb)(event);
+ }
+ }
+
+ return noErr;
+}
+
+/* API: set audio device change observer */
+static void ca_factory_set_observer(pjmedia_aud_dev_factory *f,
+ pjmedia_aud_dev_change_callback cb)
+{
+ AudioObjectPropertyAddress addr;
+ OSStatus ostatus;
+
+ /* observer for devices list */
+ addr.mSelector = kAudioHardwarePropertyDevices;
+ addr.mScope = kAudioObjectPropertyScopeGlobal;
+ addr.mElement = kAudioObjectPropertyElementMaster;
+
+ if (cb) {
+ ostatus = AudioObjectAddPropertyListener(kAudioObjectSystemObject,
+ &addr,
+ property_listener_proc,
+ cb);
+ } else {
+ ostatus = AudioObjectRemovePropertyListener(kAudioObjectSystemObject,
+ &addr,
+ property_listener_proc,
+ cb);
+ }
+ if (ostatus != noErr) {
+ PJ_LOG(5,(THIS_FILE, "Error %sregistering devices list observer", cb==NULL ? "un-" : ""));
+ }
+
+ /* observer for default input device */
+ addr.mSelector = kAudioHardwarePropertyDefaultInputDevice;
+
+ if (cb) {
+ ostatus = AudioObjectAddPropertyListener(kAudioObjectSystemObject,
+ &addr,
+ property_listener_proc,
+ cb);
+ } else {
+ ostatus = AudioObjectRemovePropertyListener(kAudioObjectSystemObject,
+ &addr,
+ property_listener_proc,
+ cb);
+ }
+ if (ostatus != noErr) {
+ PJ_LOG(5,(THIS_FILE, "Error %sregistering default input device observer", cb==NULL ? "un-" : ""));
+ }
+
+ /* observer for default output device */
+ addr.mSelector = kAudioHardwarePropertyDefaultOutputDevice;
+
+ if (cb) {
+ ostatus = AudioObjectAddPropertyListener(kAudioObjectSystemObject,
+ &addr,
+ property_listener_proc,
+ cb);
+ } else {
+ ostatus = AudioObjectRemovePropertyListener(kAudioObjectSystemObject,
+ &addr,
+ property_listener_proc,
+ cb);
+ }
+ if (ostatus != noErr) {
+ PJ_LOG(5,(THIS_FILE, "Error %sregistering default output device observer", cb==NULL ? "un-" : ""));
+ }
+
+}
+
+/* API: get default recording device */
+static int ca_factory_get_default_rec_dev(pjmedia_aud_dev_factory *f)
+{
+ AudioDeviceID dev_id = kAudioObjectUnknown;
+ AudioObjectPropertyAddress addr;
+ UInt32 size;
+ OSStatus ostatus;
+ int i;
+ int idx = -1;
+ struct coreaudio_factory *cf = (struct coreaudio_factory*)f;
+
+ /* Find default audio input device */
+ addr.mSelector = kAudioHardwarePropertyDefaultInputDevice;
+ addr.mScope = kAudioObjectPropertyScopeGlobal;
+ addr.mElement = kAudioObjectPropertyElementMaster;
+ size = sizeof(dev_id);
+
+ ostatus = AudioObjectGetPropertyData(kAudioObjectSystemObject,
+ &addr, 0, NULL,
+ &size, (void *)&dev_id);
+ if (ostatus == noErr) {
+ for (i = 0; i < cf->dev_count; i++) {
+ struct coreaudio_dev_info *cdi;
+ cdi = &cf->dev_info[i];
+ if (cdi->dev_id == dev_id) {
+ idx = i;
+ break;
+ }
+ }
+ }
+ return idx;
+}
+
+/* API: get default playback device */
+static int ca_factory_get_default_play_dev(pjmedia_aud_dev_factory *f)
+{
+ AudioDeviceID dev_id = kAudioObjectUnknown;
+ AudioObjectPropertyAddress addr;
+ UInt32 size;
+ OSStatus ostatus;
+ int i;
+ int idx = -1;
+ struct coreaudio_factory *cf = (struct coreaudio_factory*)f;
+
+ /* Find default audio output device */
+ addr.mSelector = kAudioHardwarePropertyDefaultOutputDevice;
+ addr.mScope = kAudioObjectPropertyScopeGlobal;
+ addr.mElement = kAudioObjectPropertyElementMaster;
+ size = sizeof(dev_id);
+
+ ostatus = AudioObjectGetPropertyData(kAudioObjectSystemObject,
+ &addr, 0, NULL,
+ &size, (void *)&dev_id);
+ if (ostatus == noErr) {
+ for (i = 0; i < cf->dev_count; i++) {
+ struct coreaudio_dev_info *cdi;
+ cdi = &cf->dev_info[i];
+ if (cdi->dev_id == dev_id) {
+ idx = i;
+ break;
+ }
+ }
+ }
+ return idx;
+}
+
OSStatus resampleProc(AudioConverterRef inAudioConverter,
UInt32 *ioNumberDataPackets,
AudioBufferList *ioData,
@@ -1862,7 +2033,6 @@
{
strm->param.input_latency_ms = (latency + latency2) * 1000 /
strm->param.clock_rate;
- strm->param.input_latency_ms++;
}
}
#else
@@ -1870,7 +2040,6 @@
strm->param.input_latency_ms =
(unsigned)(([strm->sess inputLatency] +
[strm->sess IOBufferDuration]) * 1000);
- strm->param.input_latency_ms++;
} else
return PJMEDIA_EAUD_INVCAP;
#endif
@@ -1903,7 +2072,6 @@
{
strm->param.output_latency_ms = (latency + latency2) * 1000 /
strm->param.clock_rate;
- strm->param.output_latency_ms++;
}
}
#else
@@ -1911,11 +2079,10 @@
strm->param.output_latency_ms =
(unsigned)(([strm->sess outputLatency] +
[strm->sess IOBufferDuration]) * 1000);
- strm->param.output_latency_ms++;
} else
return PJMEDIA_EAUD_INVCAP;
#endif
- *(unsigned*)pval = (++strm->param.output_latency_ms * 2);
+ *(unsigned*)pval = strm->param.output_latency_ms;
return PJ_SUCCESS;
} else if (cap==PJMEDIA_AUD_DEV_CAP_OUTPUT_VOLUME_SETTING &&
(strm->param.dir & PJMEDIA_DIR_PLAYBACK))
diff -ruN pjproject-2.10/pjmedia/src/pjmedia-audiodev/wmme_dev.c pjsip/pjmedia/src/pjmedia-audiodev/wmme_dev.c
--- pjproject-2.10/pjmedia/src/pjmedia-audiodev/wmme_dev.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia-audiodev/wmme_dev.c 2021-02-06 22:51:16.641714862 +0100
@@ -32,6 +32,7 @@
#endif
#include <windows.h>
+#include <dbt.h>
#include <mmsystem.h>
#include <mmreg.h>
@@ -69,6 +70,15 @@
#define THIS_FILE "wmme_dev.c"
+/* WMME device change observer */
+struct wmme_dev_observer
+{
+ pj_thread_t *thread;
+ pj_pool_t *pool;
+ pjmedia_aud_dev_change_callback cb;
+ HWND hWnd;
+};
+
/* WMME device info */
struct wmme_dev_info
{
@@ -87,6 +97,8 @@
unsigned dev_count;
struct wmme_dev_info *dev_info;
+
+ struct wmme_dev_observer dev_observer;
};
@@ -151,6 +163,11 @@
pjmedia_aud_play_cb play_cb,
void *user_data,
pjmedia_aud_stream **p_aud_strm);
+static void factory_set_observer(pjmedia_aud_dev_factory *f,
+ pjmedia_aud_dev_change_callback cb);
+static int factory_get_default_rec_dev(pjmedia_aud_dev_factory *f);
+static int factory_get_default_play_dev(pjmedia_aud_dev_factory *f);
+
static pj_status_t stream_get_param(pjmedia_aud_stream *strm,
pjmedia_aud_param *param);
@@ -174,7 +191,10 @@
&factory_get_dev_info,
&factory_default_param,
&factory_create_stream,
- &factory_refresh
+ &factory_refresh,
+ &factory_set_observer,
+ &factory_get_default_rec_dev,
+ &factory_get_default_play_dev
};
static pjmedia_aud_stream_op stream_op =
@@ -1336,6 +1356,201 @@
return PJ_SUCCESS;
}
+/* Processes OS messages arriving at the hWnd window */
+INT_PTR WINAPI ProcessOSMessage(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
+{
+ /* wf is used in order to query the number of audio devices currently handled */
+ static struct wmme_factory *wf = NULL;
+
+ switch( message )
+ {
+ case WM_CREATE:
+ /* Initialize wf pointer on the first run */
+ if (wf == NULL)
+ {
+ CREATESTRUCT *CrtStrPtr = (CREATESTRUCT *) lParam;
+ wf = (struct wmme_factory *)(CrtStrPtr->lpCreateParams);
+ }
+ break;
+ case WM_DEVICECHANGE:
+ /* Possible insertion or removal of device. There's some issues:
+
+ - Some devices/drivers does not trigger arrival nor
+ removecomplete events, but only devnodes_changed events.
+ Therefore, we process all of those type of events.
+
+ - Some hardware can send many devnodes_changed events at the
+ same time (up to ~15 of such events). These batches are
+ detected using temporal locality, using constMaxBatchPeriod_.
+ Once the device is detected, the rest of redundant events
+ are discarded. In order to know if there's a new device or not,
+ actual audio devices count is compared to stored audio devices
+ count (via wf->dev_count).
+
+ - Hardware takes some time to settle and be recognized by
+ drivers. A small window of time is given in order to account
+ for this (constMaxSettleTime_);
+
+ Settle time should be slightly lower than batch period.
+ */
+ if (wParam == DBT_DEVICEARRIVAL || wParam == DBT_DEVICEREMOVECOMPLETE || wParam == DBT_DEVNODES_CHANGED) {
+ const int constMaxBatchPeriod_ = 3; /* seconds */
+ const int constMaxSettleTime_ = (constMaxBatchPeriod_ * 1000) - 500; /* milliseconds */
+
+ /* Loop that allows hardware to settle */
+ int settleTimeLeft = constMaxSettleTime_;
+ while (settleTimeLeft > 0) {
+ /* Check if actual devices lists (I/O) sizes have actually
+ changed before notifying upper levels. Consider input
+ devices, output devices and a WAVE MAPPER device for each.
+ */
+ if(waveInGetNumDevs() + waveOutGetNumDevs() + 2 != wf->dev_count) {
+ /* Hardware changed */
+ if (wf->dev_observer.cb) {
+ wf->dev_observer.cb(DEVICE_LIST_CHANGED);
+ }
+ break;
+ } else {
+ /* Hardware is settling... */
+ Sleep(250);
+ settleTimeLeft -= 250;
+ }
+ }
+ }
+ break;
+ case WM_CLOSE:
+ if (!DestroyWindow(hWnd)) {
+ PJ_LOG(4,(THIS_FILE, "Couldn't destroy message window"));
+ }
+ break;
+ case WM_DESTROY:
+ PostQuitMessage(0);
+ break;
+ default:
+ break;
+ }
+
+ return 1;
+}
+
+static pj_status_t create_os_messages_window(struct wmme_factory *wf)
+{
+ pj_status_t status = PJ_EBUG;
+ WNDCLASSEX wndClass;
+ HWND hWnd;
+
+ /* Set up and register window class */
+ ZeroMemory(&wndClass, sizeof(WNDCLASSEX));
+ wndClass.cbSize = sizeof(WNDCLASSEX);
+ wndClass.style = CS_OWNDC;
+ wndClass.lpfnWndProc = (WNDPROC)(ProcessOSMessage);
+ wndClass.hInstance = (HINSTANCE)(GetModuleHandle(0));
+ wndClass.lpszClassName = "DeviceChangeMessageWindow";
+
+ if (RegisterClassEx(&wndClass)) {
+ /* Create the window that will receive OS messages */
+ hWnd = CreateWindowEx( 0, "DeviceChangeMessageWindow", NULL, 0, 0, 0, 0, 0, NULL, NULL, NULL, (LPVOID)(wf));
+ if (hWnd != NULL) {
+ wf->dev_observer.hWnd = hWnd;
+ if (UpdateWindow(hWnd) != 0) {
+ status = PJ_SUCCESS;
+ }
+ } else {
+ PJ_LOG(4,(THIS_FILE, "Error creating window to receive device change events"));
+ }
+ }
+
+ return status;
+
+}
+
+static pj_status_t dispatch_os_messages(void)
+{
+ pj_status_t status = PJ_SUCCESS;
+ MSG msg;
+ int ret;
+
+ /* Process OS messages with low cpu-usage wait loop */
+ while((ret = GetMessage(&msg, NULL, 0, 0)) != 0) {
+ if (ret == -1) {
+ PJ_LOG(4,(THIS_FILE, "Couldn't process OS message"));
+ status = PJ_EBUG;
+ break;
+ } else {
+ TranslateMessage(&msg);
+ DispatchMessage(&msg);
+ }
+ }
+
+ return status;
+
+}
+
+/* WMME device observer thread thread. */
+static int PJ_THREAD_FUNC wmme_dev_observer_thread(void *arg)
+{
+ struct wmme_factory *wf = (struct wmme_factory*)arg;
+ pj_status_t status;
+
+ status = create_os_messages_window(wf);
+ if (status == PJ_SUCCESS) {
+ status = dispatch_os_messages();
+ if (status != PJ_SUCCESS) {
+ PJ_LOG(4,(THIS_FILE, "Error dispatching device detection window events"));
+ }
+ } else {
+ PJ_LOG(4,(THIS_FILE, "Failed to create window for receiving device detection events"));
+ }
+
+ return status;
+}
+
+/* API: set audio device change observer */
+static void factory_set_observer(pjmedia_aud_dev_factory *f,
+ pjmedia_aud_dev_change_callback cb)
+{
+ struct wmme_factory *wf = (struct wmme_factory*)f;
+ pj_pool_t *pool;
+ pj_status_t status;
+
+ if (cb) {
+ pool = pj_pool_create(wf->pf, "wmme-dev-observer", 1000, 1000, NULL);
+ PJ_ASSERT_ON_FAIL(pool != NULL, {return;});
+ status = pj_thread_create(pool, "wmme_observer", &wmme_dev_observer_thread, wf, 0, 0, &wf->dev_observer.thread);
+ if (status != PJ_SUCCESS) {
+ PJ_LOG(4,(THIS_FILE, "Failed to create WMME device detection thread"));
+ wf->dev_observer.thread = NULL;
+ return;
+ }
+ wf->dev_observer.cb = cb;
+ } else {
+ wf->dev_observer.cb = NULL;
+ if (wf->dev_observer.hWnd) {
+ CloseWindow(wf->dev_observer.hWnd);
+ wf->dev_observer.hWnd = NULL;
+ }
+ pj_thread_join(wf->dev_observer.thread);
+ pj_thread_destroy(wf->dev_observer.thread);
+ wf->dev_observer.thread = NULL;
+ }
+}
+
+/* API: get default recording device */
+static int factory_get_default_rec_dev(pjmedia_aud_dev_factory *f)
+{
+ PJ_UNUSED_ARG(f);
+ /* Let PJMEDIA pick the first one available */
+ return -1;
+}
+
+/* API: get default playback device */
+static int factory_get_default_play_dev(pjmedia_aud_dev_factory *f)
+{
+ PJ_UNUSED_ARG(f);
+ /* Let PJMEDIA pick the first one available */
+ return -1;
+}
+
/* API: Get stream info. */
static pj_status_t stream_get_param(pjmedia_aud_stream *s,
pjmedia_aud_param *pi)
--- pjproject-2.10/pjmedia/src/pjmedia-codec/opus.c 2021-03-19 04:55:21.491267756 +0100
+++ pjsip/pjmedia/src/pjmedia-codec/opus.c 2021-03-19 05:48:19.457507442 +0100
@@ -152,7 +152,7 @@
static pjmedia_codec_opus_config opus_cfg =
{
PJMEDIA_CODEC_OPUS_DEFAULT_SAMPLE_RATE, /* Sample rate */
- 1, /* Channel count */
+ 2, /* Channel count */
PTIME, /* Frame time */
PJMEDIA_CODEC_OPUS_DEFAULT_BIT_RATE, /* Bit rate */
5, /* Expected packet loss */
diff -ruN pjproject-2.10/pjmedia/src/pjmedia-codec/ffmpeg_vid_codecs.c pjsip/pjmedia/src/pjmedia-codec/ffmpeg_vid_codecs.c
--- pjproject-2.10/pjmedia/src/pjmedia-codec/ffmpeg_vid_codecs.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia-codec/ffmpeg_vid_codecs.c 2021-02-06 22:51:16.641714862 +0100
@@ -62,16 +62,7 @@
#endif
#if LIBAVCODEC_VER_AT_LEAST(53,61)
-# if LIBAVCODEC_VER_AT_LEAST(54,59)
- /* Not sure when AVCodec::encode is obsoleted/removed. */
-# define AVCODEC_HAS_ENCODE(c) (c->encode2)
-# else
- /* Not sure when AVCodec::encode2 is introduced. It appears in
- * libavcodec 53.61 where some codecs actually still use AVCodec::encode
- * (e.g: H263, H264).
- */
-# define AVCODEC_HAS_ENCODE(c) (c->encode || c->encode2)
-# endif
+# define AVCODEC_HAS_ENCODE(c) (c->encode2)
# define AV_OPT_SET(obj,name,val,opt) (av_opt_set(obj,name,val,opt)==0)
# define AV_OPT_SET_INT(obj,name,val) (av_opt_set_int(obj,name,val,0)==0)
#else
@@ -266,6 +257,7 @@
/* H264 constants */
#define PROFILE_H264_BASELINE 66
#define PROFILE_H264_MAIN 77
+#define PROFILE_H264_HIGH 100
/* Codec specific functions */
#if PJMEDIA_HAS_FFMPEG_CODEC_H264
@@ -404,6 +396,9 @@
case PROFILE_H264_MAIN:
profile = "main";
break;
+ case PROFILE_H264_HIGH:
+ profile = "high";
+ break;
default:
break;
}
@@ -440,11 +435,11 @@
/* Misc x264 settings (performance, quality, latency, etc).
* Let's just use the x264 predefined preset & tune.
*/
- if (!AV_OPT_SET(ctx->priv_data, "preset", "veryfast", 0)) {
+ if (!AV_OPT_SET(ctx->priv_data, "preset", "ultrafast", 0)) {
PJ_LOG(3, (THIS_FILE, "Failed to set x264 preset 'veryfast'"));
}
- if (!AV_OPT_SET(ctx->priv_data, "tune", "animation+zerolatency", 0)) {
- PJ_LOG(3, (THIS_FILE, "Failed to set x264 tune 'zerolatency'"));
+ if (!AV_OPT_SET(ctx->priv_data, "tune", "fastdecode+zerolatency", 0)) {
+ PJ_LOG(3, (THIS_FILE, "Failed to set x264 tune 'fastdecode+zerolatency'"));
}
}
@@ -1425,12 +1420,12 @@
/* Check if encoder has been opened */
PJ_ASSERT_RETURN(ff->enc_ctx, PJ_EINVALIDOP);
-#ifdef PJMEDIA_USE_OLD_FFMPEG
- avcodec_get_frame_defaults(&avframe);
-#else
+//#ifdef PJMEDIA_USE_OLD_FFMPEG
+// avcodec_get_frame_defaults(&avframe);
+//#else
pj_bzero(&avframe, sizeof(avframe));
av_frame_unref(&avframe);
-#endif
+//#endif
// Let ffmpeg manage the timestamps
/*
@@ -1687,12 +1682,12 @@
* whole decoding session, and seems to be freed when the codec context
* closed).
*/
-#ifdef PJMEDIA_USE_OLD_FFMPEG
- avcodec_get_frame_defaults(&avframe);
-#else
+//#ifdef PJMEDIA_USE_OLD_FFMPEG
+// avcodec_get_frame_defaults(&avframe);
+//#else
pj_bzero(&avframe, sizeof(avframe));
av_frame_unref(&avframe);
-#endif
+//#endif
/* Init packet, the container of the encoded data */
av_init_packet(&avpacket);
@@ -1705,12 +1700,12 @@
avpacket.size = (int)input->size;
/* ffmpeg warns:
- * - input buffer padding, at least FF_INPUT_BUFFER_PADDING_SIZE
+ * - input buffer padding, at least AV_INPUT_BUFFER_PADDING_SIZE
* - null terminated
* Normally, encoded buffer is allocated more than needed, so lets just
* bzero the input buffer end/pad, hope it will be just fine.
*/
- pj_bzero(avpacket.data+avpacket.size, FF_INPUT_BUFFER_PADDING_SIZE);
+ pj_bzero(avpacket.data+avpacket.size, AV_INPUT_BUFFER_PADDING_SIZE);
output->bit_info = 0;
output->timestamp = input->timestamp;
diff -ruN pjproject-2.10/pjmedia/src/pjmedia-videodev/avf_dev.m pjsip/pjmedia/src/pjmedia-videodev/avf_dev.m
--- pjproject-2.10/pjmedia/src/pjmedia-videodev/avf_dev.m 1970-01-01 01:00:00.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia-videodev/avf_dev.m 2021-02-06 18:46:39.045322874 +0100
@@ -0,0 +1,682 @@
+/*
+ * Copyright (C) 2014-present AG Projects (http://ag-projects.com)
+ * Copyright (C) 2008-2011 Teluu Inc. (http://www.teluu.com)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+#include <pjmedia-videodev/videodev_imp.h>
+#include <pj/assert.h>
+#include <pj/log.h>
+#include <pj/os.h>
+
+#if defined(PJMEDIA_HAS_VIDEO) && PJMEDIA_HAS_VIDEO != 0 && \
+ defined(PJMEDIA_VIDEO_DEV_HAS_AVF) && PJMEDIA_VIDEO_DEV_HAS_AVF != 0
+
+#include <Foundation/NSAutoreleasePool.h>
+#include <AVFoundation/AVFoundation.h>
+#include <QuartzCore/QuartzCore.h>
+#include <dispatch/dispatch.h>
+
+#define THIS_FILE "avf_dev.c"
+#define DEFAULT_CLOCK_RATE 90000
+#define DEFAULT_WIDTH 640
+#define DEFAULT_HEIGHT 480
+#define DEFAULT_FPS 15
+
+
+typedef struct avf_fmt_info
+{
+ pjmedia_format_id pjmedia_format;
+ unsigned avf_format;
+} avf_fmt_info;
+
+static avf_fmt_info avf_fmts[] =
+{
+ {PJMEDIA_FORMAT_BGRA, kCVPixelFormatType_32BGRA},
+ {PJMEDIA_FORMAT_YUY2, kCVPixelFormatType_422YpCbCr8_yuvs},
+ {PJMEDIA_FORMAT_UYVY, kCVPixelFormatType_422YpCbCr8},
+};
+
+/* avf device info */
+struct avf_dev_info
+{
+ pjmedia_vid_dev_info info;
+ AVCaptureDevice *dev;
+};
+
+/* avf factory */
+struct avf_factory
+{
+ pjmedia_vid_dev_factory base;
+ pj_pool_t *pool;
+ pj_pool_t *dev_pool;
+ pj_pool_factory *pf;
+
+ unsigned dev_count;
+ struct avf_dev_info *dev_info;
+};
+
+struct avf_stream; /* forward declaration */
+typedef void (*func_ptr)(struct avf_stream *strm);
+
+@interface AVFDelegate: NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
+{
+@public
+ struct avf_stream *stream;
+}
+@end
+
+
+/* Video stream. */
+struct avf_stream
+{
+ pjmedia_vid_dev_stream base; /**< Base stream */
+ pjmedia_vid_dev_param param; /**< Settings */
+ pj_pool_t *pool; /**< Memory pool. */
+
+ pj_timestamp cap_frame_ts; /**< Captured frame tstamp */
+ unsigned cap_ts_inc; /**< Increment */
+
+ pjmedia_vid_dev_cb vid_cb; /**< Stream callback. */
+ void *user_data; /**< Application data. */
+
+ pjmedia_rect_size size;
+
+ pj_bool_t cap_thread_initialized;
+ pj_thread_desc cap_thread_desc;
+ pj_thread_t *cap_thread;
+ pj_bool_t cap_exited;
+
+ struct avf_factory *af;
+ pj_status_t status;
+ pj_bool_t is_running;
+
+ dispatch_queue_t video_ops_queue;
+
+ AVCaptureSession *cap_session;
+ AVCaptureDeviceInput *dev_input;
+ AVCaptureVideoDataOutput *video_output;
+ AVFDelegate *delegate;
+};
+
+
+/* Prototypes */
+static pj_status_t avf_factory_init(pjmedia_vid_dev_factory *f);
+static pj_status_t avf_factory_destroy(pjmedia_vid_dev_factory *f);
+static pj_status_t avf_factory_refresh(pjmedia_vid_dev_factory *f);
+static unsigned avf_factory_get_dev_count(pjmedia_vid_dev_factory *f);
+static pj_status_t avf_factory_get_dev_info(pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_info *info);
+static pj_status_t avf_factory_default_param(pj_pool_t *pool,
+ pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_param *param);
+static pj_status_t avf_factory_create_stream(pjmedia_vid_dev_factory *f,
+ pjmedia_vid_dev_param *param,
+ const pjmedia_vid_dev_cb *cb,
+ void *user_data,
+ pjmedia_vid_dev_stream **p_vid_strm);
+
+static pj_status_t avf_stream_get_param(pjmedia_vid_dev_stream *strm,
+ pjmedia_vid_dev_param *param);
+static pj_status_t avf_stream_get_cap(pjmedia_vid_dev_stream *strm,
+ pjmedia_vid_dev_cap cap,
+ void *value);
+static pj_status_t avf_stream_set_cap(pjmedia_vid_dev_stream *strm,
+ pjmedia_vid_dev_cap cap,
+ const void *value);
+static pj_status_t avf_stream_start(pjmedia_vid_dev_stream *strm);
+static pj_status_t avf_stream_stop(pjmedia_vid_dev_stream *strm);
+static pj_status_t avf_stream_destroy(pjmedia_vid_dev_stream *strm);
+
+/* Operations */
+static pjmedia_vid_dev_factory_op factory_op =
+{
+ &avf_factory_init,
+ &avf_factory_destroy,
+ &avf_factory_get_dev_count,
+ &avf_factory_get_dev_info,
+ &avf_factory_default_param,
+ &avf_factory_create_stream,
+ &avf_factory_refresh
+};
+
+static pjmedia_vid_dev_stream_op stream_op =
+{
+ &avf_stream_get_param,
+ &avf_stream_get_cap,
+ &avf_stream_set_cap,
+ &avf_stream_start,
+ NULL,
+ NULL,
+ &avf_stream_stop,
+ &avf_stream_destroy
+};
+
+
+/****************************************************************************
+ * Factory operations
+ */
+/*
+ * Init avf video driver.
+ */
+pjmedia_vid_dev_factory* pjmedia_avf_factory(pj_pool_factory *pf)
+{
+ struct avf_factory *f;
+ pj_pool_t *pool;
+
+ pool = pj_pool_create(pf, "avf video", 4000, 4000, NULL);
+ f = PJ_POOL_ZALLOC_T(pool, struct avf_factory);
+ f->pf = pf;
+ f->pool = pool;
+ f->base.op = &factory_op;
+
+ return &f->base;
+}
+
+
+/* API: init factory */
+static pj_status_t avf_factory_init(pjmedia_vid_dev_factory *f)
+{
+ return avf_factory_refresh(f);
+}
+
+/* API: destroy factory */
+static pj_status_t avf_factory_destroy(pjmedia_vid_dev_factory *f)
+{
+ struct avf_factory *af = (struct avf_factory*)f;
+ pj_pool_t *pool = af->pool;
+
+ if (af->dev_pool)
+ pj_pool_release(af->dev_pool);
+ af->pool = NULL;
+ if (pool)
+ pj_pool_release(pool);
+
+ return PJ_SUCCESS;
+}
+
+/* API: refresh the list of devices */
+static pj_status_t avf_factory_refresh(pjmedia_vid_dev_factory *f)
+{
+ struct avf_factory *af = (struct avf_factory*)f;
+ struct avf_dev_info *di;
+ unsigned dev_count = 0;
+ NSAutoreleasePool *apool = [[NSAutoreleasePool alloc]init];
+ NSArray *dev_array;
+
+ if (af->dev_pool) {
+ pj_pool_release(af->dev_pool);
+ af->dev_pool = NULL;
+ }
+
+ dev_array = [AVCaptureDevice devices];
+ for (AVCaptureDevice *device in dev_array) {
+ if ([device hasMediaType:AVMediaTypeVideo] && ![device isSuspended]) {
+ dev_count++;
+ }
+ }
+
+ /* Initialize input and output devices here */
+ af->dev_count = 0;
+ af->dev_pool = pj_pool_create(af->pf, "avf video", 500, 500, NULL);
+
+ af->dev_info = (struct avf_dev_info*) pj_pool_calloc(af->dev_pool, dev_count, sizeof(struct avf_dev_info));
+ for (AVCaptureDevice *device in dev_array) {
+ if (![device hasMediaType:AVMediaTypeVideo] || [device isSuspended]) {
+ continue;
+ }
+
+ di = &af->dev_info[af->dev_count++];
+ pj_bzero(di, sizeof(*di));
+ di->dev = device;
+ pj_ansi_strncpy(di->info.name, [device.localizedName UTF8String], sizeof(di->info.name));
+ pj_ansi_strncpy(di->info.driver, "AVF", sizeof(di->info.driver));
+ di->info.dir = PJMEDIA_DIR_CAPTURE;
+ di->info.has_callback = PJ_TRUE;
+ di->info.fmt_cnt = 0;
+ di->info.caps = PJMEDIA_VID_DEV_CAP_FORMAT;
+
+ PJ_LOG(4, (THIS_FILE, " dev: %s", di->info.name));
+
+ for (AVCaptureDeviceFormat* f in [device formats]) {
+ unsigned i;
+ CMFormatDescriptionRef desc = [f formatDescription];
+ for (i = 0; i < PJ_ARRAY_SIZE(avf_fmts); i++) {
+ if (CMFormatDescriptionGetMediaSubType(desc) == avf_fmts[i].avf_format) {
+ char fmt_name[5];
+ CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(desc);
+ if (dim.width < 640)
+ continue;
+ pjmedia_fourcc_name(avf_fmts[i].pjmedia_format, fmt_name);
+ PJ_LOG(4, (THIS_FILE, " detected resolution %dx%d (%s)", dim.width, dim.height, fmt_name));
+ pjmedia_format *fmt = &di->info.fmt[di->info.fmt_cnt++];
+ pjmedia_format_init_video(fmt,
+ avf_fmts[i].pjmedia_format,
+ dim.width,
+ dim.height,
+ DEFAULT_FPS, 1);
+ }
+ }
+ }
+
+ if (di->info.fmt_cnt == 0) {
+ PJ_LOG(4, (THIS_FILE, " there are no compatible formats, using default"));
+ pjmedia_format *fmt = &di->info.fmt[di->info.fmt_cnt++];
+ pjmedia_format_init_video(fmt,
+ avf_fmts[0].pjmedia_format,
+ DEFAULT_WIDTH,
+ DEFAULT_HEIGHT,
+ DEFAULT_FPS, 1);
+ }
+ }
+
+ [apool release];
+
+ PJ_LOG(4, (THIS_FILE, "avf video has %d devices", af->dev_count));
+
+ return PJ_SUCCESS;
+}
+
+/* API: get number of devices */
+static unsigned avf_factory_get_dev_count(pjmedia_vid_dev_factory *f)
+{
+ struct avf_factory *af = (struct avf_factory*)f;
+ return af->dev_count;
+}
+
+/* API: get device info */
+static pj_status_t avf_factory_get_dev_info(pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_info *info)
+{
+ struct avf_factory *af = (struct avf_factory*)f;
+ PJ_ASSERT_RETURN(index < af->dev_count, PJMEDIA_EVID_INVDEV);
+
+ pj_memcpy(info, &af->dev_info[index].info, sizeof(*info));
+
+ return PJ_SUCCESS;
+}
+
+/* API: create default device parameter */
+static pj_status_t avf_factory_default_param(pj_pool_t *pool,
+ pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_param *param)
+{
+ struct avf_factory *af = (struct avf_factory*)f;
+ struct avf_dev_info *di = &af->dev_info[index];
+
+ PJ_ASSERT_RETURN(index < af->dev_count, PJMEDIA_EVID_INVDEV);
+ PJ_UNUSED_ARG(pool);
+
+ pj_bzero(param, sizeof(*param));
+ param->dir = PJMEDIA_DIR_CAPTURE;
+ param->cap_id = index;
+ param->rend_id = PJMEDIA_VID_INVALID_DEV;
+ param->flags = PJMEDIA_VID_DEV_CAP_FORMAT;
+ param->clock_rate = DEFAULT_CLOCK_RATE;
+ pj_memcpy(&param->fmt, &di->info.fmt[0], sizeof(param->fmt));
+
+ return PJ_SUCCESS;
+}
+
+static avf_fmt_info* get_avf_format_info(pjmedia_format_id id)
+{
+ unsigned i;
+
+ for (i = 0; i < PJ_ARRAY_SIZE(avf_fmts); i++) {
+ if (avf_fmts[i].pjmedia_format == id)
+ return &avf_fmts[i];
+ }
+
+ return NULL;
+}
+
+
+@implementation AVFDelegate
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
+{
+ pjmedia_frame frame = {0};
+ CVImageBufferRef img;
+ CVReturn ret;
+ OSType type;
+ size_t width, height;
+
+ /* Register thread if needed */
+ if (stream->cap_thread_initialized == 0 || !pj_thread_is_registered()) {
+ pj_bzero(stream->cap_thread_desc, sizeof(pj_thread_desc));
+ pj_thread_register("avf_cap", stream->cap_thread_desc, &stream->cap_thread);
+ stream->cap_thread_initialized = 1;
+ }
+
+ if (!sampleBuffer)
+ return;
+
+ /* Get a CMSampleBuffer's Core Video image buffer for the media data */
+ img = CMSampleBufferGetImageBuffer(sampleBuffer);
+ if (!img)
+ return;
+
+ /* Check for supported formats */
+ type = CVPixelBufferGetPixelFormatType(img);
+ switch(type) {
+ case kCVPixelFormatType_32BGRA:
+ case kCVPixelFormatType_422YpCbCr8_yuvs:
+ case kCVPixelFormatType_422YpCbCr8:
+ break;
+ default:
+ PJ_LOG(4, (THIS_FILE, "Unsupported image format! %c%c%c%c", type>>24, type>>16, type>>8, type>>0));
+ return;
+ }
+
+ /* Lock the base address of the pixel buffer */
+ ret = CVPixelBufferLockBaseAddress(img, kCVPixelBufferLock_ReadOnly);
+ if (ret != kCVReturnSuccess)
+ return;
+
+ width = CVPixelBufferGetWidth(img);
+ height = CVPixelBufferGetHeight(img);
+
+ /* Prepare frame */
+ frame.type = PJMEDIA_FRAME_TYPE_VIDEO;
+ frame.timestamp.u64 = stream->cap_frame_ts.u64;
+ frame.buf = CVPixelBufferGetBaseAddress(img);
+ frame.size = CVPixelBufferGetBytesPerRow(img) * height;
+
+ if (stream->size.w != width || stream->size.h != height) {
+ PJ_LOG(4, (THIS_FILE, "AVF image size changed, before: %dx%d, after: %dx%d", stream->size.w, stream->size.h, width, height));
+ }
+
+ if (stream->vid_cb.capture_cb) {
+ (*stream->vid_cb.capture_cb)(&stream->base, stream->user_data, &frame);
+ }
+
+ stream->cap_frame_ts.u64 += stream->cap_ts_inc;
+
+ /* Unlock the pixel buffer */
+ CVPixelBufferUnlockBaseAddress(img, kCVPixelBufferLock_ReadOnly);
+}
+@end
+
+
+static void init_avf_stream(struct avf_stream *strm)
+{
+ const pjmedia_video_format_info *vfi;
+ pjmedia_video_format_detail *vfd;
+ avf_fmt_info *fi = get_avf_format_info(strm->param.fmt.id);
+ NSError *error;
+ pj_status_t status;
+
+ if (!fi) {
+ strm->status = PJMEDIA_EVID_BADFORMAT;
+ return;
+ }
+
+ strm->cap_session = [[AVCaptureSession alloc] init];
+ if (!strm->cap_session) {
+ strm->status = PJ_ENOMEM;
+ return;
+ }
+
+ strm->cap_session.sessionPreset = AVCaptureSessionPresetHigh;
+ vfd = pjmedia_format_get_video_format_detail(&strm->param.fmt, PJ_TRUE);
+ pj_assert(vfd);
+ vfi = pjmedia_get_video_format_info(NULL, strm->param.fmt.id);
+ pj_assert(vfi);
+ vfd->size = strm->size;
+
+ PJ_LOG(4, (THIS_FILE, "Opening video device at %dx%d resolution", vfd->size.w, vfd->size.h));
+
+ /* Add the video device to the session as a device input */
+ AVCaptureDevice *videoDevice = strm->af->dev_info[strm->param.cap_id].dev;
+ strm->dev_input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error: &error];
+ if (!strm->dev_input) {
+ status = PJMEDIA_EVID_SYSERR;
+ return;
+ }
+
+ [strm->cap_session addInput:strm->dev_input];
+
+ strm->video_output = [[AVCaptureVideoDataOutput alloc] init];
+ if (!strm->video_output) {
+ status = PJMEDIA_EVID_SYSERR;
+ return;
+ }
+ [strm->cap_session addOutput:strm->video_output];
+
+ /* Configure the video output */
+ strm->video_output.alwaysDiscardsLateVideoFrames = YES;
+ /* The Apple provided documentation says the only supported key is kCVPixelBufferPixelFormatTypeKey,
+ * but it turns out kCVPixelBufferWidthKey and kCVPixelBufferHeightKey are also required. Thanks
+ * Chromium, for figuring it out.*/
+ strm->video_output.videoSettings =
+ [NSDictionary dictionaryWithObjectsAndKeys: @(fi->avf_format),
+ kCVPixelBufferPixelFormatTypeKey,
+ @(vfd->size.w),
+ kCVPixelBufferWidthKey,
+ @(vfd->size.h),
+ kCVPixelBufferHeightKey,
+ nil];
+ strm->delegate = [[AVFDelegate alloc] init];
+ strm->delegate->stream = strm;
+ dispatch_queue_t queue = dispatch_queue_create("AVFQueue", NULL);
+ [strm->video_output setSampleBufferDelegate:strm->delegate queue:queue];
+ dispatch_release(queue);
+}
+
+static void run_func_on_video_queue(struct avf_stream *strm, func_ptr func)
+{
+ dispatch_sync(strm->video_ops_queue, ^{
+ (*func)(strm);
+ });
+}
+
+/* API: create stream */
+static pj_status_t avf_factory_create_stream(pjmedia_vid_dev_factory *f,
+ pjmedia_vid_dev_param *param,
+ const pjmedia_vid_dev_cb *cb,
+ void *user_data,
+ pjmedia_vid_dev_stream **p_vid_strm)
+{
+ struct avf_factory *af = (struct avf_factory*)f;
+ pj_pool_t *pool;
+ struct avf_stream *strm;
+ const pjmedia_video_format_info *vfi;
+ pjmedia_video_format_detail *vfd;
+ pj_status_t status = PJ_SUCCESS;
+
+ PJ_ASSERT_RETURN(f && param && p_vid_strm, PJ_EINVAL);
+ PJ_ASSERT_RETURN(param->fmt.type == PJMEDIA_TYPE_VIDEO &&
+ param->fmt.detail_type == PJMEDIA_FORMAT_DETAIL_VIDEO &&
+ param->dir == PJMEDIA_DIR_CAPTURE,
+ PJ_EINVAL);
+
+ vfi = pjmedia_get_video_format_info(NULL, param->fmt.id);
+ if (!vfi)
+ return PJMEDIA_EVID_BADFORMAT;
+
+ /* Create and Initialize stream descriptor */
+ pool = pj_pool_create(af->pf, "avf-dev", 4000, 4000, NULL);
+ PJ_ASSERT_RETURN(pool != NULL, PJ_ENOMEM);
+
+ strm = PJ_POOL_ZALLOC_T(pool, struct avf_stream);
+ pj_memcpy(&strm->param, param, sizeof(*param));
+ strm->pool = pool;
+ pj_memcpy(&strm->vid_cb, cb, sizeof(*cb));
+ strm->user_data = user_data;
+ strm->af = af;
+
+ vfd = pjmedia_format_get_video_format_detail(&strm->param.fmt, PJ_TRUE);
+ pj_memcpy(&strm->size, &vfd->size, sizeof(vfd->size));
+ pj_assert(vfd->fps.num);
+ strm->cap_ts_inc = PJMEDIA_SPF2(strm->param.clock_rate, &vfd->fps, 1);
+
+ /* Create dispatch queue */
+ strm->video_ops_queue = dispatch_queue_create("AVF Video Ops", DISPATCH_QUEUE_SERIAL);
+
+ /* Create capture stream here */
+ strm->status = PJ_SUCCESS;
+ run_func_on_video_queue(strm, init_avf_stream);
+ status = strm->status;
+ if (status != PJ_SUCCESS) {
+ dispatch_release(strm->video_ops_queue);
+ avf_stream_destroy((pjmedia_vid_dev_stream *)strm);
+ return status;
+ }
+
+ /* Update param as output */
+ param->fmt = strm->param.fmt;
+
+ /* Done */
+ strm->base.op = &stream_op;
+ *p_vid_strm = &strm->base;
+
+ return PJ_SUCCESS;
+}
+
+/* API: Get stream info. */
+static pj_status_t avf_stream_get_param(pjmedia_vid_dev_stream *s,
+ pjmedia_vid_dev_param *pi)
+{
+ struct avf_stream *strm = (struct avf_stream*)s;
+ PJ_ASSERT_RETURN(strm && pi, PJ_EINVAL);
+
+ pj_memcpy(pi, &strm->param, sizeof(*pi));
+
+ return PJ_SUCCESS;
+}
+
+/* API: get capability */
+static pj_status_t avf_stream_get_cap(pjmedia_vid_dev_stream *s,
+ pjmedia_vid_dev_cap cap,
+ void *pval)
+{
+ struct avf_stream *strm = (struct avf_stream*)s;
+
+ PJ_UNUSED_ARG(strm);
+ PJ_UNUSED_ARG(cap);
+ PJ_UNUSED_ARG(pval);
+
+ PJ_ASSERT_RETURN(s && pval, PJ_EINVAL);
+
+ return PJMEDIA_EVID_INVCAP;
+}
+
+/* API: set capability */
+static pj_status_t avf_stream_set_cap(pjmedia_vid_dev_stream *s,
+ pjmedia_vid_dev_cap cap,
+ const void *pval)
+{
+ struct avf_stream *strm = (struct avf_stream*)s;
+
+ PJ_UNUSED_ARG(strm);
+ PJ_UNUSED_ARG(cap);
+ PJ_UNUSED_ARG(pval);
+
+ PJ_ASSERT_RETURN(s && pval, PJ_EINVAL);
+
+ return PJMEDIA_EVID_INVCAP;
+}
+
+static void start_avf(struct avf_stream *strm)
+{
+ [strm->cap_session startRunning];
+}
+
+static void stop_avf(struct avf_stream *strm)
+{
+ [strm->cap_session stopRunning];
+}
+
+/* API: Start stream. */
+static pj_status_t avf_stream_start(pjmedia_vid_dev_stream *strm)
+{
+ struct avf_stream *stream = (struct avf_stream*)strm;
+
+ PJ_LOG(4, (THIS_FILE, "Starting avf video stream"));
+
+ if (stream->cap_session) {
+ run_func_on_video_queue(stream, start_avf);
+ if (![stream->cap_session isRunning])
+ return PJMEDIA_EVID_NOTREADY;
+ stream->is_running = PJ_TRUE;
+ }
+
+ return PJ_SUCCESS;
+}
+
+/* API: Stop stream. */
+static pj_status_t avf_stream_stop(pjmedia_vid_dev_stream *strm)
+{
+ struct avf_stream *stream = (struct avf_stream*)strm;
+
+ PJ_LOG(4, (THIS_FILE, "Stopping avf video stream"));
+
+ if (stream->cap_session && [stream->cap_session isRunning]) {
+ int i;
+ stream->cap_exited = PJ_FALSE;
+ run_func_on_video_queue(stream, stop_avf);
+ stream->is_running = PJ_FALSE;
+ for (i = 50; i >= 0 && !stream->cap_exited; i--) {
+ pj_thread_sleep(10);
+ }
+ }
+
+ return PJ_SUCCESS;
+}
+
+static void destroy_avf(struct avf_stream *strm)
+{
+ if (strm->cap_session) {
+ [strm->cap_session removeInput:strm->dev_input];
+ [strm->cap_session removeOutput:strm->video_output];
+ [strm->cap_session release];
+ strm->cap_session = NULL;
+ }
+
+ if (strm->delegate) {
+ [strm->delegate release];
+ strm->delegate = NULL;
+ }
+
+ if (strm->dev_input) {
+ strm->dev_input = NULL;
+ }
+ if (strm->video_output) {
+ strm->video_output = NULL;
+ }
+}
+
+/* API: Destroy stream. */
+static pj_status_t avf_stream_destroy(pjmedia_vid_dev_stream *strm)
+{
+ struct avf_stream *stream = (struct avf_stream*)strm;
+
+ PJ_ASSERT_RETURN(stream != NULL, PJ_EINVAL);
+
+ avf_stream_stop(strm);
+ run_func_on_video_queue(stream, destroy_avf);
+
+ dispatch_release(stream->video_ops_queue);
+ pj_pool_release(stream->pool);
+
+ return PJ_SUCCESS;
+}
+
+#endif /* PJMEDIA_VIDEO_DEV_HAS_AVF */
diff -ruN pjproject-2.10/pjmedia/src/pjmedia-videodev/dshow_dev.c pjsip/pjmedia/src/pjmedia-videodev/dshow_dev.c
--- pjproject-2.10/pjmedia/src/pjmedia-videodev/dshow_dev.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia-videodev/dshow_dev.c 2021-02-06 22:51:16.641714862 +0100
@@ -20,30 +20,24 @@
#include <pj/assert.h>
#include <pj/log.h>
#include <pj/os.h>
-#include <pj/unicode.h>
#if defined(PJMEDIA_HAS_VIDEO) && PJMEDIA_HAS_VIDEO != 0 && \
defined(PJMEDIA_VIDEO_DEV_HAS_DSHOW) && PJMEDIA_VIDEO_DEV_HAS_DSHOW != 0
-
-#ifdef _MSC_VER
-# pragma warning(push, 3)
-#endif
-
#include <windows.h>
#define COBJMACROS
#include <DShow.h>
#include <wmsdkidl.h>
+#include <OleAuto.h>
-#ifdef _MSC_VER
-# pragma warning(pop)
+#ifndef DIBSIZE
+# define WIDTHBYTES(BTIS) ((DWORD)(((BTIS)+31) & (~31)) / 8)
+# define DIBWIDTHBYTES(BI) (DWORD)(BI).biBitCount) * (DWORD)WIDTHBYTES((DWORD)(BI).biWidth
+# define _DIBSIZE(BI) (DIBWIDTHBYTES(BI) * (DWORD)(BI).biHeight)
+# define DIBSIZE(BI) ((BI).biHeight < 0 ? (-1)*(_DIBSIZE(BI)) : _DIBSIZE(BI))
#endif
-#pragma comment(lib, "Strmiids.lib")
-#pragma comment(lib, "Rpcrt4.lib")
-#pragma comment(lib, "Quartz.lib")
-
#define THIS_FILE "dshow_dev.c"
#define DEFAULT_CLOCK_RATE 90000
#define DEFAULT_WIDTH 640
@@ -57,10 +51,6 @@
typedef struct NullRenderer NullRenderer;
IBaseFilter* NullRenderer_Create(input_callback input_cb,
void *user_data);
-typedef struct SourceFilter SourceFilter;
-IBaseFilter* SourceFilter_Create(SourceFilter **pSrc);
-HRESULT SourceFilter_Deliver(SourceFilter *src, void *buf, long size);
-void SourceFilter_SetMediaType(SourceFilter *src, AM_MEDIA_TYPE *pmt);
typedef struct dshow_fmt_info
{
@@ -121,7 +111,6 @@
{
IFilterGraph *filter_graph;
IMediaFilter *media_filter;
- SourceFilter *csource_filter;
IBaseFilter *source_filter;
IBaseFilter *rend_filter;
AM_MEDIA_TYPE *mediatype;
@@ -160,8 +149,6 @@
pjmedia_vid_dev_cap cap,
const void *value);
static pj_status_t dshow_stream_start(pjmedia_vid_dev_stream *strm);
-static pj_status_t dshow_stream_put_frame(pjmedia_vid_dev_stream *strm,
- const pjmedia_frame *frame);
static pj_status_t dshow_stream_stop(pjmedia_vid_dev_stream *strm);
static pj_status_t dshow_stream_destroy(pjmedia_vid_dev_stream *strm);
@@ -184,7 +171,7 @@
&dshow_stream_set_cap,
&dshow_stream_start,
NULL,
- &dshow_stream_put_frame,
+ NULL,
&dshow_stream_stop,
&dshow_stream_destroy
};
@@ -213,19 +200,12 @@
/* API: init factory */
static pj_status_t dshow_factory_init(pjmedia_vid_dev_factory *f)
{
- HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
- if (hr == RPC_E_CHANGED_MODE) {
- /* When using apartment mode, Dshow object would not be accessible from
- * other thread. Take this into consideration when implementing native
- * renderer using Dshow.
- */
- hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
- if (FAILED(hr)) {
- PJ_LOG(4,(THIS_FILE, "Failed initializing DShow: "
- "COM library already initialized with "
- "incompatible concurrency model"));
- return PJMEDIA_EVID_INIT;
- }
+ HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
+ if (hr == RPC_E_CHANGED_MODE) {
+ PJ_LOG(4,(THIS_FILE, "Failed initializing DShow: "
+ "COM library already initialized with "
+ "incompatible concurrency model"));
+ return PJMEDIA_EVID_INIT;
}
return dshow_factory_refresh(f);
@@ -459,13 +439,21 @@
if (SUCCEEDED(hr) && var_name.bstrVal) {
WCHAR *wszDisplayName = NULL;
IBaseFilter *filter;
+ pj_ssize_t len;
ddi = &df->dev_info[df->dev_count++];
pj_bzero(ddi, sizeof(*ddi));
- pj_unicode_to_ansi(var_name.bstrVal,
- wcslen(var_name.bstrVal),
- ddi->info.name,
- sizeof(ddi->info.name));
+
+ len = wcslen(var_name.bstrVal),
+ len = WideCharToMultiByte(CP_ACP,
+ 0,
+ var_name.bstrVal,
+ (int)len,
+ ddi->info.name,
+ sizeof(ddi->info.name),
+ NULL,
+ NULL);
+ ddi->info.name[len] = '\0';
hr = IMoniker_GetDisplayName(moniker, NULL, NULL,
&wszDisplayName);
@@ -502,26 +490,6 @@
ICreateDevEnum_Release(dev_enum);
}
-#if HAS_VMR
- ddi = &df->dev_info[df->dev_count++];
- pj_bzero(ddi, sizeof(*ddi));
- pj_ansi_strncpy(ddi->info.name, "Video Mixing Renderer",
- sizeof(ddi->info.name));
- ddi->info.name[sizeof(ddi->info.name)-1] = '\0';
- pj_ansi_strncpy(ddi->info.driver, "dshow", sizeof(ddi->info.driver));
- ddi->info.driver[sizeof(ddi->info.driver)-1] = '\0';
- ddi->info.dir = PJMEDIA_DIR_RENDER;
- ddi->info.has_callback = PJ_FALSE;
- ddi->info.caps = PJMEDIA_VID_DEV_CAP_FORMAT;
-// TODO:
-// ddi->info.caps |= PJMEDIA_VID_DEV_CAP_OUTPUT_WINDOW;
-
- ddi->info.fmt_cnt = 1;
- pjmedia_format_init_video(&ddi->info.fmt[0], dshow_fmts[0].pjmedia_format,
- DEFAULT_WIDTH, DEFAULT_HEIGHT,
- DEFAULT_FPS, 1);
-#endif
-
PJ_LOG(4, (THIS_FILE, "DShow has %d devices:",
df->dev_count));
for (c = 0; c < df->dev_count; ++c) {
@@ -574,10 +542,6 @@
param->dir = PJMEDIA_DIR_CAPTURE;
param->cap_id = index;
param->rend_id = PJMEDIA_VID_INVALID_DEV;
- } else if (di->info.dir & PJMEDIA_DIR_RENDER) {
- param->dir = PJMEDIA_DIR_RENDER;
- param->rend_id = index;
- param->cap_id = PJMEDIA_VID_INVALID_DEV;
} else {
return PJMEDIA_EVID_INVDEV;
}
@@ -645,26 +609,6 @@
(*strm->vid_cb.capture_cb)(&strm->base, strm->user_data, &frame);
}
-/* API: Put frame from stream */
-static pj_status_t dshow_stream_put_frame(pjmedia_vid_dev_stream *strm,
- const pjmedia_frame *frame)
-{
- struct dshow_stream *stream = (struct dshow_stream*)strm;
- HRESULT hr;
-
- if (stream->quit_flag) {
- stream->rend_thread_exited = PJ_TRUE;
- return PJ_SUCCESS;
- }
-
- hr = SourceFilter_Deliver(stream->dgraph.csource_filter,
- frame->buf, (long)frame->size);
- if (FAILED(hr))
- return hr;
-
- return PJ_SUCCESS;
-}
-
static dshow_fmt_info* get_dshow_format_info(pjmedia_format_id id)
{
unsigned i;
@@ -689,16 +633,24 @@
IEnumPins *pEnum;
IPin *srcpin = NULL;
IPin *sinkpin = NULL;
- AM_MEDIA_TYPE *mediatype= NULL, mtype;
+ AM_MEDIA_TYPE *mediatype = NULL;
VIDEOINFOHEADER *video_info, *vi = NULL;
pjmedia_video_format_detail *vfd;
const pjmedia_video_format_info *vfi;
+ PJ_ASSERT_RETURN(dir == PJMEDIA_DIR_CAPTURE, PJ_EINVAL);
+
vfi = pjmedia_get_video_format_info(pjmedia_video_format_mgr_instance(),
strm->param.fmt.id);
if (!vfi)
return PJMEDIA_EVID_BADFORMAT;
+ hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
+ if (FAILED(hr)) {
+ PJ_LOG(4,(THIS_FILE, "Error: CoInitializeEx"));
+ goto on_error;
+ }
+
hr = CoCreateInstance(&CLSID_FilterGraph, NULL, CLSCTX_INPROC,
&IID_IFilterGraph, (LPVOID *)&graph->filter_graph);
if (FAILED(hr)) {
@@ -711,14 +663,10 @@
goto on_error;
}
- if (dir == PJMEDIA_DIR_CAPTURE) {
hr = get_cap_device(df, id, &graph->source_filter);
if (FAILED(hr)) {
goto on_error;
}
- } else {
- graph->source_filter = SourceFilter_Create(&graph->csource_filter);
- }
hr = IFilterGraph_AddFilter(graph->filter_graph, graph->source_filter,
L"capture");
@@ -726,16 +674,7 @@
goto on_error;
}
- if (dir == PJMEDIA_DIR_CAPTURE) {
- graph->rend_filter = NullRenderer_Create(input_cb, strm);
- } else {
- hr = CoCreateInstance(&CLSID_VideoMixingRenderer, NULL,
- CLSCTX_INPROC, &IID_IBaseFilter,
- (LPVOID *)&graph->rend_filter);
- if (FAILED (hr)) {
- goto on_error;
- }
- }
+ graph->rend_filter = NullRenderer_Create(input_cb, strm);
IBaseFilter_EnumPins(graph->rend_filter, &pEnum);
if (SUCCEEDED(hr)) {
@@ -763,34 +702,6 @@
(use_def_size? 0: vfd->size.h), &srcpin, NULL);
graph->mediatype = mediatype;
- if (srcpin && dir == PJMEDIA_DIR_RENDER) {
- mediatype = graph->mediatype = &mtype;
-
- memset (mediatype, 0, sizeof(AM_MEDIA_TYPE));
- mediatype->majortype = MEDIATYPE_Video;
- mediatype->subtype = *(get_dshow_format_info(strm->param.fmt.id)->
- dshow_format);
- mediatype->bFixedSizeSamples = TRUE;
- mediatype->bTemporalCompression = FALSE;
-
- vi = (VIDEOINFOHEADER *)
- CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
- memset (vi, 0, sizeof(VIDEOINFOHEADER));
- mediatype->formattype = FORMAT_VideoInfo;
- mediatype->cbFormat = sizeof(VIDEOINFOHEADER);
- mediatype->pbFormat = (BYTE *)vi;
-
- vi->rcSource.bottom = vfd->size.h;
- vi->rcSource.right = vfd->size.w;
- vi->rcTarget.bottom = vfd->size.h;
- vi->rcTarget.right = vfd->size.w;
-
- vi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
- vi->bmiHeader.biPlanes = 1;
- vi->bmiHeader.biBitCount = vfi->bpp;
- vi->bmiHeader.biCompression = strm->param.fmt.id;
- }
-
if (!srcpin || !sinkpin || !mediatype) {
hr = VFW_E_TYPE_NOT_ACCEPTED;
goto on_error;
@@ -809,9 +720,6 @@
}
video_info->bmiHeader.biSizeImage = DIBSIZE(video_info->bmiHeader);
mediatype->lSampleSize = DIBSIZE(video_info->bmiHeader);
- if (graph->csource_filter)
- SourceFilter_SetMediaType(graph->csource_filter,
- mediatype);
hr = IFilterGraph_AddFilter(graph->filter_graph,
(IBaseFilter *)graph->rend_filter,
@@ -893,9 +801,9 @@
pj_pool_t *pool;
struct dshow_stream *strm;
pj_status_t status;
+ const pjmedia_video_format_detail *vfd;
- PJ_ASSERT_RETURN(param->dir == PJMEDIA_DIR_CAPTURE ||
- param->dir == PJMEDIA_DIR_RENDER, PJ_EINVAL);
+ PJ_ASSERT_RETURN(param->dir == PJMEDIA_DIR_CAPTURE, PJ_EINVAL);
if (!get_dshow_format_info(param->fmt.id))
return PJMEDIA_EVID_BADFORMAT;
@@ -910,9 +818,6 @@
pj_memcpy(&strm->vid_cb, cb, sizeof(*cb));
strm->user_data = user_data;
- if (param->dir & PJMEDIA_DIR_CAPTURE) {
- const pjmedia_video_format_detail *vfd;
-
/* Create capture stream here */
status = create_filter_graph(PJMEDIA_DIR_CAPTURE, param->cap_id,
PJ_FALSE, PJ_FALSE, df, strm,
@@ -944,21 +849,6 @@
vfd = pjmedia_format_get_video_format_detail(&param->fmt, PJ_TRUE);
strm->cap_ts_inc = PJMEDIA_SPF2(param->clock_rate, &vfd->fps, 1);
- } else if (param->dir & PJMEDIA_DIR_RENDER) {
- /* Create render stream here */
- status = create_filter_graph(PJMEDIA_DIR_RENDER, param->rend_id,
- PJ_FALSE, PJ_FALSE, df, strm,
- &strm->dgraph);
- if (status != PJ_SUCCESS)
- goto on_error;
- }
-
- /* Apply the remaining settings */
- if (param->flags & PJMEDIA_VID_DEV_CAP_OUTPUT_WINDOW) {
- dshow_stream_set_cap(&strm->base,
- PJMEDIA_VID_DEV_CAP_OUTPUT_WINDOW,
- &param->window);
- }
/* Done */
strm->base.op = &stream_op;
diff -ruN pjproject-2.10/pjmedia/src/pjmedia-videodev/dshow_filter.cpp pjsip/pjmedia/src/pjmedia-videodev/dshow_filter.cpp
--- pjproject-2.10/pjmedia/src/pjmedia-videodev/dshow_filter.cpp 1970-01-01 01:00:00.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia-videodev/dshow_filter.cpp 2021-02-06 18:47:09.030177557 +0100
@@ -0,0 +1,83 @@
+/* $Id: dshowclasses.cpp 4062 2012-04-19 06:36:57Z ming $ */
+/*
+ * Copyright (C) 2008-2011 Teluu Inc. (http://www.teluu.com)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+
+#include <pjmedia-videodev/config.h>
+
+
+#if defined(PJMEDIA_VIDEO_DEV_HAS_DSHOW) && PJMEDIA_VIDEO_DEV_HAS_DSHOW != 0
+
+#include <DShow.h>
+#include <assert.h>
+#include <streams.h>
+
+typedef void (*input_callback)(void *user_data, IMediaSample *pMediaSample);
+
+const GUID CLSID_NullRenderer = {0xF9168C5E, 0xCEB2, 0x4FAA, {0xB6, 0xBF,
+ 0x32, 0x9B, 0xF3, 0x9F, 0xA1, 0xE4}};
+
+class NullRenderer: public CBaseRenderer
+{
+public:
+ NullRenderer(HRESULT *pHr);
+ virtual ~NullRenderer();
+
+ virtual HRESULT CheckMediaType(const CMediaType *pmt);
+ virtual HRESULT DoRenderSample(IMediaSample *pMediaSample);
+
+ input_callback input_cb;
+ void *user_data;
+};
+
+NullRenderer::NullRenderer(HRESULT *pHr): CBaseRenderer(CLSID_NullRenderer,
+ "NullRenderer",
+ NULL, pHr)
+{
+ input_cb = NULL;
+}
+
+NullRenderer::~NullRenderer()
+{
+}
+
+HRESULT NullRenderer::CheckMediaType(const CMediaType *pmt)
+{
+ return S_OK;
+}
+
+HRESULT NullRenderer::DoRenderSample(IMediaSample *pMediaSample)
+{
+ if (input_cb)
+ input_cb(user_data, pMediaSample);
+
+ return S_OK;
+}
+
+extern "C" IBaseFilter* NullRenderer_Create(input_callback input_cb,
+ void *user_data)
+{
+ HRESULT hr;
+ NullRenderer *renderer = new NullRenderer(&hr);
+ renderer->AddRef();
+ renderer->input_cb = input_cb;
+ renderer->user_data = user_data;
+
+ return (CBaseFilter *)renderer;
+}
+
+#endif /* PJMEDIA_VIDEO_DEV_HAS_DSHOW */
diff -ruN pjproject-2.10/pjmedia/src/pjmedia-videodev/fb_dev.c pjsip/pjmedia/src/pjmedia-videodev/fb_dev.c
--- pjproject-2.10/pjmedia/src/pjmedia-videodev/fb_dev.c 1970-01-01 01:00:00.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia-videodev/fb_dev.c 2021-02-06 18:47:25.010632996 +0100
@@ -0,0 +1,459 @@
+/*
+ * Copyright (C) 2014-present AG Projects
+ * Copyright (C) 2008-2011 Teluu Inc. (http://www.teluu.com)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+#include <pjmedia-videodev/videodev_imp.h>
+#include <pj/assert.h>
+#include <pj/log.h>
+#include <pj/os.h>
+
+#if defined(PJMEDIA_HAS_VIDEO) && PJMEDIA_HAS_VIDEO != 0 && \
+ defined(PJMEDIA_VIDEO_DEV_HAS_FB) && PJMEDIA_VIDEO_DEV_HAS_FB != 0
+
+#include <pjmedia-videodev/fb_dev.h>
+
+#define THIS_FILE "fb_dev.c"
+#define DEFAULT_CLOCK_RATE 90000
+#define DEFAULT_WIDTH 640
+#define DEFAULT_HEIGHT 480
+#define DEFAULT_FPS 25
+
+
+/* Supported formats */
+#if defined(PJ_DARWINOS) && PJ_DARWINOS!=0
+static pjmedia_format_id fb_fmts[] = {PJMEDIA_FORMAT_ARGB};
+#else
+static pjmedia_format_id fb_fmts[] = {PJMEDIA_FORMAT_BGRA};
+#endif
+
+
+/* fb device info */
+struct fb_dev_info
+{
+ pjmedia_vid_dev_info info;
+};
+
+
+/* factory */
+struct fb_factory
+{
+ pjmedia_vid_dev_factory base;
+ pj_pool_t *pool;
+ pj_pool_factory *pf;
+
+ unsigned dev_count;
+ struct fb_dev_info *dev_info;
+};
+
+
+/* Video stream. */
+struct fb_stream
+{
+ pjmedia_vid_dev_stream base; /**< Base stream */
+ pjmedia_vid_dev_param param; /**< Settings */
+ pj_pool_t *pool; /**< Memory pool. */
+
+ pjmedia_vid_dev_cb vid_cb; /**< Stream callback. */
+ void *user_data; /**< Application data. */
+
+ struct fb_factory *ff;
+ pj_bool_t is_running;
+ pjmedia_rect_size vid_size;
+
+ struct {
+ pjmedia_vid_dev_fb_frame_cb cb;
+ void *user_data;
+ } frame_handler;
+};
+
+
+/* Prototypes */
+static pj_status_t fb_factory_init(pjmedia_vid_dev_factory *f);
+static pj_status_t fb_factory_destroy(pjmedia_vid_dev_factory *f);
+static pj_status_t fb_factory_refresh(pjmedia_vid_dev_factory *f);
+static unsigned fb_factory_get_dev_count(pjmedia_vid_dev_factory *f);
+static pj_status_t fb_factory_get_dev_info(pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_info *info);
+static pj_status_t fb_factory_default_param(pj_pool_t *pool,
+ pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_param *param);
+static pj_status_t fb_factory_create_stream(pjmedia_vid_dev_factory *f,
+ pjmedia_vid_dev_param *param,
+ const pjmedia_vid_dev_cb *cb,
+ void *user_data,
+ pjmedia_vid_dev_stream **p_vid_strm);
+
+static pj_status_t fb_stream_get_param(pjmedia_vid_dev_stream *strm,
+ pjmedia_vid_dev_param *param);
+static pj_status_t fb_stream_get_cap(pjmedia_vid_dev_stream *strm,
+ pjmedia_vid_dev_cap cap,
+ void *value);
+static pj_status_t fb_stream_set_cap(pjmedia_vid_dev_stream *strm,
+ pjmedia_vid_dev_cap cap,
+ const void *value);
+static pj_status_t fb_stream_put_frame(pjmedia_vid_dev_stream *strm,
+ const pjmedia_frame *frame);
+static pj_status_t fb_stream_start(pjmedia_vid_dev_stream *strm);
+static pj_status_t fb_stream_stop(pjmedia_vid_dev_stream *strm);
+static pj_status_t fb_stream_destroy(pjmedia_vid_dev_stream *strm);
+
+
+/* Operations */
+static pjmedia_vid_dev_factory_op factory_op =
+{
+ &fb_factory_init,
+ &fb_factory_destroy,
+ &fb_factory_get_dev_count,
+ &fb_factory_get_dev_info,
+ &fb_factory_default_param,
+ &fb_factory_create_stream,
+ &fb_factory_refresh
+};
+
+static pjmedia_vid_dev_stream_op stream_op =
+{
+ &fb_stream_get_param,
+ &fb_stream_get_cap,
+ &fb_stream_set_cap,
+ &fb_stream_start,
+ NULL,
+ &fb_stream_put_frame,
+ &fb_stream_stop,
+ &fb_stream_destroy
+};
+
+
+/****************************************************************************
+ * Factory operations
+ */
+/*
+ * Init FB video driver.
+ */
+pjmedia_vid_dev_factory* pjmedia_fb_factory(pj_pool_factory *pf)
+{
+ struct fb_factory *f;
+ pj_pool_t *pool;
+
+ pool = pj_pool_create(pf, "fb video", 1000, 1000, NULL);
+ f = PJ_POOL_ZALLOC_T(pool, struct fb_factory);
+ f->pf = pf;
+ f->pool = pool;
+ f->base.op = &factory_op;
+
+ return &f->base;
+}
+
+
+/* API: init factory */
+static pj_status_t fb_factory_init(pjmedia_vid_dev_factory *f)
+{
+ struct fb_factory *ff = (struct fb_factory*)f;
+ struct fb_dev_info *di;
+ unsigned i, l;
+
+ /* Initialize input and output devices here */
+ ff->dev_info = (struct fb_dev_info*)
+ pj_pool_calloc(ff->pool, 1, sizeof(struct fb_dev_info));
+
+ ff->dev_count = 0;
+ di = &ff->dev_info[ff->dev_count++];
+ pj_bzero(di, sizeof(*di));
+ strcpy(di->info.name, "FrameBuffer renderer");
+ strcpy(di->info.driver, "FrameBuffer");
+ di->info.dir = PJMEDIA_DIR_RENDER;
+ di->info.has_callback = PJ_FALSE;
+ di->info.caps = 0;
+
+ for (i = 0; i < ff->dev_count; i++) {
+ di = &ff->dev_info[i];
+ di->info.fmt_cnt = PJ_ARRAY_SIZE(fb_fmts);
+ di->info.caps |= PJMEDIA_VID_DEV_CAP_FORMAT;
+
+ for (l = 0; l < PJ_ARRAY_SIZE(fb_fmts); l++) {
+ pjmedia_format *fmt = &di->info.fmt[l];
+ pjmedia_format_init_video(fmt,
+ fb_fmts[l],
+ DEFAULT_WIDTH,
+ DEFAULT_HEIGHT,
+ DEFAULT_FPS, 1);
+ }
+ }
+
+ PJ_LOG(4, (THIS_FILE, "FrameBuffer initialized"));
+
+ return PJ_SUCCESS;
+}
+
+
+/* API: destroy factory */
+static pj_status_t fb_factory_destroy(pjmedia_vid_dev_factory *f)
+{
+ struct fb_factory *ff = (struct fb_factory*)f;
+ pj_pool_t *pool = ff->pool;
+
+ ff->pool = NULL;
+ pj_pool_release(pool);
+
+ return PJ_SUCCESS;
+}
+
+
+/* API: refresh the list of devices */
+static pj_status_t fb_factory_refresh(pjmedia_vid_dev_factory *f)
+{
+ PJ_UNUSED_ARG(f);
+ return PJ_SUCCESS;
+}
+
+
+/* API: get number of devices */
+static unsigned fb_factory_get_dev_count(pjmedia_vid_dev_factory *f)
+{
+ struct fb_factory *ff = (struct fb_factory*)f;
+ return ff->dev_count;
+}
+
+
+/* API: get device info */
+static pj_status_t fb_factory_get_dev_info(pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_info *info)
+{
+ struct fb_factory *ff = (struct fb_factory*)f;
+
+ PJ_ASSERT_RETURN(index < ff->dev_count, PJMEDIA_EVID_INVDEV);
+ pj_memcpy(info, &ff->dev_info[index].info, sizeof(*info));
+
+ return PJ_SUCCESS;
+}
+
+
+/* API: create default device parameter */
+static pj_status_t fb_factory_default_param(pj_pool_t *pool,
+ pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_param *param)
+{
+ struct fb_factory *ff = (struct fb_factory*)f;
+ struct fb_dev_info *di = &ff->dev_info[index];
+
+ PJ_ASSERT_RETURN(index < ff->dev_count, PJMEDIA_EVID_INVDEV);
+ PJ_UNUSED_ARG(pool);
+
+ pj_bzero(param, sizeof(*param));
+ param->dir = PJMEDIA_DIR_RENDER;
+ param->rend_id = index;
+ param->cap_id = PJMEDIA_VID_INVALID_DEV;
+
+ /* Set the device capabilities here */
+ param->flags = PJMEDIA_VID_DEV_CAP_FORMAT;
+ param->clock_rate = DEFAULT_CLOCK_RATE;
+ pj_memcpy(&param->fmt, &di->info.fmt[0], sizeof(param->fmt));
+
+ return PJ_SUCCESS;
+}
+
+
+/* API: Put frame from stream */
+static pj_status_t fb_stream_put_frame(pjmedia_vid_dev_stream *strm,
+ const pjmedia_frame *frame)
+{
+ struct fb_stream *stream = (struct fb_stream*)strm;
+
+ if (!stream->is_running)
+ return PJ_EINVALIDOP;
+
+ if (frame->size==0 || frame->buf==NULL)
+ return PJ_SUCCESS;
+
+ if (stream->frame_handler.cb)
+ stream->frame_handler.cb(frame, stream->vid_size, stream->frame_handler.user_data);
+
+ return PJ_SUCCESS;
+}
+
+/* API: create stream */
+static pj_status_t fb_factory_create_stream(pjmedia_vid_dev_factory *f,
+ pjmedia_vid_dev_param *param,
+ const pjmedia_vid_dev_cb *cb,
+ void *user_data,
+ pjmedia_vid_dev_stream **p_vid_strm)
+{
+ struct fb_factory *ff = (struct fb_factory*)f;
+ pj_pool_t *pool;
+ pj_status_t status;
+ struct fb_stream *strm;
+ const pjmedia_video_format_info *vfi;
+
+ PJ_ASSERT_RETURN(f && param && p_vid_strm, PJ_EINVAL);
+ PJ_ASSERT_RETURN(param->dir == PJMEDIA_DIR_RENDER, PJ_EINVAL);
+ PJ_ASSERT_RETURN(param->fmt.type == PJMEDIA_TYPE_VIDEO &&
+ param->fmt.detail_type == PJMEDIA_FORMAT_DETAIL_VIDEO &&
+ param->dir == PJMEDIA_DIR_RENDER,
+ PJ_EINVAL);
+
+ vfi = pjmedia_get_video_format_info(NULL, param->fmt.id);
+ if (!vfi)
+ return PJMEDIA_EVID_BADFORMAT;
+
+ /* Create and Initialize stream descriptor */
+ pool = pj_pool_create(ff->pf, "fb-dev", 1000, 1000, NULL);
+ PJ_ASSERT_RETURN(pool != NULL, PJ_ENOMEM);
+
+ strm = PJ_POOL_ZALLOC_T(pool, struct fb_stream);
+ pj_memcpy(&strm->param, param, sizeof(*param));
+ strm->pool = pool;
+ strm->ff = ff;
+ pj_memcpy(&strm->vid_cb, cb, sizeof(*cb));
+ strm->user_data = user_data;
+
+ status = fb_stream_set_cap(&strm->base, PJMEDIA_VID_DEV_CAP_FORMAT, &param->fmt);
+ if (status != PJ_SUCCESS) {
+ fb_stream_destroy((pjmedia_vid_dev_stream *)strm);
+ return status;
+ }
+
+ /* Done */
+ strm->base.op = &stream_op;
+ *p_vid_strm = &strm->base;
+
+ return PJ_SUCCESS;
+}
+
+
+/* API: Get stream info. */
+static pj_status_t fb_stream_get_param(pjmedia_vid_dev_stream *s,
+ pjmedia_vid_dev_param *pi)
+{
+ struct fb_stream *strm = (struct fb_stream*)s;
+ PJ_ASSERT_RETURN(strm && pi, PJ_EINVAL);
+
+ pj_memcpy(pi, &strm->param, sizeof(*pi));
+
+ return PJ_SUCCESS;
+}
+
+
+/* API: get capability */
+static pj_status_t fb_stream_get_cap(pjmedia_vid_dev_stream *s,
+ pjmedia_vid_dev_cap cap,
+ void *pval)
+{
+ struct fb_stream *strm = (struct fb_stream*)s;
+
+ PJ_UNUSED_ARG(strm);
+ PJ_UNUSED_ARG(cap);
+ PJ_ASSERT_RETURN(s && pval, PJ_EINVAL);
+
+ return PJMEDIA_EVID_INVCAP;
+}
+
+
+/* API: set capability */
+static pj_status_t fb_stream_set_cap(pjmedia_vid_dev_stream *s,
+ pjmedia_vid_dev_cap cap,
+ const void *pval)
+{
+ struct fb_stream *strm = (struct fb_stream*)s;
+
+ PJ_UNUSED_ARG(strm);
+ PJ_ASSERT_RETURN(s && pval, PJ_EINVAL);
+
+ if (cap == PJMEDIA_VID_DEV_CAP_FORMAT) {
+ const pjmedia_video_format_info *vfi;
+ pjmedia_video_format_detail *vfd;
+ pjmedia_format *fmt = (pjmedia_format *)pval;
+
+ vfi = pjmedia_get_video_format_info(pjmedia_video_format_mgr_instance(), fmt->id);
+ if (!vfi)
+ return PJMEDIA_EVID_BADFORMAT;
+
+ pjmedia_format_copy(&strm->param.fmt, fmt);
+
+ vfd = pjmedia_format_get_video_format_detail(fmt, PJ_TRUE);
+ pj_memcpy(&strm->vid_size, &vfd->size, sizeof(vfd->size));
+ if (strm->param.disp_size.w == 0 || strm->param.disp_size.h == 0)
+ pj_memcpy(&strm->param.disp_size, &vfd->size, sizeof(vfd->size));
+
+ return PJ_SUCCESS;
+ }
+
+ return PJMEDIA_EVID_INVCAP;
+}
+
+
+/* API: Start stream. */
+static pj_status_t fb_stream_start(pjmedia_vid_dev_stream *strm)
+{
+ struct fb_stream *stream = (struct fb_stream*)strm;
+ PJ_UNUSED_ARG(strm);
+
+ PJ_LOG(4, (THIS_FILE, "Starting FB video stream"));
+ stream->is_running = PJ_TRUE;
+
+ return PJ_SUCCESS;
+}
+
+
+/* API: Stop stream. */
+static pj_status_t fb_stream_stop(pjmedia_vid_dev_stream *strm)
+{
+ struct fb_stream *stream = (struct fb_stream*)strm;
+ PJ_UNUSED_ARG(strm);
+
+ PJ_LOG(4, (THIS_FILE, "Stopping FB video stream"));
+ stream->is_running = PJ_FALSE;
+
+ return PJ_SUCCESS;
+}
+
+
+/* API: Destroy stream. */
+static pj_status_t fb_stream_destroy(pjmedia_vid_dev_stream *strm)
+{
+ struct fb_stream *stream = (struct fb_stream*)strm;
+
+ PJ_ASSERT_RETURN(stream != NULL, PJ_EINVAL);
+
+ fb_stream_stop(strm);
+ pj_pool_release(stream->pool);
+
+ return PJ_SUCCESS;
+}
+
+
+/* API: set callback for handling frames */
+pj_status_t
+pjmedia_vid_dev_fb_set_callback(pjmedia_vid_dev_stream *strm,
+ pjmedia_vid_dev_fb_frame_cb cb,
+ void *user_data)
+{
+ struct fb_stream *stream = (struct fb_stream*)strm;
+
+ PJ_ASSERT_RETURN(stream != NULL, PJ_EINVAL);
+ if (stream->is_running)
+ return PJ_EBUSY;
+
+ stream->frame_handler.cb = cb;
+ stream->frame_handler.user_data = user_data;
+
+ return PJ_SUCCESS;
+}
+
+#endif /* PJMEDIA_VIDEO_DEV_HAS_FB */
diff -ruN pjproject-2.10/pjmedia/src/pjmedia-videodev/null_dev.c pjsip/pjmedia/src/pjmedia-videodev/null_dev.c
--- pjproject-2.10/pjmedia/src/pjmedia-videodev/null_dev.c 1970-01-01 01:00:00.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia-videodev/null_dev.c 2021-02-06 18:47:47.343269399 +0100
@@ -0,0 +1,440 @@
+/* $Id: colorbar_dev.c 4158 2012-06-06 09:56:14Z nanang $ */
+/*
+ * Copyright (C) 2008-2011 Teluu Inc. (http://www.teluu.com)
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+#include <pjmedia-videodev/videodev_imp.h>
+#include <pj/assert.h>
+#include <pj/log.h>
+#include <pj/os.h>
+
+
+#if defined(PJMEDIA_HAS_VIDEO) && PJMEDIA_HAS_VIDEO != 0 && \
+ defined(PJMEDIA_VIDEO_DEV_HAS_NULL) && \
+ PJMEDIA_VIDEO_DEV_HAS_NULL != 0
+
+
+#define THIS_FILE "null_dev.c"
+#define DEFAULT_CLOCK_RATE 90000
+#define DEFAULT_WIDTH 640
+#define DEFAULT_HEIGHT 480
+#define DEFAULT_FPS 5
+
+/* null_ device info */
+struct null_dev_info
+{
+ pjmedia_vid_dev_info info;
+};
+
+/* null_ factory */
+struct null_factory
+{
+ pjmedia_vid_dev_factory base;
+ pj_pool_t *pool;
+ pj_pool_factory *pf;
+
+ unsigned dev_count;
+ struct null_dev_info *dev_info;
+};
+
+struct null_fmt_info {
+ pjmedia_format_id fmt_id; /* Format ID */
+};
+
+/* Null video source supports */
+static struct null_fmt_info null_fmts[] =
+{
+ { PJMEDIA_FORMAT_BGRA },
+};
+
+/* Video stream. */
+struct null_stream
+{
+ pjmedia_vid_dev_stream base; /**< Base stream */
+ pjmedia_vid_dev_param param; /**< Settings */
+ pj_pool_t *pool; /**< Memory pool. */
+
+ pjmedia_vid_dev_cb vid_cb; /**< Stream callback. */
+ void *user_data; /**< Application data. */
+
+ const struct null_fmt_info *cbfi;
+ const pjmedia_video_format_info *vfi;
+ pjmedia_video_apply_fmt_param vafp;
+ pj_uint8_t *first_line[PJMEDIA_MAX_VIDEO_PLANES];
+ pj_timestamp ts;
+ unsigned ts_inc;
+};
+
+
+/* Prototypes */
+static pj_status_t null_factory_init(pjmedia_vid_dev_factory *f);
+static pj_status_t null_factory_destroy(pjmedia_vid_dev_factory *f);
+static pj_status_t null_factory_refresh(pjmedia_vid_dev_factory *f);
+static unsigned null_factory_get_dev_count(pjmedia_vid_dev_factory *f);
+static pj_status_t null_factory_get_dev_info(pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_info *info);
+static pj_status_t null_factory_default_param(pj_pool_t *pool,
+ pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_param *param);
+static pj_status_t null_factory_create_stream(
+ pjmedia_vid_dev_factory *f,
+ pjmedia_vid_dev_param *param,
+ const pjmedia_vid_dev_cb *cb,
+ void *user_data,
+ pjmedia_vid_dev_stream **p_vid_strm);
+
+static pj_status_t null_stream_get_param(pjmedia_vid_dev_stream *strm,
+ pjmedia_vid_dev_param *param);
+static pj_status_t null_stream_get_cap(pjmedia_vid_dev_stream *strm,
+ pjmedia_vid_dev_cap cap,
+ void *value);
+static pj_status_t null_stream_set_cap(pjmedia_vid_dev_stream *strm,
+ pjmedia_vid_dev_cap cap,
+ const void *value);
+static pj_status_t null_stream_get_frame(pjmedia_vid_dev_stream *strm,
+ pjmedia_frame *frame);
+static pj_status_t null_stream_start(pjmedia_vid_dev_stream *strm);
+static pj_status_t null_stream_stop(pjmedia_vid_dev_stream *strm);
+static pj_status_t null_stream_destroy(pjmedia_vid_dev_stream *strm);
+
+/* Operations */
+static pjmedia_vid_dev_factory_op factory_op =
+{
+ &null_factory_init,
+ &null_factory_destroy,
+ &null_factory_get_dev_count,
+ &null_factory_get_dev_info,
+ &null_factory_default_param,
+ &null_factory_create_stream,
+ &null_factory_refresh
+};
+
+static pjmedia_vid_dev_stream_op stream_op =
+{
+ &null_stream_get_param,
+ &null_stream_get_cap,
+ &null_stream_set_cap,
+ &null_stream_start,
+ &null_stream_get_frame,
+ NULL,
+ &null_stream_stop,
+ &null_stream_destroy
+};
+
+
+/****************************************************************************
+ * Factory operations
+ */
+/*
+ * Init null_ video driver.
+ */
+pjmedia_vid_dev_factory* pjmedia_null_factory(pj_pool_factory *pf)
+{
+ struct null_factory *f;
+ pj_pool_t *pool;
+
+ pool = pj_pool_create(pf, "null video", 512, 512, NULL);
+ f = PJ_POOL_ZALLOC_T(pool, struct null_factory);
+ f->pf = pf;
+ f->pool = pool;
+ f->base.op = &factory_op;
+
+ return &f->base;
+}
+
+
+/* API: init factory */
+static pj_status_t null_factory_init(pjmedia_vid_dev_factory *f)
+{
+ struct null_factory *cf = (struct null_factory*)f;
+ struct null_dev_info *ddi;
+ unsigned i;
+
+ cf->dev_count = 1;
+ cf->dev_info = (struct null_dev_info*)
+ pj_pool_calloc(cf->pool, cf->dev_count,
+ sizeof(struct null_dev_info));
+
+ ddi = &cf->dev_info[0];
+ pj_bzero(ddi, sizeof(*ddi));
+ pj_ansi_strncpy(ddi->info.name, "Null video device",
+ sizeof(ddi->info.name));
+ ddi->info.driver[sizeof(ddi->info.driver)-1] = '\0';
+ pj_ansi_strncpy(ddi->info.driver, "Null", sizeof(ddi->info.driver));
+ ddi->info.driver[sizeof(ddi->info.driver)-1] = '\0';
+ ddi->info.dir = PJMEDIA_DIR_CAPTURE;
+ ddi->info.has_callback = PJ_FALSE;
+
+ ddi->info.caps = PJMEDIA_VID_DEV_CAP_FORMAT;
+ ddi->info.fmt_cnt = sizeof(null_fmts)/sizeof(null_fmts[0]);
+ for (i = 0; i < ddi->info.fmt_cnt; i++) {
+ pjmedia_format *fmt = &ddi->info.fmt[i];
+ pjmedia_format_init_video(fmt, null_fmts[i].fmt_id,
+ DEFAULT_WIDTH, DEFAULT_HEIGHT,
+ DEFAULT_FPS, 1);
+ }
+
+ PJ_LOG(4, (THIS_FILE, "Null video src initialized with %d device(s):", cf->dev_count));
+ for (i = 0; i < cf->dev_count; i++) {
+ PJ_LOG(4, (THIS_FILE, "%2d: %s", i, cf->dev_info[i].info.name));
+ }
+
+ return PJ_SUCCESS;
+}
+
+/* API: destroy factory */
+static pj_status_t null_factory_destroy(pjmedia_vid_dev_factory *f)
+{
+ struct null_factory *cf = (struct null_factory*)f;
+ pj_pool_t *pool = cf->pool;
+
+ cf->pool = NULL;
+ pj_pool_release(pool);
+
+ return PJ_SUCCESS;
+}
+
+/* API: refresh the list of devices */
+static pj_status_t null_factory_refresh(pjmedia_vid_dev_factory *f)
+{
+ PJ_UNUSED_ARG(f);
+ return PJ_SUCCESS;
+}
+
+/* API: get number of devices */
+static unsigned null_factory_get_dev_count(pjmedia_vid_dev_factory *f)
+{
+ struct null_factory *cf = (struct null_factory*)f;
+ return cf->dev_count;
+}
+
+/* API: get device info */
+static pj_status_t null_factory_get_dev_info(pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_info *info)
+{
+ struct null_factory *cf = (struct null_factory*)f;
+
+ PJ_ASSERT_RETURN(index < cf->dev_count, PJMEDIA_EVID_INVDEV);
+
+ pj_memcpy(info, &cf->dev_info[index].info, sizeof(*info));
+
+ return PJ_SUCCESS;
+}
+
+/* API: create default device parameter */
+static pj_status_t null_factory_default_param(pj_pool_t *pool,
+ pjmedia_vid_dev_factory *f,
+ unsigned index,
+ pjmedia_vid_dev_param *param)
+{
+ struct null_factory *cf = (struct null_factory*)f;
+ struct null_dev_info *di = &cf->dev_info[index];
+
+ PJ_ASSERT_RETURN(index < cf->dev_count, PJMEDIA_EVID_INVDEV);
+
+ PJ_UNUSED_ARG(pool);
+
+ pj_bzero(param, sizeof(*param));
+ param->dir = PJMEDIA_DIR_CAPTURE;
+ param->cap_id = index;
+ param->rend_id = PJMEDIA_VID_INVALID_DEV;
+ param->flags = PJMEDIA_VID_DEV_CAP_FORMAT;
+ param->clock_rate = DEFAULT_CLOCK_RATE;
+ pj_memcpy(&param->fmt, &di->info.fmt[0], sizeof(param->fmt));
+
+ return PJ_SUCCESS;
+}
+
+static const struct null_fmt_info* get_null_fmt_info(pjmedia_format_id id)
+{
+ unsigned i;
+
+ for (i = 0; i < sizeof(null_fmts)/sizeof(null_fmts[0]); i++) {
+ if (null_fmts[i].fmt_id == id)
+ return &null_fmts[i];
+ }
+
+ return NULL;
+}
+
+
+/* API: create stream */
+static pj_status_t null_factory_create_stream(
+ pjmedia_vid_dev_factory *f,
+ pjmedia_vid_dev_param *param,
+ const pjmedia_vid_dev_cb *cb,
+ void *user_data,
+ pjmedia_vid_dev_stream **p_vid_strm)
+{
+ struct null_factory *cf = (struct null_factory*)f;
+ pj_pool_t *pool;
+ struct null_stream *strm;
+ const pjmedia_video_format_detail *vfd;
+ const pjmedia_video_format_info *vfi;
+ pjmedia_video_apply_fmt_param vafp;
+ const struct null_fmt_info *cbfi;
+ unsigned i;
+
+ PJ_ASSERT_RETURN(f && param && p_vid_strm, PJ_EINVAL);
+ PJ_ASSERT_RETURN(param->fmt.type == PJMEDIA_TYPE_VIDEO &&
+ param->fmt.detail_type == PJMEDIA_FORMAT_DETAIL_VIDEO &&
+ param->dir == PJMEDIA_DIR_CAPTURE,
+ PJ_EINVAL);
+
+ pj_bzero(&vafp, sizeof(vafp));
+
+ vfd = pjmedia_format_get_video_format_detail(&param->fmt, PJ_TRUE);
+ vfi = pjmedia_get_video_format_info(NULL, param->fmt.id);
+ cbfi = get_null_fmt_info(param->fmt.id);
+ if (!vfi || !cbfi)
+ return PJMEDIA_EVID_BADFORMAT;
+
+ vafp.size = param->fmt.det.vid.size;
+ if (vfi->apply_fmt(vfi, &vafp) != PJ_SUCCESS)
+ return PJMEDIA_EVID_BADFORMAT;
+
+ /* Create and Initialize stream descriptor */
+ pool = pj_pool_create(cf->pf, "null-dev", 512, 512, NULL);
+ PJ_ASSERT_RETURN(pool != NULL, PJ_ENOMEM);
+
+ strm = PJ_POOL_ZALLOC_T(pool, struct null_stream);
+ pj_memcpy(&strm->param, param, sizeof(*param));
+ strm->pool = pool;
+ pj_memcpy(&strm->vid_cb, cb, sizeof(*cb));
+ strm->user_data = user_data;
+ strm->vfi = vfi;
+ strm->cbfi = cbfi;
+ pj_memcpy(&strm->vafp, &vafp, sizeof(vafp));
+ strm->ts_inc = PJMEDIA_SPF2(param->clock_rate, &vfd->fps, 1);
+
+ for (i = 0; i < vfi->plane_cnt; ++i) {
+ strm->first_line[i] = pj_pool_alloc(pool, vafp.strides[i]);
+ pj_memset(strm->first_line[i], 0, vafp.strides[i]);
+ }
+
+ /* Done */
+ strm->base.op = &stream_op;
+ *p_vid_strm = &strm->base;
+
+ return PJ_SUCCESS;
+}
+
+/* API: Get stream info. */
+static pj_status_t null_stream_get_param(pjmedia_vid_dev_stream *s,
+ pjmedia_vid_dev_param *pi)
+{
+ struct null_stream *strm = (struct null_stream*)s;
+
+ PJ_ASSERT_RETURN(strm && pi, PJ_EINVAL);
+
+ pj_memcpy(pi, &strm->param, sizeof(*pi));
+ return PJ_SUCCESS;
+}
+
+/* API: get capability */
+static pj_status_t null_stream_get_cap(pjmedia_vid_dev_stream *s,
+ pjmedia_vid_dev_cap cap,
+ void *pval)
+{
+ struct null_stream *strm = (struct null_stream*)s;
+
+ PJ_UNUSED_ARG(strm);
+ PJ_ASSERT_RETURN(s && pval, PJ_EINVAL);
+ return PJMEDIA_EVID_INVCAP;
+}
+
+/* API: set capability */
+static pj_status_t null_stream_set_cap(pjmedia_vid_dev_stream *s,
+ pjmedia_vid_dev_cap cap,
+ const void *pval)
+{
+ struct null_stream *strm = (struct null_stream*)s;
+
+ PJ_UNUSED_ARG(strm);
+ PJ_ASSERT_RETURN(s && pval, PJ_EINVAL);
+ return PJMEDIA_EVID_INVCAP;
+}
+
+
+/* API: Get frame from stream */
+static pj_status_t null_stream_get_frame(pjmedia_vid_dev_stream *strm,
+ pjmedia_frame *frame)
+{
+ struct null_stream *stream = (struct null_stream*)strm;
+ unsigned i;
+ pj_uint8_t *ptr = frame->buf;
+
+ frame->type = PJMEDIA_FRAME_TYPE_VIDEO;
+ frame->bit_info = 0;
+ frame->timestamp = stream->ts;
+ stream->ts.u64 += stream->ts_inc;
+
+ /* paint subsequent lines */
+ for (i=0; i<stream->vfi->plane_cnt; ++i) {
+ pj_uint8_t *plane_end;
+ plane_end = ptr + stream->vafp.plane_bytes[i];
+ while (ptr < plane_end) {
+ pj_memcpy(ptr, stream->first_line[i], stream->vafp.strides[i]);
+ ptr += stream->vafp.strides[i];
+ }
+ }
+
+ return PJ_SUCCESS;
+}
+
+/* API: Start stream. */
+static pj_status_t null_stream_start(pjmedia_vid_dev_stream *strm)
+{
+ struct null_stream *stream = (struct null_stream*)strm;
+
+ PJ_UNUSED_ARG(stream);
+
+ PJ_LOG(4, (THIS_FILE, "Starting null video stream"));
+
+ return PJ_SUCCESS;
+}
+
+/* API: Stop stream. */
+static pj_status_t null_stream_stop(pjmedia_vid_dev_stream *strm)
+{
+ struct null_stream *stream = (struct null_stream*)strm;
+
+ PJ_UNUSED_ARG(stream);
+
+ PJ_LOG(4, (THIS_FILE, "Stopping null video stream"));
+
+ return PJ_SUCCESS;
+}
+
+
+/* API: Destroy stream. */
+static pj_status_t null_stream_destroy(pjmedia_vid_dev_stream *strm)
+{
+ struct null_stream *stream = (struct null_stream*)strm;
+
+ PJ_ASSERT_RETURN(stream != NULL, PJ_EINVAL);
+
+ null_stream_stop(strm);
+
+ pj_pool_release(stream->pool);
+
+ return PJ_SUCCESS;
+}
+
+#endif /* PJMEDIA_VIDEO_DEV_HAS_NULL */
diff -ruN pjproject-2.10/pjmedia/src/pjmedia-videodev/videodev.c pjsip/pjmedia/src/pjmedia-videodev/videodev.c
--- pjproject-2.10/pjmedia/src/pjmedia-videodev/videodev.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjmedia/src/pjmedia-videodev/videodev.c 2021-02-06 23:01:33.883264555 +0100
@@ -51,6 +51,10 @@
pjmedia_vid_dev_factory* pjmedia_qt_factory(pj_pool_factory *pf);
#endif
+#if PJMEDIA_VIDEO_DEV_HAS_AVF
+pjmedia_vid_dev_factory* pjmedia_avf_factory(pj_pool_factory *pf);
+#endif
+
#if PJMEDIA_VIDEO_DEV_HAS_DARWIN
pjmedia_vid_dev_factory* pjmedia_darwin_factory(pj_pool_factory *pf);
#endif
@@ -59,6 +63,14 @@
pjmedia_vid_dev_factory* pjmedia_opengl_factory(pj_pool_factory *pf);
#endif
+#if PJMEDIA_VIDEO_DEV_HAS_FB
+pjmedia_vid_dev_factory* pjmedia_fb_factory(pj_pool_factory *pf);
+#endif
+
+#if PJMEDIA_VIDEO_DEV_HAS_NULL
+pjmedia_vid_dev_factory* pjmedia_null_factory(pj_pool_factory *pf);
+#endif
+
#if PJMEDIA_VIDEO_DEV_HAS_ANDROID
pjmedia_vid_dev_factory* pjmedia_and_factory(pj_pool_factory *pf);
#endif
@@ -98,6 +110,9 @@
#if PJMEDIA_VIDEO_DEV_HAS_QT
vid_subsys->drv[vid_subsys->drv_cnt++].create = &pjmedia_qt_factory;
#endif
+#if PJMEDIA_VIDEO_DEV_HAS_AVF
+ vid_subsys->drv[vid_subsys->drv_cnt++].create = &pjmedia_avf_factory;
+#endif
#if PJMEDIA_VIDEO_DEV_HAS_OPENGL
vid_subsys->drv[vid_subsys->drv_cnt++].create = &pjmedia_opengl_factory;
#endif
@@ -122,6 +137,12 @@
*/
vid_subsys->drv[vid_subsys->drv_cnt++].create = &pjmedia_cbar_factory;
#endif
+#if PJMEDIA_VIDEO_DEV_HAS_FB
+ vid_subsys->drv[vid_subsys->drv_cnt++].create = &pjmedia_fb_factory;
+#endif
+#if PJMEDIA_VIDEO_DEV_HAS_NULL
+ vid_subsys->drv[vid_subsys->drv_cnt++].create = &pjmedia_null_factory;
+#endif
/* Initialize each factory and build the device ID list */
for (i=0; i<vid_subsys->drv_cnt; ++i) {
diff -ruN pjproject-2.10/pjnath/include/pjnath/ice_strans.h pjsip/pjnath/include/pjnath/ice_strans.h
--- pjproject-2.10/pjnath/include/pjnath/ice_strans.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjnath/include/pjnath/ice_strans.h 2021-02-06 17:37:27.238934521 +0100
@@ -144,6 +144,52 @@
} pj_ice_strans_op;
+
+/**
+ * ICE stream transport's state.
+ */
+typedef enum pj_ice_strans_state
+{
+ /**
+ * ICE stream transport is not created.
+ */
+ PJ_ICE_STRANS_STATE_NULL,
+
+ /**
+ * ICE candidate gathering process is in progress.
+ */
+ PJ_ICE_STRANS_STATE_INIT,
+
+ /**
+ * ICE stream transport initialization/candidate gathering process is
+ * complete, ICE session may be created on this stream transport.
+ */
+ PJ_ICE_STRANS_STATE_READY,
+
+ /**
+ * New session has been created and the session is ready.
+ */
+ PJ_ICE_STRANS_STATE_SESS_READY,
+
+ /**
+ * ICE negotiation is in progress.
+ */
+ PJ_ICE_STRANS_STATE_NEGO,
+
+ /**
+ * ICE negotiation has completed successfully and media is ready
+ * to be used.
+ */
+ PJ_ICE_STRANS_STATE_RUNNING,
+
+ /**
+ * ICE negotiation has completed with failure.
+ */
+ PJ_ICE_STRANS_STATE_FAILED
+
+} pj_ice_strans_state;
+
+
/**
* This structure contains callbacks that will be called by the
* ICE stream transport.
@@ -192,6 +238,18 @@
pj_ice_strans_op op,
pj_status_t status);
+ /**
+ * Callback to report ICE state changes.
+ *
+ * @param ice_st The ICE stream transport.
+ * @param prev Previous state.
+ * @param curr Current state.
+ */
+ void (*on_ice_state)(pj_ice_strans *ice_st,
+ pj_ice_strans_state prev,
+ pj_ice_strans_state curr);
+
+
} pj_ice_strans_cb;
@@ -522,51 +580,6 @@
} pj_ice_strans_cfg;
-/**
- * ICE stream transport's state.
- */
-typedef enum pj_ice_strans_state
-{
- /**
- * ICE stream transport is not created.
- */
- PJ_ICE_STRANS_STATE_NULL,
-
- /**
- * ICE candidate gathering process is in progress.
- */
- PJ_ICE_STRANS_STATE_INIT,
-
- /**
- * ICE stream transport initialization/candidate gathering process is
- * complete, ICE session may be created on this stream transport.
- */
- PJ_ICE_STRANS_STATE_READY,
-
- /**
- * New session has been created and the session is ready.
- */
- PJ_ICE_STRANS_STATE_SESS_READY,
-
- /**
- * ICE negotiation is in progress.
- */
- PJ_ICE_STRANS_STATE_NEGO,
-
- /**
- * ICE negotiation has completed successfully and media is ready
- * to be used.
- */
- PJ_ICE_STRANS_STATE_RUNNING,
-
- /**
- * ICE negotiation has completed with failure.
- */
- PJ_ICE_STRANS_STATE_FAILED
-
-} pj_ice_strans_state;
-
-
/**
* Initialize ICE transport configuration with default values.
*
@@ -921,6 +934,27 @@
unsigned comp_id);
/**
+ * Retrieve the ICE session associated with this transport
+ *
+ * @param ice_st The ICE stream transport.
+ *
+ * @return The ICE session associated with this transport
+ */
+PJ_DECL(pj_ice_sess*)
+pj_ice_strans_get_session(const pj_ice_strans *ice_st);
+
+/**
+ * Retrieve the ICE start time
+ *
+ * @param ice_st The ICE stream transport.
+ *
+ * @return The ICE start time
+ */
+PJ_DECL(pj_time_val)
+pj_ice_strans_get_start_time(const pj_ice_strans *ice_st);
+
+
+/**
* Stop and destroy the ICE session inside this media transport. Application
* needs to call this function once the media session is over (the call has
* been disconnected).
diff -ruN pjproject-2.10/pjnath/src/pjnath/ice_strans.c pjsip/pjnath/src/pjnath/ice_strans.c
--- pjproject-2.10/pjnath/src/pjnath/ice_strans.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjnath/src/pjnath/ice_strans.c 2021-02-06 17:27:03.341194358 +0100
@@ -240,6 +240,19 @@
} sock_user_data;
+/* Set ICE state*/
+static void set_ice_state(pj_ice_strans *ice_st, pj_ice_strans_state state)
+{
+ pj_ice_strans_state prev = ice_st->state;
+
+ if (prev != state) {
+ ice_st->state = state;
+ if (ice_st->cb.on_ice_state)
+ (*ice_st->cb.on_ice_state)(ice_st, prev, state);
+ }
+}
+
+
/* Validate configuration */
static pj_status_t pj_ice_strans_cfg_check_valid(const pj_ice_strans_cfg *cfg)
{
@@ -928,7 +941,7 @@
pj_pool_calloc(pool, comp_cnt, sizeof(pj_ice_strans_comp*));
/* Move state to candidate gathering */
- ice_st->state = PJ_ICE_STRANS_STATE_INIT;
+ set_ice_state(ice_st, PJ_ICE_STRANS_STATE_INIT);
/* Acquire initialization mutex to prevent callback to be
* called before we finish initialization.
@@ -1123,7 +1136,7 @@
* candidate for a component.
*/
ice_st->cb_called = PJ_TRUE;
- ice_st->state = PJ_ICE_STRANS_STATE_READY;
+ set_ice_state(ice_st, PJ_ICE_STRANS_STATE_READY);
if (ice_st->cb.on_ice_complete)
(*ice_st->cb.on_ice_complete)(ice_st, PJ_ICE_STRANS_OP_INIT,
status);
@@ -1289,7 +1302,7 @@
}
/* ICE session is ready for negotiation */
- ice_st->state = PJ_ICE_STRANS_STATE_SESS_READY;
+ set_ice_state(ice_st, PJ_ICE_STRANS_STATE_SESS_READY);
return PJ_SUCCESS;
@@ -1518,7 +1531,7 @@
return status;
}
- ice_st->state = PJ_ICE_STRANS_STATE_NEGO;
+ set_ice_state(ice_st, PJ_ICE_STRANS_STATE_NEGO);
return status;
}
@@ -1539,6 +1552,25 @@
}
/*
+ * Get ICE session.
+ */
+PJ_DEF(pj_ice_sess*)
+pj_ice_strans_get_session(const pj_ice_strans *ice_st)
+{
+ return ice_st->ice;
+}
+
+/*
+ * Get ICE start time.
+ */
+PJ_DEF(pj_time_val)
+pj_ice_strans_get_start_time(const pj_ice_strans *ice_st)
+{
+
+ return ice_st->start_time;
+}
+
+/*
* Stop ICE!
*/
PJ_DEF(pj_status_t) pj_ice_strans_stop_ice(pj_ice_strans *ice_st)
@@ -1556,7 +1588,7 @@
ice_st->ice = NULL;
}
- ice_st->state = PJ_ICE_STRANS_STATE_INIT;
+ set_ice_state(ice_st, PJ_ICE_STRANS_STATE_INIT);
pj_grp_lock_release(ice_st->grp_lock);
@@ -1877,8 +1909,8 @@
}
}
- ice_st->state = (status==PJ_SUCCESS) ? PJ_ICE_STRANS_STATE_RUNNING :
- PJ_ICE_STRANS_STATE_FAILED;
+ set_ice_state(ice_st, (status==PJ_SUCCESS) ? PJ_ICE_STRANS_STATE_RUNNING :
+ PJ_ICE_STRANS_STATE_FAILED);
pj_log_push_indent();
(*cb.on_ice_complete)(ice_st, PJ_ICE_STRANS_OP_NEGOTIATION, status);
diff -ruN pjproject-2.10/pjsip/include/pjsip/sip_msg.h pjsip/pjsip/include/pjsip/sip_msg.h
--- pjproject-2.10/pjsip/include/pjsip/sip_msg.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjsip/include/pjsip/sip_msg.h 2021-02-06 19:48:20.570302003 +0100
@@ -675,6 +675,23 @@
};
/**
+ * General purpose function to print a SIP message body.
+ * Uses the appropriate internal functions to print the string representation
+ * of a SIP message body. It sets the output buffer to a statically allocated
+ * buffer, so the caller is responsible to copy it.
+ *
+ * @param msg_body The message body.
+ * @param buf Pointer to get the result buffer (statically allocated).
+ * @param size The size of the buffer.
+ *
+ * @return The length copied to the buffer, or -1.
+ */
+PJ_DECL(int) pjsip_print_body( pjsip_msg_body *msg_body,
+ char **buf, int *len);
+
+
+
+/**
* General purpose function to textual data in a SIP body. Attach this function
* in a SIP message body only if the data in pjsip_msg_body is a textual
* message ready to be embedded in a SIP message. If the data in the message
@@ -894,6 +911,20 @@
PJ_DECL(void*) pjsip_msg_find_remove_hdr( pjsip_msg *msg,
pjsip_hdr_e hdr, void *start);
+/**
+ * Find and remove a header in the message.
+ *
+ * @param msg The message.
+ * @param name The header name to find.
+ * @param start The first header field where the search should begin,
+ * or NULL to search from the first header in the message.
+ *
+ * @return The header field, or NULL if not found.
+ */
+PJ_DECL(void*) pjsip_msg_find_remove_hdr_by_name( pjsip_msg *msg,
+ pj_str_t *name,
+ void *start);
+
/**
* Add a header to the message, putting it last in the header list.
*
diff -ruN pjproject-2.10/pjsip/include/pjsip-simple/evsub.h pjsip/pjsip/include/pjsip-simple/evsub.h
--- pjproject-2.10/pjsip/include/pjsip-simple/evsub.h 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjsip/include/pjsip-simple/evsub.h 2021-02-06 19:49:09.127691820 +0100
@@ -524,6 +524,18 @@
pj_uint32_t seconds);
+/* Update evbsub internal refresh_time with the given interval */
+PJ_DECL(void) pjsip_evsub_update_expires( pjsip_evsub *sub,
+ pj_uint32_t interval );
+
+
+/* Set the specified timer (UAC or UAS) to the specified time */
+PJ_DECL(void) pjsip_evsub_set_timer( pjsip_evsub *sub,
+ int timer_id,
+ pj_int32_t seconds );
+
+
+
PJ_END_DECL
/**
diff -ruN pjproject-2.10/pjsip/src/pjsip/sip_msg.c pjsip/pjsip/src/pjsip/sip_msg.c
--- pjproject-2.10/pjsip/src/pjsip/sip_msg.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjsip/src/pjsip/sip_msg.c 2021-02-06 19:39:21.510782607 +0100
@@ -394,6 +394,18 @@
return hdr;
}
+PJ_DEF(void*) pjsip_msg_find_remove_hdr_by_name( pjsip_msg *msg,
+ pj_str_t *name,
+ void *start)
+{
+ pjsip_hdr *hdr = (pjsip_hdr*) pjsip_msg_find_hdr_by_name(msg, name, start);
+ if (hdr) {
+ pj_list_erase(hdr);
+ }
+ return hdr;
+}
+
+
PJ_DEF(pj_ssize_t) pjsip_msg_print( const pjsip_msg *msg,
char *buf, pj_size_t size)
{
@@ -2146,6 +2158,21 @@
/*
* Message body manipulations.
*/
+
+PJ_DEF(int) pjsip_print_body(pjsip_msg_body *msg_body, char **buf, int *len)
+{
+ static char s_buf[PJSIP_MAX_PKT_LEN];
+ int res;
+
+ res = (*msg_body->print_body)(msg_body, s_buf, PJSIP_MAX_PKT_LEN);
+ if (res < 0) {
+ return -1;
+ }
+ *buf = s_buf;
+ *len = res;
+ return 0;
+}
+
PJ_DEF(int) pjsip_print_text_body(pjsip_msg_body *msg_body, char *buf, pj_size_t size)
{
if (size < msg_body->len)
diff -ruN pjproject-2.10/pjsip/src/pjsip-simple/evsub.c pjsip/pjsip/src/pjsip-simple/evsub.c
--- pjproject-2.10/pjsip/src/pjsip-simple/evsub.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjsip/src/pjsip-simple/evsub.c 2021-02-06 19:40:40.321065814 +0100
@@ -495,6 +495,12 @@
}
+PJ_DEF(void) pjsip_evsub_update_expires( pjsip_evsub *sub, pj_uint32_t interval )
+{
+ update_expires(sub, interval);
+}
+
+
/*
* Schedule timer.
*/
@@ -538,6 +544,13 @@
}
+PJ_DEF(void) pjsip_evsub_set_timer( pjsip_evsub *sub, int timer_id,
+ pj_int32_t seconds)
+{
+ set_timer(sub, timer_id, seconds);
+}
+
+
/*
* Destructor.
*/
diff -ruN pjproject-2.10/pjsip/src/pjsip-simple/evsub_msg.c pjsip/pjsip/src/pjsip-simple/evsub_msg.c
--- pjproject-2.10/pjsip/src/pjsip-simple/evsub_msg.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjsip/src/pjsip-simple/evsub_msg.c 2021-02-06 19:41:44.190911650 +0100
@@ -293,6 +293,45 @@
}
/*
+ * Parse Allow-Events header.
+ */
+static pjsip_hdr* parse_hdr_allow_events(pjsip_parse_ctx *ctx)
+{
+ pjsip_allow_events_hdr *allow_events =
+ pjsip_allow_events_hdr_create(ctx->pool);
+ const pjsip_parser_const_t *pc = pjsip_parser_const();
+ pj_scanner *scanner = ctx->scanner;
+
+ /* Some header fields allow empty elements in the value:
+ * Accept, Allow, Supported
+ */
+ if (pj_scan_is_eof(scanner) ||
+ *scanner->curptr == '\r' || *scanner->curptr == '\n')
+ {
+ goto end;
+ }
+
+ pj_scan_get( scanner, &pc->pjsip_NOT_COMMA_OR_NEWLINE,
+ &allow_events->values[0]);
+ allow_events->count++;
+
+ while (*scanner->curptr == ',') {
+ pj_scan_get_char(scanner);
+ pj_scan_get( scanner, &pc->pjsip_NOT_COMMA_OR_NEWLINE,
+ &allow_events->values[allow_events->count]);
+ allow_events->count++;
+
+ if (allow_events->count >= PJSIP_MAX_ALLOW_EVENTS)
+ break;
+ }
+
+end:
+ pjsip_parse_end_hdr_imp(scanner);
+ return (pjsip_hdr*)allow_events;
+}
+
+
+/*
* Register header parsers.
*/
PJ_DEF(void) pjsip_evsub_init_parser(void)
diff -ruN pjproject-2.10/pjsip/src/pjsip-ua/sip_inv.c pjsip/pjsip/src/pjsip-ua/sip_inv.c
--- pjproject-2.10/pjsip/src/pjsip-ua/sip_inv.c 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/pjsip/src/pjsip-ua/sip_inv.c 2021-02-06 19:46:19.602835344 +0100
@@ -2006,6 +2006,20 @@
return PJMEDIA_SDP_EINSDP;
}
+ /* Only accept SDP in INVITE, UPDATE and ACK requests, 18x (reliable) and 183 provisional responses
+ * and 200 final response.
+ */
+ if (!(msg->type == PJSIP_REQUEST_MSG && msg->line.req.method.id == PJSIP_INVITE_METHOD) &&
+ !(msg->type == PJSIP_REQUEST_MSG && msg->line.req.method.id == PJSIP_ACK_METHOD) &&
+ !(msg->type == PJSIP_REQUEST_MSG && pjsip_method_cmp(&msg->line.req.method, &pjsip_update_method)==0) &&
+ !(msg->type == PJSIP_RESPONSE_MSG && msg->line.status.code/10==18 && pjsip_100rel_is_reliable(rdata)) &&
+ !(msg->type == PJSIP_RESPONSE_MSG && msg->line.status.code == 183) &&
+ !(msg->type == PJSIP_RESPONSE_MSG && msg->line.status.code == 200)) {
+ PJ_LOG(4,(inv->obj_name, "ignored SDP body"));
+ return PJ_SUCCESS;
+ }
+
+
/* Get/attach invite session's transaction data */
tsx_inv_data = (struct tsx_inv_data*) tsx->mod_data[mod_inv.mod.id];
if (tsx_inv_data == NULL) {
@@ -2233,6 +2247,11 @@
{
status = pjmedia_sdp_neg_set_local_answer(inv->pool_prov, inv->neg,
local_sdp);
+ } else if (pjmedia_sdp_neg_get_state(inv->neg)==
+ PJMEDIA_SDP_NEG_STATE_LOCAL_OFFER)
+ {
+ /* Go forward with our local offer */
+ status = PJ_SUCCESS;
} else {
/* Can not specify local SDP at this state. */
@@ -3976,8 +3995,9 @@
if (inv->state != PJSIP_INV_STATE_DISCONNECTED &&
((tsx->status_code == PJSIP_SC_CALL_TSX_DOES_NOT_EXIST &&
tsx->method.id != PJSIP_CANCEL_METHOD) ||
- tsx->status_code == PJSIP_SC_REQUEST_TIMEOUT ||
- tsx->status_code == PJSIP_SC_TSX_TIMEOUT))
+ (inv->state != PJSIP_INV_STATE_CONFIRMED &&
+ (tsx->status_code == PJSIP_SC_TSX_TIMEOUT ||
+ tsx->status_code == PJSIP_SC_TSX_TRANSPORT_ERROR))))
{
pjsip_tx_data *bye;
pj_status_t status;
diff -ruN pjproject-2.10/third_party/build/os-auto.mak.in pjsip/third_party/build/os-auto.mak.in
--- pjproject-2.10/third_party/build/os-auto.mak.in 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/third_party/build/os-auto.mak.in 2021-02-06 16:09:49.029693848 +0100
-@@ -73,6 +73,9 @@
+@@ -73,6 +73,8 @@
endif
endif
-+export PJMEDIA_VIDEODEV_OBJS += avf_dev.o
+DIRS += zsrtp
+
ifneq (@ac_no_webrtc@,1)
ifeq (@ac_external_webrtc@,1)
# External webrtc
diff -ruN pjproject-2.10/build.mak.in pjsip/build.mak.in
--- pjproject-2.10/build.mak.in 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/build.mak.in 2021-02-06 16:09:49.029693848 +0100
@@ -148,6 +148,19 @@
endif
endif
+#ifneq (@ac_no_zsrtp@,1)
+ifeq (@ac_external_zsrtp@,1)
+APP_THIRD_PARTY_EXT += -lzsrtp
+else
+APP_THIRD_PARTY_LIB_FILES += $(PJ_DIR)/third_party/lib/libzsrtp-$(LIB_SUFFIX)
+ifeq ($(PJ_SHARED_LIBRARIES),)
+APP_THIRD_PARTY_LIBS += -lzsrtp-$(TARGET_NAME) -lsqlite3 -lstdc++
+else
+APP_THIRD_PARTY_LIBS += -lzsrtp
+APP_THIRD_PARTY_LIB_FILES += $(PJ_DIR)/third_party/lib/libzsrtp.$(SHLIB_SUFFIX).$(PJ_VERSION_MAJOR) $(PJ_DIR)/third_party/lib/libzsrtp.$(SHLIB_SUFFIX)
+endif
+endif
+#endif
# Additional flags
@ac_build_mak_vars@
diff -ruN pjproject-2.10/third_party/build/os-darwinos.mak pjsip/third_party/build/os-darwinos.mak
--- pjproject-2.10/third_party/build/os-darwinos.mak 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/third_party/build/os-darwinos.mak 2021-02-06 16:00:34.131596843 +0100
@@ -5,5 +5,6 @@
DIRS += g7221
DIRS += srtp
DIRS += resample
+DIRS += zsrtp
diff -ruN pjproject-2.10/third_party/build/os-linux.mak pjsip/third_party/build/os-linux.mak
--- pjproject-2.10/third_party/build/os-linux.mak 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/third_party/build/os-linux.mak 2021-02-06 15:59:58.326708529 +0100
@@ -5,5 +5,6 @@
DIRS += g7221
DIRS += srtp
DIRS += resample
+DIRS += zsrtp
diff -ruN pjproject-2.10/third_party/build/os-win32.mak pjsip/third_party/build/os-win32.mak
--- pjproject-2.10/third_party/build/os-win32.mak 2020-02-14 10:48:27.000000000 +0100
+++ pjsip/third_party/build/os-win32.mak 2021-02-06 16:00:20.023246385 +0100
@@ -5,5 +5,6 @@
DIRS += g7221
DIRS += srtp
DIRS += resample
+DIRS += zsrtp
--- pjproject-2.10/pjmedia/src/pjmedia-audiodev/audiodev.c 2021-02-04 11:19:25.357096871 +0100
+++ pjsip/pjmedia/src/pjmedia-audiodev/audiodev.c 2021-02-04 11:09:10.120174258 +0100
@@ -143,6 +143,18 @@
aud_subsys->drv[aud_subsys->drv_cnt++].create = &pjmedia_null_audio_factory;
#endif
+ /* Initialize audio device observer objects */
+ pj_status_t st;
+ aud_subsys->dev_observer.pool = pj_pool_create(pf, "aud_dev_observer_pool", 512, 512, NULL);
+ if (!aud_subsys->dev_observer.pool) {
+ return PJ_ENOMEM;
+ }
+ st = pj_mutex_create_simple(aud_subsys->dev_observer.pool, "aud_dev_observer_lock", &aud_subsys->dev_observer.lock);
+ if (st != PJ_SUCCESS) {
+ return st;
+ }
+ aud_subsys->dev_observer.cb = NULL;
+
/* Initialize each factory and build the device ID list */
for (i=0; i<aud_subsys->drv_cnt; ++i) {
status = pjmedia_aud_driver_init(i, PJ_FALSE);
@@ -229,6 +241,9 @@
pjmedia_aud_driver_deinit(i);
}
+ pj_mutex_destroy(aud_subsys->dev_observer.lock);
+ pj_pool_release(aud_subsys->dev_observer.pool);
+
aud_subsys->pf = NULL;
}
return PJ_SUCCESS;
--- pjproject-2.10/pjmedia/src/pjmedia/audiodev.c 2021-02-04 11:19:25.357096871 +0100
+++ pjsip/pjmedia/src/pjmedia/audiodev.c 2021-02-04 11:09:10.120174258 +0100
@@ -74,6 +74,60 @@
return &aud_subsys;
}
+/* callback for device change operations */
+static void process_aud_dev_change_event(pjmedia_aud_dev_change_event event)
+{
+ pj_status_t status;
+
+ if (!pj_thread_is_registered()) {
+ status = pj_thread_register("aud_dev_observer", aud_subsys.dev_observer.thread_desc, &aud_subsys.dev_observer.thread);
+ if (status != PJ_SUCCESS) {
+ return;
+ }
+ PJ_LOG(5, (THIS_FILE, "Audio device change thread registered"));
+ }
+
+ status = pj_mutex_lock(aud_subsys.dev_observer.lock);
+ if (status != PJ_SUCCESS) {
+ PJ_LOG(5, (THIS_FILE, "Could not acquire audio device change lock"));
+ return;
+ }
+
+ if (!aud_subsys.dev_observer.cb) {
+ /* there is no registered callback to call */
+ goto end;
+ }
+
+ switch(event) {
+ case DEFAULT_INPUT_CHANGED:
+ PJ_LOG(5, (THIS_FILE, "Default input device changed"));
+ pjmedia_aud_dev_refresh();
+ (*aud_subsys.dev_observer.cb)(PJMEDIA_AUD_DEV_DEFAULT_INPUT_CHANGED);
+ break;
+ case DEFAULT_OUTPUT_CHANGED:
+ PJ_LOG(5, (THIS_FILE, "Default output device changed"));
+ pjmedia_aud_dev_refresh();
+ (*aud_subsys.dev_observer.cb)(PJMEDIA_AUD_DEV_DEFAULT_OUTPUT_CHANGED);
+ break;
+ case DEVICE_LIST_CHANGED:
+ PJ_LOG(5, (THIS_FILE, "Device list changed"));
+ (*aud_subsys.dev_observer.cb)(PJMEDIA_AUD_DEV_LIST_WILL_REFRESH);
+ pjmedia_aud_dev_refresh();
+ (*aud_subsys.dev_observer.cb)(PJMEDIA_AUD_DEV_LIST_DID_REFRESH);
+ break;
+ default:
+ PJ_LOG(5, (THIS_FILE, "Unknown event: %d", event));
+ break;
+ }
+
+end:
+ status = pj_mutex_unlock(aud_subsys.dev_observer.lock);
+ if (status != PJ_SUCCESS) {
+ PJ_LOG(5, (THIS_FILE, "Could not release audio device change lock"));
+ }
+
+}
+
/* API: init driver */
PJ_DEF(pj_status_t) pjmedia_aud_driver_init(unsigned drv_idx,
pj_bool_t refresh)
@@ -99,6 +152,11 @@
f = drv->f;
}
+ /* Register device change observer */
+ if (!refresh) {
+ f->op->set_dev_change_cb(f, &process_aud_dev_change_event);
+ }
+
if (!f)
return PJ_EUNKNOWN;
@@ -123,8 +181,8 @@
*/
/* Fill in default devices */
- drv->play_dev_idx = drv->rec_dev_idx =
- drv->dev_idx = PJMEDIA_AUD_INVALID_DEV;
+ drv->rec_dev_idx = f->op->get_default_rec_dev(f);
+ drv->play_dev_idx = f->op->get_default_play_dev(f);
for (i=0; i<dev_cnt; ++i) {
pjmedia_aud_dev_info info;
@@ -148,15 +207,8 @@
/* Set default capture device */
drv->rec_dev_idx = i;
}
- if (drv->dev_idx < 0 && info.input_count &&
- info.output_count)
- {
- /* Set default capture and playback device */
- drv->dev_idx = i;
- }
- if (drv->play_dev_idx >= 0 && drv->rec_dev_idx >= 0 &&
- drv->dev_idx >= 0)
+ if (drv->play_dev_idx >= 0 && drv->rec_dev_idx >= 0)
{
/* Done. */
break;
@@ -183,13 +248,13 @@
pjmedia_aud_driver *drv = &aud_subsys.drv[drv_idx];
if (drv->f) {
+ drv->f->op->set_dev_change_cb(drv->f, NULL);
drv->f->op->destroy(drv->f);
drv->f = NULL;
}
pj_bzero(drv, sizeof(*drv));
- drv->play_dev_idx = drv->rec_dev_idx =
- drv->dev_idx = PJMEDIA_AUD_INVALID_DEV;
+ drv->play_dev_idx = drv->rec_dev_idx = PJMEDIA_AUD_INVALID_DEV;
}
/* API: Initialize the audio subsystem. */
@@ -374,11 +426,7 @@
for (i=0; i<aud_subsys.drv_cnt; ++i) {
pjmedia_aud_driver *drv = &aud_subsys.drv[i];
- if (drv->dev_idx >= 0) {
- id = drv->dev_idx;
- make_global_index(i, &id);
- break;
- } else if (id==PJMEDIA_AUD_DEFAULT_CAPTURE_DEV &&
+ if (id==PJMEDIA_AUD_DEFAULT_CAPTURE_DEV &&
drv->rec_dev_idx >= 0)
{
id = drv->rec_dev_idx;
@@ -390,7 +455,7 @@
id = drv->play_dev_idx;
make_global_index(i, &id);
break;
- }
+ }
}
if (id < 0) {
@@ -625,4 +590,24 @@
return strm->op->destroy(strm);
}
+/* API: Register device change observer. */
+PJ_DEF(pj_status_t) pjmedia_aud_dev_set_observer_cb(pjmedia_aud_dev_observer_callback cb)
+{
+ pj_status_t status;
+
+ status = pj_mutex_lock(aud_subsys.dev_observer.lock);
+ if (status != PJ_SUCCESS) {
+ PJ_LOG(5, (THIS_FILE, "Could not acquire audio device change lock"));
+ return status;
+ }
+
+ aud_subsys.dev_observer.cb = cb;
+
+ status = pj_mutex_unlock(aud_subsys.dev_observer.lock);
+ if (status != PJ_SUCCESS) {
+ PJ_LOG(5, (THIS_FILE, "Could not release audio device change lock"));
+ }
+
+ return status;
+}
--- pjsip/pjmedia/src/pjmedia/transport_zrtp.c 2021-02-05 12:38:15.913793992 +0100
+++ pjsip/pjmedia/src/pjmedia/transport_zrtp.c 2021-03-11 09:20:35.134754145 +0100
@@ -78,6 +78,8 @@
pjmedia_dir dir,
unsigned pct_lost);
static pj_status_t transport_destroy(pjmedia_transport *tp);
+static pj_status_t transport_attach2(pjmedia_transport *tp,
+ pjmedia_transport_attach_param *att_param);
/* The transport operations */
@@ -94,7 +96,8 @@
&transport_media_start,
&transport_media_stop,
&transport_simulate_lost,
- &transport_destroy
+ &transport_destroy,
+ &transport_attach2
};
/* The transport zrtp instance */
@@ -108,6 +111,7 @@
void (*stream_rtp_cb)(void *user_data,
void *pkt,
pj_ssize_t);
+ void (*stream_rtp_cb2)(pjmedia_tp_cb_param *param);
void (*stream_rtcp_cb)(void *user_data,
void *pkt,
pj_ssize_t);
@@ -794,7 +798,7 @@
int32_t newLen = 0;
pj_status_t rc = PJ_SUCCESS;
- pj_assert(zrtp && zrtp->stream_rtcp_cb && pkt);
+ pj_assert(zrtp && zrtp->stream_rtp_cb && pkt);
// check if this could be a real RTP/SRTP packet.
if ((*buffer & 0xf0) != 0x10)
@@ -876,6 +880,106 @@
}
}
+/* This is our RTP callback, that is called by the slave transport when it
+ * receives RTP packet.
+ */
+static void transport_rtp_cb2(pjmedia_tp_cb_param *param)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)param->user_data;
+ pj_uint8_t* buffer = (pj_uint8_t*)param->pkt;
+ int32_t newLen = 0;
+ pj_status_t rc = PJ_SUCCESS;
+ pjmedia_tp_cb_param cbparam;
+
+ pj_assert(zrtp && zrtp->stream_rtp_cb2 && param->pkt);
+
+ // check if this could be a real RTP/SRTP packet.
+ if ((*buffer & 0xf0) != 0x10)
+ {
+ // Could be real RTP, check if we are in secure mode
+ if (zrtp->srtpReceive == NULL || param->size < 0)
+ {
+ pj_memcpy(&cbparam, param, sizeof(cbparam));
+ cbparam.user_data = zrtp->stream_user_data;
+ zrtp->stream_rtp_cb2(&cbparam);
+ }
+ else
+ {
+ rc = zsrtp_unprotect(zrtp->srtpReceive, param->pkt, param->size, &newLen);
+ if (rc == 1)
+ {
+ zrtp->unprotect++;
+ param->size = newLen;
+ pj_memcpy(&cbparam, param, sizeof(cbparam));
+ cbparam.user_data = zrtp->stream_user_data;
+ zrtp->stream_rtp_cb2(&cbparam);
+ //zrtp->stream_rtp_cb(zrtp->stream_user_data, pkt,
+ // newLen);
+ zrtp->unprotect_err = 0;
+ }
+ else
+ {
+ if (zrtp->cb.show_message)
+ {
+ if (rc == -1)
+ zrtp->cb.show_message(&zrtp->base, zrtp_Warning, zrtp_WarningSRTPauthError);
+ else
+ zrtp->cb.show_message(&zrtp->base, zrtp_Warning, zrtp_WarningSRTPreplayError);
+ }
+ zrtp->unprotect_err = rc;
+ /* We failed to decrypt the packet, but forward it regardless to the slave
+ * transport, it might not have been encrypted after all */
+ pj_memcpy(&cbparam, param, sizeof(cbparam));
+ cbparam.user_data = zrtp->stream_user_data;
+ zrtp->stream_rtp_cb2(&cbparam);
+ }
+ }
+ if (!zrtp->started && zrtp->enableZrtp)
+ pjmedia_transport_zrtp_startZrtp((pjmedia_transport *)zrtp);
+
+ return;
+ }
+
+ // We assume all other packets are ZRTP packets here. Process
+ // if ZRTP processing is enabled. Because valid RTP packets are
+ // already handled we delete any packets here after processing.
+ if (zrtp->enableZrtp && zrtp->zrtpCtx != NULL)
+ {
+ // Get CRC value into crc (see above how to compute the offset)
+ pj_uint16_t temp = param->size - CRC_SIZE;
+ pj_uint32_t crc = *(uint32_t*)(buffer + temp);
+ crc = pj_ntohl(crc);
+
+ if (!zrtp_CheckCksum(buffer, temp, crc))
+ {
+ if (zrtp->cb.show_message)
+ zrtp->cb.show_message(&zrtp->base, zrtp_Warning, zrtp_WarningCRCmismatch);
+ return;
+ }
+
+ pj_uint32_t magic = *(pj_uint32_t*)(buffer + 4);
+ magic = pj_ntohl(magic);
+
+ // Check if it is really a ZRTP packet, return, no further processing
+ if (magic != ZRTP_MAGIC)
+ return;
+
+ // cover the case if the other party sends _only_ ZRTP packets at the
+ // beginning of a session. Start ZRTP in this case as well.
+ if (!zrtp->started)
+ {
+ pjmedia_transport_zrtp_startZrtp((pjmedia_transport *)zrtp);
+ }
+ // this now points beyond the undefined and length field.
+ // We need them, thus adjust
+ unsigned char* zrtpMsg = (buffer + 12);
+
+ // store peer's SSRC in host order, used when creating the CryptoContext
+ zrtp->peerSSRC = *(pj_uint32_t*)(buffer + 8);
+ zrtp->peerSSRC = pj_ntohl(zrtp->peerSSRC);
+ zrtp_processZrtpMessage(zrtp->zrtpCtx, zrtpMsg, zrtp->peerSSRC, param->size);
+ }
+}
/* This is our RTCP callback, that is called by the slave transport when it
* receives RTCP packet.
@@ -1256,6 +1359,45 @@
return PJ_SUCCESS;
}
+/*
+ * attach2() is called by stream to register callbacks that we should
+ * call on receipt of RTP and RTCP packets.
+ */
+static pj_status_t transport_attach2(pjmedia_transport *tp,
+ pjmedia_transport_attach_param *att_param)
+{
+ struct tp_zrtp *zrtp = (struct tp_zrtp*)tp;
+ pj_status_t status;
+
+ /* In this example, we will save the stream information and callbacks
+ * to our structure, and we will register different RTP/RTCP callbacks
+ * instead.
+ */
+ pj_assert(zrtp->stream_user_data == NULL);
+ zrtp->stream_user_data = att_param->user_data;
+ if (att_param->rtp_cb2) {
+ zrtp->stream_rtp_cb2 = att_param->rtp_cb2;
+ } else {
+ zrtp->stream_rtp_cb = att_param->rtp_cb;
+ }
+ zrtp->stream_rtcp_cb = att_param->rtcp_cb;
+
+ att_param->rtp_cb2 = &transport_rtp_cb2;
+ att_param->rtp_cb = NULL;
+ att_param->rtcp_cb = &transport_rtcp_cb;
+ att_param->user_data = zrtp;
+
+ status = pjmedia_transport_attach2(zrtp->slave_tp, att_param);
+ if (status != PJ_SUCCESS) {
+ zrtp->stream_user_data = NULL;
+ zrtp->stream_rtp_cb = NULL;
+ zrtp->stream_rtp_cb2 = NULL;
+ zrtp->stream_rtcp_cb = NULL;
+ return status;
+ }
+
+ return PJ_SUCCESS;
+}
--- pjsip/pjmedia/src/pjmedia-videodev/ffmpeg_dev.c 2021-03-19 17:02:04.749861806 +0100
+++ pjsip/pjmedia/src/pjmedia-videodev/ffmpeg_dev.c 2021-03-19 17:03:03.618376942 +0100
@@ -58,9 +58,9 @@
#define MAX_DEV_CNT 8
-#ifndef PJMEDIA_USE_OLD_FFMPEG
+//#ifndef PJMEDIA_USE_OLD_FFMPEG
# define av_close_input_stream(ctx) avformat_close_input(&ctx)
-#endif
+//#endif
typedef struct ffmpeg_dev_info
+--- pjsip/pjmedia/build/os-darwinos.mak 2021-03-22 07:33:02.058097009 +0100
++++ pjsip/pjmedia/build/os-darwinos.mak 2021-03-22 07:48:59.147298176 +0100
+@@ -139,4 +139,5 @@
+ export CFLAGS += -DPJMEDIA_SOUND_IMPLEMENTATION=PJMEDIA_SOUND_NULL_SOUND
+ endif
+
++export PJMEDIA_VIDEODEV_OBJS += avf_dev.o
+
+--- pjsip/pjmedia/build/os-auto.mak.in 2021-03-26 22:08:01.739146366 +0100
++++ pjsip/pjmedia/build/os-auto.mak.in 2021-03-26 22:07:16.701862482 +0100
+@@ -302,7 +302,8 @@
+ #
+ ifneq (,$(filter $(AC_PJMEDIA_VIDEO),darwin_os))
+ # Mac and iPhone OS specific, use obj-c
+-export PJMEDIA_VIDEODEV_OBJS += sdl_dev_m.o
++export PJMEDIA_VIDEODEV_OBJS += sdl_dev_m.o avf_dev.o
++export CFLAGS += -DPJMEDIA_VIDEO_DEV_HAS_AVF=1 -DPJMEDIA_VIDEO_HAS_VTOOLBOX=1 -DPJMEDIA_HAS_VID_TOOLBOX_CODEC=1 -DPJMEDIA_USE_OLD_FFMPEG=0
+ else
+ # Other platforms, compile .c
+ export PJMEDIA_VIDEODEV_OBJS += sdl_dev.o

File Metadata

Mime Type
text/x-diff
Expires
Sat, Nov 23, 10:20 AM (1 d, 48 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3409094
Default Alt Text
(277 KB)

Event Timeline