mirror of
https://github.com/asterisk/asterisk.git
synced 2025-08-22 05:36:57 +00:00
Compare commits
42 Commits
21.7.0-rc2
...
16.0.0-rc1
Author | SHA1 | Date | |
---|---|---|---|
|
a4ffd074bb | ||
|
edf2ce04dd | ||
|
d3789cc420 | ||
|
89b669a227 | ||
|
0028db48cc | ||
|
6c5f8403ad | ||
|
24e4e45177 | ||
|
c384a4cdcd | ||
|
9f1041c4d0 | ||
|
c5761ee58e | ||
|
d2054d1679 | ||
|
8e65c39a9d | ||
|
8f1b1a3847 | ||
|
cfd61ba237 | ||
|
a81870110a | ||
|
4a01be5c80 | ||
|
e6f2bae0cc | ||
|
f1156f0cfd | ||
|
7e99090c9d | ||
|
3012fe0cbc | ||
|
69b721e152 | ||
|
b32adca9b4 | ||
|
ad7944a52a | ||
|
c7b53fe85f | ||
|
e22cbe7c17 | ||
|
3509ada06f | ||
|
008d304be2 | ||
|
5dbbc68311 | ||
|
7638f895e8 | ||
|
6956324f5c | ||
|
3027f4cf6f | ||
|
e1ca973748 | ||
|
2035092bfb | ||
|
2a13a4344e | ||
|
9742fb07c9 | ||
|
2c51079d05 | ||
|
3cdffa1342 | ||
|
136d855f69 | ||
|
0c1513d8a0 | ||
|
61a974ed4e | ||
|
50a26b15a3 | ||
|
958f76205b |
@@ -1,5 +1,5 @@
|
||||
[gerrit]
|
||||
defaultbranch=master
|
||||
defaultbranch=16
|
||||
#
|
||||
# Intentional padding to ensure it is possible to point a commit
|
||||
# to an alternative gerrit server/repository without breaking
|
||||
|
1
.lastclean
Normal file
1
.lastclean
Normal file
@@ -0,0 +1 @@
|
||||
40
|
3
CHANGES
3
CHANGES
@@ -31,6 +31,9 @@ Build System
|
||||
MALLOC_DEBUG and vice versa. Third-party pre-compiled modules no longer
|
||||
need to have a special build with it enabled.
|
||||
|
||||
* Asterisk now depends on libjansson >= 2.11. If this version is not
|
||||
available on your distro you can use `./configure --with-jansson-bundled`.
|
||||
|
||||
app_macro
|
||||
------------------
|
||||
* The app_macro module is now deprecated and by default it is no longer
|
||||
|
@@ -50,6 +50,9 @@ Build System:
|
||||
MALLOC_DEBUG and vice versa. Third-party pre-compiled modules no longer
|
||||
need to have a special build with it enabled.
|
||||
|
||||
- Asterisk now depends on libjansson >= 2.11. If this version is not
|
||||
available on your distro you can use `./configure --with-jansson-bundled`.
|
||||
|
||||
chan_dahdi:
|
||||
- Timeouts for reading digits from analog phones are now configurable in
|
||||
chan_dahdi.conf: firstdigit_timeout, interdigit_timeout, matchdigit_timeout.
|
||||
|
2867
asterisk-16.0.0-rc1-summary.html
Normal file
2867
asterisk-16.0.0-rc1-summary.html
Normal file
File diff suppressed because one or more lines are too long
5984
asterisk-16.0.0-rc1-summary.txt
Normal file
5984
asterisk-16.0.0-rc1-summary.txt
Normal file
File diff suppressed because it is too large
Load Diff
@@ -14,7 +14,8 @@ END
|
||||
if ${GREP} "AST_DEVMODE" makeopts | ${GREP} -q "yes"
|
||||
then
|
||||
echo "#define AST_DEVMODE 1"
|
||||
BUILDOPTS="AST_DEVMODE"
|
||||
# AST_DEVMODE is no longer an API/ABI affecting option so it no longer
|
||||
# gets added to BUILDOPTS.
|
||||
fi
|
||||
|
||||
TMP=`${GREP} -e "^MENUSELECT_CFLAGS" menuselect.makeopts | sed 's/MENUSELECT_CFLAGS\=//g' | sed 's/-D//g'`
|
||||
|
@@ -3,7 +3,7 @@
|
||||
AWK=${AWK:-awk}
|
||||
GIT=${GIT:-git}
|
||||
GREP=${GREP:-grep}
|
||||
MAINLINE_BRANCH=master
|
||||
MAINLINE_BRANCH=16
|
||||
|
||||
if [ -f ${1}/.version ]; then
|
||||
cat ${1}/.version
|
||||
|
147
configure
vendored
147
configure
vendored
@@ -1072,10 +1072,7 @@ PBX_URIPARSER
|
||||
URIPARSER_DIR
|
||||
URIPARSER_INCLUDE
|
||||
URIPARSER_LIB
|
||||
PBX_JANSSON
|
||||
JANSSON_DIR
|
||||
JANSSON_INCLUDE
|
||||
JANSSON_LIB
|
||||
PBX_JACK
|
||||
JACK_DIR
|
||||
JACK_INCLUDE
|
||||
@@ -1186,6 +1183,11 @@ PBX_PJPROJECT
|
||||
PJPROJECT_DIR
|
||||
PJPROJECT_BUNDLED
|
||||
PJPROJECT_CONFIGURE_OPTS
|
||||
JANSSON_INCLUDE
|
||||
JANSSON_LIB
|
||||
PBX_JANSSON
|
||||
JANSSON_BUNDLED
|
||||
JANSSON_CONFIGURE_OPTS
|
||||
AST_C_COMPILER_FAMILY
|
||||
AST_CLANG_BLOCKS
|
||||
AST_CLANG_BLOCKS_LIBS
|
||||
@@ -1352,6 +1354,7 @@ with_download_cache
|
||||
with_sounds_cache
|
||||
with_externals_cache
|
||||
enable_coverage
|
||||
with_jansson_bundled
|
||||
with_pjproject_bundled
|
||||
with_asound
|
||||
with_bfd
|
||||
@@ -1448,6 +1451,7 @@ CXX
|
||||
CXXFLAGS
|
||||
CCC
|
||||
CXXCPP
|
||||
JANSSON_CONFIGURE_OPTS
|
||||
PJPROJECT_CONFIGURE_OPTS
|
||||
PKG_CONFIG
|
||||
PKG_CONFIG_PATH
|
||||
@@ -2100,6 +2104,7 @@ Optional Packages:
|
||||
use cached sound tarfiles in PATH
|
||||
--with-externals-cache=PATH
|
||||
use cached external module tarfiles in PATH
|
||||
--with-jansson-bundled Use bundled jansson library
|
||||
--with-pjproject-bundled
|
||||
Use bundled pjproject libraries (default)
|
||||
--with-asound=PATH use Advanced Linux Sound Architecture files in PATH
|
||||
@@ -2195,6 +2200,8 @@ Some influential environment variables:
|
||||
CXX C++ compiler command
|
||||
CXXFLAGS C++ compiler flags
|
||||
CXXCPP C++ preprocessor
|
||||
JANSSON_CONFIGURE_OPTS
|
||||
Additional configure options to pass to bundled jansson
|
||||
PJPROJECT_CONFIGURE_OPTS
|
||||
Additional configure options to pass to bundled pjproject
|
||||
PKG_CONFIG path to pkg-config utility
|
||||
@@ -9145,6 +9152,17 @@ rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
|
||||
CFLAGS="$save_CFLAGS"
|
||||
|
||||
|
||||
JANSSON_BUNDLED=no
|
||||
|
||||
# Check whether --with-jansson-bundled was given.
|
||||
if test "${with_jansson_bundled+set}" = set; then :
|
||||
withval=$with_jansson_bundled; case "${withval}" in
|
||||
y|yes) JANSSON_BUNDLED=yes ;;
|
||||
*) JANSSON_BUNDLED=no ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
|
||||
PJPROJECT_BUNDLED=yes
|
||||
|
||||
|
||||
@@ -9160,6 +9178,92 @@ fi
|
||||
|
||||
|
||||
|
||||
if test "$JANSSON_BUNDLED" = "yes" ; then
|
||||
|
||||
if test "${ac_mandatory_list#*JANSSON*}" != "$ac_mandatory_list" ; then
|
||||
as_fn_error $? "--with-jansson and --with-jansson-bundled can't both be specified" "$LINENO" 5
|
||||
fi
|
||||
|
||||
ac_mandatory_list="$ac_mandatory_list JANSSON"
|
||||
JANSSON_DIR="${ac_pwd}/third-party/jansson"
|
||||
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for embedded jansson (may have to download)" >&5
|
||||
$as_echo_n "checking for embedded jansson (may have to download)... " >&6; }
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: configuring" >&5
|
||||
$as_echo "configuring" >&6; }
|
||||
|
||||
if test "x${DOWNLOAD_TO_STDOUT}" = "x" ; then
|
||||
as_fn_error $? "A download utility (wget, curl, or fetch) is required to download bundled jansson" "$LINENO" 5
|
||||
fi
|
||||
if test "${BZIP2}" = ":" ; then
|
||||
as_fn_error $? "bzip2 is required to extract the jansson tar file" "$LINENO" 5
|
||||
fi
|
||||
if test "${TAR}" = ":" ; then
|
||||
as_fn_error $? "tar is required to extract the jansson tar file" "$LINENO" 5
|
||||
fi
|
||||
if test "${PATCH}" = ":" ; then
|
||||
as_fn_error $? "patch is required to configure bundled jansson" "$LINENO" 5
|
||||
fi
|
||||
if test "${SED}" = ":" ; then
|
||||
as_fn_error $? "sed is required to configure bundled jansson" "$LINENO" 5
|
||||
fi
|
||||
if test "${NM}" = ":" ; then
|
||||
as_fn_error $? "nm is required to build bundled jansson" "$LINENO" 5
|
||||
fi
|
||||
if test "${MD5}" = ":" ; then
|
||||
as_fn_error $? "md5sum is required to build bundled jansson" "$LINENO" 5
|
||||
fi
|
||||
if test "${CAT}" = ":" ; then
|
||||
as_fn_error $? "cat is required to build bundled jansson" "$LINENO" 5
|
||||
fi
|
||||
if test "${CUT}" = ":" ; then
|
||||
as_fn_error $? "cut is required to build bundled jansson" "$LINENO" 5
|
||||
fi
|
||||
if test "${GREP}" = ":" ; then
|
||||
as_fn_error $? "grep is required to build bundled jansson" "$LINENO" 5
|
||||
fi
|
||||
|
||||
|
||||
this_host=$(./config.sub $(./config.guess))
|
||||
if test "$build" != "$this_host" ; then
|
||||
JANSSON_CONFIGURE_OPTS+=" --build=$build"
|
||||
fi
|
||||
if test "$host" != "$this_host" ; then
|
||||
JANSSON_CONFIGURE_OPTS+=" --host=$host"
|
||||
fi
|
||||
|
||||
export TAR PATCH SED NM EXTERNALS_CACHE_DIR AST_DOWNLOAD_CACHE DOWNLOAD_TO_STDOUT DOWNLOAD_TIMEOUT DOWNLOAD MD5 CAT CUT GREP
|
||||
export NOISY_BUILD
|
||||
${GNU_MAKE} --quiet --no-print-directory -C ${JANSSON_DIR} \
|
||||
JANSSON_CONFIGURE_OPTS="$JANSSON_CONFIGURE_OPTS" \
|
||||
EXTERNALS_CACHE_DIR="${EXTERNALS_CACHE_DIR:-${AST_DOWNLOAD_CACHE}}" \
|
||||
configure
|
||||
if test $? -ne 0 ; then
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5
|
||||
$as_echo "failed" >&6; }
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: Unable to configure ${JANSSON_DIR}" >&5
|
||||
$as_echo "$as_me: Unable to configure ${JANSSON_DIR}" >&6;}
|
||||
as_fn_error $? "Re-run the ./configure command with 'NOISY_BUILD=yes' appended to see error details." "$LINENO" 5
|
||||
fi
|
||||
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for bundled jansson" >&5
|
||||
$as_echo_n "checking for bundled jansson... " >&6; }
|
||||
|
||||
JANSSON_INCLUDE=-I${JANSSON_DIR}/dest/include
|
||||
JANSSON_CFLAGS="$JANSSON_INCLUDE"
|
||||
JANSSON_LIB="-L${JANSSON_DIR}/dest/lib -ljansson"
|
||||
PBX_JANSSON=1
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
|
||||
$as_echo "yes" >&6; }
|
||||
|
||||
fi
|
||||
|
||||
|
||||
if test "$PJPROJECT_BUNDLED" = "yes" ; then
|
||||
|
||||
if test "${ac_mandatory_list#*PJPROJECT*}" != "$ac_mandatory_list" ; then
|
||||
@@ -13825,7 +13929,9 @@ fi
|
||||
|
||||
|
||||
|
||||
# Find required JSON support.
|
||||
# Find required JSON support if bundled is not enabled.
|
||||
if test "$JANSSON_BUNDLED" = "no" ; then
|
||||
# json_sprintf is available in 2.11+
|
||||
|
||||
if test "x${PBX_JANSSON}" != "x1" -a "${USE_JANSSON}" != "no"; then
|
||||
pbxlibdir=""
|
||||
@@ -13840,9 +13946,9 @@ if test "x${PBX_JANSSON}" != "x1" -a "${USE_JANSSON}" != "no"; then
|
||||
|
||||
ast_ext_lib_check_save_CFLAGS="${CFLAGS}"
|
||||
CFLAGS="${CFLAGS} "
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for json_dumps in -ljansson" >&5
|
||||
$as_echo_n "checking for json_dumps in -ljansson... " >&6; }
|
||||
if ${ac_cv_lib_jansson_json_dumps+:} false; then :
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for json_sprintf in -ljansson" >&5
|
||||
$as_echo_n "checking for json_sprintf in -ljansson... " >&6; }
|
||||
if ${ac_cv_lib_jansson_json_sprintf+:} false; then :
|
||||
$as_echo_n "(cached) " >&6
|
||||
else
|
||||
ac_check_lib_save_LIBS=$LIBS
|
||||
@@ -13856,27 +13962,27 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
|
||||
#ifdef __cplusplus
|
||||
extern "C"
|
||||
#endif
|
||||
char json_dumps ();
|
||||
char json_sprintf ();
|
||||
int
|
||||
main ()
|
||||
{
|
||||
return json_dumps ();
|
||||
return json_sprintf ();
|
||||
;
|
||||
return 0;
|
||||
}
|
||||
_ACEOF
|
||||
if ac_fn_c_try_link "$LINENO"; then :
|
||||
ac_cv_lib_jansson_json_dumps=yes
|
||||
ac_cv_lib_jansson_json_sprintf=yes
|
||||
else
|
||||
ac_cv_lib_jansson_json_dumps=no
|
||||
ac_cv_lib_jansson_json_sprintf=no
|
||||
fi
|
||||
rm -f core conftest.err conftest.$ac_objext \
|
||||
conftest$ac_exeext conftest.$ac_ext
|
||||
LIBS=$ac_check_lib_save_LIBS
|
||||
fi
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_jansson_json_dumps" >&5
|
||||
$as_echo "$ac_cv_lib_jansson_json_dumps" >&6; }
|
||||
if test "x$ac_cv_lib_jansson_json_dumps" = xyes; then :
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_jansson_json_sprintf" >&5
|
||||
$as_echo "$ac_cv_lib_jansson_json_sprintf" >&6; }
|
||||
if test "x$ac_cv_lib_jansson_json_sprintf" = xyes; then :
|
||||
AST_JANSSON_FOUND=yes
|
||||
else
|
||||
AST_JANSSON_FOUND=no
|
||||
@@ -13923,8 +14029,17 @@ fi
|
||||
|
||||
|
||||
|
||||
if test "${PBX_JANSSON}" != 1; then
|
||||
as_fn_error $? "*** JSON support not found (this typically means the libjansson development package is missing)" "$LINENO" 5
|
||||
if test "${PBX_JANSSON}" != 1; then
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: *** Asterisk requires libjansson >= 2.11 and no system copy was found." >&5
|
||||
$as_echo "$as_me: *** Asterisk requires libjansson >= 2.11 and no system copy was found." >&6;}
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: *** Please install the 'libjansson' development package or" >&5
|
||||
$as_echo "$as_me: *** Please install the 'libjansson' development package or" >&6;}
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: *** use './configure --with-jansson-bundled'" >&5
|
||||
$as_echo "$as_me: *** use './configure --with-jansson-bundled'" >&6;}
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
PBX_JANSSON=1
|
||||
fi
|
||||
|
||||
# See if clock_gettime is in librt
|
||||
|
25
configure.ac
25
configure.ac
@@ -423,6 +423,15 @@ AC_SUBST(AST_CODE_COVERAGE)
|
||||
AST_CHECK_RAII()
|
||||
AST_CHECK_STRSEP_ARRAY_BOUNDS()
|
||||
|
||||
JANSSON_BUNDLED=no
|
||||
AC_ARG_WITH([jansson-bundled],
|
||||
[AS_HELP_STRING([--with-jansson-bundled],
|
||||
[Use bundled jansson library])],
|
||||
[case "${withval}" in
|
||||
y|yes) JANSSON_BUNDLED=yes ;;
|
||||
*) JANSSON_BUNDLED=no ;;
|
||||
esac])
|
||||
|
||||
PJPROJECT_BUNDLED=yes
|
||||
AH_TEMPLATE(m4_bpatsubst([[HAVE_PJPROJECT_BUNDLED]], [(.*)]), [Define to 1 when using the bundled pjproject.])
|
||||
|
||||
@@ -652,11 +661,19 @@ fi
|
||||
AC_SUBST(UUID_INCLUDE)
|
||||
AC_SUBST(UUID_LIB)
|
||||
|
||||
# Find required JSON support.
|
||||
AST_EXT_LIB_CHECK([JANSSON], [jansson], [json_dumps], [jansson.h])
|
||||
# Find required JSON support if bundled is not enabled.
|
||||
if test "$JANSSON_BUNDLED" = "no" ; then
|
||||
# json_sprintf is available in 2.11+
|
||||
AST_EXT_LIB_CHECK([JANSSON], [jansson], [json_sprintf], [jansson.h])
|
||||
|
||||
if test "${PBX_JANSSON}" != 1; then
|
||||
AC_MSG_ERROR([*** JSON support not found (this typically means the libjansson development package is missing)])
|
||||
if test "${PBX_JANSSON}" != 1; then
|
||||
AC_MSG_NOTICE(*** Asterisk requires libjansson >= 2.11 and no system copy was found.)
|
||||
AC_MSG_NOTICE(*** Please install the 'libjansson' development package or)
|
||||
AC_MSG_NOTICE(*** use './configure --with-jansson-bundled')
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
PBX_JANSSON=1
|
||||
fi
|
||||
|
||||
# See if clock_gettime is in librt
|
||||
|
58
contrib/realtime/mssql/mssql_cdr.sql
Normal file
58
contrib/realtime/mssql/mssql_cdr.sql
Normal file
@@ -0,0 +1,58 @@
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE alembic_version (
|
||||
version_num VARCHAR(32) NOT NULL
|
||||
);
|
||||
|
||||
GO
|
||||
|
||||
-- Running upgrade -> 210693f3123d
|
||||
|
||||
CREATE TABLE cdr (
|
||||
accountcode VARCHAR(20) NULL,
|
||||
src VARCHAR(80) NULL,
|
||||
dst VARCHAR(80) NULL,
|
||||
dcontext VARCHAR(80) NULL,
|
||||
clid VARCHAR(80) NULL,
|
||||
channel VARCHAR(80) NULL,
|
||||
dstchannel VARCHAR(80) NULL,
|
||||
lastapp VARCHAR(80) NULL,
|
||||
lastdata VARCHAR(80) NULL,
|
||||
start DATETIME NULL,
|
||||
answer DATETIME NULL,
|
||||
[end] DATETIME NULL,
|
||||
duration INTEGER NULL,
|
||||
billsec INTEGER NULL,
|
||||
disposition VARCHAR(45) NULL,
|
||||
amaflags VARCHAR(45) NULL,
|
||||
userfield VARCHAR(256) NULL,
|
||||
uniqueid VARCHAR(150) NULL,
|
||||
linkedid VARCHAR(150) NULL,
|
||||
peeraccount VARCHAR(20) NULL,
|
||||
sequence INTEGER NULL
|
||||
);
|
||||
|
||||
GO
|
||||
|
||||
INSERT INTO alembic_version (version_num) VALUES ('210693f3123d');
|
||||
|
||||
GO
|
||||
|
||||
-- Running upgrade 210693f3123d -> 54cde9847798
|
||||
|
||||
ALTER TABLE cdr ALTER COLUMN accountcode VARCHAR(80);
|
||||
|
||||
GO
|
||||
|
||||
ALTER TABLE cdr ALTER COLUMN peeraccount VARCHAR(80);
|
||||
|
||||
GO
|
||||
|
||||
UPDATE alembic_version SET version_num='54cde9847798' WHERE alembic_version.version_num = '210693f3123d';
|
||||
|
||||
GO
|
||||
|
||||
COMMIT;
|
||||
|
||||
GO
|
||||
|
2027
contrib/realtime/mssql/mssql_config.sql
Normal file
2027
contrib/realtime/mssql/mssql_config.sql
Normal file
File diff suppressed because it is too large
Load Diff
54
contrib/realtime/mssql/mssql_voicemail.sql
Normal file
54
contrib/realtime/mssql/mssql_voicemail.sql
Normal file
@@ -0,0 +1,54 @@
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE alembic_version (
|
||||
version_num VARCHAR(32) NOT NULL
|
||||
);
|
||||
|
||||
GO
|
||||
|
||||
-- Running upgrade -> a2e9769475e
|
||||
|
||||
CREATE TABLE voicemail_messages (
|
||||
dir VARCHAR(255) NOT NULL,
|
||||
msgnum INTEGER NOT NULL,
|
||||
context VARCHAR(80) NULL,
|
||||
macrocontext VARCHAR(80) NULL,
|
||||
callerid VARCHAR(80) NULL,
|
||||
origtime INTEGER NULL,
|
||||
duration INTEGER NULL,
|
||||
recording IMAGE NULL,
|
||||
flag VARCHAR(30) NULL,
|
||||
category VARCHAR(30) NULL,
|
||||
mailboxuser VARCHAR(30) NULL,
|
||||
mailboxcontext VARCHAR(30) NULL,
|
||||
msg_id VARCHAR(40) NULL
|
||||
);
|
||||
|
||||
GO
|
||||
|
||||
ALTER TABLE voicemail_messages ADD CONSTRAINT voicemail_messages_dir_msgnum PRIMARY KEY (dir, msgnum);
|
||||
|
||||
GO
|
||||
|
||||
CREATE INDEX voicemail_messages_dir ON voicemail_messages (dir);
|
||||
|
||||
GO
|
||||
|
||||
INSERT INTO alembic_version (version_num) VALUES ('a2e9769475e');
|
||||
|
||||
GO
|
||||
|
||||
-- Running upgrade a2e9769475e -> 39428242f7f5
|
||||
|
||||
ALTER TABLE voicemail_messages ALTER COLUMN recording IMAGE;
|
||||
|
||||
GO
|
||||
|
||||
UPDATE alembic_version SET version_num='39428242f7f5' WHERE alembic_version.version_num = 'a2e9769475e';
|
||||
|
||||
GO
|
||||
|
||||
COMMIT;
|
||||
|
||||
GO
|
||||
|
40
contrib/realtime/mysql/mysql_cdr.sql
Normal file
40
contrib/realtime/mysql/mysql_cdr.sql
Normal file
@@ -0,0 +1,40 @@
|
||||
CREATE TABLE alembic_version (
|
||||
version_num VARCHAR(32) NOT NULL
|
||||
);
|
||||
|
||||
-- Running upgrade -> 210693f3123d
|
||||
|
||||
CREATE TABLE cdr (
|
||||
accountcode VARCHAR(20),
|
||||
src VARCHAR(80),
|
||||
dst VARCHAR(80),
|
||||
dcontext VARCHAR(80),
|
||||
clid VARCHAR(80),
|
||||
channel VARCHAR(80),
|
||||
dstchannel VARCHAR(80),
|
||||
lastapp VARCHAR(80),
|
||||
lastdata VARCHAR(80),
|
||||
start DATETIME,
|
||||
answer DATETIME,
|
||||
end DATETIME,
|
||||
duration INTEGER,
|
||||
billsec INTEGER,
|
||||
disposition VARCHAR(45),
|
||||
amaflags VARCHAR(45),
|
||||
userfield VARCHAR(256),
|
||||
uniqueid VARCHAR(150),
|
||||
linkedid VARCHAR(150),
|
||||
peeraccount VARCHAR(20),
|
||||
sequence INTEGER
|
||||
);
|
||||
|
||||
INSERT INTO alembic_version (version_num) VALUES ('210693f3123d');
|
||||
|
||||
-- Running upgrade 210693f3123d -> 54cde9847798
|
||||
|
||||
ALTER TABLE cdr MODIFY accountcode VARCHAR(80) NULL;
|
||||
|
||||
ALTER TABLE cdr MODIFY peeraccount VARCHAR(80) NULL;
|
||||
|
||||
UPDATE alembic_version SET version_num='54cde9847798' WHERE alembic_version.version_num = '210693f3123d';
|
||||
|
1178
contrib/realtime/mysql/mysql_config.sql
Normal file
1178
contrib/realtime/mysql/mysql_config.sql
Normal file
File diff suppressed because it is too large
Load Diff
34
contrib/realtime/mysql/mysql_voicemail.sql
Normal file
34
contrib/realtime/mysql/mysql_voicemail.sql
Normal file
@@ -0,0 +1,34 @@
|
||||
CREATE TABLE alembic_version (
|
||||
version_num VARCHAR(32) NOT NULL
|
||||
);
|
||||
|
||||
-- Running upgrade -> a2e9769475e
|
||||
|
||||
CREATE TABLE voicemail_messages (
|
||||
dir VARCHAR(255) NOT NULL,
|
||||
msgnum INTEGER NOT NULL,
|
||||
context VARCHAR(80),
|
||||
macrocontext VARCHAR(80),
|
||||
callerid VARCHAR(80),
|
||||
origtime INTEGER,
|
||||
duration INTEGER,
|
||||
recording BLOB,
|
||||
flag VARCHAR(30),
|
||||
category VARCHAR(30),
|
||||
mailboxuser VARCHAR(30),
|
||||
mailboxcontext VARCHAR(30),
|
||||
msg_id VARCHAR(40)
|
||||
);
|
||||
|
||||
ALTER TABLE voicemail_messages ADD CONSTRAINT voicemail_messages_dir_msgnum PRIMARY KEY (dir, msgnum);
|
||||
|
||||
CREATE INDEX voicemail_messages_dir ON voicemail_messages (dir);
|
||||
|
||||
INSERT INTO alembic_version (version_num) VALUES ('a2e9769475e');
|
||||
|
||||
-- Running upgrade a2e9769475e -> 39428242f7f5
|
||||
|
||||
ALTER TABLE voicemail_messages MODIFY recording BLOB(4294967295) NULL;
|
||||
|
||||
UPDATE alembic_version SET version_num='39428242f7f5' WHERE alembic_version.version_num = 'a2e9769475e';
|
||||
|
52
contrib/realtime/oracle/oracle_cdr.sql
Normal file
52
contrib/realtime/oracle/oracle_cdr.sql
Normal file
@@ -0,0 +1,52 @@
|
||||
CREATE TABLE alembic_version (
|
||||
version_num VARCHAR2(32 CHAR) NOT NULL
|
||||
)
|
||||
|
||||
/
|
||||
|
||||
-- Running upgrade -> 210693f3123d
|
||||
|
||||
CREATE TABLE cdr (
|
||||
accountcode VARCHAR2(20 CHAR),
|
||||
src VARCHAR2(80 CHAR),
|
||||
dst VARCHAR2(80 CHAR),
|
||||
dcontext VARCHAR2(80 CHAR),
|
||||
clid VARCHAR2(80 CHAR),
|
||||
channel VARCHAR2(80 CHAR),
|
||||
dstchannel VARCHAR2(80 CHAR),
|
||||
lastapp VARCHAR2(80 CHAR),
|
||||
lastdata VARCHAR2(80 CHAR),
|
||||
"start" DATE,
|
||||
answer DATE,
|
||||
end DATE,
|
||||
duration INTEGER,
|
||||
billsec INTEGER,
|
||||
disposition VARCHAR2(45 CHAR),
|
||||
amaflags VARCHAR2(45 CHAR),
|
||||
userfield VARCHAR2(256 CHAR),
|
||||
uniqueid VARCHAR2(150 CHAR),
|
||||
linkedid VARCHAR2(150 CHAR),
|
||||
peeraccount VARCHAR2(20 CHAR),
|
||||
sequence INTEGER
|
||||
)
|
||||
|
||||
/
|
||||
|
||||
INSERT INTO alembic_version (version_num) VALUES ('210693f3123d')
|
||||
|
||||
/
|
||||
|
||||
-- Running upgrade 210693f3123d -> 54cde9847798
|
||||
|
||||
ALTER TABLE cdr MODIFY accountcode VARCHAR2(80 CHAR)
|
||||
|
||||
/
|
||||
|
||||
ALTER TABLE cdr MODIFY peeraccount VARCHAR2(80 CHAR)
|
||||
|
||||
/
|
||||
|
||||
UPDATE alembic_version SET version_num='54cde9847798' WHERE alembic_version.version_num = '210693f3123d'
|
||||
|
||||
/
|
||||
|
1993
contrib/realtime/oracle/oracle_config.sql
Normal file
1993
contrib/realtime/oracle/oracle_config.sql
Normal file
File diff suppressed because it is too large
Load Diff
48
contrib/realtime/oracle/oracle_voicemail.sql
Normal file
48
contrib/realtime/oracle/oracle_voicemail.sql
Normal file
@@ -0,0 +1,48 @@
|
||||
CREATE TABLE alembic_version (
|
||||
version_num VARCHAR2(32 CHAR) NOT NULL
|
||||
)
|
||||
|
||||
/
|
||||
|
||||
-- Running upgrade -> a2e9769475e
|
||||
|
||||
CREATE TABLE voicemail_messages (
|
||||
dir VARCHAR2(255 CHAR) NOT NULL,
|
||||
msgnum INTEGER NOT NULL,
|
||||
context VARCHAR2(80 CHAR),
|
||||
macrocontext VARCHAR2(80 CHAR),
|
||||
callerid VARCHAR2(80 CHAR),
|
||||
origtime INTEGER,
|
||||
duration INTEGER,
|
||||
recording BLOB,
|
||||
flag VARCHAR2(30 CHAR),
|
||||
category VARCHAR2(30 CHAR),
|
||||
mailboxuser VARCHAR2(30 CHAR),
|
||||
mailboxcontext VARCHAR2(30 CHAR),
|
||||
msg_id VARCHAR2(40 CHAR)
|
||||
)
|
||||
|
||||
/
|
||||
|
||||
ALTER TABLE voicemail_messages ADD CONSTRAINT voicemail_messages_dir_msgnum PRIMARY KEY (dir, msgnum)
|
||||
|
||||
/
|
||||
|
||||
CREATE INDEX voicemail_messages_dir ON voicemail_messages (dir)
|
||||
|
||||
/
|
||||
|
||||
INSERT INTO alembic_version (version_num) VALUES ('a2e9769475e')
|
||||
|
||||
/
|
||||
|
||||
-- Running upgrade a2e9769475e -> 39428242f7f5
|
||||
|
||||
ALTER TABLE voicemail_messages MODIFY recording BLOB
|
||||
|
||||
/
|
||||
|
||||
UPDATE alembic_version SET version_num='39428242f7f5' WHERE alembic_version.version_num = 'a2e9769475e'
|
||||
|
||||
/
|
||||
|
44
contrib/realtime/postgresql/postgresql_cdr.sql
Normal file
44
contrib/realtime/postgresql/postgresql_cdr.sql
Normal file
@@ -0,0 +1,44 @@
|
||||
BEGIN;
|
||||
|
||||
CREATE TABLE alembic_version (
|
||||
version_num VARCHAR(32) NOT NULL
|
||||
);
|
||||
|
||||
-- Running upgrade -> 210693f3123d
|
||||
|
||||
CREATE TABLE cdr (
|
||||
accountcode VARCHAR(20),
|
||||
src VARCHAR(80),
|
||||
dst VARCHAR(80),
|
||||
dcontext VARCHAR(80),
|
||||
clid VARCHAR(80),
|
||||
channel VARCHAR(80),
|
||||
dstchannel VARCHAR(80),
|
||||
lastapp VARCHAR(80),
|
||||
lastdata VARCHAR(80),
|
||||
start TIMESTAMP WITHOUT TIME ZONE,
|
||||
answer TIMESTAMP WITHOUT TIME ZONE,
|
||||
"end" TIMESTAMP WITHOUT TIME ZONE,
|
||||
duration INTEGER,
|
||||
billsec INTEGER,
|
||||
disposition VARCHAR(45),
|
||||
amaflags VARCHAR(45),
|
||||
userfield VARCHAR(256),
|
||||
uniqueid VARCHAR(150),
|
||||
linkedid VARCHAR(150),
|
||||
peeraccount VARCHAR(20),
|
||||
sequence INTEGER
|
||||
);
|
||||
|
||||
INSERT INTO alembic_version (version_num) VALUES ('210693f3123d');
|
||||
|
||||
-- Running upgrade 210693f3123d -> 54cde9847798
|
||||
|
||||
ALTER TABLE cdr ALTER COLUMN accountcode TYPE VARCHAR(80);
|
||||
|
||||
ALTER TABLE cdr ALTER COLUMN peeraccount TYPE VARCHAR(80);
|
||||
|
||||
UPDATE alembic_version SET version_num='54cde9847798' WHERE alembic_version.version_num = '210693f3123d';
|
||||
|
||||
COMMIT;
|
||||
|
1270
contrib/realtime/postgresql/postgresql_config.sql
Normal file
1270
contrib/realtime/postgresql/postgresql_config.sql
Normal file
File diff suppressed because it is too large
Load Diff
38
contrib/realtime/postgresql/postgresql_voicemail.sql
Normal file
38
contrib/realtime/postgresql/postgresql_voicemail.sql
Normal file
@@ -0,0 +1,38 @@
|
||||
BEGIN;
|
||||
|
||||
CREATE TABLE alembic_version (
|
||||
version_num VARCHAR(32) NOT NULL
|
||||
);
|
||||
|
||||
-- Running upgrade -> a2e9769475e
|
||||
|
||||
CREATE TABLE voicemail_messages (
|
||||
dir VARCHAR(255) NOT NULL,
|
||||
msgnum INTEGER NOT NULL,
|
||||
context VARCHAR(80),
|
||||
macrocontext VARCHAR(80),
|
||||
callerid VARCHAR(80),
|
||||
origtime INTEGER,
|
||||
duration INTEGER,
|
||||
recording BYTEA,
|
||||
flag VARCHAR(30),
|
||||
category VARCHAR(30),
|
||||
mailboxuser VARCHAR(30),
|
||||
mailboxcontext VARCHAR(30),
|
||||
msg_id VARCHAR(40)
|
||||
);
|
||||
|
||||
ALTER TABLE voicemail_messages ADD CONSTRAINT voicemail_messages_dir_msgnum PRIMARY KEY (dir, msgnum);
|
||||
|
||||
CREATE INDEX voicemail_messages_dir ON voicemail_messages (dir);
|
||||
|
||||
INSERT INTO alembic_version (version_num) VALUES ('a2e9769475e');
|
||||
|
||||
-- Running upgrade a2e9769475e -> 39428242f7f5
|
||||
|
||||
ALTER TABLE voicemail_messages ALTER COLUMN recording TYPE BYTEA;
|
||||
|
||||
UPDATE alembic_version SET version_num='39428242f7f5' WHERE alembic_version.version_num = 'a2e9769475e';
|
||||
|
||||
COMMIT;
|
||||
|
@@ -27,7 +27,7 @@
|
||||
* \since 12.0.0
|
||||
*
|
||||
* This is a very thin wrapper around the Jansson API. For more details on it,
|
||||
* see its docs at http://www.digip.org/jansson/doc/2.4/apiref.html.
|
||||
* see its docs at http://www.digip.org/jansson/doc/2.11/apiref.html.
|
||||
*
|
||||
* Rather than provide the multiple ways of doing things that the Jansson API
|
||||
* does, the Asterisk wrapper is always reference-stealing, and always NULL
|
||||
@@ -43,35 +43,6 @@
|
||||
* wrap them with json_ref() when passing them to other \c ast_json_*()
|
||||
* functions.
|
||||
*
|
||||
* \par Thread Safety
|
||||
*
|
||||
* Jansson (as of 2.4) provides fairly weak thread safety guarantees. The
|
||||
* Asterisk wrapper improves upon that slightly. The remaining refcounting
|
||||
* problems are issues when slicing/sharing/mixing instances between JSON
|
||||
* objects and arrays, which we avoid.
|
||||
*
|
||||
* The \c ast_json_dump_* functions are thread safe for multiple concurrent
|
||||
* dumps of the same object, so long as the concurrent dumps start from the same
|
||||
* \c root object. But if an object is shared by other JSON objects/arrays, then
|
||||
* concurrent dumps of the outer objects/arrays are not thread safe. This can be
|
||||
* avoided by using ast_json_deep_copy() when sharing JSON instances between
|
||||
* objects.
|
||||
*
|
||||
* The ast_json_ref() and ast_json_unref() functions are thread safe. Since the
|
||||
* Asterisk wrapper exclusively uses the reference stealing API, Jansson won't
|
||||
* be performing many refcount modifications behind our backs. There are a few
|
||||
* exceptions.
|
||||
*
|
||||
* The first is the transitive json_decref() that occurs when \ref
|
||||
* AST_JSON_OBJECT and \ref AST_JSON_ARRAY instances are deleted. This can be
|
||||
* avoided by using ast_json_deep_copy() when sharing JSON instances between
|
||||
* objects.
|
||||
*
|
||||
* The second is when using the reference borrowing specifier in
|
||||
* ast_json_pack() (capital \c O). This can be avoided by using the reference
|
||||
* stealing specifier (lowercase \c o) and wrapping the JSON object parameter
|
||||
* with ast_json_ref() for an explicit ref-bump.
|
||||
*
|
||||
* \par Example code
|
||||
*
|
||||
* \code
|
||||
@@ -907,7 +878,7 @@ struct ast_json *ast_json_load_new_file(const char *path, struct ast_json_error
|
||||
* \brief Helper for creating complex JSON values.
|
||||
* \since 12.0.0
|
||||
*
|
||||
* See original Jansson docs at http://www.digip.org/jansson/doc/2.4/apiref.html#apiref-pack
|
||||
* See original Jansson docs at http://www.digip.org/jansson/doc/2.11/apiref.html#apiref-pack
|
||||
* for more details.
|
||||
*/
|
||||
struct ast_json *ast_json_pack(char const *format, ...);
|
||||
@@ -916,7 +887,7 @@ struct ast_json *ast_json_pack(char const *format, ...);
|
||||
* \brief Helper for creating complex JSON values simply.
|
||||
* \since 12.0.0
|
||||
*
|
||||
* See original Jansson docs at http://www.digip.org/jansson/doc/2.4/apiref.html#apiref-pack
|
||||
* See original Jansson docs at http://www.digip.org/jansson/doc/2.11/apiref.html#apiref-pack
|
||||
* for more details.
|
||||
*/
|
||||
struct ast_json *ast_json_vpack(char const *format, va_list ap);
|
||||
|
@@ -284,12 +284,7 @@ const void *ast_sched_find_data(struct ast_sched_context *con, int id);
|
||||
*
|
||||
* \return Returns 0 on success, -1 on failure
|
||||
*/
|
||||
#ifndef AST_DEVMODE
|
||||
int ast_sched_del(struct ast_sched_context *con, int id) attribute_warn_unused_result;
|
||||
#else
|
||||
int _ast_sched_del(struct ast_sched_context *con, int id, const char *file, int line, const char *function) attribute_warn_unused_result;
|
||||
#define ast_sched_del(a, b) _ast_sched_del(a, b, __FILE__, __LINE__, __PRETTY_FUNCTION__)
|
||||
#endif
|
||||
|
||||
/*!
|
||||
* \brief Determines number of seconds until the next outstanding event to take place
|
||||
|
@@ -82,6 +82,23 @@ struct ast_xml_node *ast_xml_new_child(struct ast_xml_node *parent, const char *
|
||||
*/
|
||||
struct ast_xml_node *ast_xml_add_child(struct ast_xml_node *parent, struct ast_xml_node *child);
|
||||
|
||||
/*!
|
||||
* \brief Add a list of child nodes, to a specified parent node.
|
||||
* \param parent Where to add the child node.
|
||||
* \param child The child list to add.
|
||||
* \retval NULL on error.
|
||||
* \retval non-NULL The added child list on success.
|
||||
*/
|
||||
struct ast_xml_node *ast_xml_add_child_list(struct ast_xml_node *parent, struct ast_xml_node *child);
|
||||
|
||||
/*!
|
||||
* \brief Create a copy of a n ode list.
|
||||
* \param list The list to copy.
|
||||
* \retval NULL on error.
|
||||
* \retval non-NULL The copied list.
|
||||
*/
|
||||
struct ast_xml_node *ast_xml_copy_node_list(struct ast_xml_node *list);
|
||||
|
||||
/*!
|
||||
* \brief Close an already open document and free the used
|
||||
* structure.
|
||||
|
@@ -3285,7 +3285,7 @@ static int show_version(void)
|
||||
|
||||
static int show_cli_help(void)
|
||||
{
|
||||
printf("Asterisk %s, Copyright (C) 1999 - 2016, Digium, Inc. and others.\n", ast_get_version());
|
||||
printf("Asterisk %s, Copyright (C) 1999 - 2018, Digium, Inc. and others.\n", ast_get_version());
|
||||
printf("Usage: asterisk [OPTIONS]\n");
|
||||
printf("Valid Options:\n");
|
||||
printf(" -V Display version number and exit\n");
|
||||
|
@@ -4628,4 +4628,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_module,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "extconfig",
|
||||
);
|
||||
|
@@ -1753,4 +1753,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_module,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "extconfig",
|
||||
);
|
||||
|
@@ -523,4 +523,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_module,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "extconfig",
|
||||
);
|
||||
|
@@ -2425,4 +2425,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_module,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "extconfig",
|
||||
);
|
||||
|
@@ -1023,4 +1023,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_module,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "extconfig",
|
||||
);
|
||||
|
@@ -1174,4 +1174,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_features_config,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "extconfig",
|
||||
);
|
||||
|
@@ -2314,4 +2314,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_module,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "extconfig",
|
||||
);
|
||||
|
@@ -1158,4 +1158,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_module,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "extconfig",
|
||||
);
|
||||
|
264
main/json.c
264
main/json.c
@@ -44,7 +44,6 @@
|
||||
#include <jansson.h>
|
||||
#include <time.h>
|
||||
|
||||
#if defined(JANSSON_THREAD_SAFE_REFCOUNT)
|
||||
void *ast_json_malloc(size_t size)
|
||||
{
|
||||
return ast_malloc(size);
|
||||
@@ -55,155 +54,6 @@ void ast_json_free(void *p)
|
||||
ast_free(p);
|
||||
}
|
||||
|
||||
/* No need to lock since jansson is thread safe. */
|
||||
#define SCOPED_JSON_LOCK(json)
|
||||
|
||||
#else
|
||||
/*! \brief Magic number, for safety checks. */
|
||||
#define JSON_MAGIC 0x1541992
|
||||
|
||||
/*! \brief Internal structure for allocated memory blocks */
|
||||
struct json_mem {
|
||||
/*! Magic number, for safety checks */
|
||||
uint32_t magic;
|
||||
/*! Mutext for locking this memory block */
|
||||
ast_mutex_t mutex;
|
||||
/*! Linked list pointer for the free list */
|
||||
AST_LIST_ENTRY(json_mem) list;
|
||||
/*! Data section of the allocation; void pointer for proper alignment */
|
||||
void *data[];
|
||||
};
|
||||
|
||||
/*! \brief Free a \ref json_mem block. */
|
||||
static void json_mem_free(struct json_mem *mem)
|
||||
{
|
||||
mem->magic = 0;
|
||||
ast_mutex_destroy(&mem->mutex);
|
||||
ast_free(mem);
|
||||
}
|
||||
|
||||
/*!
|
||||
* \brief Get the \ref json_mem block for a pointer allocated via
|
||||
* ast_json_malloc().
|
||||
*
|
||||
* This function properly handles Jansson singletons (null, true, false), and
|
||||
* \c NULL.
|
||||
*
|
||||
* \param p Pointer, usually to a \c json_t or \ref ast_json.
|
||||
* \return \ref json_mem object with extra allocation info.
|
||||
*/
|
||||
static inline struct json_mem *to_json_mem(void *p)
|
||||
{
|
||||
struct json_mem *mem;
|
||||
/* Avoid ref'ing the singleton values */
|
||||
if (p == NULL || p == json_null() || p == json_true() ||
|
||||
p == json_false()) {
|
||||
return NULL;
|
||||
}
|
||||
mem = (struct json_mem *)((char *) (p) - sizeof(*mem));
|
||||
ast_assert(mem->magic == JSON_MAGIC);
|
||||
return mem;
|
||||
}
|
||||
|
||||
/*!
|
||||
* \brief Lock an \ref ast_json instance.
|
||||
*
|
||||
* If \a json is an immutable singleton (null, true, false), this function
|
||||
* safely ignores it and returns \c NULL. Otherwise, \a json must have been
|
||||
* allocates using ast_json_malloc().
|
||||
*
|
||||
* \param json JSON instance to lock.
|
||||
* \return \ref Corresponding \ref json_mem block.
|
||||
* \return \c NULL if \a json was not allocated.
|
||||
*/
|
||||
static struct json_mem *json_mem_lock(struct ast_json *json)
|
||||
{
|
||||
struct json_mem *mem = to_json_mem(json);
|
||||
if (!mem) {
|
||||
return NULL;
|
||||
}
|
||||
ast_mutex_lock(&mem->mutex);
|
||||
return mem;
|
||||
}
|
||||
|
||||
/*!
|
||||
* \brief Unlock a \ref json_mem instance.
|
||||
*
|
||||
* \param mem \ref json_mem, usually returned from json_mem_lock().
|
||||
*/
|
||||
static void json_mem_unlock(struct json_mem *mem)
|
||||
{
|
||||
if (!mem) {
|
||||
return;
|
||||
}
|
||||
ast_mutex_unlock(&mem->mutex);
|
||||
}
|
||||
|
||||
/*!
|
||||
* \brief Scoped lock for a \ref ast_json instance.
|
||||
*
|
||||
* \param json JSON instance to lock.
|
||||
*/
|
||||
#define SCOPED_JSON_LOCK(json) \
|
||||
RAII_VAR(struct json_mem *, __mem_ ## __LINE__, \
|
||||
json_mem_lock(json), json_mem_unlock)
|
||||
|
||||
void *ast_json_malloc(size_t size)
|
||||
{
|
||||
struct json_mem *mem = ast_malloc(size + sizeof(*mem));
|
||||
if (!mem) {
|
||||
return NULL;
|
||||
}
|
||||
mem->magic = JSON_MAGIC;
|
||||
ast_mutex_init(&mem->mutex);
|
||||
return mem->data;
|
||||
}
|
||||
|
||||
AST_THREADSTORAGE(json_free_list_ts);
|
||||
|
||||
/*!
|
||||
* \brief Struct for a linked list of \ref json_mem.
|
||||
*/
|
||||
AST_LIST_HEAD_NOLOCK(json_mem_list, json_mem);
|
||||
|
||||
/*!
|
||||
* \brief Thread local list of \ref json_mem blocks to free at the end of an
|
||||
* unref.
|
||||
*/
|
||||
static struct json_mem_list *json_free_list(void)
|
||||
{
|
||||
return ast_threadstorage_get(&json_free_list_ts,
|
||||
sizeof(struct json_mem_list));
|
||||
}
|
||||
|
||||
void ast_json_free(void *p)
|
||||
{
|
||||
struct json_mem *mem;
|
||||
struct json_mem_list *free_list;
|
||||
mem = to_json_mem(p);
|
||||
|
||||
if (!mem) {
|
||||
return;
|
||||
}
|
||||
|
||||
/* Since the unref is holding a lock in mem, we can't free it
|
||||
* immediately. Store it off on a thread local list to be freed by
|
||||
* ast_json_unref().
|
||||
*/
|
||||
free_list = json_free_list();
|
||||
if (!free_list) {
|
||||
ast_log(LOG_ERROR, "Error allocating free list\n");
|
||||
ast_assert(0);
|
||||
/* It's not ideal to free the memory immediately, but that's the
|
||||
* best we can do if the threadlocal allocation fails */
|
||||
json_mem_free(mem);
|
||||
return;
|
||||
}
|
||||
|
||||
AST_LIST_INSERT_HEAD(free_list, mem, list);
|
||||
}
|
||||
#endif
|
||||
|
||||
void ast_json_set_alloc_funcs(void *(*malloc_fn)(size_t), void (*free_fn)(void*))
|
||||
{
|
||||
json_set_alloc_funcs(malloc_fn, free_fn);
|
||||
@@ -216,42 +66,13 @@ void ast_json_reset_alloc_funcs(void)
|
||||
|
||||
struct ast_json *ast_json_ref(struct ast_json *json)
|
||||
{
|
||||
/* If Jansson refcounting is non-atomic; lock it. */
|
||||
SCOPED_JSON_LOCK(json);
|
||||
json_incref((json_t *)json);
|
||||
return json;
|
||||
}
|
||||
|
||||
void ast_json_unref(struct ast_json *json)
|
||||
{
|
||||
#if defined(JANSSON_THREAD_SAFE_REFCOUNT)
|
||||
json_decref((json_t *) json);
|
||||
#else
|
||||
struct json_mem_list *free_list;
|
||||
struct json_mem *mem;
|
||||
|
||||
if (!json) {
|
||||
return;
|
||||
}
|
||||
|
||||
/* Jansson refcounting is non-atomic; lock it. */
|
||||
{
|
||||
SCOPED_JSON_LOCK(json);
|
||||
|
||||
json_decref((json_t *) json);
|
||||
}
|
||||
|
||||
/* Now free any objects that were ast_json_free()'s while the lock was
|
||||
* held */
|
||||
free_list = json_free_list();
|
||||
if (!free_list) {
|
||||
return;
|
||||
}
|
||||
|
||||
while ((mem = AST_LIST_REMOVE_HEAD(free_list, list))) {
|
||||
json_mem_free(mem);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
enum ast_json_type ast_json_typeof(const struct ast_json *json)
|
||||
@@ -421,11 +242,7 @@ struct ast_json *ast_json_false(void)
|
||||
|
||||
struct ast_json *ast_json_boolean(int value)
|
||||
{
|
||||
#if JANSSON_VERSION_HEX >= 0x020400
|
||||
return (struct ast_json *)json_boolean(value);
|
||||
#else
|
||||
return value ? ast_json_true() : ast_json_false();
|
||||
#endif
|
||||
}
|
||||
|
||||
struct ast_json *ast_json_null(void)
|
||||
@@ -593,57 +410,11 @@ int ast_json_object_update(struct ast_json *object, struct ast_json *other)
|
||||
}
|
||||
int ast_json_object_update_existing(struct ast_json *object, struct ast_json *other)
|
||||
{
|
||||
#if JANSSON_VERSION_HEX >= 0x020300
|
||||
return json_object_update_existing((json_t *)object, (json_t *)other);
|
||||
#else
|
||||
struct ast_json_iter *iter = ast_json_object_iter(other);
|
||||
int ret = 0;
|
||||
|
||||
if (object == NULL || other == NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
while (iter != NULL && ret == 0) {
|
||||
const char *key = ast_json_object_iter_key(iter);
|
||||
|
||||
if (ast_json_object_get(object, key) != NULL) {
|
||||
struct ast_json *value = ast_json_object_iter_value(iter);
|
||||
|
||||
if (!value || ast_json_object_set(object, key, ast_json_ref(value))) {
|
||||
ret = -1;
|
||||
}
|
||||
}
|
||||
iter = ast_json_object_iter_next(other, iter);
|
||||
}
|
||||
return ret;
|
||||
#endif
|
||||
}
|
||||
int ast_json_object_update_missing(struct ast_json *object, struct ast_json *other)
|
||||
{
|
||||
#if JANSSON_VERSION_HEX >= 0x020300
|
||||
return json_object_update_missing((json_t *)object, (json_t *)other);
|
||||
#else
|
||||
struct ast_json_iter *iter = ast_json_object_iter(other);
|
||||
int ret = 0;
|
||||
|
||||
if (object == NULL || other == NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
while (iter != NULL && ret == 0) {
|
||||
const char *key = ast_json_object_iter_key(iter);
|
||||
|
||||
if (ast_json_object_get(object, key) == NULL) {
|
||||
struct ast_json *value = ast_json_object_iter_value(iter);
|
||||
|
||||
if (!value || ast_json_object_set(object, key, ast_json_ref(value))) {
|
||||
ret = -1;
|
||||
}
|
||||
}
|
||||
iter = ast_json_object_iter_next(other, iter);
|
||||
}
|
||||
return ret;
|
||||
#endif
|
||||
}
|
||||
|
||||
struct ast_json_iter *ast_json_object_iter(struct ast_json *object)
|
||||
@@ -682,14 +453,6 @@ static size_t dump_flags(enum ast_json_encoding_format format)
|
||||
|
||||
char *ast_json_dump_string_format(struct ast_json *root, enum ast_json_encoding_format format)
|
||||
{
|
||||
/* Jansson's json_dump*, even though it's a read operation, isn't
|
||||
* thread safe for concurrent reads. Locking is necessary.
|
||||
* See http://www.digip.org/jansson/doc/2.4/portability.html#thread-safety.
|
||||
*
|
||||
* This comment does not apply when JANSSON_THREAD_SAFE_REFCOUNT is defined,
|
||||
* in that case SCOPED_JSON_LOCK is a no-op.
|
||||
*/
|
||||
SCOPED_JSON_LOCK(root);
|
||||
return json_dumps((json_t *)root, dump_flags(format));
|
||||
}
|
||||
|
||||
@@ -726,28 +489,12 @@ static int write_to_ast_str(const char *buffer, size_t size, void *data)
|
||||
|
||||
int ast_json_dump_str_format(struct ast_json *root, struct ast_str **dst, enum ast_json_encoding_format format)
|
||||
{
|
||||
/* Jansson's json_dump*, even though it's a read operation, isn't
|
||||
* thread safe for concurrent reads. Locking is necessary.
|
||||
* See http://www.digip.org/jansson/doc/2.4/portability.html#thread-safety.
|
||||
*
|
||||
* This comment does not apply when JANSSON_THREAD_SAFE_REFCOUNT is defined,
|
||||
* in that case SCOPED_JSON_LOCK is a no-op.
|
||||
*/
|
||||
SCOPED_JSON_LOCK(root);
|
||||
return json_dump_callback((json_t *)root, write_to_ast_str, dst, dump_flags(format));
|
||||
}
|
||||
|
||||
|
||||
int ast_json_dump_file_format(struct ast_json *root, FILE *output, enum ast_json_encoding_format format)
|
||||
{
|
||||
/* Jansson's json_dump*, even though it's a read operation, isn't
|
||||
* thread safe for concurrent reads. Locking is necessary.
|
||||
* See http://www.digip.org/jansson/doc/2.4/portability.html#thread-safety.
|
||||
*
|
||||
* This comment does not apply when JANSSON_THREAD_SAFE_REFCOUNT is defined,
|
||||
* in that case SCOPED_JSON_LOCK is a no-op.
|
||||
*/
|
||||
SCOPED_JSON_LOCK(root);
|
||||
if (!root || !output) {
|
||||
return -1;
|
||||
}
|
||||
@@ -755,14 +502,6 @@ int ast_json_dump_file_format(struct ast_json *root, FILE *output, enum ast_json
|
||||
}
|
||||
int ast_json_dump_new_file_format(struct ast_json *root, const char *path, enum ast_json_encoding_format format)
|
||||
{
|
||||
/* Jansson's json_dump*, even though it's a read operation, isn't
|
||||
* thread safe for concurrent reads. Locking is necessary.
|
||||
* See http://www.digip.org/jansson/doc/2.4/portability.html#thread-safety.
|
||||
*
|
||||
* This comment does not apply when JANSSON_THREAD_SAFE_REFCOUNT is defined,
|
||||
* in that case SCOPED_JSON_LOCK is a no-op.
|
||||
*/
|
||||
SCOPED_JSON_LOCK(root);
|
||||
if (!root || !path) {
|
||||
return -1;
|
||||
}
|
||||
@@ -1044,6 +783,7 @@ struct ast_json *ast_json_party_id(struct ast_party_id *party)
|
||||
enum ast_json_to_ast_vars_code ast_json_to_ast_variables(struct ast_json *json_variables, struct ast_variable **variables)
|
||||
{
|
||||
struct ast_json_iter *it_json_var;
|
||||
struct ast_variable *tail = NULL;
|
||||
|
||||
*variables = NULL;
|
||||
|
||||
@@ -1080,7 +820,7 @@ enum ast_json_to_ast_vars_code ast_json_to_ast_variables(struct ast_json *json_v
|
||||
return AST_JSON_TO_AST_VARS_CODE_OOM;
|
||||
}
|
||||
|
||||
ast_variable_list_append(variables, new_var);
|
||||
tail = ast_variable_list_append_hint(variables, tail, new_var);
|
||||
}
|
||||
|
||||
return AST_JSON_TO_AST_VARS_CODE_SUCCESS;
|
||||
|
@@ -569,6 +569,18 @@ void ast_module_register(const struct ast_module_info *info)
|
||||
*((struct ast_module **) &(info->self)) = mod;
|
||||
}
|
||||
|
||||
static int module_post_register(struct ast_module *mod)
|
||||
{
|
||||
int res;
|
||||
|
||||
/* Split lists from mod->info. */
|
||||
res = ast_vector_string_split(&mod->requires, mod->info->requires, ",", 0, strcasecmp);
|
||||
res |= ast_vector_string_split(&mod->optional_modules, mod->info->optional_modules, ",", 0, strcasecmp);
|
||||
res |= ast_vector_string_split(&mod->enhances, mod->info->enhances, ",", 0, strcasecmp);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
static void module_destroy(struct ast_module *mod)
|
||||
{
|
||||
AST_VECTOR_CALLBACK_VOID(&mod->requires, ast_free);
|
||||
@@ -1526,11 +1538,7 @@ static enum ast_module_load_result load_resource(const char *resource_name, unsi
|
||||
return required ? AST_MODULE_LOAD_FAILURE : AST_MODULE_LOAD_DECLINE;
|
||||
}
|
||||
|
||||
/* Split lists from mod->info. */
|
||||
res = ast_vector_string_split(&mod->requires, mod->info->requires, ",", 0, strcasecmp);
|
||||
res |= ast_vector_string_split(&mod->optional_modules, mod->info->optional_modules, ",", 0, strcasecmp);
|
||||
res |= ast_vector_string_split(&mod->enhances, mod->info->enhances, ",", 0, strcasecmp);
|
||||
if (res) {
|
||||
if (module_post_register(mod)) {
|
||||
goto prestart_error;
|
||||
}
|
||||
}
|
||||
@@ -1846,6 +1854,11 @@ static int loader_builtin_init(struct load_order *load_order)
|
||||
continue;
|
||||
}
|
||||
|
||||
/* Parse dependendencies from mod->info. */
|
||||
if (module_post_register(mod)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Built-in modules are not preloaded, most have an early load priority. */
|
||||
if (!add_to_load_order(mod->resource, load_order, 0, 0, 1)) {
|
||||
return -1;
|
||||
|
@@ -2381,6 +2381,7 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.support_level = AST_MODULE_SUPPORT_CORE,
|
||||
.load = load_module,
|
||||
.unload = unload_module,
|
||||
/* This reload does not support realtime so it does not require "extconfig". */
|
||||
.reload = reload_module,
|
||||
.load_pri = 0,
|
||||
);
|
||||
|
@@ -9600,5 +9600,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_module,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "http",
|
||||
.requires = "extconfig,acl,http",
|
||||
);
|
||||
|
@@ -591,4 +591,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_module,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "extconfig",
|
||||
);
|
||||
|
@@ -296,4 +296,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_module,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "extconfig",
|
||||
);
|
||||
|
@@ -608,11 +608,7 @@ const void *ast_sched_find_data(struct ast_sched_context *con, int id)
|
||||
* would be two or more in the list with that
|
||||
* id.
|
||||
*/
|
||||
#ifndef AST_DEVMODE
|
||||
int ast_sched_del(struct ast_sched_context *con, int id)
|
||||
#else
|
||||
int _ast_sched_del(struct ast_sched_context *con, int id, const char *file, int line, const char *function)
|
||||
#endif
|
||||
{
|
||||
struct sched *s = NULL;
|
||||
int *last_id = ast_threadstorage_get(&last_del_id, sizeof(int));
|
||||
|
@@ -340,6 +340,7 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.support_level = AST_MODULE_SUPPORT_CORE,
|
||||
.load = load_module,
|
||||
.unload = unload_module,
|
||||
/* This reload doesn't use config so this module doesn't require "extconfig". */
|
||||
.reload = reload_module,
|
||||
/* Load after the format modules to reduce processing during startup. */
|
||||
.load_pri = AST_MODPRI_APP_DEPEND + 1,
|
||||
|
@@ -1420,4 +1420,5 @@ AST_MODULE_INFO(ASTERISK_GPL_KEY, AST_MODFLAG_GLOBAL_SYMBOLS | AST_MODFLAG_LOAD_
|
||||
.unload = unload_module,
|
||||
.reload = reload_module,
|
||||
.load_pri = AST_MODPRI_CORE,
|
||||
.requires = "extconfig",
|
||||
);
|
||||
|
16
main/xml.c
16
main/xml.c
@@ -142,6 +142,22 @@ struct ast_xml_node *ast_xml_add_child(struct ast_xml_node *parent, struct ast_x
|
||||
return (struct ast_xml_node *) xmlAddChild((xmlNode *) parent, (xmlNode *) child);
|
||||
}
|
||||
|
||||
struct ast_xml_node *ast_xml_add_child_list(struct ast_xml_node *parent, struct ast_xml_node *child)
|
||||
{
|
||||
if (!parent || !child) {
|
||||
return NULL;
|
||||
}
|
||||
return (struct ast_xml_node *) xmlAddChildList((xmlNode *) parent, (xmlNode *) child);
|
||||
}
|
||||
|
||||
struct ast_xml_node *ast_xml_copy_node_list(struct ast_xml_node *list)
|
||||
{
|
||||
if (!list) {
|
||||
return NULL;
|
||||
}
|
||||
return (struct ast_xml_node *) xmlCopyNodeList((xmlNode *) list);
|
||||
}
|
||||
|
||||
struct ast_xml_doc *ast_xml_read_memory(char *buffer, size_t size)
|
||||
{
|
||||
xmlDoc *doc;
|
||||
|
@@ -2783,6 +2783,8 @@ static int xml_pathmatch(char *xmlpattern, int xmlpattern_maxlen, glob_t *globbu
|
||||
static char *handle_dump_docs(struct ast_cli_entry *e, int cmd, struct ast_cli_args *a)
|
||||
{
|
||||
struct documentation_tree *doctree;
|
||||
struct ast_xml_doc *dumpdoc;
|
||||
struct ast_xml_node *dumproot;
|
||||
FILE *f;
|
||||
|
||||
switch (cmd) {
|
||||
@@ -2799,15 +2801,53 @@ static char *handle_dump_docs(struct ast_cli_entry *e, int cmd, struct ast_cli_a
|
||||
if (a->argc != 3) {
|
||||
return CLI_SHOWUSAGE;
|
||||
}
|
||||
|
||||
dumpdoc = ast_xml_new();
|
||||
if (!dumpdoc) {
|
||||
ast_log(LOG_ERROR, "Could not create new XML document\n");
|
||||
return CLI_FAILURE;
|
||||
}
|
||||
|
||||
dumproot = ast_xml_new_node("docs");
|
||||
if (!dumproot) {
|
||||
ast_xml_close(dumpdoc);
|
||||
ast_log(LOG_ERROR, "Could not create new XML root node\n");
|
||||
return CLI_FAILURE;
|
||||
}
|
||||
|
||||
ast_xml_set_root(dumpdoc, dumproot);
|
||||
|
||||
AST_RWLIST_RDLOCK(&xmldoc_tree);
|
||||
AST_LIST_TRAVERSE(&xmldoc_tree, doctree, entry) {
|
||||
struct ast_xml_node *root_node = ast_xml_get_root(doctree->doc);
|
||||
struct ast_xml_node *kids = ast_xml_node_get_children(root_node);
|
||||
struct ast_xml_node *kids_copy;
|
||||
|
||||
/* If there are no kids someone screwed up, but we check anyway. */
|
||||
if (!kids) {
|
||||
continue;
|
||||
}
|
||||
|
||||
kids_copy = ast_xml_copy_node_list(kids);
|
||||
if (!kids_copy) {
|
||||
ast_xml_close(dumpdoc);
|
||||
ast_log(LOG_ERROR, "Could not create copy of XML node list\n");
|
||||
return CLI_FAILURE;
|
||||
}
|
||||
|
||||
ast_xml_add_child_list(dumproot, kids_copy);
|
||||
}
|
||||
AST_RWLIST_UNLOCK(&xmldoc_tree);
|
||||
|
||||
if (!(f = fopen(a->argv[2], "w"))) {
|
||||
ast_xml_close(dumpdoc);
|
||||
ast_log(LOG_ERROR, "Could not open file '%s': %s\n", a->argv[2], strerror(errno));
|
||||
return CLI_FAILURE;
|
||||
}
|
||||
AST_RWLIST_RDLOCK(&xmldoc_tree);
|
||||
AST_LIST_TRAVERSE(&xmldoc_tree, doctree, entry) {
|
||||
ast_xml_doc_dump_file(f, doctree->doc);
|
||||
}
|
||||
AST_RWLIST_UNLOCK(&xmldoc_tree);
|
||||
|
||||
ast_xml_doc_dump_file(f, dumpdoc);
|
||||
ast_xml_close(dumpdoc);
|
||||
|
||||
fclose(f);
|
||||
return CLI_SUCCESS;
|
||||
}
|
||||
|
@@ -183,6 +183,7 @@ IODBC_LIB=@IODBC_LIB@
|
||||
JACK_INCLUDE=@JACK_INCLUDE@
|
||||
JACK_LIB=@JACK_LIB@
|
||||
|
||||
JANSSON_BUNDLED=@JANSSON_BUNDLED@
|
||||
JANSSON_INCLUDE=@JANSSON_INCLUDE@
|
||||
JANSSON_LIB=@JANSSON_LIB@
|
||||
|
||||
|
@@ -1726,7 +1726,7 @@
|
||||
<configOption name="max_forwards" default="70">
|
||||
<synopsis>Value used in Max-Forwards header for SIP requests.</synopsis>
|
||||
</configOption>
|
||||
<configOption name="keep_alive_interval" default="0">
|
||||
<configOption name="keep_alive_interval" default="90">
|
||||
<synopsis>The interval (in seconds) to send keepalives to active connection-oriented transports.</synopsis>
|
||||
</configOption>
|
||||
<configOption name="contact_expiration_check_interval" default="30">
|
||||
|
@@ -29,7 +29,7 @@
|
||||
#include "asterisk/res_pjsip_cli.h"
|
||||
|
||||
#define DEFAULT_MAX_FORWARDS 70
|
||||
#define DEFAULT_KEEPALIVE_INTERVAL 0
|
||||
#define DEFAULT_KEEPALIVE_INTERVAL 90
|
||||
#define DEFAULT_USERAGENT_PREFIX "Asterisk PBX"
|
||||
#define DEFAULT_OUTBOUND_ENDPOINT "default_outbound_endpoint"
|
||||
#define DEFAULT_DEBUG "no"
|
||||
|
@@ -5522,7 +5522,7 @@ static int load_module(void)
|
||||
persistence_expires_str2struct, persistence_expires_struct2str, NULL, 0, 0);
|
||||
ast_sorcery_object_field_register(sorcery, "subscription_persistence", "contact_uri", "", OPT_CHAR_ARRAY_T, 0,
|
||||
CHARFLDSET(struct subscription_persistence, contact_uri));
|
||||
ast_sorcery_object_field_register(sorcery, "subscription_persistence", "prune_on_boot", "0", OPT_UINT_T, 0,
|
||||
ast_sorcery_object_field_register(sorcery, "subscription_persistence", "prune_on_boot", "no", OPT_YESNO_T, 1,
|
||||
FLDSET(struct subscription_persistence, prune_on_boot));
|
||||
|
||||
if (apply_list_configuration(sorcery)) {
|
||||
|
@@ -26,10 +26,10 @@ gen_mods() {
|
||||
done
|
||||
}
|
||||
|
||||
[ x"$OUTPUT_DIR" != x ] && mkdir -p "$OUTPUT_DIR" 2&> /dev/null
|
||||
[ x"$OUTPUT_DIR" != x ] && mkdir -p "$OUTPUT_DIR" 2> /dev/null
|
||||
|
||||
if [ x"$CACHE_DIR" != x ] ; then
|
||||
mkdir -p "$CACHE_DIR/sounds $CACHE_DIR/externals" 2&> /dev/null
|
||||
mkdir -p $CACHE_DIR/sounds $CACHE_DIR/externals 2> /dev/null
|
||||
fi
|
||||
|
||||
if [ ${CCACHE_DISABLE:-0} -ne 1 ] ; then
|
||||
@@ -58,9 +58,11 @@ runner ccache -s
|
||||
runner ulimit -a
|
||||
|
||||
MAKE=`which make`
|
||||
PKGCONFIG=`which pkg-config`
|
||||
[ -d /usr/lib64 ] && _libdir=/usr/lib64
|
||||
|
||||
common_config_args="--prefix=/usr ${_libdir:+--libdir=${_libdir}} --sysconfdir=/etc --with-pjproject-bundled"
|
||||
$PKGCONFIG 'jansson >= 2.11' || common_config_args+=" --with-jansson-bundled"
|
||||
common_config_args+=" ${CACHE_DIR:+--with-sounds-cache=${CACHE_DIR}/sounds --with-externals-cache=${CACHE_DIR}/externals}"
|
||||
common_config_args+=" --enable-dev-mode"
|
||||
export WGET_EXTRA_ARGS="--quiet"
|
||||
|
@@ -62,6 +62,9 @@ pipeline {
|
||||
steps {
|
||||
/* Here's where we switch to scripted pipeline */
|
||||
script {
|
||||
manager.build.displayName = "${env.GERRIT_CHANGE_NUMBER}"
|
||||
manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Host: ${NODE_NAME}", false)
|
||||
|
||||
stage ("Checkout") {
|
||||
sh "sudo chown -R jenkins:users ."
|
||||
env.GERRIT_PROJECT_URL = env.GERRIT_CHANGE_URL.replaceAll(/\/[0-9]+$/, "/${env.GERRIT_PROJECT}")
|
||||
@@ -110,11 +113,13 @@ pipeline {
|
||||
def r = currentBuild.startTimeInMillis % images.length
|
||||
def ri = images[(int)r]
|
||||
def randomImage = env.DOCKER_REGISTRY + "/" + ri
|
||||
def dockerOptions = "--ulimit core=0 --ulimit nofile=10240 " +
|
||||
def dockerOptions = "--privileged --ulimit core=0 --ulimit nofile=10240 " +
|
||||
" -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
|
||||
" --entrypoint=''"
|
||||
def bt = env.BUILD_TAG.replaceAll(/[^a-zA-Z0-9_.-]/, '-')
|
||||
def outputdir = "tests/CI/output/Testsuite"
|
||||
|
||||
manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Image: ${randomImage}", false)
|
||||
def img = docker.image(randomImage)
|
||||
img.pull()
|
||||
|
||||
@@ -122,7 +127,7 @@ pipeline {
|
||||
img.inside(dockerOptions + " --name ${bt}-build") {
|
||||
echo 'Building..'
|
||||
env.CCACHE_DIR = "/srv/cache/ccache"
|
||||
sh "./tests/CI/buildAsterisk.sh --output-dir=${outputdir} --cache-dir=/srv/cache"
|
||||
sh "./tests/CI/buildAsterisk.sh --branch-name=${BRANCH_NAME} --output-dir=${outputdir} --cache-dir=/srv/cache"
|
||||
|
||||
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: false,
|
||||
artifacts: "${outputdir}/*"
|
||||
@@ -148,7 +153,7 @@ pipeline {
|
||||
img.inside("${dockerOptions} --name ${bt}-${groupName}") {
|
||||
|
||||
lock("${JOB_NAME}.${NODE_NAME}.installer") {
|
||||
sh 'sudo ./tests/CI/installAsterisk.sh --user-group=jenkins:users'
|
||||
sh "sudo ./tests/CI/installAsterisk.sh --uninstall-all --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
|
||||
}
|
||||
|
||||
sh "sudo rm -rf ${groupDir} || : "
|
||||
@@ -167,7 +172,7 @@ pipeline {
|
||||
userRemoteConfigs: [[name: env.GERRIT_NAME, url: testsuiteUrl]]
|
||||
]
|
||||
|
||||
sh "sudo tests/CI/runTestsuite.sh --testsuite-dir='${groupDir}' --test-command='${groupTestcmd}'"
|
||||
sh "sudo tests/CI/runTestsuite.sh --testsuite-dir='${groupDir}' --testsuite-command='${groupTestcmd}'"
|
||||
|
||||
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: true,
|
||||
artifacts: "${groupDir}/asterisk-test-suite-report.xml, ${groupDir}/logs/**, ${groupDir}/core*.txt"
|
||||
|
@@ -1,6 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CIDIR=$(dirname $(readlink -fn $0))
|
||||
UNINSTALL=0
|
||||
UNINSTALL_ALL=0
|
||||
source $CIDIR/ci.functions
|
||||
|
||||
MAKE=`which make`
|
||||
@@ -10,6 +12,9 @@ if [ x"$DESTDIR" != x ] ; then
|
||||
fi
|
||||
destdir=${DESTDIR:+DESTDIR=$DESTDIR}
|
||||
|
||||
[ $UNINSTALL -gt 0 ] && ${MAKE} ${destdir} uninstall
|
||||
[ $UNINSTALL_ALL -gt 0 ] && ${MAKE} ${destdir} uninstall-all
|
||||
|
||||
${MAKE} ${destdir} install || ${MAKE} ${destdir} NOISY_BUILD=yes install || exit 1
|
||||
${MAKE} ${destdir} samples
|
||||
if [ x"$DESTDIR" != x ] ; then
|
||||
|
@@ -32,7 +32,7 @@
|
||||
{
|
||||
"name": "real",
|
||||
"dir": "tests/CI/output/realtime",
|
||||
"runTestsuiteOptions": "--realtime",
|
||||
"runTestsuiteOptions": "--realtime --initialize-db --cleanup-db",
|
||||
"testcmd": " -t tests/channels/pjsip -G realtime-incompatible"
|
||||
}
|
||||
]
|
||||
|
@@ -14,7 +14,7 @@ pipeline {
|
||||
triggers {
|
||||
cron 'H H(0-4) * * *'
|
||||
}
|
||||
|
||||
|
||||
agent {
|
||||
/* All of the stages need to be performed on a docker host */
|
||||
label "swdev-docker"
|
||||
@@ -25,8 +25,10 @@ pipeline {
|
||||
steps {
|
||||
/* Here's where we switch to scripted pipeline */
|
||||
script {
|
||||
manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Host: ${NODE_NAME}", false)
|
||||
|
||||
stage ("Checkout") {
|
||||
sh "sudo chown -R jenkins:users ."
|
||||
sh "sudo chown -R jenkins:users ."
|
||||
sh "printenv | sort"
|
||||
sh "sudo tests/CI/setupJenkinsEnvironment.sh"
|
||||
}
|
||||
@@ -35,23 +37,46 @@ pipeline {
|
||||
def r = currentBuild.startTimeInMillis % images.length
|
||||
def ri = images[(int)r]
|
||||
def randomImage = env.DOCKER_REGISTRY + "/" + ri
|
||||
def dockerOptions = "--ulimit core=0 --ulimit nofile=10240 " +
|
||||
def dockerOptions = "--privileged --ulimit core=0 --ulimit nofile=10240 " +
|
||||
" -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
|
||||
" --entrypoint=''"
|
||||
def bt = env.BUILD_TAG.replaceAll(/[^a-zA-Z0-9_.-]/, '-')
|
||||
def outputdir = "tests/CI/output/Testsuite"
|
||||
|
||||
manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Image: ${randomImage}", false)
|
||||
def img = docker.image(randomImage)
|
||||
img.pull()
|
||||
|
||||
stage ("Build") {
|
||||
img.inside(dockerOptions + " --name ${bt}-build") {
|
||||
img.inside(dockerOptions + " --name ${bt}-build") {
|
||||
stage ("Build") {
|
||||
echo 'Building..'
|
||||
env.CCACHE_DIR = "/srv/cache/ccache"
|
||||
sh "./tests/CI/buildAsterisk.sh --output-dir=${outputdir} --cache-dir=/srv/cache"
|
||||
sh "./tests/CI/buildAsterisk.sh --branch-name=${BRANCH_NAME} --output-dir=${outputdir} --cache-dir=/srv/cache"
|
||||
|
||||
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: false,
|
||||
artifacts: "${outputdir}/*"
|
||||
}
|
||||
stage ("Docs") {
|
||||
|
||||
sh "sudo ./tests/CI/installAsterisk.sh --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
|
||||
|
||||
def docUrl = env.GIT_URL.replaceAll(/\/[^\/]+$/, "/publish-docs")
|
||||
checkout scm: [$class: 'GitSCM',
|
||||
branches: [[name: "master"]],
|
||||
extensions: [
|
||||
[$class: 'RelativeTargetDirectory', relativeTargetDir: "tests/CI/output/publish-docs"],
|
||||
[$class: 'CloneOption',
|
||||
noTags: true,
|
||||
depth: 10,
|
||||
honorRefspec: true,
|
||||
shallow: true
|
||||
],
|
||||
],
|
||||
userRemoteConfigs: [[url: docUrl]]
|
||||
]
|
||||
|
||||
sh "./tests/CI/publishAsteriskDocs.sh --user-group=jenkins:users --branch-name=${BRANCH_NAME} --wiki-doc-branch-regex=\"${WIKI_DOC_BRANCH_REGEX}\""
|
||||
}
|
||||
}
|
||||
|
||||
def testGroups = readJSON file: "tests/CI/periodic-dailyTestGroups.json"
|
||||
@@ -74,11 +99,11 @@ pipeline {
|
||||
img.inside("${dockerOptions} --name ${bt}-${groupName}") {
|
||||
|
||||
lock("${JOB_NAME}.${NODE_NAME}.installer") {
|
||||
sh 'sudo ./tests/CI/installAsterisk.sh --user-group=jenkins:users'
|
||||
sh "sudo ./tests/CI/installAsterisk.sh --uninstall-all --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
|
||||
}
|
||||
|
||||
sh "sudo rm -rf ${groupDir} || : "
|
||||
|
||||
|
||||
checkout scm: [$class: 'GitSCM',
|
||||
branches: [[name: "${BRANCH_NAME}"]],
|
||||
extensions: [
|
||||
@@ -93,7 +118,7 @@ pipeline {
|
||||
userRemoteConfigs: [[url: testsuiteUrl]]
|
||||
]
|
||||
|
||||
sh "sudo tests/CI/runTestsuite.sh ${groupRunTestsuiteOptions} --testsuite-dir='${groupDir}' --test-command='${groupTestcmd}'"
|
||||
sh "sudo tests/CI/runTestsuite.sh ${groupRunTestsuiteOptions} --testsuite-dir='${groupDir}' --testsuite-command='${groupTestcmd}'"
|
||||
|
||||
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: true,
|
||||
artifacts: "${groupDir}/asterisk-test-suite-report.xml, ${groupDir}/logs/**, ${groupDir}/core*.txt"
|
||||
|
144
tests/CI/publishAsteriskDocs.sh
Executable file
144
tests/CI/publishAsteriskDocs.sh
Executable file
@@ -0,0 +1,144 @@
|
||||
#
|
||||
# Publish Asterisk documentation to the wiki
|
||||
#
|
||||
#!/usr/bin/env bash
|
||||
CIDIR=$(dirname $(readlink -fn $0))
|
||||
source $CIDIR/ci.functions
|
||||
ASTETCDIR=$DESTDIR/etc/asterisk
|
||||
|
||||
ASTERISK="$DESTDIR/usr/sbin/asterisk"
|
||||
CONFFILE=$ASTETCDIR/asterisk.conf
|
||||
OUTPUTDIR=${OUTPUT_DIR:-tests/CI/output/publish-docs}
|
||||
|
||||
[ ! -d ${OUTPUTDIR} ] && mkdir -p $OUTPUTDIR
|
||||
[ x"$USER_GROUP" != x ] && sudo chown -R $USER_GROUP $OUTPUTDIR
|
||||
|
||||
rm -rf $ASTETCDIR/extensions.{ael,lua} || :
|
||||
|
||||
if test -f ~/.asterisk-wiki.conf; then
|
||||
. ~/.asterisk-wiki.conf
|
||||
fi
|
||||
|
||||
: ${AWK:=awk}
|
||||
: ${GREP:=grep}
|
||||
: ${MAKE:=make}
|
||||
: ${GIT:=git}
|
||||
|
||||
function fail()
|
||||
{
|
||||
echo "${PROGNAME}: " "$@" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
function usage()
|
||||
{
|
||||
echo "usage: ${PROGNAME} --branch-name=<branch> [ --user-group=<user>:<group> ] [ --output-dir=<output_dir> ]"
|
||||
}
|
||||
|
||||
#
|
||||
# Check settings from config file
|
||||
#
|
||||
if ! test ${CONFLUENCE_URL}; then
|
||||
fail "CONFLUENCE_URL not set in ~/.asterisk-wiki.conf"
|
||||
fi
|
||||
|
||||
if ! test ${CONFLUENCE_USER}; then
|
||||
fail "CONFLUENCE_USER not set in ~/.asterisk-wiki.conf"
|
||||
fi
|
||||
|
||||
if ! test ${CONFLUENCE_PASSWORD}; then
|
||||
fail "CONFLUENCE_PASSWORD not set in ~/.asterisk-wiki.conf"
|
||||
fi
|
||||
# needed by publishing scripts. pass via the environment so it doesn't show
|
||||
# up in the logs.
|
||||
export CONFLUENCE_PASSWORD
|
||||
|
||||
# default space to AST
|
||||
: ${CONFLUENCE_SPACE:=AST}
|
||||
|
||||
#
|
||||
# Check repository
|
||||
#
|
||||
if ! test -f main/asterisk.c; then
|
||||
fail "Must run from an Asterisk checkout"
|
||||
fi
|
||||
|
||||
#
|
||||
# Check current working copy
|
||||
#
|
||||
CHANGES=$(${GIT} status | grep 'modified:' | wc -l)
|
||||
if test ${CHANGES} -ne 0; then
|
||||
fail "Asterisk checkout must be clean"
|
||||
fi
|
||||
|
||||
# Verbose, and exit on any command failure
|
||||
set -ex
|
||||
|
||||
AST_VER=$(export GREP; export AWK; ./build_tools/make_version .)
|
||||
|
||||
# Generate latest ARI documentation
|
||||
make ari-stubs
|
||||
|
||||
# Ensure docs are consistent with the implementation
|
||||
CHANGES=$(${GIT} status | grep 'modified:' | wc -l)
|
||||
if test ${CHANGES} -ne 0; then
|
||||
fail "Asterisk code out of date compared to the model"
|
||||
fi
|
||||
|
||||
# make ari-stubs may modify the $Revision$ tags in a file; revert the
|
||||
# changes
|
||||
${GIT} reset --hard
|
||||
|
||||
#
|
||||
# Don't publish docs for non-main-release branches. We still want the above
|
||||
# validation to ensure that REST API docs are kept up to date though.
|
||||
#
|
||||
if [ -n "$WIKI_DOC_BRANCH_REGEX" ] ; then
|
||||
if [[ ! ${BRANCH_NAME} =~ $WIKI_DOC_BRANCH_REGEX ]] ; then
|
||||
exit 0;
|
||||
fi
|
||||
fi
|
||||
|
||||
#
|
||||
# Publish the REST API.
|
||||
#
|
||||
|
||||
${OUTPUTDIR}/publish-rest-api.py --username="${CONFLUENCE_USER}" \
|
||||
--verbose \
|
||||
--ast-version="${AST_VER}" \
|
||||
${CONFLUENCE_URL} \
|
||||
${CONFLUENCE_SPACE} \
|
||||
"Asterisk ${BRANCH_NAME}"
|
||||
|
||||
rm -f ${OUTPUTDIR}/full-en_US.xml
|
||||
|
||||
sudo $ASTERISK ${USER_GROUP:+-U ${USER_GROUP%%:*} -G ${USER_GROUP##*:}} -gn -C $CONFFILE
|
||||
for n in `seq 1 5` ; do
|
||||
sleep 3
|
||||
$ASTERISK -rx "core waitfullybooted" -C $CONFFILE && break
|
||||
done
|
||||
sleep 1
|
||||
$ASTERISK -rx "xmldoc dump ${OUTPUTDIR}/asterisk-docs.xml" -C $CONFFILE
|
||||
$ASTERISK -rx "core stop now" -C $CONFFILE
|
||||
|
||||
#
|
||||
# Set the prefix argument for publishing docs
|
||||
#
|
||||
PREFIX="Asterisk ${BRANCH_NAME}"
|
||||
|
||||
#
|
||||
# Publish XML documentation.
|
||||
#
|
||||
|
||||
# Script assumes that it's running from TOPDIR
|
||||
pushd ${OUTPUTDIR}
|
||||
|
||||
./astxml2wiki.py --username="${CONFLUENCE_USER}" \
|
||||
--server=${CONFLUENCE_URL} \
|
||||
--prefix="${PREFIX}" \
|
||||
--space="${CONFLUENCE_SPACE}" \
|
||||
--file=asterisk-docs.xml \
|
||||
--ast-version="${AST_VER}" \
|
||||
-v
|
||||
|
||||
popd
|
@@ -25,6 +25,8 @@ pipeline {
|
||||
steps {
|
||||
/* Here's where we switch to scripted pipeline */
|
||||
script {
|
||||
manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Host: ${NODE_NAME}", false)
|
||||
|
||||
stage ("Checkout") {
|
||||
sh "sudo chown -R jenkins:users ."
|
||||
sh "printenv | sort"
|
||||
@@ -35,11 +37,13 @@ pipeline {
|
||||
def r = currentBuild.startTimeInMillis % images.length
|
||||
def ri = images[(int)r]
|
||||
def randomImage = env.DOCKER_REGISTRY + "/" + ri
|
||||
def dockerOptions = "--ulimit core=0 --ulimit nofile=10240 " +
|
||||
def dockerOptions = "--privileged --ulimit core=0 --ulimit nofile=10240 " +
|
||||
" -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
|
||||
" --entrypoint=''"
|
||||
def bt = env.BUILD_TAG.replaceAll(/[^a-zA-Z0-9_.-]/, '-')
|
||||
def outputdir = "tests/CI/output/Testsuite"
|
||||
|
||||
manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Image: ${randomImage}", false)
|
||||
def img = docker.image(randomImage)
|
||||
img.pull()
|
||||
|
||||
@@ -47,7 +51,7 @@ pipeline {
|
||||
img.inside(dockerOptions + " --name ${bt}-build") {
|
||||
echo 'Building..'
|
||||
env.CCACHE_DIR = "/srv/cache/ccache"
|
||||
sh "./tests/CI/buildAsterisk.sh --ref-debug --output-dir=${outputdir} --cache-dir=/srv/cache"
|
||||
sh "./tests/CI/buildAsterisk.sh --ref-debug --branch-name=${BRANCH_NAME} --output-dir=${outputdir} --cache-dir=/srv/cache"
|
||||
|
||||
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: false,
|
||||
artifacts: "${outputdir}/*"
|
||||
@@ -65,7 +69,7 @@ pipeline {
|
||||
def groupName = testGroup.name
|
||||
def groupDir = testGroup.dir
|
||||
def groupTestcmd = testGroup.testcmd
|
||||
def testsuiteUrl = env.GIT_URL.replaceAll(/\/[^\/]+$/, "/1testsuite")
|
||||
def testsuiteUrl = env.GIT_URL.replaceAll(/\/[^\/]+$/, "/testsuite")
|
||||
|
||||
parallelTasks[groupName] = {
|
||||
stage (groupName) {
|
||||
@@ -73,7 +77,7 @@ pipeline {
|
||||
img.inside("${dockerOptions} --name ${bt}-${groupName}") {
|
||||
|
||||
lock("${JOB_NAME}.${NODE_NAME}.installer") {
|
||||
sh 'sudo ./tests/CI/installAsterisk.sh --user-group=jenkins:users'
|
||||
sh "sudo ./tests/CI/installAsterisk.sh --uninstall-all --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
|
||||
}
|
||||
|
||||
sh "sudo rm -rf ${groupDir} || : "
|
||||
@@ -92,7 +96,7 @@ pipeline {
|
||||
userRemoteConfigs: [[url: testsuiteUrl]]
|
||||
]
|
||||
|
||||
sh "sudo tests/CI/runTestsuite.sh --testsuite-dir='${groupDir}' --test-command='${groupTestcmd}'"
|
||||
sh "sudo tests/CI/runTestsuite.sh --testsuite-dir='${groupDir}' --testsuite-command='${groupTestcmd}'"
|
||||
|
||||
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: true,
|
||||
artifacts: "${groupDir}/asterisk-test-suite-report.xml, ${groupDir}/logs/**, ${groupDir}/core*.txt"
|
||||
|
@@ -9,15 +9,15 @@ pushd $TESTSUITE_DIR
|
||||
./cleanup-test-remnants.sh
|
||||
|
||||
if [ $REALTIME -eq 1 ] ; then
|
||||
$CIDIR/setupRealtime.sh
|
||||
$CIDIR/setupRealtime.sh --initialize-db=${INITIALIZE_DB:?0}
|
||||
fi
|
||||
|
||||
export PYTHONPATH=./lib/python/
|
||||
echo "Running tests ${TEST_COMMAND}"
|
||||
./runtests.py --cleanup ${TEST_COMMAND} | contrib/scripts/pretty_print --no-color --no-timer --term-width=120 --show-errors || :
|
||||
echo "Running tests ${TESTSUITE_COMMAND}"
|
||||
./runtests.py --cleanup ${TESTSUITE_COMMAND} | contrib/scripts/pretty_print --no-color --no-timer --term-width=120 --show-errors || :
|
||||
|
||||
if [ $REALTIME -eq 1 ] ; then
|
||||
$CIDIR/teardownRealtime.sh
|
||||
$CIDIR/teardownRealtime.sh --cleanup-db=${CLEANUP_DB:?0}
|
||||
fi
|
||||
|
||||
if [ -f core* ] ; then
|
||||
@@ -26,4 +26,4 @@ if [ -f core* ] ; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
popd
|
||||
popd
|
||||
|
@@ -3,7 +3,10 @@ CIDIR=$(dirname $(readlink -fn $0))
|
||||
source $CIDIR/ci.functions
|
||||
ASTETCDIR=$DESTDIR/etc/asterisk
|
||||
|
||||
echo "full => notice,warning,error,debug,verbose" > "$ASTETCDIR/logger.conf"
|
||||
cat <<-EOF > "$ASTETCDIR/logger.conf"
|
||||
[logfiles]
|
||||
full => notice,warning,error,debug,verbose
|
||||
EOF
|
||||
|
||||
echo "[default]" > "$ASTETCDIR/extensions.conf"
|
||||
|
||||
@@ -57,7 +60,7 @@ for n in `seq 1 5` ; do
|
||||
$ASTERISK -rx "core waitfullybooted" -C $CONFFILE && break
|
||||
done
|
||||
sleep 1
|
||||
$ASTERISK -rx "${TEST_COMMAND:-test execute all}" -C $CONFFILE
|
||||
$ASTERISK -rx "${UNITTEST_COMMAND:-test execute all}" -C $CONFFILE
|
||||
$ASTERISK -rx "test show results failed" -C $CONFFILE
|
||||
$ASTERISK -rx "test generate results xml $OUTPUTFILE" -C $CONFFILE
|
||||
$ASTERISK -rx "core stop now" -C $CONFFILE
|
||||
|
@@ -1,9 +1,114 @@
|
||||
#!/usr/bin/env bash
|
||||
CIDIR=$(dirname $(readlink -fn $0))
|
||||
INITIALIZE_DB=0
|
||||
source $CIDIR/ci.functions
|
||||
ASTTOP=$(readlink -fn $CIDIR/../../)
|
||||
|
||||
set -e
|
||||
|
||||
POSTGRES_PID=`pidof postgres || : `
|
||||
|
||||
if [ -z "$POSTGRES_PID" ] ; then
|
||||
if [ -x /usr/local/bin/postgresql-start ] ; then
|
||||
/usr/local/bin/postgresql-start
|
||||
fi
|
||||
fi
|
||||
|
||||
POSTGRES_PID=`pidof postgres || : `
|
||||
if [ -z "$POSTGRES_PID" ] ; then
|
||||
echo "Postgres isn't running. It must be started manually before this test can continue."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $INITIALIZE_DB -gt 0 ] ; then
|
||||
echo "(re)Initializing Database"
|
||||
|
||||
sudo -u postgres dropdb -e asterisk_test >/dev/null 2>&1 || :
|
||||
sudo -u postgres dropuser -e asterisk_test >/dev/null 2>&1 || :
|
||||
sudo -u postgres createuser --username=postgres -RDIElS asterisk_test
|
||||
sudo -u postgres createdb --username=postgres -E UTF-8 -O asterisk_test asterisk_test
|
||||
|
||||
echo "Configuring ODBC"
|
||||
|
||||
sudo odbcinst -u -d -n "PostgreSQL-Asterisk-Test"
|
||||
|
||||
sudo odbcinst -i -d -n "PostgreSQL-Asterisk-Test" -f /dev/stdin <<-EOF
|
||||
[PostgreSQL-Asterisk-Test]
|
||||
Description=PostgreSQL ODBC driver (Unicode version)
|
||||
Driver=psqlodbcw.so
|
||||
Setup=libodbcpsqlS.so
|
||||
Debug=0
|
||||
CommLog=1
|
||||
UsageCount=1
|
||||
EOF
|
||||
|
||||
sudo odbcinst -u -s -l -n asterisk-connector-test
|
||||
sudo odbcinst -i -s -l -n asterisk-connector-test -f /dev/stdin <<-EOF
|
||||
[asterisk-connector-test]
|
||||
Description = PostgreSQL connection to 'asterisk' database
|
||||
Driver = PostgreSQL-Asterisk-Test
|
||||
Database = asterisk_test
|
||||
Servername = 127.0.0.1
|
||||
UserName = asterisk_test
|
||||
Port = 5432
|
||||
Protocol = 9.1
|
||||
ReadOnly = No
|
||||
RowVersioning = No
|
||||
ShowSystemTables = No
|
||||
ShowOldColumn = No
|
||||
FakeOldIndex = No
|
||||
ConnSettings =
|
||||
EOF
|
||||
fi
|
||||
|
||||
cat >/tmp/config.ini <<-EOF
|
||||
[alembic]
|
||||
script_location = config
|
||||
sqlalchemy.url = postgresql://asterisk_test@localhost/asterisk_test
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
EOF
|
||||
|
||||
pushd $ASTTOP/contrib/ast-db-manage
|
||||
|
||||
psql --username=asterisk_test --host=localhost --db=asterisk_test --command='DROP OWNED BY asterisk_test CASCADE'
|
||||
alembic -c /tmp/config.ini upgrade head
|
||||
rm -rf /tmp/config.ini || :
|
||||
|
||||
popd
|
||||
|
||||
cp test-config.yaml test-config.orig.yaml
|
||||
|
||||
cat >test-config.yaml <<-EOF
|
||||
@@ -65,59 +170,10 @@ cat >test-config.yaml <<-EOF
|
||||
config-section: realtime-config
|
||||
|
||||
realtime-config:
|
||||
username: "asterisk"
|
||||
username: "asterisk_test"
|
||||
password: "asterisk_test"
|
||||
host: "localhost"
|
||||
db: "asterisk"
|
||||
dsn: "asterisk-connector"
|
||||
db: "asterisk_test"
|
||||
dsn: "asterisk-connector-test"
|
||||
EOF
|
||||
|
||||
ASTTOP=$(readlink -fn $CIDIR/../../)
|
||||
|
||||
cat >/tmp/config.ini <<-EOF
|
||||
[alembic]
|
||||
script_location = config
|
||||
sqlalchemy.url = postgresql://asterisk@localhost/asterisk
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
EOF
|
||||
|
||||
pushd $ASTTOP/contrib/ast-db-manage
|
||||
if [ -x /usr/local/bin/postgresql-start ] ; then
|
||||
/usr/local/bin/postgresql-start
|
||||
fi
|
||||
psql --username=asterisk --host=localhost --db=asterisk --command='DROP OWNED BY asterisk CASCADE'
|
||||
alembic -c /tmp/config.ini upgrade head
|
||||
rm -rf /tmp/config.ini || :
|
||||
popd
|
||||
|
@@ -1,6 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
CIDIR=$(dirname $(readlink -fn $0))
|
||||
CLEANUP_DB=0
|
||||
source $CIDIR/ci.functions
|
||||
|
||||
cp test-config.orig.yaml test-config.yaml
|
||||
psql --username=asterisk --host=localhost --db=asterisk --command='DROP OWNED BY asterisk CASCADE'
|
||||
if [ $CLEANUP_DB -gt 0 ] ; then
|
||||
sudo -u postgres dropdb -e asterisk_test >/dev/null 2>&1 || :
|
||||
sudo -u postgres dropuser -e asterisk_test >/dev/null 2>&1 || :
|
||||
sudo odbcinst -u -d -n "PostgreSQL-Asterisk-Test"
|
||||
sudo odbcinst -u -s -l -n "asterisk-connector-test"
|
||||
fi
|
||||
|
@@ -63,6 +63,9 @@ pipeline {
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
manager.build.displayName = "${env.GERRIT_CHANGE_NUMBER}"
|
||||
manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Host: ${NODE_NAME}", false)
|
||||
|
||||
stage ("Checkout") {
|
||||
sh "sudo chown -R jenkins:users ."
|
||||
env.GERRIT_PROJECT_URL = env.GERRIT_CHANGE_URL.replaceAll(/\/[0-9]+$/, "/${env.GERRIT_PROJECT}")
|
||||
@@ -112,11 +115,12 @@ pipeline {
|
||||
def ri = images[(int)r]
|
||||
def randomImage = env.DOCKER_REGISTRY + "/" + ri;
|
||||
def bt = env.BUILD_TAG.replaceAll(/[^a-zA-Z0-9_.-]/, '-')
|
||||
def dockerOptions = "--ulimit core=0 --ulimit nofile=10240 " +
|
||||
def dockerOptions = "--privileged --ulimit core=0 --ulimit nofile=10240 " +
|
||||
" -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
|
||||
" --entrypoint='' --name ${bt}-build"
|
||||
def outputdir = "tests/CI/output/UnitTests"
|
||||
|
||||
manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Image: ${randomImage}", false)
|
||||
def img = docker.image(randomImage)
|
||||
img.pull()
|
||||
img.inside(dockerOptions) {
|
||||
@@ -124,7 +128,7 @@ pipeline {
|
||||
stage ('Build') {
|
||||
echo 'Building..'
|
||||
|
||||
sh "./tests/CI/buildAsterisk.sh --output-dir=${outputdir} --cache-dir=/srv/cache"
|
||||
sh "./tests/CI/buildAsterisk.sh --branch-name=${BRANCH_NAME} --output-dir=${outputdir} --cache-dir=/srv/cache"
|
||||
|
||||
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: false,
|
||||
artifacts: "${outputdir}/*"
|
||||
@@ -134,9 +138,9 @@ pipeline {
|
||||
def outputfile = "${outputdir}/unittests-results.xml"
|
||||
def testcmd = "test execute all"
|
||||
|
||||
sh 'sudo ./tests/CI/installAsterisk.sh --user-group=jenkins:users'
|
||||
sh "sudo ./tests/CI/installAsterisk.sh --uninstall-all --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
|
||||
|
||||
sh "tests/CI/runUnittests.sh --user-group=jenkins:users --output-dir='${outputdir}' --output-xml='${outputfile}' --test-command='${testcmd}'"
|
||||
sh "tests/CI/runUnittests.sh --user-group=jenkins:users --output-dir='${outputdir}' --output-xml='${outputfile}' --unittest-command='${testcmd}'"
|
||||
|
||||
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: true,
|
||||
artifacts: "${outputdir}/**"
|
||||
|
4
third-party/Makefile
vendored
4
third-party/Makefile
vendored
@@ -1,10 +1,10 @@
|
||||
|
||||
include Makefile.rules
|
||||
|
||||
TP_SUBDIRS := pjproject
|
||||
TP_SUBDIRS := pjproject jansson
|
||||
# Sub directories that contain special install/uninstall targets must be explicitly listed
|
||||
# to prevent accidentally running the package's default install target.
|
||||
TP_INSTALL_SUBDIRS := pjproject
|
||||
TP_INSTALL_SUBDIRS := pjproject jansson
|
||||
|
||||
.PHONY: all dist-clean distclean install clean moduleinfo makeopts uninstall $(TP_SUBDIRS)
|
||||
|
||||
|
22
third-party/Makefile.rules
vendored
22
third-party/Makefile.rules
vendored
@@ -33,3 +33,25 @@ export GREP
|
||||
export DOWNLOAD
|
||||
export DOWNLOAD_TO_STDOUT
|
||||
export DOWNLOAD_TIMEOUT
|
||||
|
||||
DOWNLOAD_DIR := $(or $(EXTERNALS_CACHE_DIR),$(TMPDIR),$(wildcard /tmp),.)
|
||||
|
||||
# These depend on the subpackage defining TARBALL_FILE.
|
||||
TARBALL_EXISTS = test -f $(DOWNLOAD_DIR)/$(TARBALL_FILE) -a -f $(TARBALL_MD5)
|
||||
|
||||
define TARBALL_VERIFY
|
||||
($(SHELL_ECHO_PREFIX) Verifying $(DOWNLOAD_DIR)/$(TARBALL_FILE) &&\
|
||||
tarball_sum=$$($(CAT) $(DOWNLOAD_DIR)/$(TARBALL_FILE) | $(MD5) | $(CUT) -d' ' -f1) ;\
|
||||
required_sum=$$($(GREP) -e $(TARBALL_FILE) $(TARBALL_MD5) | $(CUT) -d' ' -f1) ;\
|
||||
if [ -z "$$required_sum" -o "$$tarball_sum" != "$$required_sum" ] ; then $(SHELL_ECHO_PREFIX) Verify failed ; exit 1 ;\
|
||||
else $(SHELL_ECHO_PREFIX) Verify successful ; exit 0 ; fi; )
|
||||
endef
|
||||
|
||||
define TARBALL_DOWNLOAD
|
||||
($(SHELL_ECHO_PREFIX) Downloading $(TARBALL_URL) to $(DOWNLOAD_DIR)/$(TARBALL_FILE) ;\
|
||||
$(DOWNLOAD_TO_STDOUT) $(call DOWNLOAD_TIMEOUT,5,60) $(TARBALL_URL) > $(DOWNLOAD_DIR)/$(TARBALL_FILE) &&\
|
||||
$(TARBALL_VERIFY))
|
||||
endef
|
||||
|
||||
TARBALL_URL = $(PACKAGE_URL)/$(TARBALL_FILE)
|
||||
TARBALL_MD5 = $(TARBALL_FILE).md5
|
||||
|
1
third-party/configure.m4
vendored
1
third-party/configure.m4
vendored
@@ -5,5 +5,6 @@
|
||||
|
||||
AC_DEFUN([THIRD_PARTY_CONFIGURE],
|
||||
[
|
||||
JANSSON_CONFIGURE()
|
||||
PJPROJECT_CONFIGURE()
|
||||
])
|
||||
|
4
third-party/jansson/.gitignore
vendored
Normal file
4
third-party/jansson/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
source/
|
||||
dest/
|
||||
**.bz2
|
||||
.rebuild_needed
|
97
third-party/jansson/Makefile
vendored
Normal file
97
third-party/jansson/Makefile
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
.PHONY: all install clean distclean configure
|
||||
|
||||
.NOTPARALLEL:
|
||||
|
||||
include ../versions.mak
|
||||
export JANSSON_DIR := $(shell pwd -P)
|
||||
|
||||
SPECIAL_TARGETS :=
|
||||
|
||||
ifneq ($(findstring configure,$(MAKECMDGOALS)),)
|
||||
# Run from $(ASTTOPDIR)/configure
|
||||
SPECIAL_TARGETS += configure
|
||||
endif
|
||||
|
||||
ifeq ($(findstring clean,$(MAKECMDGOALS)),clean)
|
||||
# clean or distclean
|
||||
SPECIAL_TARGETS += clean
|
||||
endif
|
||||
|
||||
ifeq ($(findstring uninstall,$(MAKECMDGOALS)),uninstall)
|
||||
SPECIAL_TARGETS += uninstall
|
||||
endif
|
||||
|
||||
|
||||
ifneq ($(wildcard ../../makeopts),)
|
||||
include ../../makeopts
|
||||
endif
|
||||
|
||||
ifeq ($(SPECIAL_TARGETS),)
|
||||
# Run locally or from $(ASTTOPDIR)/Makefile. All include files should be present
|
||||
ifeq ($(wildcard ../../makeopts),)
|
||||
$(error ASTTOPDIR/configure hasn't been run)
|
||||
endif
|
||||
|
||||
ifeq ($(JANSSON_BUNDLED),yes)
|
||||
ifneq ($(wildcard ../../menuselect.makeopts),)
|
||||
include ../../menuselect.makeopts
|
||||
else
|
||||
$(warning ASTTOPDIR/menuselect hasn't been run yet. Can't find debug options.)
|
||||
endif
|
||||
|
||||
all: dest/include/jansson.h
|
||||
else
|
||||
all:
|
||||
endif
|
||||
endif
|
||||
|
||||
include ../../Makefile.rules
|
||||
include ../Makefile.rules
|
||||
include Makefile.rules
|
||||
|
||||
ECHO_PREFIX := $(ECHO_PREFIX) echo '[jansson] '
|
||||
SHELL_ECHO_PREFIX := echo '[jansson] '
|
||||
|
||||
dest/include/jansson.h: source/config.status
|
||||
$(ECHO_PREFIX) Building bundled jansson.
|
||||
$(CMD_PREFIX) (cd source; make $(REALLY_QUIET))
|
||||
$(CMD_PREFIX) (cd source; make install $(REALLY_QUIET))
|
||||
|
||||
.DELETE_ON_ERROR:
|
||||
|
||||
$(DOWNLOAD_DIR)/$(TARBALL_FILE): ../versions.mak
|
||||
$(CMD_PREFIX) ($(TARBALL_EXISTS) && $(TARBALL_VERIFY) && touch $@) || (rm -rf $@ ;\
|
||||
$(TARBALL_DOWNLOAD)) || (rm -rf $@ ;\
|
||||
$(SHELL_ECHO_PREFIX) Retrying download ; $(TARBALL_DOWNLOAD))
|
||||
|
||||
source/.unpacked: $(DOWNLOAD_DIR)/$(TARBALL_FILE)
|
||||
$(CMD_PREFIX) $(TARBALL_VERIFY) || (rm -rf $@ ;\
|
||||
$(SHELL_ECHO_PREFIX) Retrying download ; $(TARBALL_DOWNLOAD))
|
||||
$(ECHO_PREFIX) Unpacking $<
|
||||
-@rm -rf source jansson-*/ >/dev/null 2>&1
|
||||
$(CMD_PREFIX) $(TAR) -xjf $<
|
||||
@mv jansson-$(JANSSON_VERSION) source
|
||||
$(ECHO_PREFIX) Applying patches "$(realpath patches)" "$(realpath .)/source"
|
||||
$(CMD_PREFIX) ../apply_patches $(QUIET_CONFIGURE) "$(realpath patches)" "$(realpath .)/source"
|
||||
-@touch source/.unpacked
|
||||
|
||||
.rebuild_needed: $(wildcard ../../.lastclean)
|
||||
$(ECHO_PREFIX) Rebuilding
|
||||
$(CMD_PREFIX) $(MAKE) clean $(REALLY_QUIET)
|
||||
|
||||
source/config.status: source/.unpacked Makefile.rules .rebuild_needed
|
||||
$(ECHO_PREFIX) Configuring
|
||||
$(CMD_PREFIX) (cd source ; ./configure $(QUIET_CONFIGURE) $(JANSSON_CONFIG_OPTS) --disable-shared --enable-static --prefix=$(JANSSON_DIR)/dest)
|
||||
|
||||
configure: source/config.status
|
||||
|
||||
install:
|
||||
uninstall:
|
||||
|
||||
clean:
|
||||
$(ECHO_PREFIX) Cleaning
|
||||
+-$(CMD_PREFIX) test -d source dest && $(SUBMAKE) -C source clean || :
|
||||
|
||||
distclean:
|
||||
$(ECHO_PREFIX) Distcleaning
|
||||
-$(CMD_PREFIX) rm -rf source jansson-*.tar.bz2 .rebuild_needed
|
16
third-party/jansson/Makefile.rules
vendored
Normal file
16
third-party/jansson/Makefile.rules
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
# We switched download locations so Asterisk users don't bombard the Digip
|
||||
# site with download requests.
|
||||
#
|
||||
# For future reference when upgrading bundled JANSSON the next time
|
||||
# JANSSON is released.
|
||||
# Digip's download URL.
|
||||
# PACKAGE_URL ?= http://www.digip.org/jansson/releases/
|
||||
|
||||
PACKAGE_URL ?= https://raw.githubusercontent.com/asterisk/third-party/master/jansson/$(JANSSON_VERSION)
|
||||
TARBALL_FILE = jansson-$(JANSSON_VERSION).tar.bz2
|
||||
|
||||
# JANSSON_CONFIGURE_OPTS could come from the command line or could be
|
||||
# set/modified by configure.m4 if the build or host tuples aren't the same
|
||||
# as the current build environment (cross-compile).
|
||||
|
||||
JANSSON_CONFIG_OPTS = $(JANSSON_CONFIGURE_OPTS)
|
89
third-party/jansson/configure.m4
vendored
Normal file
89
third-party/jansson/configure.m4
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
#
|
||||
# If this file is changed, be sure to run ASTTOPDIR/bootstrap.sh
|
||||
# before committing.
|
||||
#
|
||||
|
||||
AC_DEFUN([_JANSSON_CONFIGURE],
|
||||
[
|
||||
if test "${ac_mandatory_list#*JANSSON*}" != "$ac_mandatory_list" ; then
|
||||
AC_MSG_ERROR(--with-jansson and --with-jansson-bundled can't both be specified)
|
||||
fi
|
||||
|
||||
ac_mandatory_list="$ac_mandatory_list JANSSON"
|
||||
JANSSON_DIR="${ac_pwd}/third-party/jansson"
|
||||
|
||||
AC_MSG_CHECKING(for embedded jansson (may have to download))
|
||||
AC_MSG_RESULT(configuring)
|
||||
|
||||
if test "x${DOWNLOAD_TO_STDOUT}" = "x" ; then
|
||||
AC_MSG_ERROR(A download utility (wget, curl, or fetch) is required to download bundled jansson)
|
||||
fi
|
||||
if test "${BZIP2}" = ":" ; then
|
||||
AC_MSG_ERROR(bzip2 is required to extract the jansson tar file)
|
||||
fi
|
||||
if test "${TAR}" = ":" ; then
|
||||
AC_MSG_ERROR(tar is required to extract the jansson tar file)
|
||||
fi
|
||||
if test "${PATCH}" = ":" ; then
|
||||
AC_MSG_ERROR(patch is required to configure bundled jansson)
|
||||
fi
|
||||
if test "${SED}" = ":" ; then
|
||||
AC_MSG_ERROR(sed is required to configure bundled jansson)
|
||||
fi
|
||||
if test "${NM}" = ":" ; then
|
||||
AC_MSG_ERROR(nm is required to build bundled jansson)
|
||||
fi
|
||||
if test "${MD5}" = ":" ; then
|
||||
AC_MSG_ERROR(md5sum is required to build bundled jansson)
|
||||
fi
|
||||
if test "${CAT}" = ":" ; then
|
||||
AC_MSG_ERROR(cat is required to build bundled jansson)
|
||||
fi
|
||||
if test "${CUT}" = ":" ; then
|
||||
AC_MSG_ERROR(cut is required to build bundled jansson)
|
||||
fi
|
||||
if test "${GREP}" = ":" ; then
|
||||
AC_MSG_ERROR(grep is required to build bundled jansson)
|
||||
fi
|
||||
|
||||
AC_ARG_VAR([JANSSON_CONFIGURE_OPTS],[Additional configure options to pass to bundled jansson])
|
||||
this_host=$(./config.sub $(./config.guess))
|
||||
if test "$build" != "$this_host" ; then
|
||||
JANSSON_CONFIGURE_OPTS+=" --build=$build"
|
||||
fi
|
||||
if test "$host" != "$this_host" ; then
|
||||
JANSSON_CONFIGURE_OPTS+=" --host=$host"
|
||||
fi
|
||||
|
||||
export TAR PATCH SED NM EXTERNALS_CACHE_DIR AST_DOWNLOAD_CACHE DOWNLOAD_TO_STDOUT DOWNLOAD_TIMEOUT DOWNLOAD MD5 CAT CUT GREP
|
||||
export NOISY_BUILD
|
||||
${GNU_MAKE} --quiet --no-print-directory -C ${JANSSON_DIR} \
|
||||
JANSSON_CONFIGURE_OPTS="$JANSSON_CONFIGURE_OPTS" \
|
||||
EXTERNALS_CACHE_DIR="${EXTERNALS_CACHE_DIR:-${AST_DOWNLOAD_CACHE}}" \
|
||||
configure
|
||||
if test $? -ne 0 ; then
|
||||
AC_MSG_RESULT(failed)
|
||||
AC_MSG_NOTICE(Unable to configure ${JANSSON_DIR})
|
||||
AC_MSG_ERROR(Re-run the ./configure command with 'NOISY_BUILD=yes' appended to see error details.)
|
||||
fi
|
||||
|
||||
AC_MSG_CHECKING(for bundled jansson)
|
||||
|
||||
JANSSON_INCLUDE=-I${JANSSON_DIR}/dest/include
|
||||
JANSSON_CFLAGS="$JANSSON_INCLUDE"
|
||||
JANSSON_LIB="-L${JANSSON_DIR}/dest/lib -ljansson"
|
||||
PBX_JANSSON=1
|
||||
|
||||
AC_SUBST([JANSSON_BUNDLED])
|
||||
AC_SUBST([PBX_JANSSON])
|
||||
AC_SUBST([JANSSON_LIB])
|
||||
AC_SUBST([JANSSON_INCLUDE])
|
||||
AC_MSG_RESULT(yes)
|
||||
])
|
||||
|
||||
AC_DEFUN([JANSSON_CONFIGURE],
|
||||
[
|
||||
if test "$JANSSON_BUNDLED" = "yes" ; then
|
||||
_JANSSON_CONFIGURE()
|
||||
fi
|
||||
])
|
1
third-party/jansson/jansson-2.11.tar.bz2.md5
vendored
Normal file
1
third-party/jansson/jansson-2.11.tar.bz2.md5
vendored
Normal file
@@ -0,0 +1 @@
|
||||
289ca8cbd2df31de9bda7e5220754d25 jansson-2.11.tar.bz2
|
128
third-party/jansson/patches/0001-Improve-test-coverage.patch
vendored
Normal file
128
third-party/jansson/patches/0001-Improve-test-coverage.patch
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
From 73c22de51672cb40fdc29c95331923d4dcebb6fa Mon Sep 17 00:00:00 2001
|
||||
From: Corey Farrell <git@cfware.com>
|
||||
Date: Tue, 13 Feb 2018 04:35:37 -0500
|
||||
Subject: [PATCH 01/22] Improve test coverage.
|
||||
|
||||
Changes to test/ removed for bundled use in Asterisk.
|
||||
|
||||
* Test equality of different length strings.
|
||||
* Add tab to json_pack whitespace test.
|
||||
* Test json_sprintf with empty result and invalid UTF.
|
||||
* Test json_get_alloc_funcs with NULL arguments.
|
||||
* Test invalid arguments.
|
||||
* Add test_chaos to test allocation failure code paths.
|
||||
* Remove redundant json_is_string checks from json_string_equal and
|
||||
json_string_copy. Both functions are static and can only be called
|
||||
with a json string.
|
||||
|
||||
Fixes to issues found by test_chaos:
|
||||
* Fix crash on OOM in pack_unpack.c:read_string().
|
||||
* Unconditionally free string in string_create upon allocation failure.
|
||||
Update load.c:parse_value() to reflect this. This resolves a leak on
|
||||
allocation failure for pack_unpack.c:pack_string() and
|
||||
value.c:json_sprintf().
|
||||
|
||||
Although not visible from CodeCoverage these changes significantly
|
||||
increase branch coverage. Especially in src/value.c where we previously
|
||||
covered 67.4% of branches and now cover 96.3% of branches.
|
||||
---
|
||||
CMakeLists.txt | 1 +
|
||||
src/load.c | 6 +-
|
||||
src/pack_unpack.c | 5 +-
|
||||
src/value.c | 9 +-
|
||||
test/.gitignore | 1 +
|
||||
test/suites/api/Makefile.am | 2 +
|
||||
test/suites/api/test_array.c | 73 +++++++++++++++++
|
||||
test/suites/api/test_chaos.c | 115 ++++++++++++++++++++++++++
|
||||
test/suites/api/test_equal.c | 7 ++
|
||||
test/suites/api/test_memory_funcs.c | 7 ++
|
||||
test/suites/api/test_number.c | 36 ++++++++
|
||||
test/suites/api/test_object.c | 122 ++++++++++++++++++++++++++++
|
||||
test/suites/api/test_pack.c | 10 ++-
|
||||
test/suites/api/test_simple.c | 52 ++++++++++++
|
||||
test/suites/api/test_sprintf.c | 12 +++
|
||||
15 files changed, 444 insertions(+), 14 deletions(-)
|
||||
create mode 100644 test/suites/api/test_chaos.c
|
||||
|
||||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
||||
index 16cf552..2f6cfec 100644
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -487,6 +487,7 @@ if (NOT JANSSON_WITHOUT_TESTS)
|
||||
set(api_tests
|
||||
test_array
|
||||
test_copy
|
||||
+ test_chaos
|
||||
test_dump
|
||||
test_dump_callback
|
||||
test_equal
|
||||
diff --git a/src/load.c b/src/load.c
|
||||
index deb36f3..25efe2e 100644
|
||||
--- a/src/load.c
|
||||
+++ b/src/load.c
|
||||
@@ -829,10 +829,8 @@ static json_t *parse_value(lex_t *lex, size_t flags, json_error_t *error)
|
||||
}
|
||||
|
||||
json = jsonp_stringn_nocheck_own(value, len);
|
||||
- if(json) {
|
||||
- lex->value.string.val = NULL;
|
||||
- lex->value.string.len = 0;
|
||||
- }
|
||||
+ lex->value.string.val = NULL;
|
||||
+ lex->value.string.len = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
diff --git a/src/pack_unpack.c b/src/pack_unpack.c
|
||||
index 153f64d..19dbf93 100644
|
||||
--- a/src/pack_unpack.c
|
||||
+++ b/src/pack_unpack.c
|
||||
@@ -159,7 +159,10 @@ static char *read_string(scanner_t *s, va_list *ap,
|
||||
return (char *)str;
|
||||
}
|
||||
|
||||
- strbuffer_init(&strbuff);
|
||||
+ if(strbuffer_init(&strbuff)) {
|
||||
+ set_error(s, "<internal>", json_error_out_of_memory, "Out of memory");
|
||||
+ s->has_error = 1;
|
||||
+ }
|
||||
|
||||
while(1) {
|
||||
str = va_arg(*ap, const char *);
|
||||
diff --git a/src/value.c b/src/value.c
|
||||
index b3b3141..29a978c 100644
|
||||
--- a/src/value.c
|
||||
+++ b/src/value.c
|
||||
@@ -652,8 +652,7 @@ static json_t *string_create(const char *value, size_t len, int own)
|
||||
|
||||
string = jsonp_malloc(sizeof(json_string_t));
|
||||
if(!string) {
|
||||
- if(!own)
|
||||
- jsonp_free(v);
|
||||
+ jsonp_free(v);
|
||||
return NULL;
|
||||
}
|
||||
json_init(&string->json, JSON_STRING);
|
||||
@@ -768,9 +767,6 @@ static int json_string_equal(const json_t *string1, const json_t *string2)
|
||||
{
|
||||
json_string_t *s1, *s2;
|
||||
|
||||
- if(!json_is_string(string1) || !json_is_string(string2))
|
||||
- return 0;
|
||||
-
|
||||
s1 = json_to_string(string1);
|
||||
s2 = json_to_string(string2);
|
||||
return s1->length == s2->length && !memcmp(s1->value, s2->value, s1->length);
|
||||
@@ -780,9 +776,6 @@ static json_t *json_string_copy(const json_t *string)
|
||||
{
|
||||
json_string_t *s;
|
||||
|
||||
- if(!json_is_string(string))
|
||||
- return NULL;
|
||||
-
|
||||
s = json_to_string(string);
|
||||
return json_stringn_nocheck(s->value, s->length);
|
||||
}
|
||||
--
|
||||
2.17.1
|
||||
|
103
third-party/jansson/patches/0017-Fix-error-handling-in-json_pack.patch
vendored
Normal file
103
third-party/jansson/patches/0017-Fix-error-handling-in-json_pack.patch
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
From 15105b66b4df387037b670ac713584194ea10c2f Mon Sep 17 00:00:00 2001
|
||||
From: Maxim Zhukov <mussitantesmortem@gmail.com>
|
||||
Date: Mon, 12 Mar 2018 17:39:04 +0300
|
||||
Subject: [PATCH 17/22] Fix error handling in json_pack
|
||||
|
||||
Changes to test/ removed.
|
||||
|
||||
Fixed a bug where the error message was not filled if an empty object
|
||||
was passed to the json_pack.
|
||||
|
||||
Fixes #271
|
||||
---
|
||||
src/pack_unpack.c | 64 ++++++++++++++++++-------------------
|
||||
test/suites/api/test_pack.c | 8 +++++
|
||||
2 files changed, 40 insertions(+), 32 deletions(-)
|
||||
|
||||
diff --git a/src/pack_unpack.c b/src/pack_unpack.c
|
||||
index 4026fd9..6461c06 100644
|
||||
--- a/src/pack_unpack.c
|
||||
+++ b/src/pack_unpack.c
|
||||
@@ -348,6 +348,36 @@ static json_t *pack_string(scanner_t *s, va_list *ap)
|
||||
}
|
||||
}
|
||||
|
||||
+static json_t *pack_object_inter(scanner_t *s, va_list *ap, int need_incref)
|
||||
+{
|
||||
+ json_t *json;
|
||||
+ char ntoken;
|
||||
+
|
||||
+ next_token(s);
|
||||
+ ntoken = token(s);
|
||||
+
|
||||
+ if (ntoken != '?')
|
||||
+ prev_token(s);
|
||||
+
|
||||
+ json = va_arg(*ap, json_t *);
|
||||
+
|
||||
+ if (json)
|
||||
+ return need_incref ? json_incref(json) : json;
|
||||
+
|
||||
+ switch (ntoken) {
|
||||
+ case '?':
|
||||
+ return json_null();
|
||||
+ case '*':
|
||||
+ return NULL;
|
||||
+ default:
|
||||
+ break;
|
||||
+ }
|
||||
+
|
||||
+ set_error(s, "<args>", json_error_null_value, "NULL object key");
|
||||
+ s->has_error = 1;
|
||||
+ return NULL;
|
||||
+}
|
||||
+
|
||||
static json_t *pack(scanner_t *s, va_list *ap)
|
||||
{
|
||||
switch(token(s)) {
|
||||
@@ -376,40 +406,10 @@ static json_t *pack(scanner_t *s, va_list *ap)
|
||||
return json_real(va_arg(*ap, double));
|
||||
|
||||
case 'O': /* a json_t object; increments refcount */
|
||||
- {
|
||||
- int nullable;
|
||||
- json_t *json;
|
||||
-
|
||||
- next_token(s);
|
||||
- nullable = token(s) == '?';
|
||||
- if (!nullable)
|
||||
- prev_token(s);
|
||||
-
|
||||
- json = va_arg(*ap, json_t *);
|
||||
- if (!json && nullable) {
|
||||
- return json_null();
|
||||
- } else {
|
||||
- return json_incref(json);
|
||||
- }
|
||||
- }
|
||||
+ return pack_object_inter(s, ap, 1);
|
||||
|
||||
case 'o': /* a json_t object; doesn't increment refcount */
|
||||
- {
|
||||
- int nullable;
|
||||
- json_t *json;
|
||||
-
|
||||
- next_token(s);
|
||||
- nullable = token(s) == '?';
|
||||
- if (!nullable)
|
||||
- prev_token(s);
|
||||
-
|
||||
- json = va_arg(*ap, json_t *);
|
||||
- if (!json && nullable) {
|
||||
- return json_null();
|
||||
- } else {
|
||||
- return json;
|
||||
- }
|
||||
- }
|
||||
+ return pack_object_inter(s, ap, 0);
|
||||
|
||||
default:
|
||||
set_error(s, "<format>", json_error_invalid_format, "Unexpected format character '%c'",
|
||||
--
|
||||
2.17.1
|
||||
|
45
third-party/pjproject/Makefile
vendored
45
third-party/pjproject/Makefile
vendored
@@ -89,49 +89,22 @@ SHELL_ECHO_PREFIX := echo '[pjproject] '
|
||||
|
||||
_all: $(TARGETS)
|
||||
|
||||
define tarball_exists
|
||||
(if [ -f $(TARBALL) -a -f $(PJMD5SUM) ] ; then exit 0 ;\
|
||||
else exit 1; fi; )
|
||||
endef
|
||||
|
||||
define verify_tarball
|
||||
($(SHELL_ECHO_PREFIX) Verifying $(TARBALL) &&\
|
||||
tarball_sum=$$($(CAT) $(TARBALL) | $(MD5) | $(CUT) -d' ' -f1) ;\
|
||||
required_sum=$$($(GREP) -e $(TARBALL_FILE) $(PJMD5SUM) | $(CUT) -d' ' -f1) ;\
|
||||
if [ -z "$$required_sum" -o "$$tarball_sum" != "$$required_sum" ] ; then $(SHELL_ECHO_PREFIX) Verify failed ; exit 1 ;\
|
||||
else $(SHELL_ECHO_PREFIX) Verify successful ; exit 0 ; fi; )
|
||||
endef
|
||||
|
||||
define download_from_pjproject
|
||||
($(SHELL_ECHO_PREFIX) Downloading $(TARBALL_URL) to $(TARBALL) ;\
|
||||
$(DOWNLOAD_TO_STDOUT) $(call DOWNLOAD_TIMEOUT,5,60) $(TARBALL_URL) > $(TARBALL) &&\
|
||||
$(SHELL_ECHO_PREFIX) Downloading $(PJPROJECT_URL)/MD5SUM.TXT to $(PJMD5SUM) &&\
|
||||
$(DOWNLOAD_TO_STDOUT) $(call DOWNLOAD_TIMEOUT,5,60) $(PJPROJECT_URL)/MD5SUM.TXT > $(PJMD5SUM) &&\
|
||||
$(verify_tarball))
|
||||
endef
|
||||
|
||||
.DELETE_ON_ERROR:
|
||||
|
||||
DOWNLOAD_DIR := $(or $(EXTERNALS_CACHE_DIR),$(TMPDIR),$(wildcard /tmp),.)
|
||||
TARBALL_FILE = pjproject-$(PJPROJECT_VERSION).tar.bz2
|
||||
TARBALL = $(DOWNLOAD_DIR)/$(TARBALL_FILE)
|
||||
TARBALL_URL = $(PJPROJECT_URL)/$(TARBALL_FILE)
|
||||
PJMD5SUM = $(patsubst %.tar.bz2,%.md5,$(TARBALL))
|
||||
$(DOWNLOAD_DIR)/$(TARBALL_FILE): ../versions.mak
|
||||
$(CMD_PREFIX) ($(TARBALL_EXISTS) && $(TARBALL_VERIFY) && touch $@) || (rm -rf $@ ;\
|
||||
$(TARBALL_DOWNLOAD)) || (rm -rf $@ ;\
|
||||
$(SHELL_ECHO_PREFIX) Retrying download ; $(TARBALL_DOWNLOAD))
|
||||
|
||||
$(TARBALL): ../versions.mak
|
||||
$(CMD_PREFIX) ($(tarball_exists) && $(verify_tarball) && touch $@) || (rm -rf $@ ;\
|
||||
$(download_from_pjproject)) || (rm -rf $@ ;\
|
||||
$(SHELL_ECHO_PREFIX) Retrying download ; $(download_from_pjproject))
|
||||
|
||||
source/.unpacked: $(DOWNLOAD_DIR)/pjproject-$(PJPROJECT_VERSION).tar.bz2
|
||||
$(CMD_PREFIX) $(verify_tarball) || (rm -rf $@ ;\
|
||||
$(SHELL_ECHO_PREFIX) Retrying download ; $(download_from_pjproject))
|
||||
source/.unpacked: $(DOWNLOAD_DIR)/$(TARBALL_FILE)
|
||||
$(CMD_PREFIX) $(TARBALL_VERIFY) || (rm -rf $@ ;\
|
||||
$(SHELL_ECHO_PREFIX) Retrying download ; $(TARBALL_DOWNLOAD))
|
||||
$(ECHO_PREFIX) Unpacking $<
|
||||
-@rm -rf source pjproject-* >/dev/null 2>&1
|
||||
-@rm -rf source pjproject-*/ >/dev/null 2>&1
|
||||
$(CMD_PREFIX) $(TAR) -xjf $<
|
||||
@mv pjproject-$(PJPROJECT_VERSION) source
|
||||
$(ECHO_PREFIX) Applying patches "$(realpath patches)" "$(realpath .)/source"
|
||||
$(CMD_PREFIX) ./apply_patches $(QUIET_CONFIGURE) "$(realpath patches)" "$(realpath .)/source"
|
||||
$(CMD_PREFIX) ../apply_patches $(QUIET_CONFIGURE) "$(realpath patches)" "$(realpath .)/source"
|
||||
-@touch source/.unpacked
|
||||
|
||||
source/version.mak: source/.unpacked
|
||||
|
5
third-party/pjproject/Makefile.rules
vendored
5
third-party/pjproject/Makefile.rules
vendored
@@ -4,9 +4,10 @@
|
||||
# For future reference when upgrading bundled PJPROJECT the next time
|
||||
# PJPROJECT is released.
|
||||
# Teluu's download URL.
|
||||
# PJPROJECT_URL ?= http://www.pjsip.org/release/$(PJPROJECT_VERSION)
|
||||
# PACKAGE_URL ?= http://www.pjsip.org/release/$(PJPROJECT_VERSION)
|
||||
|
||||
PJPROJECT_URL ?= https://raw.githubusercontent.com/asterisk/third-party/master/pjproject/$(PJPROJECT_VERSION)
|
||||
PACKAGE_URL ?= https://raw.githubusercontent.com/asterisk/third-party/master/pjproject/$(PJPROJECT_VERSION)
|
||||
TARBALL_FILE = pjproject-$(PJPROJECT_VERSION).tar.bz2
|
||||
|
||||
# PJPROJECT_CONFIGURE_OPTS could come from the command line or could be
|
||||
# set/modified by configure.m4 if the build or host tuples aren't the same
|
||||
|
2
third-party/pjproject/pjproject-2.7.2.tar.bz2.md5
vendored
Normal file
2
third-party/pjproject/pjproject-2.7.2.tar.bz2.md5
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
8119f0d91a00b6f553099e6ee5358ade *pjproject-2.7.2.zip
|
||||
fa3f0bc098c4bff48ddd92db1c016a7a pjproject-2.7.2.tar.bz2
|
2
third-party/versions.mak
vendored
2
third-party/versions.mak
vendored
@@ -1,2 +1,2 @@
|
||||
|
||||
JANSSON_VERSION = 2.11
|
||||
PJPROJECT_VERSION = 2.7.2
|
||||
|
Reference in New Issue
Block a user