Merge branch 'openwrt:master' into master

This commit is contained in:
Hayzam Sherif 2023-12-22 23:23:45 +05:30 committed by GitHub
commit 1ae934d62f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
138 changed files with 1664 additions and 1344 deletions

View file

@ -2,12 +2,12 @@ include $(TOPDIR)/rules.mk
include $(INCLUDE_DIR)/kernel.mk include $(INCLUDE_DIR)/kernel.mk
PKG_NAME:=debian-archive-keyring PKG_NAME:=debian-archive-keyring
PKG_VERSION:=2021.1.1 PKG_VERSION:=2023.4
PKG_HASH:=6e93a87b9e50bd81518880ec07a62f95d7d8452f4aa703f5b0a3076439f1022c
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=debian-archive-keyring_2021.1.1_all.deb PKG_SOURCE:=$(PKG_NAME)_$(PKG_VERSION)_all.deb
PKG_SOURCE_URL:=http://ftp.debian.org/debian/pool/main/d/debian-archive-keyring/ PKG_SOURCE_URL:=http://ftp.debian.org/debian/pool/main/d/$(PKG_NAME)/
PKG_HASH:=56beca470dcd9b6d7e6c3c9e9d702101e01e9467e62810a8c357bd7b9c26251d
PKG_BUILD_DIR:=$(BUILD_DIR)/$(PKG_NAME)-$(PKG_VERSION) PKG_BUILD_DIR:=$(BUILD_DIR)/$(PKG_NAME)-$(PKG_VERSION)
PKG_MAINTAINER:=Daniel Golle <daniel@makrotopia.org> PKG_MAINTAINER:=Daniel Golle <daniel@makrotopia.org>

View file

@ -9,12 +9,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=debootstrap PKG_NAME:=debootstrap
PKG_VERSION:=1.0.126 PKG_VERSION:=1.0.128+nmu2+deb12u1
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-udeb_$(PKG_VERSION)_all.udeb PKG_SOURCE:=$(PKG_NAME)-udeb_$(PKG_VERSION)_all.udeb
PKG_SOURCE_URL:=@DEBIAN/pool/main/d/debootstrap PKG_SOURCE_URL:=@DEBIAN/pool/main/d/debootstrap
PKG_HASH:=ca8233789167fd7ddf50aab8d4cb5085436832efdf54423fa3e446832d625a92 PKG_HASH:=4fa4ec7c144ed047c47d0d8eb9b91b56eaa9b2db2b52510777abbabf5965d268
PKG_MAINTAINER:=Daniel Golle <daniel@makrotopia.org> PKG_MAINTAINER:=Daniel Golle <daniel@makrotopia.org>
PKG_LICENSE:=Unique PKG_LICENSE:=Unique

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=sudo PKG_NAME:=sudo
PKG_VERSION:=1.9.14p3 PKG_VERSION:=1.9.15p4
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://www.sudo.ws/dist PKG_SOURCE_URL:=https://www.sudo.ws/dist
PKG_HASH:=a08318b1c4bc8582c004d4cd9ae2903abc549e7e46ba815e41fe81d1c0782b62 PKG_HASH:=2e20ec9865eeeea1316c6f49ec6ac4678869b689d4d90b44243bf4887d6dd532
PKG_MAINTAINER:=Alexandru Ardelean <ardeleanalex@gmail.com> PKG_MAINTAINER:=Alexandru Ardelean <ardeleanalex@gmail.com>

View file

@ -1,6 +1,6 @@
--- a/Makefile.in --- a/Makefile.in
+++ b/Makefile.in +++ b/Makefile.in
@@ -74,7 +74,7 @@ EGREP = @EGREP@ @@ -75,7 +75,7 @@ EGREP = @EGREP@
SED = @SED@ SED = @SED@
INSTALL = $(SHELL) $(scriptdir)/install-sh -c INSTALL = $(SHELL) $(scriptdir)/install-sh -c

7
admin/sudo/test.sh Normal file
View file

@ -0,0 +1,7 @@
#!/bin/sh
case "$1" in
sudo)
sudo --version | grep "$2"
;;
esac

View file

@ -6,8 +6,8 @@ PKG_RELEASE:=1
PKG_SOURCE_URL:=https://github.com/wkz/mdio-tools PKG_SOURCE_URL:=https://github.com/wkz/mdio-tools
PKG_SOURCE_PROTO:=git PKG_SOURCE_PROTO:=git
PKG_SOURCE_VERSION:=1.3.0 PKG_SOURCE_VERSION:=1.3.1
PKG_MIRROR_HASH:=668a1dcb06da1c0a26e3aac86487ca83cff548c8c6e3763fde905a888fea5f5e PKG_MIRROR_HASH:=97dfd25d8cdf5994eeb8cb0a5862c993b8aef373b280bca567d41d4113f494a9
PKG_LICENSE:=GPL-2.0-only PKG_LICENSE:=GPL-2.0-only
PKG_LICENSE_FILES:=COPYING PKG_LICENSE_FILES:=COPYING

View file

@ -8,7 +8,7 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
GO_VERSION_MAJOR_MINOR:=1.21 GO_VERSION_MAJOR_MINOR:=1.21
GO_VERSION_PATCH:=4 GO_VERSION_PATCH:=5
PKG_NAME:=golang PKG_NAME:=golang
PKG_VERSION:=$(GO_VERSION_MAJOR_MINOR)$(if $(GO_VERSION_PATCH),.$(GO_VERSION_PATCH)) PKG_VERSION:=$(GO_VERSION_MAJOR_MINOR)$(if $(GO_VERSION_PATCH),.$(GO_VERSION_PATCH))
@ -20,7 +20,7 @@ GO_SOURCE_URLS:=https://dl.google.com/go/ \
PKG_SOURCE:=go$(PKG_VERSION).src.tar.gz PKG_SOURCE:=go$(PKG_VERSION).src.tar.gz
PKG_SOURCE_URL:=$(GO_SOURCE_URLS) PKG_SOURCE_URL:=$(GO_SOURCE_URLS)
PKG_HASH:=47b26a83d2b65a3c1c1bcace273b69bee49a7a7b5168a7604ded3d26a37bd787 PKG_HASH:=285cbbdf4b6e6e62ed58f370f3f6d8c30825d6e56c5853c66d3c23bcdb09db19
PKG_MAINTAINER:=Jeffery To <jeffery.to@gmail.com> PKG_MAINTAINER:=Jeffery To <jeffery.to@gmail.com>
PKG_LICENSE:=BSD-3-Clause PKG_LICENSE:=BSD-3-Clause

View file

@ -1,12 +1,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=lua-eco PKG_NAME:=lua-eco
PKG_VERSION:=3.1.0 PKG_VERSION:=3.1.2
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL=https://github.com/zhaojh329/lua-eco/releases/download/v$(PKG_VERSION) PKG_SOURCE_URL=https://github.com/zhaojh329/lua-eco/releases/download/v$(PKG_VERSION)
PKG_HASH:=bb48af3f65a2c5d69b06b32ec2734bcb77cc6315b208be4fe3b0ae5fc0a82a33 PKG_HASH:=eff99419d14d3cb13e2513bcf38bac643560e905461043492bb9daa282b34e7f
PKG_MAINTAINER:=Jianhui Zhao <zhaojh329@gmail.com> PKG_MAINTAINER:=Jianhui Zhao <zhaojh329@gmail.com>
PKG_LICENSE:=MIT PKG_LICENSE:=MIT

View file

@ -1,13 +1,13 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=perl-net-dns PKG_NAME:=perl-net-dns
PKG_VERSION:=1.35 PKG_VERSION:=1.41
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE_NAME:=Net-DNS PKG_SOURCE_NAME:=Net-DNS
PKG_SOURCE:=$(PKG_SOURCE_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_SOURCE_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://www.net-dns.org/download PKG_SOURCE_URL:=https://www.net-dns.org/download
PKG_HASH:=f1a1478e4acbdb6b96de63070b35050dec9b9fce6c95bb2215bfc64a2d98e167 PKG_HASH:=3e053cb0756a2d9c83f11e2bcf271e39a5279d8cad5645c86c6020c23f765061
PKG_BUILD_DIR:=$(BUILD_DIR)/perl/$(PKG_SOURCE_NAME)-$(PKG_VERSION) PKG_BUILD_DIR:=$(BUILD_DIR)/perl/$(PKG_SOURCE_NAME)-$(PKG_VERSION)
HOST_BUILD_DIR:=$(BUILD_DIR_HOST)/perl/$(PKG_SOURCE_NAME)-$(PKG_VERSION) HOST_BUILD_DIR:=$(BUILD_DIR_HOST)/perl/$(PKG_SOURCE_NAME)-$(PKG_VERSION)

View file

@ -8,9 +8,9 @@ include $(TOPDIR)/rules.mk
PECL_NAME:=xdebug PECL_NAME:=xdebug
PECL_LONGNAME:=Xdebug extension PECL_LONGNAME:=Xdebug extension
PKG_VERSION:=3.2.2 PKG_VERSION:=3.3.1
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_HASH:=f48777371f90cbb315ea4ea082a1ede6765bcfb35d7d6356ab8f71fd6dfcc157 PKG_HASH:=4eb4ee270bbcc5f14195c38f6ee58580e007cf4886ce32e11430318ab5bc2315
PKG_NAME:=php8-pecl-xdebug PKG_NAME:=php8-pecl-xdebug
PKG_SOURCE:=$(PECL_NAME)-$(PKG_VERSION).tgz PKG_SOURCE:=$(PECL_NAME)-$(PKG_VERSION).tgz

View file

@ -6,7 +6,7 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=php PKG_NAME:=php
PKG_VERSION:=8.2.12 PKG_VERSION:=8.3.0
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_MAINTAINER:=Michael Heimpold <mhei@heimpold.de> PKG_MAINTAINER:=Michael Heimpold <mhei@heimpold.de>
@ -16,7 +16,7 @@ PKG_CPE_ID:=cpe:/a:php:php
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.xz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.xz
PKG_SOURCE_URL:=http://www.php.net/distributions/ PKG_SOURCE_URL:=http://www.php.net/distributions/
PKG_HASH:=e1526e400bce9f9f9f774603cfac6b72b5e8f89fa66971ebc3cc4e5964083132 PKG_HASH:=1db84fec57125aa93638b51bb2b15103e12ac196e2f960f0d124275b2687ea54
PKG_BUILD_PARALLEL:=1 PKG_BUILD_PARALLEL:=1
PKG_BUILD_FLAGS:=no-mips16 PKG_BUILD_FLAGS:=no-mips16

View file

@ -15,7 +15,7 @@ To be used in tandem with use_embedded_timezonedb.patch and use_embedded_timezon
--- a/ext/date/php_date.c --- a/ext/date/php_date.c
+++ b/ext/date/php_date.c +++ b/ext/date/php_date.c
@@ -549,6 +549,23 @@ static char* guess_timezone(const timeli @@ -567,6 +567,23 @@ static const char* guess_timezone(const
} else if (*DATEG(default_timezone)) { } else if (*DATEG(default_timezone)) {
return DATEG(default_timezone); return DATEG(default_timezone);
} }

View file

@ -9,7 +9,7 @@ Make generated php_config.h constant across rebuilds.
--- a/configure.ac --- a/configure.ac
+++ b/configure.ac +++ b/configure.ac
@@ -1451,7 +1451,7 @@ PHP_REMOVE_USR_LIB(LDFLAGS) @@ -1490,7 +1490,7 @@ PHP_REMOVE_USR_LIB(LDFLAGS)
EXTRA_LDFLAGS="$EXTRA_LDFLAGS $PHP_LDFLAGS" EXTRA_LDFLAGS="$EXTRA_LDFLAGS $PHP_LDFLAGS"
EXTRA_LDFLAGS_PROGRAM="$EXTRA_LDFLAGS_PROGRAM $PHP_LDFLAGS" EXTRA_LDFLAGS_PROGRAM="$EXTRA_LDFLAGS_PROGRAM $PHP_LDFLAGS"

View file

@ -11,7 +11,7 @@
--- a/configure.ac --- a/configure.ac
+++ b/configure.ac +++ b/configure.ac
@@ -1634,13 +1634,13 @@ CFLAGS_CLEAN="$CFLAGS \$(PROF_FLAGS)" @@ -1690,13 +1690,13 @@ CFLAGS_CLEAN="$CFLAGS \$(PROF_FLAGS)"
CFLAGS="\$(CFLAGS_CLEAN) $standard_libtool_flag" CFLAGS="\$(CFLAGS_CLEAN) $standard_libtool_flag"
CXXFLAGS="$CXXFLAGS $standard_libtool_flag \$(PROF_FLAGS)" CXXFLAGS="$CXXFLAGS $standard_libtool_flag \$(PROF_FLAGS)"

View file

@ -8,11 +8,11 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=pipx PKG_NAME:=pipx
PKG_VERSION:=1.3.2 PKG_VERSION:=1.3.3
PKG_RELEASE:=1 PKG_RELEASE:=1
PYPI_NAME:=pipx PYPI_NAME:=pipx
PKG_HASH:=704d01d04c67c2dd0c776c5bf5ed35c7b249055b0174568b8507f07d72ed7a7f PKG_HASH:=6d5474e71e78c28d83570443e5418c56599aa8319a950ccf5984c5cb0a35f0a7
PKG_LICENSE:=MIT PKG_LICENSE:=MIT
PKG_LICENSE_FILES:=LICENSE PKG_LICENSE_FILES:=LICENSE
@ -29,7 +29,7 @@ define Package/pipx
CATEGORY:=Languages CATEGORY:=Languages
SUBMENU:=Python SUBMENU:=Python
TITLE:=Install/Run Python Applications in Isolated Environments TITLE:=Install/Run Python Applications in Isolated Environments
URL:=https://github.com/pypa/pipx URL:=https://pipx.pypa.io/
DEPENDS:= \ DEPENDS:= \
+python3-light \ +python3-light \
+python3-logging \ +python3-logging \

View file

@ -8,11 +8,11 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=python-argcomplete PKG_NAME:=python-argcomplete
PKG_VERSION:=3.1.6 PKG_VERSION:=3.2.1
PKG_RELEASE:=1 PKG_RELEASE:=1
PYPI_NAME:=argcomplete PYPI_NAME:=argcomplete
PKG_HASH:=3b1f07d133332547a53c79437527c00be48cca3807b1d4ca5cab1b26313386a6 PKG_HASH:=437f67fb9b058da5a090df505ef9be0297c4883993f3f56cb186ff087778cfb4
PKG_LICENSE:=Apache-2.0 PKG_LICENSE:=Apache-2.0
PKG_LICENSE_FILES:=LICENSE.rst PKG_LICENSE_FILES:=LICENSE.rst

View file

@ -7,11 +7,11 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=python-ble2mqtt PKG_NAME:=python-ble2mqtt
PKG_VERSION:=0.2.0 PKG_VERSION:=0.2.1
PKG_RELEASE:=1 PKG_RELEASE:=1
PYPI_NAME:=ble2mqtt PYPI_NAME:=ble2mqtt
PKG_HASH:=cd45b5fb382a71d441470899e01be50482d26e2d86e3012e1f5a9ab633248383 PKG_HASH:=b02a2cb61ae7a2ba6e4d9d5c2bd95bba80cd6ee8b7d8704a6a55ee56d5ecb196
PKG_MAINTAINER:=Quintin Hill <stuff@quintin.me.uk> PKG_MAINTAINER:=Quintin Hill <stuff@quintin.me.uk>
PKG_LICENSE:=MIT PKG_LICENSE:=MIT

View file

@ -7,11 +7,11 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=python-bleak PKG_NAME:=python-bleak
PKG_VERSION:=0.20.2 PKG_VERSION:=0.21.1
PKG_RELEASE:=1 PKG_RELEASE:=1
PYPI_NAME:=bleak PYPI_NAME:=bleak
PKG_HASH:=6c92a47abe34e6dea8ffc5cea9457cbff6e1be966854839dbc25cddb36b79ee4 PKG_HASH:=ec4a1a2772fb315b992cbaa1153070c7e26968a52b0e2727035f443a1af5c18f
PKG_MAINTAINER:=Quintin Hill <stuff@quintin.me.uk> PKG_MAINTAINER:=Quintin Hill <stuff@quintin.me.uk>
PKG_LICENSE:=MIT PKG_LICENSE:=MIT

View file

@ -7,12 +7,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=python-dbus-fast PKG_NAME:=python-dbus-fast
PKG_VERSION:=1.94.1 PKG_VERSION:=2.20.0
PKG_RELEASE:=1 PKG_RELEASE:=1
PYPI_NAME:=dbus-fast PYPI_NAME:=dbus-fast
PYPI_SOURCE_NAME:=dbus_fast PYPI_SOURCE_NAME:=dbus_fast
PKG_HASH:=9514e4abf586c656fb70cf9dab323a019131a032765997972045059717b7537c PKG_HASH:=a38e837c5a8d0a1745ec8390f68ff57986ed2167b0aa2e4a79738a51dd6dfcc3
PKG_MAINTAINER:=Quintin Hill <stuff@quintin.me.uk> PKG_MAINTAINER:=Quintin Hill <stuff@quintin.me.uk>
PKG_LICENSE:=MIT PKG_LICENSE:=MIT

View file

@ -8,11 +8,11 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=python-pathspec PKG_NAME:=python-pathspec
PKG_VERSION:=0.11.2 PKG_VERSION:=0.12.1
PKG_RELEASE:=1 PKG_RELEASE:=1
PYPI_NAME:=pathspec PYPI_NAME:=pathspec
PKG_HASH:=e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3 PKG_HASH:=a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712
PKG_LICENSE:=MPL-2.0 PKG_LICENSE:=MPL-2.0
PKG_LICENSE_FILES:=LICENSE PKG_LICENSE_FILES:=LICENSE

View file

@ -8,11 +8,11 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=python-platformdirs PKG_NAME:=python-platformdirs
PKG_VERSION:=4.0.0 PKG_VERSION:=4.1.0
PKG_RELEASE:=1 PKG_RELEASE:=1
PYPI_NAME:=platformdirs PYPI_NAME:=platformdirs
PKG_HASH:=cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731 PKG_HASH:=906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420
PKG_LICENSE:=MIT PKG_LICENSE:=MIT
PKG_LICENSE_FILES:=LICENSE PKG_LICENSE_FILES:=LICENSE

View file

@ -8,11 +8,11 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=python-referencing PKG_NAME:=python-referencing
PKG_VERSION:=0.31.1 PKG_VERSION:=0.32.0
PKG_RELEASE:=1 PKG_RELEASE:=1
PYPI_NAME:=referencing PYPI_NAME:=referencing
PKG_HASH:=81a1471c68c9d5e3831c30ad1dd9815c45b558e596653db751a2bfdd17b3b9ec PKG_HASH:=689e64fe121843dcfd57b71933318ef1f91188ffb45367332700a86ac8fd6161
PKG_LICENSE:=MIT PKG_LICENSE:=MIT
PKG_LICENSE_FILES:=COPYING PKG_LICENSE_FILES:=COPYING

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=python-typing-extensions PKG_NAME:=python-typing-extensions
PKG_VERSION:=4.8.0 PKG_VERSION:=4.9.0
PKG_RELEASE:=1 PKG_RELEASE:=1
PYPI_NAME:=typing-extensions PYPI_NAME:=typing-extensions
PYPI_SOURCE_NAME:=typing_extensions PYPI_SOURCE_NAME:=typing_extensions
PKG_HASH:=df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef PKG_HASH:=23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783
PKG_MAINTAINER:=Jan Pavlinec <jan.pavlinec1@gmail.com>, Jeffery To <jeffery.to@gmail.com> PKG_MAINTAINER:=Jan Pavlinec <jan.pavlinec1@gmail.com>, Jeffery To <jeffery.to@gmail.com>
PKG_LICENSE:=Python-2.0.1 0BSD PKG_LICENSE:=Python-2.0.1 0BSD

View file

@ -8,7 +8,7 @@
# Note: keep in sync with setuptools & pip # Note: keep in sync with setuptools & pip
PYTHON3_VERSION_MAJOR:=3 PYTHON3_VERSION_MAJOR:=3
PYTHON3_VERSION_MINOR:=11 PYTHON3_VERSION_MINOR:=11
PYTHON3_VERSION_MICRO:=6 PYTHON3_VERSION_MICRO:=7
PYTHON3_VERSION:=$(PYTHON3_VERSION_MAJOR).$(PYTHON3_VERSION_MINOR) PYTHON3_VERSION:=$(PYTHON3_VERSION_MAJOR).$(PYTHON3_VERSION_MINOR)

View file

@ -16,7 +16,7 @@ PKG_VERSION:=$(PYTHON3_VERSION).$(PYTHON3_VERSION_MICRO)
PKG_SOURCE:=Python-$(PKG_VERSION).tar.xz PKG_SOURCE:=Python-$(PKG_VERSION).tar.xz
PKG_SOURCE_URL:=https://www.python.org/ftp/python/$(PKG_VERSION) PKG_SOURCE_URL:=https://www.python.org/ftp/python/$(PKG_VERSION)
PKG_HASH:=0fab78fa7f133f4f38210c6260d90d7c0d5c7198446419ce057ec7ac2e6f5f38 PKG_HASH:=18e1aa7e66ff3a58423d59ed22815a6954e53342122c45df20c96877c062b9b7
PKG_MAINTAINER:=Jeffery To <jeffery.to@gmail.com> PKG_MAINTAINER:=Jeffery To <jeffery.to@gmail.com>
PKG_LICENSE:=Python-2.0.1 0BSD PKG_LICENSE:=Python-2.0.1 0BSD

View file

@ -1,6 +1,6 @@
--- a/Makefile.pre.in --- a/Makefile.pre.in
+++ b/Makefile.pre.in +++ b/Makefile.pre.in
@@ -2128,6 +2128,7 @@ libinstall: all $(srcdir)/Modules/xxmodu @@ -2133,6 +2133,7 @@ libinstall: all $(srcdir)/Modules/xxmodu
$(INSTALL_DATA) $(srcdir)/Modules/xxmodule.c \ $(INSTALL_DATA) $(srcdir)/Modules/xxmodule.c \
$(DESTDIR)$(LIBDEST)/distutils/tests ; \ $(DESTDIR)$(LIBDEST)/distutils/tests ; \
fi fi
@ -8,7 +8,7 @@
-PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
$(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \
-j0 -d $(LIBDEST) -f \ -j0 -d $(LIBDEST) -f \
@@ -2155,6 +2156,7 @@ libinstall: all $(srcdir)/Modules/xxmodu @@ -2160,6 +2161,7 @@ libinstall: all $(srcdir)/Modules/xxmodu
$(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \ $(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \
-j0 -d $(LIBDEST)/site-packages -f \ -j0 -d $(LIBDEST)/site-packages -f \
-x badsyntax $(DESTDIR)$(LIBDEST)/site-packages -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages

View file

@ -1,6 +1,6 @@
--- a/Makefile.pre.in --- a/Makefile.pre.in
+++ b/Makefile.pre.in +++ b/Makefile.pre.in
@@ -2173,7 +2173,7 @@ python-config: $(srcdir)/Misc/python-con @@ -2178,7 +2178,7 @@ python-config: $(srcdir)/Misc/python-con
@ # On Darwin, always use the python version of the script, the shell @ # On Darwin, always use the python version of the script, the shell
@ # version doesn't use the compiler customizations that are provided @ # version doesn't use the compiler customizations that are provided
@ # in python (_osx_support.py). @ # in python (_osx_support.py).

View file

@ -289,7 +289,7 @@ Signed-off-by: Jeffery To <jeffery.to@gmail.com>
+#endif +#endif
--- a/configure.ac --- a/configure.ac
+++ b/configure.ac +++ b/configure.ac
@@ -917,180 +917,14 @@ fi @@ -925,180 +925,14 @@ fi
AC_MSG_CHECKING([for the platform triplet based on compiler characteristics]) AC_MSG_CHECKING([for the platform triplet based on compiler characteristics])

View file

@ -9,7 +9,7 @@ include $(TOPDIR)/rules.mk
PKG_NAME:=cyrus-sasl PKG_NAME:=cyrus-sasl
PKG_VERSION:=2.1.28 PKG_VERSION:=2.1.28
PKG_RELEASE:=1 PKG_RELEASE:=2
PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org>
@ -47,6 +47,12 @@ define Package/libsasl2-sasldb
TITLE+= (sasldb libraries) TITLE+= (sasldb libraries)
endef endef
define Package/libsasl2-utils
$(call Package/libsasl2/Default)
DEPENDS:=+libsasl2 +libdb47
TITLE+= (sasldb utilities)
endef
TARGET_CFLAGS += $(FPIC) TARGET_CFLAGS += $(FPIC)
CONFIGURE_ARGS += \ CONFIGURE_ARGS += \
--enable-shared \ --enable-shared \
@ -132,5 +138,11 @@ define Package/libsasl2-sasldb/install
$(CP) $(PKG_INSTALL_DIR)/usr/lib/sasl2/libsasldb.so* $(1)/usr/lib/sasl2/ $(CP) $(PKG_INSTALL_DIR)/usr/lib/sasl2/libsasldb.so* $(1)/usr/lib/sasl2/
endef endef
define Package/libsasl2-utils/install
$(INSTALL_DIR) $(1)/usr/sbin/
$(CP) $(PKG_INSTALL_DIR)/usr/sbin/{pluginviewer,sasldblistusers2,saslpasswd2} $(1)/usr/sbin/
endef
$(eval $(call BuildPackage,libsasl2)) $(eval $(call BuildPackage,libsasl2))
$(eval $(call BuildPackage,libsasl2-sasldb)) $(eval $(call BuildPackage,libsasl2-sasldb))
$(eval $(call BuildPackage,libsasl2-utils))

53
libs/davici/Makefile Normal file
View file

@ -0,0 +1,53 @@
#
# Copyright (C) 2023 TDT AG <development@tdt.de>
#
# This is free software, licensed under the GNU General Public License v2.
# See https://www.gnu.org/licenses/gpl-2.0.txt for more information.
#
include $(TOPDIR)/rules.mk
PKG_NAME:=davici
PKG_VERSION:=1.4
PKG_RELEASE=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://codeload.github.com/strongswan/davici/tar.gz/v$(PKG_VERSION)?
PKG_HASH:=b03c5a1aad905e962271d70246d6af6c337ffd00449d990082ea02161327bde8
PKG_MAINTAINER:=Lukas Voegl <lvoegl@tdt.de>
PKG_LICENSE:=LGPL-2.1-or-later
PKG_LICENSE_FILES:=COPYING
PKG_FIXUP:=autoreconf
PKG_BUILD_PARALLEL:=1
PKG_INSTALL:=1
include $(INCLUDE_DIR)/package.mk
define Package/davici
SECTION:=libs
CATEGORY:=Libraries
TITLE:=Decoupled Asynchronous VICI
URL:=https://github.com/strongswan/davici
endef
define Package/davici/description
The davici library provides a client implementation of the
strongSwan VICI protocol for integration into external applications.
endef
define Build/InstallDev
$(INSTALL_DIR) $(1)/usr/include
$(CP) $(PKG_INSTALL_DIR)/usr/include/*.h $(1)/usr/include/
$(INSTALL_DIR) $(1)/usr/lib
$(CP) $(PKG_INSTALL_DIR)/usr/lib/libdavici.so* $(1)/usr/lib/
endef
define Package/davici/install
$(INSTALL_DIR) $(1)/usr/lib
$(CP) $(PKG_INSTALL_DIR)/usr/lib/libdavici.so* $(1)/usr/lib/
endef
$(eval $(call BuildPackage,davici))

View file

@ -9,7 +9,7 @@ PKG_SOURCE_URL:=https://gnupg.org/ftp/gcrypt/$(PKG_NAME)
PKG_HASH:=416e174e165734d84806253f8c96bda2993fd07f258c3aad5f053a6efd463e88 PKG_HASH:=416e174e165734d84806253f8c96bda2993fd07f258c3aad5f053a6efd463e88
PKG_MAINTAINER:=Daniel Golle <daniel@makrotopia.org> PKG_MAINTAINER:=Daniel Golle <daniel@makrotopia.org>
PKG_LICENSE:=GPL-3.0-or-later PKG_LICENSE:=LGPL-2.1-or-later
PKG_LICENSE_FILES:=COPYING PKG_LICENSE_FILES:=COPYING
PKG_FIXUP:=autoreconf PKG_FIXUP:=autoreconf

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=hiredis PKG_NAME:=hiredis
PKG_VERSION:=1.1.0 PKG_VERSION:=1.2.0
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://codeload.github.com/redis/hiredis/tar.gz/v$(PKG_VERSION)? PKG_SOURCE_URL:=https://codeload.github.com/redis/hiredis/tar.gz/v$(PKG_VERSION)?
PKG_HASH:=fe6d21741ec7f3fc9df409d921f47dfc73a4d8ff64f4ac6f1d95f951bf7f53d6 PKG_HASH:=82ad632d31ee05da13b537c124f819eb88e18851d9cb0c30ae0552084811588c
PKG_LICENSE:=BSD-3-Clause PKG_LICENSE:=BSD-3-Clause
PKG_LICENSE_FILES:=COPYING PKG_LICENSE_FILES:=COPYING

82
libs/hyperscan/Makefile Normal file
View file

@ -0,0 +1,82 @@
#
# This is free software, licensed under the GNU General Public License v2.
# See /LICENSE for more information.
#
include $(TOPDIR)/rules.mk
PKG_NAME:=hyperscan
PKG_VERSION:=5.4.2
PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://codeload.github.com/intel/hyperscan/tar.gz/v$(PKG_VERSION)?
PKG_HASH:=32b0f24b3113bbc46b6bfaa05cf7cf45840b6b59333d078cc1f624e4c40b2b99
PKG_MAINTAINER:=John Audia <therealgraysky@proton.me>
PKG_LICENSE:=BSD-3-Clause BSD-2-Clause BSL-1.0
PKG_LICENSE_FILES:=LICENSE
PKG_BUILD_DEPENDS:=ragel/host python3/host boost/host
include $(INCLUDE_DIR)/package.mk
include $(INCLUDE_DIR)/cmake.mk
# A minimum of SSSE3 support is required to use hyperscan
# We need to define the C(XX)FLAGS to avoid -march=native being
# used as this is hardcoded by upstream, see:
# https://github.com/intel/hyperscan/blob/master/doc/dev-reference/getting_started.rst
CMAKE_OPTIONS += \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \
-DBUILD_SHARED_LIBS=ON \
-DCMAKE_C_FLAGS="-march=x86-64-v2" \
-DCMAKE_CXX_FLAGS="-march=x86-64-v2" \
-Wno-dev
define Package/hyperscan-headers
CATEGORY:=Libraries
SECTION:=libs
TITLE:=Hyperscan Headers
URL:=https://github.com/intel/hyperscan
DEPENDS:=@TARGET_x86_64
endef
define Package/hyperscan-runtime
CATEGORY:=Libraries
SECTION:=libs
TITLE:=Hyperscan Runtime
URL:=https://github.com/intel/hyperscan
DEPENDS:=@TARGET_x86_64 +libstdcpp
endef
define Package/hyperscan-headers/description
This package contains the headers for Hyperscan.
endef
define Package/hyperscan-runtime/description
This package contains the shared objects for Hyperscan.
endef
# This installs files into ./staging_dir/. so that you can cross compile from the host
define Build/InstallDev
$(INSTALL_DIR) $(1)/usr/include/hs
$(INSTALL_DATA) $(PKG_INSTALL_DIR)/usr/include/hs/* $(1)/usr/include/hs/
$(INSTALL_DIR) $(1)/usr/lib
$(INSTALL_DATA) $(PKG_INSTALL_DIR)/usr/lib/libhs* $(1)/usr/lib/
$(INSTALL_DIR) $(1)/usr/lib/pkgconfig
$(INSTALL_DATA) $(PKG_BUILD_DIR)/libhs.pc $(1)/usr/lib/pkgconfig/libhs.pc
endef
# These install files on the target. Compare with Build/InstallDev
define Package/hyperscan-headers/install
$(INSTALL_DIR) $(1)/usr/include/hs
$(INSTALL_DATA) $(PKG_INSTALL_DIR)/usr/include/hs/*.h $(1)/usr/include/hs/
endef
define Package/hyperscan-runtime/install
$(INSTALL_DIR) $(1)/usr/lib
$(INSTALL_DATA) $(PKG_INSTALL_DIR)/usr/lib/libhs* $(1)/usr/lib/
endef
$(eval $(call BuildPackage,hyperscan-headers))
$(eval $(call BuildPackage,hyperscan-runtime))

View file

@ -9,13 +9,13 @@ include $(TOPDIR)/rules.mk
PKG_NAME:=icu4c PKG_NAME:=icu4c
MAJOR_VERSION:=74 MAJOR_VERSION:=74
MINOR_VERSION:=1 MINOR_VERSION:=2
PKG_VERSION:=$(MAJOR_VERSION).$(MINOR_VERSION) PKG_VERSION:=$(MAJOR_VERSION).$(MINOR_VERSION)
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(MAJOR_VERSION)_$(MINOR_VERSION)-src.tgz PKG_SOURCE:=$(PKG_NAME)-$(MAJOR_VERSION)_$(MINOR_VERSION)-src.tgz
PKG_SOURCE_URL:=https://github.com/unicode-org/icu/releases/download/release-$(MAJOR_VERSION)-$(MINOR_VERSION) PKG_SOURCE_URL:=https://github.com/unicode-org/icu/releases/download/release-$(MAJOR_VERSION)-$(MINOR_VERSION)
PKG_HASH:=86ce8e60681972e60e4dcb2490c697463fcec60dd400a5f9bffba26d0b52b8d0 PKG_HASH:=5e4fb11d6a3e6b85afb55de8da8a71538f1d8fd64fce893986b37d60e5bb0091
PKG_LICENSE:=ICU PKG_LICENSE:=ICU
PKG_LICENSE_FILES:=LICENSE PKG_LICENSE_FILES:=LICENSE

View file

@ -9,8 +9,8 @@ PKG_SOURCE_URL:=https://gnupg.org/ftp/gcrypt/$(PKG_NAME)
PKG_HASH:=e9fd27218d5394904e4e39788f9b1742711c3e6b41689a31aa3380bd5aa4f426 PKG_HASH:=e9fd27218d5394904e4e39788f9b1742711c3e6b41689a31aa3380bd5aa4f426
PKG_MAINTAINER:=Daniel Golle <daniel@makrotopia.org> PKG_MAINTAINER:=Daniel Golle <daniel@makrotopia.org>
PKG_LICENSE:=GPL-3.0-or-later PKG_LICENSE:=LGPL-2.1-or-later
PKG_LICENSE_FILES:=COPYING PKG_LICENSE_FILES:=COPYING COPYING.LIB
PKG_FIXUP:=autoreconf PKG_FIXUP:=autoreconf
PKG_INSTALL:=1 PKG_INSTALL:=1

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=libiio PKG_NAME:=libiio
PKG_VERSION:=0.21 PKG_VERSION:=0.25
PKG_RELEASE:=4 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://codeload.github.com/analogdevicesinc/libiio/tar.gz/v$(PKG_VERSION)? PKG_SOURCE_URL:=https://codeload.github.com/analogdevicesinc/libiio/tar.gz/v$(PKG_VERSION)?
PKG_HASH:=03d13165cbeb83b036743cbd9a10e336c728da162714f39d13250a3d94305cac PKG_HASH:=21972599a3c143ab1f98002ad2b3f28f4aff927fde5f677478311cd4e517730c
PKG_LICENSE:=LGPL-2.1 PKG_LICENSE:=LGPL-2.1
PKG_LICENSE_FILES:=COPYING.txt PKG_LICENSE_FILES:=COPYING.txt
@ -35,7 +35,7 @@ include $(INCLUDE_DIR)/cmake.mk
CMAKE_OPTIONS += -DWITH_DOC=OFF CMAKE_OPTIONS += -DWITH_DOC=OFF
CMAKE_OPTIONS += -DENABLE_IPV6=$(if $(CONFIG_IPV6),ON,OFF) CMAKE_OPTIONS += -DENABLE_IPV6=$(if $(CONFIG_IPV6),ON,OFF)
CMAKE_OPTIONS += -DENABLE_AIO=OFF CMAKE_OPTIONS += -DWITH_AIO=OFF
CMAKE_OPTIONS += -DWITH_LOCAL_BACKEND=$(if $(CONFIG_LIBIIO_LOCAL_BACKEND),ON,OFF) CMAKE_OPTIONS += -DWITH_LOCAL_BACKEND=$(if $(CONFIG_LIBIIO_LOCAL_BACKEND),ON,OFF)
CMAKE_OPTIONS += -DWITH_LOCAL_CONFIG=OFF CMAKE_OPTIONS += -DWITH_LOCAL_CONFIG=OFF
CMAKE_OPTIONS += -DWITH_NETWORK_BACKEND=$(if $(CONFIG_LIBIIO_NETWORK_BACKEND),ON,OFF) CMAKE_OPTIONS += -DWITH_NETWORK_BACKEND=$(if $(CONFIG_LIBIIO_NETWORK_BACKEND),ON,OFF)

View file

@ -0,0 +1,27 @@
From bb688d04294dda45e68dfaf13e3bc1187841e52a Mon Sep 17 00:00:00 2001
From: Jan Tojnar <jtojnar@gmail.com>
Date: Sun, 10 Dec 2023 21:52:05 +0100
Subject: [PATCH] xml: Fix compatibility with libxml 2.12
libxml 2.12.0 reorganized includes, resulting in the following no longer being in scope:
- XML_PARSE_DTDVALID
- xmlReadMemory
- xmlReadFile
- xmlCleanupParser
Signed-off-by: Jan Tojnar <jtojnar@gmail.com>
---
xml.c | 1 +
1 file changed, 1 insertion(+)
--- a/xml.c
+++ b/xml.c
@@ -10,6 +10,7 @@
#include "iio-private.h"
#include <errno.h>
+#include <libxml/parser.h>
#include <libxml/tree.h>
#include <string.h>

View file

@ -7,7 +7,7 @@ include $(TOPDIR)/rules.mk
PKG_NAME:=libsoup PKG_NAME:=libsoup
PKG_VERSION:=2.74.3 PKG_VERSION:=2.74.3
PKG_RELEASE:=1 PKG_RELEASE:=2
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.xz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.xz
PKG_SOURCE_URL:=@GNOME/libsoup/$(basename $(PKG_VERSION)) PKG_SOURCE_URL:=@GNOME/libsoup/$(basename $(PKG_VERSION))

View file

@ -0,0 +1,22 @@
--- a/libsoup/soup-xmlrpc-old.c
+++ b/libsoup/soup-xmlrpc-old.c
@@ -11,7 +11,7 @@
#include <string.h>
-#include <libxml/tree.h>
+#include <libxml/parser.h>
#include "soup-xmlrpc-old.h"
#include "soup.h"
--- a/libsoup/soup-xmlrpc.c
+++ b/libsoup/soup-xmlrpc.c
@@ -17,7 +17,7 @@
#include <string.h>
#include <errno.h>
-#include <libxml/tree.h>
+#include <libxml/parser.h>
#include "soup-xmlrpc.h"
#include "soup.h"

View file

@ -1,12 +1,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=libtorrent-rasterbar PKG_NAME:=libtorrent-rasterbar
PKG_VERSION:=2.0.8 PKG_VERSION:=2.0.9
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://codeload.github.com/arvidn/libtorrent/tar.gz/v$(PKG_VERSION)? PKG_SOURCE_URL:=https://github.com/arvidn/libtorrent/releases/download/v$(PKG_VERSION)/
PKG_HASH:=29e5c5395de8126ed1b24d0540a9477fbb158b536021cd65aaf9de34d0aadb46 PKG_HASH:=90cd92b6061c5b664840c3d5e151d43fedb24f5b2b24e14425ffbb884ef1798e
PKG_MAINTAINER:=David Yang <mmyangfl@gmail.com> PKG_MAINTAINER:=David Yang <mmyangfl@gmail.com>
PKG_LICENSE:=BSD-3-Clause PKG_LICENSE:=BSD-3-Clause
@ -46,24 +46,6 @@ endef
# This package contains Python 3 bindings for the libtorrent-rasterbar library. # This package contains Python 3 bindings for the libtorrent-rasterbar library.
#endef #endef
define Download/try_signal
VERSION:=105cce59972f925a33aa6b1c3109e4cd3caf583d
SUBDIR:=deps/try_signal
FILE:=$(PKG_NAME)-try_signal-$$(VERSION).tar.xz
URL:=https://github.com/arvidn/try_signal.git
MIRROR_HASH:=da81da67d52b7a731c21148573b68bf8dc7863616d6ae1f81845b7afb29e8f00
PROTO:=git
endef
$(eval $(call Download,try_signal))
PKG_UNPACK:=$(HOST_TAR) -C $(PKG_BUILD_DIR) --strip-components=1 -xzf $(DL_DIR)/$(PKG_SOURCE)
define Build/Prepare
$(Build/Prepare/Default)
$(eval $(Download/try_signal))
xzcat $(DL_DIR)/$(FILE) | tar -C $(PKG_BUILD_DIR) $(TAR_OPTIONS)
endef
#CMAKE_OPTIONS += \ #CMAKE_OPTIONS += \
# -Dpython-bindings=ON \ # -Dpython-bindings=ON \
# -Dpython-egg-info=ON # -Dpython-egg-info=ON

View file

@ -5,12 +5,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=OpenBLAS PKG_NAME:=OpenBLAS
PKG_VERSION:=0.3.24 PKG_VERSION:=0.3.25
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=OpenBLAS-$(PKG_VERSION).tar.gz PKG_SOURCE:=OpenBLAS-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://github.com/xianyi/OpenBLAS/releases/download/v$(PKG_VERSION)/ PKG_SOURCE_URL:=https://github.com/xianyi/OpenBLAS/releases/download/v$(PKG_VERSION)/
PKG_HASH:=ceadc5065da97bd92404cac7254da66cc6eb192679cf1002098688978d4d5132 PKG_HASH:=4c25cb30c4bb23eddca05d7d0a85997b8db6144f5464ba7f8c09ce91e2f35543
PKG_LICENSE:=BSD 3-Clause PKG_LICENSE:=BSD 3-Clause
PKG_MAINTAINER:=Alexandru Ardelean <ardeleanalex@gmail.com> PKG_MAINTAINER:=Alexandru Ardelean <ardeleanalex@gmail.com>

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=gst1-libav PKG_NAME:=gst1-libav
PKG_VERSION:=1.22.6 PKG_VERSION:=1.22.8
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=gst-libav-$(PKG_VERSION).tar.xz PKG_SOURCE:=gst-libav-$(PKG_VERSION).tar.xz
PKG_SOURCE_URL:=https://gstreamer.freedesktop.org/src/gst-libav PKG_SOURCE_URL:=https://gstreamer.freedesktop.org/src/gst-libav
PKG_HASH:=7789e6408388a25f23cbf948cfc5c6230d735bbcd8b7f37f4a01c9e348a1e3a7 PKG_HASH:=be39349bc07ab4cdbd9a5fd6ea9848c601c7560ba5a0577ad5200b83bd424981
PKG_BUILD_DIR:=$(BUILD_DIR)/gst-libav-$(PKG_VERSION) PKG_BUILD_DIR:=$(BUILD_DIR)/gst-libav-$(PKG_VERSION)
PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \ PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=gst1-plugins-bad PKG_NAME:=gst1-plugins-bad
PKG_VERSION:=1.22.6 PKG_VERSION:=1.22.8
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=gst-plugins-bad-$(PKG_VERSION).tar.xz PKG_SOURCE:=gst-plugins-bad-$(PKG_VERSION).tar.xz
PKG_SOURCE_URL:=http://gstreamer.freedesktop.org/src/gst-plugins-bad/ PKG_SOURCE_URL:=http://gstreamer.freedesktop.org/src/gst-plugins-bad/
PKG_HASH:=b4029cd2908a089c55f1d902a565d007495c95b1442d838485dc47fb12df7137 PKG_HASH:=458783f8236068991e3e296edd671c8eddb8be6fac933c1c2e1503462864ea0f
PKG_BUILD_DIR:=$(BUILD_DIR)/gst-plugins-bad-$(PKG_VERSION) PKG_BUILD_DIR:=$(BUILD_DIR)/gst-plugins-bad-$(PKG_VERSION)
PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \ PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \

View file

@ -1,11 +0,0 @@
--- a/meson.build
+++ b/meson.build
@@ -508,7 +508,7 @@ gst_plugins_bad_args = ['-DHAVE_CONFIG_H
configinc = include_directories('.')
libsinc = include_directories('gst-libs')
-python3 = import('python').find_installation()
+python3 = 'python3'
gir = find_program('g-ir-scanner', required : get_option('introspection'))
gnome = import('gnome')

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=gst1-plugins-base PKG_NAME:=gst1-plugins-base
PKG_VERSION:=1.22.6 PKG_VERSION:=1.22.8
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=gst-plugins-base-$(PKG_VERSION).tar.xz PKG_SOURCE:=gst-plugins-base-$(PKG_VERSION).tar.xz
PKG_SOURCE_URL:=https://gstreamer.freedesktop.org/src/gst-plugins-base PKG_SOURCE_URL:=https://gstreamer.freedesktop.org/src/gst-plugins-base
PKG_HASH:=50f2b4d17c02eefe430bbefa8c5cd134b1be78a53c0f60e951136d96cf49fd4b PKG_HASH:=eb6792e5c73c6defb9159c36ea6e4b78a2f8af6512678b4bd3b02c8d2d492acf
PKG_BUILD_DIR:=$(BUILD_DIR)/gst-plugins-base-$(PKG_VERSION) PKG_BUILD_DIR:=$(BUILD_DIR)/gst-plugins-base-$(PKG_VERSION)
PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \ PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \

View file

@ -1,11 +0,0 @@
--- a/meson.build
+++ b/meson.build
@@ -464,7 +464,7 @@ pkgconfig_subdirs = ['gstreamer-1.0']
meson_pkg_config_file_fixup_script = find_program('scripts/meson-pkg-config-file-fixup.py')
-python3 = import('python').find_installation()
+python3 = 'python3'
subdir('gst-libs')
subdir('gst')
subdir('ext')

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=gst1-plugins-good PKG_NAME:=gst1-plugins-good
PKG_VERSION:=1.22.6 PKG_VERSION:=1.22.8
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=gst-plugins-good-$(PKG_VERSION).tar.xz PKG_SOURCE:=gst-plugins-good-$(PKG_VERSION).tar.xz
PKG_SOURCE_URL:=https://gstreamer.freedesktop.org/src/gst-plugins-good/ PKG_SOURCE_URL:=https://gstreamer.freedesktop.org/src/gst-plugins-good/
PKG_HASH:=b3b07fe3f1ce7fe93aa9be7217866044548f35c4a7792280eec7e108a32f9817 PKG_HASH:=e305b9f07f52743ca481da0a4e0c76c35efd60adaf1b0694eb3bb021e2137e39
PKG_BUILD_DIR:=$(BUILD_DIR)/gst-plugins-good-$(PKG_VERSION) PKG_BUILD_DIR:=$(BUILD_DIR)/gst-plugins-good-$(PKG_VERSION)
PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \ PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \

View file

@ -1,11 +0,0 @@
--- a/meson.build
+++ b/meson.build
@@ -469,7 +469,7 @@ endif
presetdir = join_paths(get_option('datadir'), 'gstreamer-' + api_version, 'presets')
-python3 = import('python').find_installation()
+python3 = 'python3'
pkgconfig = import('pkgconfig')
plugins_pkgconfig_install_dir = join_paths(plugins_install_dir, 'pkgconfig')
if get_option('default_library') == 'shared'

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=gst1-plugins-ugly PKG_NAME:=gst1-plugins-ugly
PKG_VERSION:=1.22.6 PKG_VERSION:=1.22.8
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=gst-plugins-ugly-$(PKG_VERSION).tar.xz PKG_SOURCE:=gst-plugins-ugly-$(PKG_VERSION).tar.xz
PKG_SOURCE_URL:=https://gstreamer.freedesktop.org/src/gst-plugins-ugly PKG_SOURCE_URL:=https://gstreamer.freedesktop.org/src/gst-plugins-ugly
PKG_HASH:=3e31454c98cb2f7f6d2d355eceb933a892fa0f1dc09bc36c9abc930d8e29ca48 PKG_HASH:=0761d96ba508e01c0271881b26828c2bffd7d8afd50872219f088f755b252ca7
PKG_BUILD_DIR:=$(BUILD_DIR)/gst-plugins-ugly-$(PKG_VERSION) PKG_BUILD_DIR:=$(BUILD_DIR)/gst-plugins-ugly-$(PKG_VERSION)
PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \ PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \

View file

@ -1,11 +0,0 @@
--- a/meson.build
+++ b/meson.build
@@ -305,7 +305,7 @@ endif
gpl_allowed = get_option('gpl').allowed()
-python3 = import('python').find_installation()
+python3 = 'python3'
subdir('gst')
subdir('ext')

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=gstreamer1 PKG_NAME:=gstreamer1
PKG_VERSION:=1.22.6 PKG_VERSION:=1.22.8
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=gstreamer-$(PKG_VERSION).tar.xz PKG_SOURCE:=gstreamer-$(PKG_VERSION).tar.xz
PKG_SOURCE_URL:=https://gstreamer.freedesktop.org/src/gstreamer PKG_SOURCE_URL:=https://gstreamer.freedesktop.org/src/gstreamer
PKG_HASH:=f500e6cfddff55908f937711fc26a0840de28a1e9ec49621c0b6f1adbd8f818e PKG_HASH:=ad4e3db1771139b1db17b1afa7c05db083ae0100bd4da244b71f162dcce41bfc
PKG_BUILD_DIR:=$(BUILD_DIR)/gstreamer-$(PKG_VERSION) PKG_BUILD_DIR:=$(BUILD_DIR)/gstreamer-$(PKG_VERSION)
PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \ PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> \

View file

@ -1,11 +0,0 @@
--- a/meson.build
+++ b/meson.build
@@ -572,7 +572,7 @@ if get_option('default_library') == 'sta
endif
# Used in gst/parse/meson.build and below
-python3 = import('python').find_installation()
+python3 = 'python3'
bashcomp_option = get_option('bash-completion')
bashcomp_dep = dependency('bash-completion', version : '>= 2.0', required : bashcomp_option)

View file

@ -5,8 +5,8 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=adblock-fast PKG_NAME:=adblock-fast
PKG_VERSION:=1.1.0 PKG_VERSION:=1.1.1
PKG_RELEASE:=4 PKG_RELEASE:=1
PKG_MAINTAINER:=Stan Grishin <stangri@melmac.ca> PKG_MAINTAINER:=Stan Grishin <stangri@melmac.ca>
PKG_LICENSE:=GPL-3.0-or-later PKG_LICENSE:=GPL-3.0-or-later

View file

@ -53,10 +53,12 @@ readonly unboundFile="/var/lib/unbound/adb_list.${packageName}"
readonly unboundCache="/var/run/${packageName}/unbound.cache" readonly unboundCache="/var/run/${packageName}/unbound.cache"
readonly unboundGzip="${packageName}.unbound.gz" readonly unboundGzip="${packageName}.unbound.gz"
readonly unboundFilter='s|^|local-zone: "|;s|$|" static|' readonly unboundFilter='s|^|local-zone: "|;s|$|" static|'
readonly A_TMP="/var/${packageName}.hosts.a.tmp" readonly A_TMP="/var/${packageName}.a.tmp"
readonly B_TMP="/var/${packageName}.hosts.b.tmp" readonly B_TMP="/var/${packageName}.b.tmp"
readonly jsonFile="/dev/shm/$packageName-status.json" readonly SED_TMP="/var/${packageName}.sed.tmp"
readonly sharedMemoryError="/dev/shm/$packageName-error" readonly runningConfigFile="/dev/shm/${packageName}.config"
readonly runningErrorFile="/dev/shm/${packageName}.error"
readonly runningStatusFile="/dev/shm/${packageName}.status"
readonly hostsFilter='/localhost/d;/^#/d;/^[^0-9]/d;s/^0\.0\.0\.0.//;s/^127\.0\.0\.1.//;s/[[:space:]]*#.*$//;s/[[:cntrl:]]$//;s/[[:space:]]//g;/[`~!@#\$%\^&\*()=+;:"'\'',<>?/\|[{}]/d;/]/d;/\./!d;/^$/d;/[^[:alnum:]_.-]/d;' readonly hostsFilter='/localhost/d;/^#/d;/^[^0-9]/d;s/^0\.0\.0\.0.//;s/^127\.0\.0\.1.//;s/[[:space:]]*#.*$//;s/[[:cntrl:]]$//;s/[[:space:]]//g;/[`~!@#\$%\^&\*()=+;:"'\'',<>?/\|[{}]/d;/]/d;/\./!d;/^$/d;/[^[:alnum:]_.-]/d;'
readonly domainsFilter='/^#/d;s/[[:space:]]*#.*$//;s/[[:space:]]*$//;s/[[:cntrl:]]$//;/[[:space:]]/d;/[`~!@#\$%\^&\*()=+;:"'\'',<>?/\|[{}]/d;/]/d;/\./!d;/^$/d;/[^[:alnum:]_.-]/d;' readonly domainsFilter='/^#/d;s/[[:space:]]*#.*$//;s/[[:space:]]*$//;s/[[:cntrl:]]$//;/[[:space:]]/d;/[`~!@#\$%\^&\*()=+;:"'\'',<>?/\|[{}]/d;/]/d;/\./!d;/^$/d;/[^[:alnum:]_.-]/d;'
readonly adBlockPlusFilter='/^#/d;/^!/d;s/[[:space:]]*#.*$//;s/^||//;s/\^$//;s/[[:space:]]*$//;s/[[:cntrl:]]$//;/[[:space:]]/d;/[`~!@#\$%\^&\*()=+;:"'\'',<>?/\|[{}]/d;/]/d;/\./!d;/^$/d;/[^[:alnum:]_.-]/d;' readonly adBlockPlusFilter='/^#/d;/^!/d;s/[[:space:]]*#.*$//;s/^||//;s/\^$//;s/[[:space:]]*$//;s/[[:cntrl:]]$//;/[[:space:]]/d;/[`~!@#\$%\^&\*()=+;:"'\'',<>?/\|[{}]/d;/]/d;/\./!d;/^$/d;/[^[:alnum:]_.-]/d;'
@ -75,6 +77,8 @@ readonly ipset="$(command -v ipset)"
readonly nft="$(command -v nft)" readonly nft="$(command -v nft)"
readonly canaryDomainsMozilla='use-application-dns.net' readonly canaryDomainsMozilla='use-application-dns.net'
readonly canaryDomainsiCloud='mask.icloud.com mask-h2.icloud.com' readonly canaryDomainsiCloud='mask.icloud.com mask-h2.icloud.com'
readonly triggersReload='parallel_downloads debug download_timeout allowed_domain blocked_domain allowed_url blocked_url dns config_update_enabled config_update_url dnsmasq_config_file_url curl_additional_param curl_max_file_size curl_retry'
readonly triggersRestart='compressed_cache compressed_cache_dir force_dns led force_dns_port'
dl_command= dl_command=
dl_flag= dl_flag=
@ -88,6 +92,7 @@ awk='awk'
load_environment_flag= load_environment_flag=
allowed_url= allowed_url=
blocked_url= blocked_url=
fw4_restart_flag=
# shellcheck disable=SC1091 # shellcheck disable=SC1091
. /lib/functions.sh . /lib/functions.sh
@ -116,12 +121,159 @@ check_smartdns() { command -v smartdns >/dev/null 2>&1; }
check_smartdns_ipset() { check_smartdns && check_ipset; } check_smartdns_ipset() { check_smartdns && check_ipset; }
check_smartdns_nftset() { check_smartdns && check_nft; } check_smartdns_nftset() { check_smartdns && check_nft; }
check_unbound() { command -v unbound >/dev/null 2>&1; } check_unbound() { command -v unbound >/dev/null 2>&1; }
config_cache() {
local param="$1" var="$2"
local _reload="$triggersReload"
local _restart="$triggersRestart"
local i ret
case "$param" in
create|set)
cp -f "/etc/config/${packageName}" "$runningConfigFile"
;;
get)
case "$var" in
trigger_fw4)
ret='false'
if [ -s "$runningConfigFile" ]; then
local UCI_CONFIG_DIR="${runningConfigFile%/*}"
is_fw4_restart_needed && ret='true'
fi
printf "%b" "$ret"
return
;;
trigger_service)
local old_allowed_url old_blocked_url
if [ ! -s "$runningConfigFile" ]; then
ret='on_boot'
else
for i in $_reload; do
local val_current val_old UCI_CONFIG_DIR
case "$i" in
allowed_url)
val_current="$allowed_url"
config_load "$runningConfigFile"
config_foreach append_url 'file_url' old_allowed_url old_blocked_url
val_old="$old_allowed_url"
;;
blocked_url)
val_current="$blocked_url"
config_load "$runningConfigFile"
config_foreach append_url 'file_url' old_allowed_url old_blocked_url
val_old="$old_blocked_url"
;;
*)
UCI_CONFIG_DIR=
val_current="$(uci_get "$packageName" 'config' "$i")"
UCI_CONFIG_DIR="${runningConfigFile%/*}"
val_old="$(uci_get "$packageName" 'config' "$i")"
;;
esac
if [ "$val_current" != "$val_old" ]; then
ret='download'
unset _restart
break
fi
done
for i in $_restart; do
local val_current val_old UCI_CONFIG_DIR
UCI_CONFIG_DIR=
val_current="$(uci_get "$packageName" 'config' "$i")"
UCI_CONFIG_DIR="${runningConfigFile%/*}"
val_old="$(uci_get "$packageName" 'config' "$i")"
if [ "$val_current" != "$val_old" ]; then
ret='restart'
break
fi
done
fi
printf "%b" "$ret"
return
;;
*)
local UCI_CONFIG_DIR="${runningConfigFile%/*}"
ret="$(uci_get "$packageName" 'config' "$var")"
printf "%b" "$ret"
return
;;
esac
;;
esac
}
debug() { local i j; for i in "$@"; do eval "j=\$$i"; echo "${i}: ${j} "; done; } debug() { local i j; for i in "$@"; do eval "j=\$$i"; echo "${i}: ${j} "; done; }
dns_set_output_values() {
case "$1" in
dnsmasq.addnhosts)
outputFilter="$dnsmasqAddnhostsFilter"
outputFile="$dnsmasqAddnhostsFile"
outputCache="$dnsmasqAddnhostsCache"
outputGzip="${compressed_cache_dir}/${dnsmasqAddnhostsGzip}"
if [ "$ipv6_enabled" -ne '0' ]; then
outputFilterIPv6="$dnsmasqAddnhostsFilterIPv6"
fi
;;
dnsmasq.conf)
outputFilter="$dnsmasqConfFilter"
outputFile="$dnsmasqConfFile"
outputCache="$dnsmasqConfCache"
outputGzip="${compressed_cache_dir}/${dnsmasqConfGzip}"
;;
dnsmasq.ipset)
outputFilter="$dnsmasqIpsetFilter"
outputFile="$dnsmasqIpsetFile"
outputCache="$dnsmasqIpsetCache"
outputGzip="${compressed_cache_dir}/${dnsmasqIpsetGzip}"
;;
dnsmasq.nftset)
if [ "$ipv6_enabled" -ne '0' ]; then
outputFilter="$dnsmasqNftsetFilterIPv6"
else
outputFilter="$dnsmasqNftsetFilter"
fi
outputFile="$dnsmasqNftsetFile"
outputCache="$dnsmasqNftsetCache"
outputGzip="${compressed_cache_dir}/${dnsmasqNftsetGzip}"
;;
dnsmasq.servers)
outputFilter="$dnsmasqServersFilter"
outputFile="$dnsmasqServersFile"
outputCache="$dnsmasqServersCache"
outputGzip="${compressed_cache_dir}/${dnsmasqServersGzip}"
;;
smartdns.domainset)
outputFilter="$smartdnsDomainSetFilter"
outputFile="$smartdnsDomainSetFile"
outputCache="$smartdnsDomainSetCache"
outputGzip="${compressed_cache_dir}/${smartdnsDomainSetGzip}"
outputConfig="$smartdnsDomainSetConfig"
;;
smartdns.ipset)
outputFilter="$smartdnsIpsetFilter"
outputFile="$smartdnsIpsetFile"
outputCache="$smartdnsIpsetCache"
outputGzip="${compressed_cache_dir}/${smartdnsIpsetGzip}"
outputConfig="$smartdnsIpsetConfig"
;;
smartdns.nftset)
outputFilter="$smartdnsNftsetFilter"
outputFile="$smartdnsNftsetFile"
outputCache="$smartdnsNftsetCache"
outputGzip="${compressed_cache_dir}/${smartdnsNftsetGzip}"
outputConfig="$smartdnsNftsetConfig"
;;
unbound.adb_list)
outputFilter="$unboundFilter"
outputFile="$unboundFile"
outputCache="$unboundCache"
outputGzip="$unboundGzip"
;;
esac
}
dnsmasq_hup() { killall -q -s HUP dnsmasq; } dnsmasq_hup() { killall -q -s HUP dnsmasq; }
dnsmasq_kill() { killall -q -s KILL dnsmasq; } dnsmasq_kill() { killall -q -s KILL dnsmasq; }
dnsmasq_restart() { /etc/init.d/dnsmasq restart >/dev/null 2>&1; } dnsmasq_restart() { /etc/init.d/dnsmasq restart >/dev/null 2>&1; }
is_enabled() { uci_get "$1" 'config' 'enabled' '0'; } is_enabled() { uci_get "$1" 'config' 'enabled' '0'; }
is_fw4_restart_needed() { is_fw4_restart_needed() {
[ "$fw4_restart_flag" = 'true' ] && return 0
local dns force_dns local dns force_dns
dns="$(uci_get "$packageName" 'config' 'dns' 'dnsmasq.servers')" dns="$(uci_get "$packageName" 'config' 'dns' 'dnsmasq.servers')"
force_dns="$(uci_get "$packageName" 'config' 'force_dns' '1')" force_dns="$(uci_get "$packageName" 'config' 'force_dns' '1')"
@ -198,39 +350,18 @@ json() {
# shellcheck disable=SC2124 # shellcheck disable=SC2124
local extras="$@" line local extras="$@" line
local status message error stats local status message error stats
local reload restart curReload curRestart
local ret i local ret i
if [ -s "$jsonFile" ]; then if [ -s "$runningStatusFile" ]; then
json_load_file "$jsonFile" 2>/dev/null json_load_file "$runningStatusFile" 2>/dev/null
json_select 'data' 2>/dev/null json_select 'data' 2>/dev/null
for i in status message error stats reload restart; do for i in status message error stats; do
json_get_var "$i" "$i" 2>/dev/null json_get_var "$i" "$i" 2>/dev/null
done done
fi fi
case "$action" in case "$action" in
get) get)
case "$param" in printf "%b" "$(eval echo "\$$param")"
triggers) return
# shellcheck disable=SC2154
curReload="$parallel_downloads $debug $download_timeout \
$allowed_domain $blocked_domain $allowed_url $blocked_url $dns \
$config_update_enabled $config_update_url $dnsmasq_config_file_url \
$curl_additional_param $curl_max_file_size $curl_retry"
# shellcheck disable=SC2154
curRestart="$compressed_cache $compressed_cache_dir $force_dns $led \
$force_dns_port"
if [ ! -s "$jsonFile" ]; then
ret='on_boot'
elif [ "$curReload" != "$reload" ]; then
ret='download'
elif [ "$curRestart" != "$restart" ]; then
ret='restart'
fi
printf "%b" "$ret"
return;;
*)
printf "%b" "$(eval echo "\$$param")"; return;;
esac
;; ;;
add) add)
line="$(eval echo "\$$param")" line="$(eval echo "\$$param")"
@ -240,25 +371,12 @@ json() {
case "$param" in case "$param" in
all) all)
unset status message error stats;; unset status message error stats;;
triggers)
unset reload restart;;
*) *)
unset "$param";; unset "$param";;
esac esac
;; ;;
set) set)
case "$param" in eval "$param"='${value}${extras:+|$extras}'
triggers)
reload="$parallel_downloads $debug $download_timeout \
$allowed_domain $blocked_domain $allowed_url $blocked_url $dns \
$config_update_enabled $config_update_url $dnsmasq_config_file_url \
$curl_additional_param $curl_max_file_size $curl_retry"
restart="$compressed_cache $compressed_cache_dir $force_dns $led \
$force_dns_port"
;;
*)
eval "$param"='${value}${extras:+|$extras}';;
esac
;; ;;
esac esac
json_init json_init
@ -268,11 +386,9 @@ json() {
json_add_string message "$message" json_add_string message "$message"
json_add_string error "$error" json_add_string error "$error"
json_add_string stats "$stats" json_add_string stats "$stats"
json_add_string reload "$reload"
json_add_string restart "$restart"
json_close_object json_close_object
mkdir -p "$(dirname "$jsonFile")" mkdir -p "${runningStatusFile%/*}"
json_dump > "$jsonFile" json_dump > "$runningStatusFile"
sync sync
} }
@ -474,16 +590,21 @@ load_network() {
} }
append_url() { append_url() {
local cfg="$1" var="$2" local cfg="$1" allow_var="${2:-allowed_url}" block_var="${3:-blocked_url}"
local old_value
local en action url local en action url
config_get_bool en "$cfg" enabled '1' config_get_bool en "$cfg" enabled '1'
config_get action "$cfg" action 'block' config_get action "$cfg" action 'block'
config_get url "$cfg" url config_get url "$cfg" url
if [ "$en" = '1' ]; then if [ "$en" = '1' ]; then
if [ "$action" = 'allow' ]; then if [ "$action" = 'allow' ]; then
allowed_url="${allowed_url:+$allowed_url }${url}" old_value=$(eval echo "\$$allow_var")
old_value="${old_value:+$old_value }${url}"
eval "$allow_var"="\$old_value"
else else
blocked_url="${blocked_url:+$blocked_url }${url}" old_value=$(eval echo "\$$block_var")
old_value="${old_value:+$old_value }${url}"
eval "$block_var"="\$old_value"
fi fi
fi fi
} }
@ -624,72 +745,7 @@ load_environment() {
compressed_cache_dir="/etc" compressed_cache_dir="/etc"
fi fi
case "$dns" in dns_set_output_values "$dns"
dnsmasq.addnhosts)
outputFilter="$dnsmasqAddnhostsFilter"
outputFile="$dnsmasqAddnhostsFile"
outputCache="$dnsmasqAddnhostsCache"
outputGzip="${compressed_cache_dir}/${dnsmasqAddnhostsGzip}"
if [ "$ipv6_enabled" -ne '0' ]; then
outputFilterIPv6="$dnsmasqAddnhostsFilterIPv6"
fi
;;
dnsmasq.conf)
outputFilter="$dnsmasqConfFilter"
outputFile="$dnsmasqConfFile"
outputCache="$dnsmasqConfCache"
outputGzip="${compressed_cache_dir}/${dnsmasqConfGzip}"
;;
dnsmasq.ipset)
outputFilter="$dnsmasqIpsetFilter"
outputFile="$dnsmasqIpsetFile"
outputCache="$dnsmasqIpsetCache"
outputGzip="${compressed_cache_dir}/${dnsmasqIpsetGzip}"
;;
dnsmasq.nftset)
if [ "$ipv6_enabled" -ne '0' ]; then
outputFilter="$dnsmasqNftsetFilterIPv6"
else
outputFilter="$dnsmasqNftsetFilter"
fi
outputFile="$dnsmasqNftsetFile"
outputCache="$dnsmasqNftsetCache"
outputGzip="${compressed_cache_dir}/${dnsmasqNftsetGzip}"
;;
dnsmasq.servers)
outputFilter="$dnsmasqServersFilter"
outputFile="$dnsmasqServersFile"
outputCache="$dnsmasqServersCache"
outputGzip="${compressed_cache_dir}/${dnsmasqServersGzip}"
;;
smartdns.domainset)
outputFilter="$smartdnsDomainSetFilter"
outputFile="$smartdnsDomainSetFile"
outputCache="$smartdnsDomainSetCache"
outputGzip="${compressed_cache_dir}/${smartdnsDomainSetGzip}"
outputConfig="$smartdnsDomainSetConfig"
;;
smartdns.ipset)
outputFilter="$smartdnsIpsetFilter"
outputFile="$smartdnsIpsetFile"
outputCache="$smartdnsIpsetCache"
outputGzip="${compressed_cache_dir}/${smartdnsIpsetGzip}"
outputConfig="$smartdnsIpsetConfig"
;;
smartdns.nftset)
outputFilter="$smartdnsNftsetFilter"
outputFile="$smartdnsNftsetFile"
outputCache="$smartdnsNftsetCache"
outputGzip="${compressed_cache_dir}/${smartdnsNftsetGzip}"
outputConfig="$smartdnsNftsetConfig"
;;
unbound.adb_list)
outputFilter="$unboundFilter"
outputFile="$unboundFile"
outputCache="$unboundCache"
outputGzip="$unboundGzip"
;;
esac
[ "$dns" = 'dnsmasq.addnhosts' ] || rm -f "$dnsmasqAddnhostsFile" "$dnsmasqAddnhostsCache" "${compressed_cache_dir}/${dnsmasqAddnhostsGzip}" [ "$dns" = 'dnsmasq.addnhosts' ] || rm -f "$dnsmasqAddnhostsFile" "$dnsmasqAddnhostsCache" "${compressed_cache_dir}/${dnsmasqAddnhostsGzip}"
[ "$dns" = 'dnsmasq.conf' ] || rm -f "$dnsmasqConfFile" "$dnsmasqConfCache" "${compressed_cache_dir}/${dnsmasqConfGzip}" [ "$dns" = 'dnsmasq.conf' ] || rm -f "$dnsmasqConfFile" "$dnsmasqConfCache" "${compressed_cache_dir}/${dnsmasqConfGzip}"
@ -701,9 +757,9 @@ load_environment() {
[ "$dns" = 'smartdns.nftset' ] || rm -f "$smartdnsNftsetFile" "$smartdnsNftsetCache" "${compressed_cache_dir}/${smartdnsNftsetGzip}" "$smartdnsNftsetConfig" [ "$dns" = 'smartdns.nftset' ] || rm -f "$smartdnsNftsetFile" "$smartdnsNftsetCache" "${compressed_cache_dir}/${smartdnsNftsetGzip}" "$smartdnsNftsetConfig"
[ "$dns" = 'unbound.adb_list' ] || rm -f "$unboundFile" "$unboundCache" "$unboundGzip" [ "$dns" = 'unbound.adb_list' ] || rm -f "$unboundFile" "$unboundCache" "$unboundGzip"
for i in "$jsonFile" "$outputFile" "$outputCache" "$outputGzip" "$outputConfig"; do for i in "$runningConfigFile" "$runningErrorFile" "$runningStatusFile" "$outputFile" "$outputCache" "$outputGzip" "$outputConfig"; do
[ -n "$i" ] || continue [ -n "$i" ] || continue
if ! mkdir -p "$(dirname "$i")"; then if ! mkdir -p "${i%/*}"; then
if [ "$param" != 'quiet' ]; then if [ "$param" != 'quiet' ]; then
json add error 'errorOutputDirCreate' "$i" json add error 'errorOutputDirCreate' "$i"
output "${_ERROR_}: $(get_text 'errorOutputDirCreate' "$i")!\\n" output "${_ERROR_}: $(get_text 'errorOutputDirCreate' "$i")!\\n"
@ -883,7 +939,7 @@ resolver() {
;; ;;
smartdns.*) smartdns.*)
chmod 660 "$outputFile" "$outputConfig" chmod 660 "$outputFile" "$outputConfig"
chown root:smartdns "$outputFile" "$outputConfig" chown root:root "$outputFile" "$outputConfig"
param='smartdns_restart' param='smartdns_restart'
output_text='Restarting SmartDNS' output_text='Restarting SmartDNS'
;; ;;
@ -1044,7 +1100,7 @@ process_file_url() {
if is_https_url "$url" && [ -z "$isSSLSupported" ]; then if is_https_url "$url" && [ -z "$isSSLSupported" ]; then
output 1 "$_FAIL_" output 1 "$_FAIL_"
output 2 "[DL] $type $label $__FAIL__\\n" output 2 "[DL] $type $label $__FAIL__\\n"
echo "errorNoSSLSupport|${1}" >> "$sharedMemoryError" echo "errorNoSSLSupport|${1}" >> "$runningErrorFile"
return 0 return 0
fi fi
while [ -z "$R_TMP" ] || [ -e "$R_TMP" ]; do while [ -z "$R_TMP" ] || [ -e "$R_TMP" ]; do
@ -1054,7 +1110,7 @@ process_file_url() {
[ ! -s "$R_TMP" ]; then [ ! -s "$R_TMP" ]; then
output 1 "$_FAIL_" output 1 "$_FAIL_"
output 2 "[DL] $type $label $__FAIL__\\n" output 2 "[DL] $type $label $__FAIL__\\n"
echo "errorDownloadingList|${url}" >> "$sharedMemoryError" echo "errorDownloadingList|${url}" >> "$runningErrorFile"
else else
append_newline "$R_TMP" append_newline "$R_TMP"
[ -n "$cfg" ] && new_size="$(get_local_filesize "$R_TMP")" [ -n "$cfg" ] && new_size="$(get_local_filesize "$R_TMP")"
@ -1072,7 +1128,7 @@ process_file_url() {
*) *)
output 1 "$_FAIL_" output 1 "$_FAIL_"
output 2 "[DL] $type $label $__FAIL__\\n" output 2 "[DL] $type $label $__FAIL__\\n"
echo "errorDetectingFileType|${url}" >> "$sharedMemoryError" echo "errorDetectingFileType|${url}" >> "$runningErrorFile"
rm -f "$R_TMP" rm -f "$R_TMP"
return 0 return 0
;; ;;
@ -1083,7 +1139,7 @@ process_file_url() {
if [ ! -s "$R_TMP" ]; then if [ ! -s "$R_TMP" ]; then
output 1 "$_FAIL_" output 1 "$_FAIL_"
output 2 "[DL] $type $label ($format) $__FAIL__\\n" output 2 "[DL] $type $label ($format) $__FAIL__\\n"
echo "errorParsingList|${url}" >> "$sharedMemoryError" echo "errorParsingList|${url}" >> "$runningErrorFile"
else else
append_newline "$R_TMP" append_newline "$R_TMP"
cat "${R_TMP}" >> "$D_TMP" cat "${R_TMP}" >> "$D_TMP"
@ -1096,12 +1152,10 @@ process_file_url() {
} }
download_dnsmasq_file() { download_dnsmasq_file() {
local hf allow_filter j=0 R_TMP
json set message "$(get_text 'statusDownloading')..." json set message "$(get_text 'statusDownloading')..."
json set status 'statusDownloading' json set status 'statusDownloading'
rm -f "$A_TMP" "$B_TMP" "$outputFile" "$outputCache" "$outputGzip" rm -f "$A_TMP" "$B_TMP" "$SED_TMP" "$outputFile" "$outputCache" "$outputGzip"
if [ "$(get_ram_free)" -lt 32 ]; then if [ "$(get_ram_free)" -lt 32 ]; then
output 3 'Low free memory, restarting resolver ' output 3 'Low free memory, restarting resolver '
if resolver 'quiet_restart'; then if resolver 'quiet_restart'; then
@ -1110,15 +1164,15 @@ download_dnsmasq_file() {
output_failn output_failn
fi fi
fi fi
touch $A_TMP; touch $B_TMP; touch "$A_TMP" "$B_TMP" "$SED_TMP"
output 1 'Downloading dnsmasq file ' output 1 'Downloading dnsmasq file '
rm -f "$sharedMemoryError" rm -f "$runningErrorFile"
process_file_url '' "$dnsmasq_config_file_url" 'file' process_file_url '' "$dnsmasq_config_file_url" 'file'
if [ -s "$sharedMemoryError" ]; then if [ -s "$runningErrorFile" ]; then
while IFS= read -r line; do while IFS= read -r line; do
json add error "$line" json add error "$line"
done < "$sharedMemoryError" done < "$runningErrorFile"
rm -f "$sharedMemoryError" rm -f "$runningErrorFile"
fi fi
output 2 'Moving dnsmasq file ' output 2 'Moving dnsmasq file '
if mv "$B_TMP" "$outputFile"; then if mv "$B_TMP" "$outputFile"; then
@ -1159,14 +1213,14 @@ download_lists() {
return 0 return 0
fi fi
} }
local hf allow_filter j=0 R_TMP local hf j=0 R_TMP
_ram_check || return 1 _ram_check || return 1
json set message "$(get_text 'statusDownloading')..." json set message "$(get_text 'statusDownloading')..."
json set status 'statusDownloading' json set status 'statusDownloading'
rm -f "$A_TMP" "$B_TMP" "$outputFile" "$outputCache" "$outputGzip" rm -f "$A_TMP" "$B_TMP" "$SED_TMP" "$outputFile" "$outputCache" "$outputGzip"
if [ "$(get_ram_total)" -lt 33554432 ]; then if [ "$(get_ram_total)" -lt 33554432 ]; then
output 3 'Low free memory, restarting resolver ' output 3 'Low free memory, restarting resolver '
if resolver 'quiet_restart'; then if resolver 'quiet_restart'; then
@ -1175,9 +1229,9 @@ download_lists() {
output_failn output_failn
fi fi
fi fi
touch $A_TMP; touch $B_TMP; touch "$A_TMP" "$B_TMP" "$SED_TMP"
output 1 'Downloading lists ' output 1 'Downloading lists '
rm -f "$sharedMemoryError" rm -f "$runningErrorFile"
config_load "$packageName" config_load "$packageName"
config_foreach load_validate_file_url_section 'file_url' process_file_url_wrapper config_foreach load_validate_file_url_section 'file_url' process_file_url_wrapper
wait wait
@ -1187,11 +1241,11 @@ download_lists() {
fi fi
output 1 '\n' output 1 '\n'
if [ -s "$sharedMemoryError" ]; then if [ -s "$runningErrorFile" ]; then
while IFS= read -r line; do while IFS= read -r line; do
json add error "$line" json add error "$line"
done < "$sharedMemoryError" done < "$runningErrorFile"
rm -f "$sharedMemoryError" rm -f "$runningErrorFile"
fi fi
if [ "$canary_domains_icloud" -ne '0' ]; then if [ "$canary_domains_icloud" -ne '0' ]; then
@ -1206,9 +1260,13 @@ download_lists() {
printf "%s\n" "$(echo "$hf" | sed "$domainsFilter")" >> "$B_TMP" printf "%s\n" "$(echo "$hf" | sed "$domainsFilter")" >> "$B_TMP"
done done
allowed_domain="${allowed_domain} allowed_domain="${allowed_domain}
$(cat $A_TMP)" $(sed '/^[[:space:]]*$/d' "$A_TMP")"
for hf in ${allowed_domain}; do hf="$(echo "$hf" | sed 's/\./\\./g')"; allow_filter="$allow_filter/(^|\.)${hf}$/d;"; done for hf in ${allowed_domain}; do
hf="$(echo "$hf" | sed 's/\./\\./g')"
echo "/(^|\.)${hf}$/d;" >> "$SED_TMP"
done
sed -i '/^[[:space:]]*$/d' "$B_TMP"
[ ! -s "$B_TMP" ] && return 1 [ ! -s "$B_TMP" ] && return 1
output 1 'Processing downloads ' output 1 'Processing downloads '
@ -1277,10 +1335,10 @@ $(cat $A_TMP)"
mv "$A_TMP" "$B_TMP" mv "$A_TMP" "$B_TMP"
fi fi
if [ -n "$allow_filter" ]; then if [ -s "$SED_TMP" ]; then
output 2 'Allowing domains ' output 2 'Allowing domains '
json set message "$(get_text 'statusProcessing'): allowing domains" json set message "$(get_text 'statusProcessing'): allowing domains"
if sed -i -E "$allow_filter" "$B_TMP"; then if sed -i -E -f "$SED_TMP" "$B_TMP"; then
output_ok output_ok
else else
output_failn output_failn
@ -1369,7 +1427,7 @@ $(cat $A_TMP)"
fi fi
output 2 'Removing temporary files ' output 2 'Removing temporary files '
json set message "$(get_text 'statusProcessing'): removing temporary files" json set message "$(get_text 'statusProcessing'): removing temporary files"
rm -f "/tmp/${packageName}_tmp.*" "$A_TMP" "$B_TMP" "$outputCache" || j=1 rm -f "/tmp/${packageName}_tmp.*" "$A_TMP" "$B_TMP" "$SED_TMP" "$outputCache" || j=1
if [ $j -eq 0 ]; then if [ $j -eq 0 ]; then
output_ok output_ok
else else
@ -1418,7 +1476,7 @@ adb_allow() {
output 2 "Committing changes to config " output 2 "Committing changes to config "
if uci_commit "$packageName"; then if uci_commit "$packageName"; then
allowed_domain="$(uci_get "$packageName" 'config' 'allowed_domain')" allowed_domain="$(uci_get "$packageName" 'config' 'allowed_domain')"
json set triggers config_cache 'create'
json set stats "$serviceName is blocking $(wc -l < "$outputFile") domains (with ${dns})" json set stats "$serviceName is blocking $(wc -l < "$outputFile") domains (with ${dns})"
output_ok; output_ok;
if [ "$dns" = 'dnsmasq.ipset' ]; then if [ "$dns" = 'dnsmasq.ipset' ]; then
@ -1460,7 +1518,7 @@ adb_allow() {
output 2 "Committing changes to config " output 2 "Committing changes to config "
if uci_commit "$packageName"; then if uci_commit "$packageName"; then
allowed_domain="$(uci_get "$packageName" 'config' 'allowed_domain')" allowed_domain="$(uci_get "$packageName" 'config' 'allowed_domain')"
json set triggers config_cache 'create'
json set stats "$serviceName is blocking $(wc -l < "$outputFile") domains (with ${dns})" json set stats "$serviceName is blocking $(wc -l < "$outputFile") domains (with ${dns})"
output_ok; output_ok;
output 2 "Restarting Unbound " output 2 "Restarting Unbound "
@ -1493,7 +1551,7 @@ adb_allow() {
output 2 "Committing changes to config " output 2 "Committing changes to config "
if uci_commit "$packageName"; then if uci_commit "$packageName"; then
allowed_domain="$(uci_get "$packageName" 'config' 'allowed_domain')" allowed_domain="$(uci_get "$packageName" 'config' 'allowed_domain')"
json set triggers config_cache 'create'
json set stats "$serviceName is blocking $(wc -l < "$outputFile") domains (with ${dns})" json set stats "$serviceName is blocking $(wc -l < "$outputFile") domains (with ${dns})"
output_ok; output_ok;
output 2 "Restarting Unbound " output 2 "Restarting Unbound "
@ -1663,11 +1721,12 @@ adb_start() {
load_environment "$validation_result" "$param" || return 1 load_environment "$validation_result" "$param" || return 1
status="$(json get status)" status="$(json get 'status')"
error="$(json get error)" error="$(json get 'error')"
message="$(json get message)" message="$(json get 'message')"
stats="$(json get stats)" stats="$(json get 'stats')"
action="$(json get triggers)" action="$(config_cache get 'trigger_service')"
fw4_restart_flag="$(config_cache get 'trigger_fw4')"
if [ "$action" = 'on_boot' ] || [ "$param" = 'on_boot' ]; then if [ "$action" = 'on_boot' ] || [ "$param" = 'on_boot' ]; then
if cache 'test_gzip' || cache 'test'; then if cache 'test_gzip' || cache 'test'; then
@ -1693,7 +1752,7 @@ adb_start() {
fi fi
json del all json del all
json set triggers config_cache 'create'
if [ "$action" = 'restore' ]; then if [ "$action" = 'restore' ]; then
output 0 "Starting $serviceName... " output 0 "Starting $serviceName... "

85
net/alist/Makefile Normal file
View file

@ -0,0 +1,85 @@
# SPDX-License-Identifier: GPL-2.0-only
#
# Copyright (C) 2023 ImmortalWrt.org
include $(TOPDIR)/rules.mk
PKG_NAME:=alist
PKG_VERSION:=3.29.1
PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://codeload.github.com/alist-org/alist/tar.gz/v$(PKG_VERSION)?
PKG_HASH:=b7d1929d9aef511b263673dba8e5b787f695e1b4fa4555fe562f8060ee0bdea4
PKG_LICENSE:=AGPL-3.0-only
PKG_LICENSE_FILES:=LICENSE
PKG_MAINTAINER:=Tianling Shen <cnsztl@immortalwrt.org>
PKG_BUILD_DEPENDS:=golang/host
PKG_BUILD_PARALLEL:=1
PKG_BUILD_FLAGS:=no-mips16
GO_PKG:=github.com/alist-org/alist/v3
GO_PKG_LDFLAGS_X:= \
$(GO_PKG)/internal/conf.Version=$(PKG_VERSION) \
$(GO_PKG)/internal/conf.WebVersion=$(PKG_VERSION)
include $(INCLUDE_DIR)/package.mk
include ../../lang/golang/golang-package.mk
define Package/alist
SECTION:=net
CATEGORY:=Network
TITLE:=A file list program that supports multiple storage
URL:=https://alist.nn.ci
DEPENDS:=$(GO_ARCH_DEPENDS) +ca-bundle +fuse-utils
endef
define Package/alist/description
A file list program that supports multiple storage, and supports
web browsing and webdav, powered by gin and Solidjs.
endef
define Package/alist/conffiles
/etc/alist/
/etc/config/alist
endef
WEB_VERSION:=3.29.0
WEB_FILE:=$(PKG_NAME)-web-$(WEB_VERSION).tar.gz
define Download/alist-web
URL:=https://github.com/alist-org/alist-web/releases/download/$(WEB_VERSION)/
URL_FILE:=dist.tar.gz
FILE:=$(WEB_FILE)
HASH:=ece9d3fd45f18eaa7376e5fc077ebeae8f67b7bcf6004a29ae58392bfc2a3d13
endef
define Build/Prepare
$(call Build/Prepare/Default)
( \
mkdir -p $(PKG_BUILD_DIR)/public ; \
gzip -dc $(DL_DIR)/$(WEB_FILE) | $(HOST_TAR) -C $(PKG_BUILD_DIR)/public $(TAR_OPTIONS) ; \
)
endef
ifneq ($(CONFIG_USE_MUSL),)
TARGET_CFLAGS += -D_LARGEFILE64_SOURCE
endif
define Package/alist/install
$(call GoPackage/Package/Install/Bin,$(PKG_INSTALL_DIR))
$(INSTALL_DIR) $(1)/usr/bin/
$(INSTALL_BIN) $(PKG_INSTALL_DIR)/usr/bin/alist $(1)/usr/bin/
$(INSTALL_DIR) $(1)/etc/config
$(INSTALL_CONF) $(CURDIR)/files/alist.config $(1)/etc/config/alist
$(INSTALL_DIR) $(1)/etc/init.d
$(INSTALL_BIN) $(CURDIR)/files/alist.init $(1)/etc/init.d/alist
endef
$(eval $(call Download,alist-web))
$(eval $(call GoBinPackage,alist))
$(eval $(call BuildPackage,alist))

View file

@ -0,0 +1,38 @@
config alist 'config'
option enabled '0'
option debug '0'
# listen
option listen_addr '0.0.0.0'
option listen_http_port '5244'
option listen_https_port '-1'
option listen_force_https '0'
option listen_cert_file ''
option listen_key_file ''
option listen_unix_file ''
option listen_unix_file_perm ''
# site
option site_url ''
option site_cdn ''
option site_login_expire '48'
option site_max_connections '0'
option site_tls_insecure '0'
# database
option db_type 'sqlite3'
option db_host ''
option db_port '0'
option db_user ''
option db_pass ''
option db_name ''
option db_table_prefix 'x_'
option db_ssl_mode ''
# log
option log_enable '1'
option log_max_size '5'
option log_max_backups '1'
option log_max_age '15'

113
net/alist/files/alist.init Normal file
View file

@ -0,0 +1,113 @@
#!/bin/sh /etc/rc.common
USE_PROCD=1
START=99
CONF="alist"
PROG="/usr/bin/alist"
CONF_DIR="/etc/$CONF"
RUN_DIR="/var/run/$CONF"
uci_json_add_boolean() {
local enabled
config_get_bool enabled "${4:-config}" "$2" "${3:-0}"
json_add_boolean "$1" "$enabled"
}
uci_json_add_int() {
local value
config_get value "${4:-config}" "$2" "${3:-0}"
json_add_int "$1" "$value"
}
uci_json_add_string() {
local value
config_get value "${4:-config}" "$2" $3
json_add_string "$1" "$value"
}
start_service() {
config_load "$CONF"
local enabled
config_get_bool enabled "config" "enabled" "0"
[ "$enabled" -eq "1" ] || return 1
local jwt_secret
jwt_secret="$(jsonfilter -qi "$CONF_DIR/config.json" -e "@.jwt_secret")"
[ -n "$jwt_secret" ] || jwt_secret="$(tr -cd "a-zA-Z0-9" < "/dev/urandom" | head -c16)"
mkdir -p "$CONF_DIR"
mkdir -p "$RUN_DIR"
json_init
json_add_boolean "force" "1"
uci_json_add_string "site_url" "site_url"
uci_json_add_string "cdn" "site_cdn"
json_add_string "jwt_secret" "$jwt_secret"
uci_json_add_int "token_expires_in" "site_login_expire" "48"
json_add_object "database"
uci_json_add_string "type" "db_type" "sqlite3"
uci_json_add_string "host" "db_host"
uci_json_add_int "port" "db_port"
uci_json_add_string "user" "db_user"
uci_json_add_string "password" "db_pass"
uci_json_add_string "name" "db_name"
json_add_string "db_file" "$CONF_DIR/data.db"
uci_json_add_string "table_prefix" "db_table_prefix" "x_"
uci_json_add_string "ssl_mode" "db_ssl_mode"
json_close_object
json_add_object "scheme"
uci_json_add_string "address" "listen_addr" "0.0.0.0"
uci_json_add_int "http_port" "listen_http_port" "5244"
uci_json_add_int "https_port" "listen_https_port" "-1"
uci_json_add_boolean "force_https" "listen_force_https"
uci_json_add_string "cert_file" "listen_cert_file"
uci_json_add_string "key_file" "listen_key_file"
uci_json_add_string "unix_file" "listen_unix_file"
uci_json_add_string "unix_file_perm" "listen_unix_file_perm"
json_close_object
json_add_string "temp_dir" "$RUN_DIR/temp"
json_add_string "bleve_dir" "$CONF_DIR/bleve"
json_add_object "log"
uci_json_add_boolean "enable" "log_enable" "1"
json_add_string "name" "$RUN_DIR/log/alist.log"
uci_json_add_int "max_size" "log_max_size" "5"
uci_json_add_int "max_backups" "log_max_backups" "1"
uci_json_add_int "max_age" "log_max_age" "15"
json_add_boolean "compress" "0"
json_close_object
json_add_int "delayed_start" "0"
uci_json_add_int "max_connections" "site_max_connections"
uci_json_add_boolean "tls_insecure_skip_verify" "site_tls_insecure"
json_dump > "$CONF_DIR/config.json"
local db_type
config_get db_type "config" "db_type" "sqlite3"
[ "$db_type" != "sqlite3" -o -e "$CONF_DIR/data.db" ] || "$PROG" --data "$CONF_DIR" admin set "password" 2>"/dev/null"
procd_open_instance
procd_set_param command "$PROG"
procd_append_param command server
procd_append_param command --data "$CONF_DIR"
procd_append_param command --no-prefix
local debug
config_get_bool debug "config" "debug" "0"
[ "$debug" -eq "0" ] || procd_append_param command --debug
procd_set_param limits core="unlimited"
procd_set_param limits nofile="1000000 1000000"
procd_set_param respawn
[ "$debug" -eq "0" ] || procd_set_param stderr 1
procd_close_instance
}
stop_service() {
rm -rf "$RUN_DIR"
}
service_triggers() {
procd_add_reload_trigger "$CONF"
}

3
net/alist/test.sh Normal file
View file

@ -0,0 +1,3 @@
#!/bin/sh
alist version | grep "$PKG_VERSION"

View file

@ -8,13 +8,13 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=apache PKG_NAME:=apache
PKG_VERSION:=2.4.57 PKG_VERSION:=2.4.58
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE_NAME:=httpd PKG_SOURCE_NAME:=httpd
PKG_SOURCE:=$(PKG_SOURCE_NAME)-$(PKG_VERSION).tar.bz2 PKG_SOURCE:=$(PKG_SOURCE_NAME)-$(PKG_VERSION).tar.bz2
PKG_SOURCE_URL:=@APACHE/httpd/ PKG_SOURCE_URL:=@APACHE/httpd/
PKG_HASH:=dbccb84aee95e095edfbb81e5eb926ccd24e6ada55dcd83caecb262e5cf94d2a PKG_HASH:=fa16d72a078210a54c47dd5bef2f8b9b8a01d94909a51453956b3ec6442ea4c5
PKG_BUILD_DIR:=$(BUILD_DIR)/$(PKG_SOURCE_NAME)-$(PKG_VERSION) PKG_BUILD_DIR:=$(BUILD_DIR)/$(PKG_SOURCE_NAME)-$(PKG_VERSION)

View file

@ -0,0 +1,10 @@
--- a/modules/filters/mod_xml2enc.c
+++ b/modules/filters/mod_xml2enc.c
@@ -35,6 +35,7 @@
#endif
/* libxml2 */
+#include <libxml/xmlstring.h>
#include <libxml/encoding.h>
#if defined(__clang__)

View file

@ -5,8 +5,8 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=banip PKG_NAME:=banip
PKG_VERSION:=0.9.2 PKG_VERSION:=0.9.3
PKG_RELEASE:=4 PKG_RELEASE:=1
PKG_LICENSE:=GPL-3.0-or-later PKG_LICENSE:=GPL-3.0-or-later
PKG_MAINTAINER:=Dirk Brenken <dev@brenken.org> PKG_MAINTAINER:=Dirk Brenken <dev@brenken.org>
@ -63,6 +63,9 @@ define Package/banip/install
$(INSTALL_CONF) ./files/banip.countries $(1)/etc/banip $(INSTALL_CONF) ./files/banip.countries $(1)/etc/banip
$(INSTALL_CONF) ./files/banip.feeds $(1)/etc/banip $(INSTALL_CONF) ./files/banip.feeds $(1)/etc/banip
$(INSTALL_CONF) ./files/banip.custom.feeds $(1)/etc/banip $(INSTALL_CONF) ./files/banip.custom.feeds $(1)/etc/banip
$(INSTALL_DIR) $(1)/www/cgi-bin
$(INSTALL_BIN) ./files/banip.cgi $(1)/www/cgi-bin/banip
endef endef
$(eval $(call BuildPackage,banip)) $(eval $(call BuildPackage,banip))

View file

@ -89,6 +89,7 @@ IP address blocking is commonly used to protect against brute force attacks, pre
* Add new or edit existing banIP feeds on your own with the LuCI integrated custom feed editor * Add new or edit existing banIP feeds on your own with the LuCI integrated custom feed editor
* Supports external allowlist URLs to reference additional IPv4/IPv6 feeds * Supports external allowlist URLs to reference additional IPv4/IPv6 feeds
* Supports allowing / blocking of certain VLAN forwards * Supports allowing / blocking of certain VLAN forwards
* Provides an option to transfer logging events on remote servers via cgi interface
## Prerequisites ## Prerequisites
* **[OpenWrt](https://openwrt.org)**, latest stable release or a snapshot with nft/firewall 4 support * **[OpenWrt](https://openwrt.org)**, latest stable release or a snapshot with nft/firewall 4 support
@ -141,7 +142,7 @@ Available commands:
| ban_filelimit | option | 1024 | ulimit max open/number of files (range 1024-4096) | | ban_filelimit | option | 1024 | ulimit max open/number of files (range 1024-4096) |
| ban_loglimit | option | 100 | scan only the last n log entries permanently. A value of '0' disables the monitor | | ban_loglimit | option | 100 | scan only the last n log entries permanently. A value of '0' disables the monitor |
| ban_logcount | option | 1 | how many times the IP must appear in the log to be considered as suspicious | | ban_logcount | option | 1 | how many times the IP must appear in the log to be considered as suspicious |
| ban_logterm | list | regex | various regex for logfile parsing (default: dropbear, sshd, luci, nginx, asterisk) | | ban_logterm | list | regex | various regex for logfile parsing (default: dropbear, sshd, luci, nginx, asterisk and cgi-remote events) |
| ban_logreadfile | option | /var/log/messages | alternative location for parsing the log file, e.g. via syslog-ng, to deactivate the standard parsing via logread | | ban_logreadfile | option | /var/log/messages | alternative location for parsing the log file, e.g. via syslog-ng, to deactivate the standard parsing via logread |
| ban_autodetect | option | 1 | auto-detect wan interfaces, devices and subnets | | ban_autodetect | option | 1 | auto-detect wan interfaces, devices and subnets |
| ban_debug | option | 0 | enable banIP related debug logging | | ban_debug | option | 0 | enable banIP related debug logging |
@ -191,6 +192,8 @@ Available commands:
| ban_mailnotification | option | 0 | receive E-Mail notifications with every banIP run | | ban_mailnotification | option | 0 | receive E-Mail notifications with every banIP run |
| ban_reportelements | option | 1 | count Set elements in the report, disable this option to speed up the report significantly | | ban_reportelements | option | 1 | count Set elements in the report, disable this option to speed up the report significantly |
| ban_resolver | option | - | external resolver used for DNS lookups | | ban_resolver | option | - | external resolver used for DNS lookups |
| ban_remotelog | option | 0 | enable the cgi interface to receive remote logging events |
| ban_remotetoken | option | - | unique token to communicate with the cgi interface |
## Examples ## Examples
**banIP report information** **banIP report information**
@ -292,6 +295,7 @@ list ban_logterm 'luci: failed login'
list ban_logterm 'error: maximum authentication attempts exceeded' list ban_logterm 'error: maximum authentication attempts exceeded'
list ban_logterm 'sshd.*Connection closed by.*\[preauth\]' list ban_logterm 'sshd.*Connection closed by.*\[preauth\]'
list ban_logterm 'SecurityEvent=\"InvalidAccountID\".*RemoteAddress=' list ban_logterm 'SecurityEvent=\"InvalidAccountID\".*RemoteAddress='
list ban_logterm 'received a suspicious remote IP '\''.*'\'''
``` ```
**allow-/blocklist handling** **allow-/blocklist handling**
@ -324,6 +328,18 @@ MAC-address with IPv4 and IPv6 wildcard concatenation:
C8:C2:9B:F7:80:12 192.168.1.10 => this will be populated to v4MAC-Set with the certain IP C8:C2:9B:F7:80:12 192.168.1.10 => this will be populated to v4MAC-Set with the certain IP
C8:C2:9B:F7:80:12 => this will be populated to v6MAC-Set with the IP-wildcard ::/0 C8:C2:9B:F7:80:12 => this will be populated to v6MAC-Set with the IP-wildcard ::/0
``` ```
**enable the cgi interface to receive remote logging events**
banIP ships a basic cgi interface in '/www/cgi-bin/banip' to receive remote logging events (disabled by default). The cgi interface evaluates logging events via GET or POST request (see examples below). To enable the cgi interface set the following options:
* set 'ban_remotelog' to '1' to enbale the cgi interface
* set 'ban_remotetoken' to a secret transfer token, allowed token characters consist of '[A-Za-z]', '[0-9]', '.' and ':'
Examples to transfer remote logging events from an internal server to banIP via cgi interface:
* POST request: curl --insecure --data "<ban_remotetoken>=<suspicious IP>" https://192.168.1.1/cgi-bin/banip
* GET request: wget --no-check-certificate https://192.168.1.1/cgi-bin/banip?<ban_remotetoken>=<suspicious IP>
Please note: for security reasons use this cgi interface only internally and only encrypted via https transfer protocol.
**redirect Asterisk security logs to lodg/logread** **redirect Asterisk security logs to lodg/logread**
banIP only supports logfile scanning via logread, so to monitor attacks on Asterisk, its security log must be available via logread. To do this, edit '/etc/asterisk/logger.conf' and add the line 'syslog.local0 = security', then run 'asterisk -rx reload logger' to update the running Asterisk configuration. banIP only supports logfile scanning via logread, so to monitor attacks on Asterisk, its security log must be available via logread. To do this, edit '/etc/asterisk/logger.conf' and add the line 'syslog.local0 = security', then run 'asterisk -rx reload logger' to update the running Asterisk configuration.

View file

@ -43,6 +43,8 @@ ban_mailtopic="banIP notification"
ban_mailprofile="ban_notify" ban_mailprofile="ban_notify"
ban_mailnotification="0" ban_mailnotification="0"
ban_reportelements="1" ban_reportelements="1"
ban_remotelog="0"
ban_remotetoken=""
ban_nftloglevel="warn" ban_nftloglevel="warn"
ban_nftpriority="-200" ban_nftpriority="-200"
ban_nftpolicy="memory" ban_nftpolicy="memory"
@ -1526,7 +1528,7 @@ f_monitor() {
ip="${ip##* }" ip="${ip##* }"
[ -n "${ip}" ] && proto="v6" [ -n "${ip}" ] && proto="v6"
fi fi
if [ -n "${proto}" ] && ! "${ban_nftcmd}" get element inet banIP blocklist"${proto}" "{ ${ip} }" >/dev/null 2>&1 && ! "${ban_grepcmd}" -q "^${ip}" "${ban_allowlist}"; then if [ -n "${proto}" ] && ! "${ban_nftcmd}" get element inet banIP allowlist"${proto}" "{ ${ip} }" >/dev/null 2>&1 && ! "${ban_nftcmd}" get element inet banIP blocklist"${proto}" "{ ${ip} }" >/dev/null 2>&1; then
f_log "info" "suspicious IP '${ip}'" f_log "info" "suspicious IP '${ip}'"
log_raw="$(eval ${loglimit_cmd})" log_raw="$(eval ${loglimit_cmd})"
log_count="$(printf "%s\n" "${log_raw}" | "${ban_grepcmd}" -c "suspicious IP '${ip}'")" log_count="$(printf "%s\n" "${log_raw}" | "${ban_grepcmd}" -c "suspicious IP '${ip}'")"

36
net/banip/files/banip.cgi Normal file
View file

@ -0,0 +1,36 @@
#!/bin/sh
# banIP cgi remote logging script - ban incoming and outgoing IPs via named nftables Sets
# Copyright (c) 2018-2023 Dirk Brenken (dev@brenken.org)
# This is free software, licensed under the GNU General Public License v3.
# (s)hellcheck exceptions
# shellcheck disable=all
# handle post/get requests
#
post_string="$(cat)"
request="${post_string//[^[:alnum:]=\.\:]/}"
[ -z "${request}" ] && request="${QUERY_STRING//[^[:alnum:]=\.\:]/}"
request_decode() {
local key value token
key="${request%=*}"
value="${request#*=}"
token="$(uci -q get banip.global.ban_remotetoken)"
if [ -n "${key}" ] && [ -n "${value}" ] && [ "${key}" = "${token}" ] && /etc/init.d/banip running; then
[ -r "/usr/lib/banip-functions.sh" ] && { . "/usr/lib/banip-functions.sh"; f_conf; }
if [ "${ban_remotelog}" = "1" ] && [ -x "${ban_logreadcmd}" ] && [ -n "${ban_logterm%%??}" ] && [ "${ban_loglimit}" != "0" ]; then
f_log "info" "received a suspicious remote IP '${value}'"
fi
fi
}
cat <<EOF
Status: 202 Accepted
Content-Type: text/plain; charset=UTF-8
EOF
request_decode

View file

@ -7,3 +7,4 @@ config banip 'global'
list ban_logterm 'error: maximum authentication attempts exceeded' list ban_logterm 'error: maximum authentication attempts exceeded'
list ban_logterm 'sshd.*Connection closed by.*\[preauth\]' list ban_logterm 'sshd.*Connection closed by.*\[preauth\]'
list ban_logterm 'SecurityEvent=\"InvalidAccountID\".*RemoteAddress=' list ban_logterm 'SecurityEvent=\"InvalidAccountID\".*RemoteAddress='
list ban_logterm 'received a suspicious remote IP '\''.*'\'''

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=chrony PKG_NAME:=chrony
PKG_VERSION:=4.4 PKG_VERSION:=4.5
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://chrony-project.org/releases/ PKG_SOURCE_URL:=https://chrony-project.org/releases/
PKG_HASH:=eafb07e6daf92b142200f478856dfed6efc9ea2d146eeded5edcb09b93127088 PKG_HASH:=19fe1d9f4664d445a69a96c71e8fdb60bcd8df24c73d1386e02287f7366ad422
PKG_MAINTAINER:=Miroslav Lichvar <mlichvar0@gmail.com> PKG_MAINTAINER:=Miroslav Lichvar <mlichvar0@gmail.com>
PKG_LICENSE:=GPL-2.0 PKG_LICENSE:=GPL-2.0
@ -85,6 +85,7 @@ define Package/chrony/install
$(INSTALL_DIR) $(1)/usr/sbin/ $(INSTALL_DIR) $(1)/usr/sbin/
$(INSTALL_BIN) $(PKG_BUILD_DIR)/chronyd $(1)/usr/sbin $(INSTALL_BIN) $(PKG_BUILD_DIR)/chronyd $(1)/usr/sbin
$(INSTALL_BIN) $(PKG_BUILD_DIR)/chronyc $(1)/usr/bin $(INSTALL_BIN) $(PKG_BUILD_DIR)/chronyc $(1)/usr/bin
$(INSTALL_BIN) ./files/chrony.ntp-hotplug $(1)/usr/sbin/chrony-hotplug
$(INSTALL_DIR) $(1)/etc/init.d $(INSTALL_DIR) $(1)/etc/init.d
$(INSTALL_DIR) $(1)/etc/config $(INSTALL_DIR) $(1)/etc/config
$(INSTALL_DIR) $(1)/etc/chrony $(INSTALL_DIR) $(1)/etc/chrony

View file

@ -0,0 +1,6 @@
#!/bin/sh
# Wait for sync for up to 5 minutes and notify other services
/usr/bin/chronyc waitsync 300 1 0.0 1 || exit 0
ubus call hotplug.ntp call '{ "env": [ "ACTION=stratum" ] }'

View file

@ -4,6 +4,7 @@
START=15 START=15
USE_PROCD=1 USE_PROCD=1
PROG=/usr/sbin/chronyd PROG=/usr/sbin/chronyd
HOTPLUG=/usr/sbin/chrony-hotplug
CONFIGFILE=/etc/chrony/chrony.conf CONFIGFILE=/etc/chrony/chrony.conf
INCLUDEFILE=/var/etc/chrony.d/10-uci.conf INCLUDEFILE=/var/etc/chrony.d/10-uci.conf
RTCDEVICE=/dev/rtc0 RTCDEVICE=/dev/rtc0
@ -79,6 +80,10 @@ start_service() {
procd_set_param file $INCLUDEFILE procd_set_param file $INCLUDEFILE
procd_close_instance procd_close_instance
procd_open_instance
procd_set_param command $HOTPLUG
procd_close_instance
config_load chrony config_load chrony
mkdir -p $(dirname $INCLUDEFILE) mkdir -p $(dirname $INCLUDEFILE)

View file

@ -9,15 +9,15 @@ include $(TOPDIR)/rules.mk
include $(INCLUDE_DIR)/nls.mk include $(INCLUDE_DIR)/nls.mk
PKG_NAME:=curl PKG_NAME:=curl
PKG_VERSION:=8.4.0 PKG_VERSION:=8.5.0
PKG_RELEASE:=2 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2 PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2
PKG_SOURCE_URL:=https://github.com/curl/curl/releases/download/curl-$(subst .,_,$(PKG_VERSION))/ \ PKG_SOURCE_URL:=https://github.com/curl/curl/releases/download/curl-$(subst .,_,$(PKG_VERSION))/ \
https://dl.uxnr.de/mirror/curl/ \ https://dl.uxnr.de/mirror/curl/ \
https://curl.askapache.com/download/ \ https://curl.askapache.com/download/ \
https://curl.se/download/ https://curl.se/download/
PKG_HASH:=e5250581a9c032b1b6ed3cf2f9c114c811fc41881069e9892d115cc73f9e88c6 PKG_HASH:=ce4b6a6655431147624aaf582632a36fe1ade262d5fab385c60f78942dd8d87b
PKG_LICENSE:=MIT PKG_LICENSE:=MIT
PKG_LICENSE_FILES:=COPYING PKG_LICENSE_FILES:=COPYING

View file

@ -1,6 +1,6 @@
--- a/Makefile.am --- a/Makefile.am
+++ b/Makefile.am +++ b/Makefile.am
@@ -159,7 +159,7 @@ CLEANFILES = $(VC10_LIBVCXPROJ) $(VC10_S @@ -134,7 +134,7 @@ CLEANFILES = $(VC14_LIBVCXPROJ) \
bin_SCRIPTS = curl-config bin_SCRIPTS = curl-config
SUBDIRS = lib src SUBDIRS = lib src
@ -9,7 +9,7 @@
pkgconfigdir = $(libdir)/pkgconfig pkgconfigdir = $(libdir)/pkgconfig
pkgconfig_DATA = libcurl.pc pkgconfig_DATA = libcurl.pc
@@ -273,8 +273,6 @@ cygwinbin: @@ -248,8 +248,6 @@ cygwinbin:
# We extend the standard install with a custom hook: # We extend the standard install with a custom hook:
install-data-hook: install-data-hook:
(cd include && $(MAKE) install) (cd include && $(MAKE) install)

View file

@ -1,12 +1,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=dnsdist PKG_NAME:=dnsdist
PKG_VERSION:=1.8.2 PKG_VERSION:=1.8.3
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2 PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2
PKG_SOURCE_URL:=https://downloads.powerdns.com/releases/ PKG_SOURCE_URL:=https://downloads.powerdns.com/releases/
PKG_HASH:=6688f09b2c52f9bf935f0769f4ee28dd0760e5622dade7b3f4e6fa3776f07ab8 PKG_HASH:=858323f2ed5181488bb7558fbf4f84ec7198600b070b2c5375d15d40695727f4
PKG_MAINTAINER:=Peter van Dijk <peter.van.dijk@powerdns.com> PKG_MAINTAINER:=Peter van Dijk <peter.van.dijk@powerdns.com>
PKG_LICENSE:=GPL-2.0-only PKG_LICENSE:=GPL-2.0-only

View file

@ -5,12 +5,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=dnsproxy PKG_NAME:=dnsproxy
PKG_VERSION:=0.60.0 PKG_VERSION:=0.61.0
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://codeload.github.com/AdguardTeam/dnsproxy/tar.gz/v$(PKG_VERSION)? PKG_SOURCE_URL:=https://codeload.github.com/AdguardTeam/dnsproxy/tar.gz/v$(PKG_VERSION)?
PKG_HASH:=4985b65cc4055f2defda5910005a998077726f5118ce55830b05d42dd73affd6 PKG_HASH:=0b75b8d8139992e77df39492d41ca7060553f80a13d7e0436f3d8e1616146b92
PKG_MAINTAINER:=Tianling Shen <cnsztl@immortalwrt.org> PKG_MAINTAINER:=Tianling Shen <cnsztl@immortalwrt.org>
PKG_LICENSE:=Apache-2.0 PKG_LICENSE:=Apache-2.0

View file

@ -2,11 +2,11 @@ include $(TOPDIR)/rules.mk
PKG_NAME:=gnunet PKG_NAME:=gnunet
PKG_VERSION:=0.19.4 PKG_VERSION:=0.20.0
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=@GNU/gnunet PKG_SOURCE_URL:=@GNU/gnunet
PKG_HASH:=00a63df408d5987f5ba9a50441f2a77182bd9fb32f1e302ae563ac94e7ac009b PKG_HASH:=56029e78a99c04d52b1358094ae5074e4cd8ea9b98cf6855f57ad9af27ac9518
PKG_LICENSE:=AGPL-3.0 PKG_LICENSE:=AGPL-3.0
PKG_LICENSE_FILES:=COPYING PKG_LICENSE_FILES:=COPYING
@ -268,9 +268,9 @@ CONF_fs:=fs
DEPENDS_gns:=+gnunet-vpn +iptables-mod-extra DEPENDS_gns:=+gnunet-vpn +iptables-mod-extra
USERID_gns:=:gnunetdns=452 USERID_gns:=:gnunetdns=452
BIN_gns:=gns namecache namestore resolver zoneimport BIN_gns:=gns namecache namestore namestore-dbtool namestore-zonefile resolver zoneimport
LIB_gns:=gns gnsrecord namecache namestore LIB_gns:=gns gnsrecord namecache namestore
PLUGIN_gns:=block_dns block_gns gnsrecord_conversation gnsrecord_dns gnsrecord_gns PLUGIN_gns:=block_dns block_gns gnsrecord_conversation gnsrecord_dns gnsrecord_gns namecache_flat
LIBEXEC_gns:=dns2gns helper-dns service-dns service-gns service-namecache service-namestore service-resolver service-zonemaster LIBEXEC_gns:=dns2gns helper-dns service-dns service-gns service-namecache service-namestore service-resolver service-zonemaster
CONF_gns:=dns gns namecache namestore resolver zonemaster CONF_gns:=dns gns namecache namestore resolver zonemaster
FILE_MODES_gns:=/usr/lib/gnunet/libexec/gnunet-helper-dns:root:gnunetdns:4750 /usr/lib/gnunet/libexec/gnunet-service-dns:gnunet:gnunetdns:2750 FILE_MODES_gns:=/usr/lib/gnunet/libexec/gnunet-helper-dns:root:gnunetdns:4750 /usr/lib/gnunet/libexec/gnunet-service-dns:gnunet:gnunetdns:2750

View file

@ -10,12 +10,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=haproxy PKG_NAME:=haproxy
PKG_VERSION:=2.8.4 PKG_VERSION:=2.8.5
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://www.haproxy.org/download/2.8/src PKG_SOURCE_URL:=https://www.haproxy.org/download/2.8/src
PKG_HASH:=81bacbf50ec6d0f7ecaaad7c03e59978b00322fbdad6ed4a989dd31754b6f25d PKG_HASH:=3f5459c5a58e0b343a32eaef7ed5bed9d3fc29d8aa9e14b36c92c969fc2a60d9
PKG_MAINTAINER:=Thomas Heil <heil@terminal-consulting.de>, \ PKG_MAINTAINER:=Thomas Heil <heil@terminal-consulting.de>, \
Christian Lachner <gladiac@gmail.com> Christian Lachner <gladiac@gmail.com>

View file

@ -1,7 +1,7 @@
#!/bin/sh #!/bin/sh
CLONEURL=https://git.haproxy.org/git/haproxy-2.8.git CLONEURL=https://git.haproxy.org/git/haproxy-2.8.git
BASE_TAG=v2.8.4 BASE_TAG=v2.8.5
TMP_REPODIR=tmprepo TMP_REPODIR=tmprepo
PATCHESDIR=patches PATCHESDIR=patches

View file

@ -2,7 +2,7 @@ include $(TOPDIR)/rules.mk
PKG_NAME:=https-dns-proxy PKG_NAME:=https-dns-proxy
PKG_VERSION:=2023-10-25 PKG_VERSION:=2023-10-25
PKG_RELEASE:=4 PKG_RELEASE:=5
PKG_SOURCE_PROTO:=git PKG_SOURCE_PROTO:=git
PKG_SOURCE_URL:=https://github.com/aarond10/https_dns_proxy/ PKG_SOURCE_URL:=https://github.com/aarond10/https_dns_proxy/

View file

@ -31,6 +31,7 @@ readonly canaryDomainsiCloud='mask.icloud.com mask-h2.icloud.com'
on_boot_trigger= on_boot_trigger=
dnsmasq_restart() { [ -x /etc/init.d/dnsmasq ] || return 1; /etc/init.d/dnsmasq restart >/dev/null 2>&1; } dnsmasq_restart() { [ -x /etc/init.d/dnsmasq ] || return 1; /etc/init.d/dnsmasq restart >/dev/null 2>&1; }
is_fw4_restart_needed() { [ "$(uci_get "$packageName" 'config' 'force_dns' '1')" = '1' ]; }
is_mac_address() { expr "$1" : '[0-9A-F][0-9A-F]:[0-9A-F][0-9A-F]:[0-9A-F][0-9A-F]:[0-9A-F][0-9A-F]:[0-9A-F][0-9A-F]:[0-9A-F][0-9A-F]$' >/dev/null; } is_mac_address() { expr "$1" : '[0-9A-F][0-9A-F]:[0-9A-F][0-9A-F]:[0-9A-F][0-9A-F]:[0-9A-F][0-9A-F]:[0-9A-F][0-9A-F]:[0-9A-F][0-9A-F]$' >/dev/null; }
is_ipv4() { expr "$1" : '[0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*$' >/dev/null; } is_ipv4() { expr "$1" : '[0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*$' >/dev/null; }
is_ipv6() { ! is_mac_address "$1" && str_contains "$1" ":"; } is_ipv6() { ! is_mac_address "$1" && str_contains "$1" ":"; }
@ -168,7 +169,7 @@ start_instance() {
json_add_object mdns json_add_object mdns
procd_add_mdns_service "$packageName" 'udp' "$port" "DNS over HTTPS proxy" procd_add_mdns_service "$packageName" 'udp' "$port" "DNS over HTTPS proxy"
json_close_object json_close_object
if [ "$force_dns" -ne 0 ]; then if [ "$force_dns" -ne '0' ]; then
json_add_array firewall json_add_array firewall
for iface in $procd_fw_src_interfaces; do for iface in $procd_fw_src_interfaces; do
for p in $force_dns_port; do for p in $force_dns_port; do
@ -218,7 +219,7 @@ start_instance() {
fi fi
output_ok output_ok
port="$((port+1))" port="$((port+1))"
force_dns=0 force_dns='0'
else else
output_fail output_fail
fi fi
@ -318,8 +319,8 @@ service_triggers() {
procd_add_config_trigger "config.change" "$packageName" "/etc/init.d/${packageName}" reload 'on_config_change' procd_add_config_trigger "config.change" "$packageName" "/etc/init.d/${packageName}" reload 'on_config_change'
} }
service_started() { procd_set_config_changed firewall; } service_started() { is_fw4_restart_needed && procd_set_config_changed firewall; }
service_stopped() { procd_set_config_changed firewall; } service_stopped() { is_fw4_restart_needed && procd_set_config_changed firewall; }
restart() { procd_send_signal "$packageName"; rc_procd start_service "$*"; } restart() { procd_send_signal "$packageName"; rc_procd start_service "$*"; }
dnsmasq_doh_server() { dnsmasq_doh_server() {
@ -339,7 +340,7 @@ dnsmasq_doh_server() {
uci_add_list_if_new 'dhcp' "$cfg" 'doh_server' "${address}#${port}" uci_add_list_if_new 'dhcp' "$cfg" 'doh_server' "${address}#${port}"
;; ;;
remove) remove)
for i in $(uci -q get "dhcp.$cfg.doh_server"); do for i in $(uci_get 'dhcp' "$cfg" 'doh_server'); do
uci_remove_list 'dhcp' "$cfg" 'server' "$i" uci_remove_list 'dhcp' "$cfg" 'server' "$i"
uci_remove_list 'dhcp' "$cfg" 'doh_server' "$i" uci_remove_list 'dhcp' "$cfg" 'doh_server' "$i"
done done

View file

@ -5,7 +5,7 @@
return SW_VERSION; return SW_VERSION;
#else #else
- return "2023.10.10-atLeast"; // update date sometimes, like 1-2 times a year - return "2023.10.10-atLeast"; // update date sometimes, like 1-2 times a year
+ return "2023-10-25-4"; // update date sometimes, like 1-2 times a year + return "2023-10-25-5"; // update date sometimes, like 1-2 times a year
#endif #endif
} }

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=knot PKG_NAME:=knot
PKG_VERSION:=3.3.2 PKG_VERSION:=3.3.3
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.xz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.xz
PKG_SOURCE_URL:=https://secure.nic.cz/files/knot-dns/ PKG_SOURCE_URL:=https://secure.nic.cz/files/knot-dns/
PKG_HASH:=0d65d4b59f5df69b78c6295ade0a2ea7931831de7ef5eeee3e00f8a20af679e4 PKG_HASH:=aab40aab2acd735c500f296bacaa5c84ff0488221a4068ce9946e973beacc5ae
PKG_MAINTAINER:=Daniel Salzman <daniel.salzman@nic.cz> PKG_MAINTAINER:=Daniel Salzman <daniel.salzman@nic.cz>
PKG_LICENSE:=GPL-3.0 LGPL-2.0 0BSD BSD-3-Clause OLDAP-2.8 PKG_LICENSE:=GPL-3.0 LGPL-2.0 0BSD BSD-3-Clause OLDAP-2.8

View file

@ -1,17 +0,0 @@
--- a/src/libdnssec/key/key.c
+++ b/src/libdnssec/key/key.c
@@ -146,10 +146,14 @@ dnssec_key_t *dnssec_key_dup(const dnsse
gnutls_privkey_type_t type = gnutls_privkey_get_type(key->private_key);
if (type == GNUTLS_PRIVKEY_PKCS11) {
+#ifdef ENABLE_PKCS11
gnutls_pkcs11_privkey_t tmp;
gnutls_privkey_export_pkcs11(key->private_key, &tmp);
gnutls_privkey_import_pkcs11(dup->private_key, tmp,
GNUTLS_PRIVKEY_IMPORT_AUTO_RELEASE);
+#else
+ assert(0);
+#endif
} else {
assert(type == GNUTLS_PRIVKEY_X509);
gnutls_x509_privkey_t tmp;

View file

@ -10,14 +10,14 @@ include $(TOPDIR)/rules.mk
PKG_NAME:=libcurl-gnutls PKG_NAME:=libcurl-gnutls
PKG_SOURCE_NAME:=curl PKG_SOURCE_NAME:=curl
PKG_VERSION:=8.2.1 PKG_VERSION:=8.5.0
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_SOURCE_NAME)-$(PKG_VERSION).tar.xz PKG_SOURCE:=$(PKG_SOURCE_NAME)-$(PKG_VERSION).tar.bz2
PKG_SOURCE_URL:=https://github.com/curl/curl/releases/download/curl-$(subst .,_,$(PKG_VERSION))/ \ PKG_SOURCE_URL:=https://github.com/curl/curl/releases/download/curl-$(subst .,_,$(PKG_VERSION))/ \
https://dl.uxnr.de/mirror/curl/ \ https://dl.uxnr.de/mirror/curl/ \
https://curl.askapache.com/download/ \ https://curl.askapache.com/download/ \
https://curl.se/download/ https://curl.se/download/
PKG_HASH:=dd322f6bd0a20e6cebdfd388f69e98c3d183bed792cf4713c8a7ef498cba4894 PKG_HASH:=ce4b6a6655431147624aaf582632a36fe1ade262d5fab385c60f78942dd8d87b
PKG_MAINTAINER:=Daniel Golle <daniel@makrotopia.org> PKG_MAINTAINER:=Daniel Golle <daniel@makrotopia.org>
PKG_LICENSE:=MIT PKG_LICENSE:=MIT

View file

@ -0,0 +1,54 @@
From af520ac9fec7d88e942f05fdcd90704adb9fa566 Mon Sep 17 00:00:00 2001
From: Baruch Siach <baruch@tkos.co.il>
Date: Mon, 11 Dec 2023 20:45:01 +0200
Subject: [PATCH] gnutls: fix build with --disable-verbose
infof() parameters must be defined event with --disable-verbose since
commit dac293cfb702 ("lib: apache style infof and trace
macros/functions").
Move also 'ptr' definition under !CURL_DISABLE_VERBOSE_STRINGS.
Fixes the following build failure:
In file included from ../lib/sendf.h:29,
from vtls/gtls.c:44:
vtls/gtls.c: In function 'Curl_gtls_verifyserver':
vtls/gtls.c:841:34: error: 'version' undeclared (first use in this function); did you mean 'session'?
841 | gnutls_protocol_get_name(version), ptr);
| ^~~~~~~
Closes #12505
---
lib/vtls/gtls.c | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
--- a/lib/vtls/gtls.c
+++ b/lib/vtls/gtls.c
@@ -822,16 +822,17 @@ Curl_gtls_verifyserver(struct Curl_easy
char certname[65] = ""; /* limited to 64 chars by ASN.1 */
size_t size;
time_t certclock;
- const char *ptr;
int rc;
CURLcode result = CURLE_OK;
#ifndef CURL_DISABLE_VERBOSE_STRINGS
+ const char *ptr;
unsigned int algo;
unsigned int bits;
gnutls_protocol_t version = gnutls_protocol_get_version(session);
#endif
long * const certverifyresult = &ssl_config->certverifyresult;
+#ifndef CURL_DISABLE_VERBOSE_STRINGS
/* the name of the cipher suite used, e.g. ECDHE_RSA_AES_256_GCM_SHA384. */
ptr = gnutls_cipher_suite_get_name(gnutls_kx_get(session),
gnutls_cipher_get(session),
@@ -839,6 +840,7 @@ Curl_gtls_verifyserver(struct Curl_easy
infof(data, "SSL connection using %s / %s",
gnutls_protocol_get_name(version), ptr);
+#endif
/* This function will return the peer's raw certificate (chain) as sent by
the peer. These certificates are in raw format (DER encoded for

View file

@ -5,8 +5,8 @@ PKG_RELEASE:=1
PKG_SOURCE_URL:=https://github.com/wkz/mdio-tools PKG_SOURCE_URL:=https://github.com/wkz/mdio-tools
PKG_SOURCE_PROTO:=git PKG_SOURCE_PROTO:=git
PKG_SOURCE_VERSION:=1.3.0 PKG_SOURCE_VERSION:=1.3.1
PKG_MIRROR_HASH:=409ac6aa6a141c0c6691522b27b90cb04d8323d3a9c3b5587f7cbae3f5c24536 PKG_MIRROR_HASH:=b7973284dc3dffef4bd2a904e3f7aa7fd3caab4aecf85ac57488f5acbf341aba
PKG_FIXUP:=autoreconf PKG_FIXUP:=autoreconf

View file

@ -27,8 +27,9 @@ PKG_BUILD_FLAGS:=gc-sections lto
include $(INCLUDE_DIR)/package.mk include $(INCLUDE_DIR)/package.mk
define Package/mosh/Default define Package/mosh/Default
SECTION:=utils SECTION:=net
CATEGORY:=Utilities CATEGORY:=Network
SUBMENU:=SSH
TITLE:=Mosh mobile shell TITLE:=Mosh mobile shell
DEPENDS:=+libncursesw +libopenssl +protobuf DEPENDS:=+libncursesw +libopenssl +protobuf
URL:=https://mosh.org/ URL:=https://mosh.org/

View file

@ -4,12 +4,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=nebula PKG_NAME:=nebula
PKG_VERSION:=1.7.2 PKG_VERSION:=1.8.0
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://codeload.github.com/slackhq/nebula/tar.gz/v$(PKG_VERSION)? PKG_SOURCE_URL:=https://codeload.github.com/slackhq/nebula/tar.gz/v$(PKG_VERSION)?
PKG_HASH:=c4771ce6eb3e142f88f5f4c12443cfca140bf96b2746c74f9536bd1a362f3f88 PKG_HASH:=678ad2bda47258cce8c2d14b3fa56d17c0ba4f894d75b75afab8937d64e12da7
PKG_MAINTAINER:=Stan Grishin <stangri@melmac.ca> PKG_MAINTAINER:=Stan Grishin <stangri@melmac.ca>
PKG_LICENSE:=MIT PKG_LICENSE:=MIT
@ -51,6 +51,9 @@ define Package/nebula-proto
TITLE:=nebula-proto TITLE:=nebula-proto
URL:=https://docs.openwrt.melmac.net/nebula/ URL:=https://docs.openwrt.melmac.net/nebula/
DEPENDS:=nebula DEPENDS:=nebula
DEPENDS+=+!BUSYBOX_DEFAULT_AWK:gawk
DEPENDS+=+!BUSYBOX_DEFAULT_GREP:grep
DEPENDS+=+!BUSYBOX_DEFAULT_SED:sed
PKGARCH:=all PKGARCH:=all
endef endef
@ -60,6 +63,8 @@ define Package/nebula-service
TITLE:=nebula-service TITLE:=nebula-service
URL:=https://docs.openwrt.melmac.net/nebula/ URL:=https://docs.openwrt.melmac.net/nebula/
DEPENDS:=nebula DEPENDS:=nebula
DEPENDS+=+!BUSYBOX_DEFAULT_AWK:gawk
DEPENDS+=+!BUSYBOX_DEFAULT_SED:sed
CONFLICTS:=nebula-proto CONFLICTS:=nebula-proto
PKGARCH:=all PKGARCH:=all
endef endef
@ -69,28 +74,28 @@ define Build/Compile
endef endef
define Package/nebula/description define Package/nebula/description
Nebula is a scalable overlay networking tool with a focus on performance, simplicity Nebula is a scalable overlay networking tool with a focus on performance, simplicity
and security. It lets you seamlessly connect computers anywhere in the world. and security. It lets you seamlessly connect computers anywhere in the world.
This package contains only nebula binary. Unless you want to start nebula manually, This package contains only nebula binary. Unless you want to start nebula manually,
you may want to also install *either* 'nebula-service' *or* 'nebula-proto' package. you may want to also install *either* 'nebula-service' *or* 'nebula-proto' package.
endef endef
define Package/nebula-cert/description define Package/nebula-cert/description
Nebula is a scalable overlay networking tool with a focus on performance, simplicity Nebula is a scalable overlay networking tool with a focus on performance, simplicity
and security. It lets you seamlessly connect computers anywhere in the world. and security. It lets you seamlessly connect computers anywhere in the world.
This package contains only nebula-cert binary. This package contains only nebula-cert binary.
endef endef
define Package/nebula-proto/description define Package/nebula-proto/description
Nebula is a scalable overlay networking tool with a focus on performance, simplicity Nebula is a scalable overlay networking tool with a focus on performance, simplicity
and security. It lets you seamlessly connect computers anywhere in the world. and security. It lets you seamlessly connect computers anywhere in the world.
This package contains only OpenWrt protocol/interface support for nebula. This package contains only OpenWrt protocol/interface support for nebula.
endef endef
define Package/nebula-service/description define Package/nebula-service/description
Nebula is a scalable overlay networking tool with a focus on performance, simplicity Nebula is a scalable overlay networking tool with a focus on performance, simplicity
and security. It lets you seamlessly connect computers anywhere in the world. and security. It lets you seamlessly connect computers anywhere in the world.
This package contains only OpenWrt-specific init.d script for nebula. This package contains only OpenWrt-specific init.d script for nebula.
endef endef
define Package/nebula/install define Package/nebula/install

View file

@ -49,6 +49,7 @@ proto_nebula_setup() {
[ -s "$config_file" ] || { log "Config file not found or empty!"; return 1; } [ -s "$config_file" ] || { log "Config file not found or empty!"; return 1; }
eval "$(yaml_parse "$config_file" "yaml_")" eval "$(yaml_parse "$config_file" "yaml_")"
yaml_tun_dev="${yaml_tun_dev%"${yaml_tun_dev##*[![:space:]]}"}"
[ "$yaml_tun_dev" = "$interface" ] || { log "Tunnel device in config file (${yaml_tun_dev}) doesn't match interface name (${interface})!"; return 1; } [ "$yaml_tun_dev" = "$interface" ] || { log "Tunnel device in config file (${yaml_tun_dev}) doesn't match interface name (${interface})!"; return 1; }
log "Setting up ${interface} from $(basename "$config_file")." log "Setting up ${interface} from $(basename "$config_file")."
@ -71,7 +72,7 @@ proto_nebula_setup() {
json_close_array json_close_array
proto_close_data proto_close_data
addresses="$(ip -4 a list dev "$interface" 2>/dev/null | grep inet | awk '{print $2}' | awk -F "/" '{print $1}')" addresses="$(ip -4 a list dev "$interface" 2>/dev/null | grep inet | awk '{print $2}' | awk -F "/" '{print $1}')"
log "Running ${interface} from $(basename "$config_file") with addresses: ${addresses}." log "Running ${interface} from $(basename "$config_file")${addresses+: with addresses: $addresses}."
for address in ${addresses}; do for address in ${addresses}; do
case "${address}" in case "${address}" in
*:*/*) *:*/*)

View file

@ -8,12 +8,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=nut PKG_NAME:=nut
PKG_VERSION:=2.8.0 PKG_VERSION:=2.8.1
PKG_RELEASE:=3 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=http://www.networkupstools.org/source/2.8/ PKG_SOURCE_URL:=http://www.networkupstools.org/source/2.8/
PKG_HASH:=c3e5a708da797b7c70b653d37b1206a000fcb503b85519fe4cdf6353f792bfe5 PKG_HASH:=7da48ee23b1f0d8d72560bb0af84f5c5ae4dbe35452b84cb49840132e47f099c
PKG_LICENSE:=GPL-2.0-or-later GPL-3.0-or-later GPL-1.0-or-later Artistic-1.0-Perl PKG_LICENSE:=GPL-2.0-or-later GPL-3.0-or-later GPL-1.0-or-later Artistic-1.0-Perl
PKG_LICENSE_FILES:=LICENSE-GPL2 LICENSE-GPL3 COPYING PKG_LICENSE_FILES:=LICENSE-GPL2 LICENSE-GPL3 COPYING
PKG_FIXUP:=autoreconf PKG_FIXUP:=autoreconf

View file

@ -1,27 +0,0 @@
From cafd77993ec5e16634b774b65bf6da9b34a21fc5 Mon Sep 17 00:00:00 2001
From: Jim Klimov <jimklimov+nut@gmail.com>
Date: Wed, 31 Aug 2022 11:24:19 +0200
Subject: [PATCH] clients/upsclient.h: ensure time_t is defined
--- a/clients/upsclient.h
+++ b/clients/upsclient.h
@@ -41,6 +41,18 @@
#include <limits.h>
#endif
+/* Not including NUT timehead.h because this is part of end-user API */
+#ifdef TIME_WITH_SYS_TIME
+# include <sys/time.h>
+# include <time.h>
+#else
+# ifdef HAVE_SYS_TIME_H
+# include <sys/time.h>
+# else
+# include <time.h>
+# endif
+#endif
+
#ifdef __cplusplus
/* *INDENT-OFF* */
extern "C" {

View file

@ -10,7 +10,7 @@ include $(TOPDIR)/rules.mk
PKG_NAME:=openvpn PKG_NAME:=openvpn
PKG_VERSION:=2.6.8 PKG_VERSION:=2.6.8
PKG_RELEASE:=1 PKG_RELEASE:=2
PKG_SOURCE_URL:=\ PKG_SOURCE_URL:=\
https://build.openvpn.net/downloads/releases/ \ https://build.openvpn.net/downloads/releases/ \

View file

@ -10,6 +10,7 @@ STOP=10
USE_PROCD=1 USE_PROCD=1
PROG=/usr/sbin/openvpn PROG=/usr/sbin/openvpn
PATH_INSTANCE_DIR="/etc/openvpn"
LIST_SEP=" LIST_SEP="
" "
@ -171,7 +172,7 @@ openvpn_add_instance() {
procd_close_instance procd_close_instance
} }
start_instance() { start_uci_instance() {
local s="$1" local s="$1"
config_get config "$s" config config_get config "$s" config
@ -206,6 +207,43 @@ start_instance() {
openvpn_add_instance "$s" "/var/etc" "openvpn-$s.conf" "$script_security" "$up" "$down" openvpn_add_instance "$s" "/var/etc" "openvpn-$s.conf" "$script_security" "$up" "$down"
} }
start_path_instances() {
local path name
for path in ${PATH_INSTANCE_DIR}/*.conf; do
[ -f "$path" ] && {
name="${path##*/}"
name="${name%.conf}"
start_path_instance "$name"
}
done
}
start_path_instance() {
local name="$1"
local path up down
path="${PATH_INSTANCE_DIR}/${name}.conf"
# don't start configs again that are already started by uci
if echo "$UCI_STARTED" | grep -qxF "$path"; then
logger -t openvpn "$name.conf already started"
return
fi
# don't start configs which are set to disabled in uci
if echo "$UCI_DISABLED" | grep -qxF "$path"; then
logger -t openvpn "$name.conf is disabled in /etc/config/openvpn"
return
fi
get_openvpn_option "$path" up up || up=""
get_openvpn_option "$path" down down || down=""
openvpn_add_instance "$name" "${path%/*}" "$path" "" "$up" "$down"
}
start_service() { start_service() {
local instance="$1" local instance="$1"
local instance_found=0 local instance_found=0
@ -225,31 +263,20 @@ start_service() {
config_load 'openvpn' config_load 'openvpn'
if [ -n "$instance" ]; then if [ -n "$instance" ]; then
[ "$instance_found" -gt 0 ] || return if [ "$instance_found" -gt 0 ]; then
start_instance "$instance" start_uci_instance "$instance"
else else
config_foreach start_instance 'openvpn' start_path_instance "$instance"
local path name up down
for path in /etc/openvpn/*.conf; do
if [ -f "$path" ]; then
name="${path##*/}"; name="${name%.conf}"
# don't start configs again that are already started by uci
if echo "$UCI_STARTED" | grep -qxF "$path"; then
continue
# don't start configs which are set to disabled in uci
elif echo "$UCI_DISABLED" | grep -qxF "$path"; then
logger -t openvpn "$name.conf is disabled in /etc/config/openvpn"
continue
fi fi
else
config_foreach start_uci_instance 'openvpn'
get_openvpn_option "$path" up up || up="" auto="$(uci_get openvpn globals autostart 1)"
get_openvpn_option "$path" down down || down="" if [ "$auto" = "1" ]; then
openvpn_add_instance "$name" "${path%/*}" "$path" "" "$up" "$down" start_path_instances
else
logger -t openvpn "Autostart for configs in '$PATH_INSTANCE_DIR/*.conf' disabled"
fi fi
done
fi fi
} }

View file

@ -1,12 +1,12 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=pdns PKG_NAME:=pdns
PKG_VERSION:=4.8.3 PKG_VERSION:=4.8.4
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2 PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2
PKG_SOURCE_URL:=https://downloads.powerdns.com/releases/ PKG_SOURCE_URL:=https://downloads.powerdns.com/releases/
PKG_HASH:=77b91199bdf71874334501c67e26469c2667a373d8423803fe657417295c77ba PKG_HASH:=7f40c8cbc4650d06fe49abba79902ebabb384363dabbd5cef271964a07c3645c
PKG_MAINTAINER:=Peter van Dijk <peter.van.dijk@powerdns.com> PKG_MAINTAINER:=Peter van Dijk <peter.van.dijk@powerdns.com>
PKG_LICENCE:=GPL-2.0-only PKG_LICENCE:=GPL-2.0-only

View file

@ -7,13 +7,13 @@ include $(TOPDIR)/rules.mk
PKG_NAME:=snort3 PKG_NAME:=snort3
PKG_VERSION:=3.1.76.0 PKG_VERSION:=3.1.76.0
PKG_RELEASE:=1 PKG_RELEASE:=2
PKG_SOURCE:=$(PKG_VERSION).tar.gz PKG_SOURCE:=$(PKG_VERSION).tar.gz
PKG_SOURCE_URL:=https://github.com/snort3/snort3/archive/refs/tags/ PKG_SOURCE_URL:=https://github.com/snort3/snort3/archive/refs/tags/
PKG_HASH:=5586199be8b7a7c6a1b73e0af2e2e004db8417b8282668b10583071e35c9c7a9 PKG_HASH:=5586199be8b7a7c6a1b73e0af2e2e004db8417b8282668b10583071e35c9c7a9
PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org> PKG_MAINTAINER:=W. Michael Petullo <mike@flyn.org>, John Audia <therealgraysky@proton.me>
PKG_LICENSE:=GPL-2.0-only PKG_LICENSE:=GPL-2.0-only
PKG_LICENSE_FILES:=COPYING PKG_LICENSE_FILES:=COPYING
PKG_CPE_ID:=cpe:/a:snort:snort PKG_CPE_ID:=cpe:/a:snort:snort
@ -125,15 +125,12 @@ define Package/snort3/install
$(INSTALL_CONF) \ $(INSTALL_CONF) \
./files/snort.config \ ./files/snort.config \
$(1)/etc/config/snort $(1)/etc/config/snort
$(INSTALL_CONF) \
./files/local.lua \
$(1)/etc/snort
$(INSTALL_CONF) \
./files/homenet.lua \
$(1)/etc/snort
sed \ sed \
-i -e "/^EXTERNAL_NET\\s\\+=/ a include 'homenet.lua'" \ -i \
-e "/^HOME_NET\\s\\+=/ i -- we set HOME_NET and EXTERNAL_NET here or via an included file" \ -e "/^-- HOME_NET and EXTERNAL_NET/ i -- The values for the two variables HOME_NET and EXTERNAL_NET have been" \
-e "/^-- HOME_NET and EXTERNAL_NET/ i -- moved to /etc/config/snort, so do not modify them here without good" \
-e "/^-- HOME_NET and EXTERNAL_NET/ i -- reason.\n" \
-e 's/^\(HOME_NET\s\+=\)/--\1/g' \ -e 's/^\(HOME_NET\s\+=\)/--\1/g' \
-e 's/^\(EXTERNAL_NET\s\+=\)/--\1/g' \ -e 's/^\(EXTERNAL_NET\s\+=\)/--\1/g' \
$(1)/etc/snort/snort.lua $(1)/etc/snort/snort.lua

View file

@ -1,4 +0,0 @@
-- Unused when using 'snort-mgr', do not modify without deep understanding.
-- setup HOME_NET below with your IP range/ranges to protect
--HOME_NET = [[ 192.168.1.0/24 10.1.0.0/24 ]]
--EXTERNAL_NET = "!$HOME_NET"

View file

@ -1,62 +0,0 @@
-- This file is no longer used if you are using 'snort-mgr' to create the
-- configuration. It is left as a sample.
--
-- use ths file to customize any functions defined in /etc/snort/snort.lua
-- switch tap to inline in ips and uncomment the below to run snort in inline mode
--snort = {}
--snort["-Q"] = true
ips = {
mode = tap,
-- mode = inline,
variables = default_variables,
-- uncomment and change the below to reflect rules or symlinks to rules on your filesystem
-- include = RULE_PATH .. '/snort.rules',
}
daq = {
module_dirs = {
'/usr/lib/daq',
},
modules = {
{
name = 'afpacket',
mode = 'inline',
}
}
}
alert_syslog = {
level = 'info',
}
-- To log to a file, uncomment the below and manually create the dir defined in output.logdir
--output.logdir = '/var/log/snort'
--alert_fast = {
-- file = true,
-- packet = false,
--}
normalizer = {
tcp = {
ips = true,
}
}
file_policy = {
enable_type = true,
enable_signature = true,
rules = {
use = {
verdict = 'log', enable_file_type = true, enable_file_signature = true
}
}
}
-- To use openappid with snort, install the openappid package and uncomment the below
--appid = {
-- app_detector_dir = '/usr/lib/openappid',
-- log_stats = true,
-- app_stats_period = 60,
--}

View file

@ -93,6 +93,8 @@ const snort_config = {
action: config_item("enum", [ "alert", "block", "drop", "reject" ]), action: config_item("enum", [ "alert", "block", "drop", "reject" ]),
interface: config_item("str", [ uci.get("network", "wan", "device") ]), interface: config_item("str", [ uci.get("network", "wan", "device") ]),
snaplen: config_item("range", [ 1518, 65535 ]), // int daq.snaplen = 1518: set snap length (same as -s) { 0:65535 } snaplen: config_item("range", [ 1518, 65535 ]), // int daq.snaplen = 1518: set snap length (same as -s) { 0:65535 }
include: config_item("path", [ "" ]), // User-defined snort configuration, applied at end of snort.lua.
}; };
const nfq_config = { const nfq_config = {
@ -123,7 +125,7 @@ snort
your lan range, default is '192.168.1.0/24' your lan range, default is '192.168.1.0/24'
external_net - IP range external to home. Usually 'any', but if you only external_net - IP range external to home. Usually 'any', but if you only
care about true external hosts (trusting all lan devices), care about true external hosts (trusting all lan devices),
then '!$HOMENET' or some specific range then '!$HOME_NET' or some specific range
mode - 'ids' or 'ips', for detection-only or prevention, respectively mode - 'ids' or 'ips', for detection-only or prevention, respectively
oinkcode - https://www.snort.org/oinkcodes oinkcode - https://www.snort.org/oinkcodes
config_dir - Location of the base snort configuration files. Default /etc/snort config_dir - Location of the base snort configuration files. Default /etc/snort
@ -138,6 +140,7 @@ snort
action - 'alert', 'block', 'reject' or 'drop' action - 'alert', 'block', 'reject' or 'drop'
method - 'pcap', 'afpacket' or 'nfq' method - 'pcap', 'afpacket' or 'nfq'
snaplen - int daq.snaplen = 1518: set snap length (same as -s) { 0:65535 } snaplen - int daq.snaplen = 1518: set snap length (same as -s) { 0:65535 }
include - User-defined snort configuration, applied at end of generated snort.lua
nfq - https://github.com/snort3/libdaq/blob/master/modules/nfq/README.nfq.md nfq - https://github.com/snort3/libdaq/blob/master/modules/nfq/README.nfq.md
queue_maxlen - nfq's '--daq-var queue_maxlen=int' queue_maxlen - nfq's '--daq-var queue_maxlen=int'
@ -237,7 +240,8 @@ function render_help() {
load_all(); load_all();
switch (getenv("TYPE")) { let table_type = getenv("TYPE");
switch (table_type) {
case "snort": case "snort":
render_snort(); render_snort();
return; return;
@ -255,7 +259,7 @@ switch (getenv("TYPE")) {
return; return;
default: default:
print("Invalid table type.\n"); print(`Invalid table type '${table_type}', should be one of snort, nftables, config, help.\n`);
return; return;
} }

View file

@ -11,8 +11,13 @@ table inet snort {
chain {{ chain_type }}_{{ snort.mode }} { chain {{ chain_type }}_{{ snort.mode }} {
type filter hook {{ chain_type }} priority {{ nfq.chain_priority }} type filter hook {{ chain_type }} priority {{ nfq.chain_priority }}
policy accept policy accept
{% if (nfq.include) { include(nfq.include, { snort, nfq }); } %} {% if (nfq.include) {
# tcp flags ack ct direction original ct state established counter accept // We use the ucode include here, so that the included file is also
// part of the template and can use values passed in from the config.
printf("\n\t\t#-- The following content included from '%s'\n", nfq.include);
include(nfq.include, { snort, nfq });
printf("\t\t#-- End of included file.\n\n");
} %}
counter queue flags bypass to {{ queues }} counter queue flags bypass to {{ queues }}
} }
} }

View file

@ -1,7 +1,7 @@
#!/bin/sh #!/bin/sh
# Copyright (c) 2023 Eric Fahlgren <eric.fahlgren@gmail.com> # Copyright (c) 2023 Eric Fahlgren <eric.fahlgren@gmail.com>
# SPDX-License-Identifier: GPL-2.0 # SPDX-License-Identifier: GPL-2.0
# shellcheck disable=SC2039 # "local" not defined in POSIX sh # shellcheck disable=SC2039,SC2155 # "local" not defined in POSIX sh
PROG="/usr/bin/snort" PROG="/usr/bin/snort"
MAIN="/usr/share/snort/main.uc" MAIN="/usr/share/snort/main.uc"
@ -26,7 +26,7 @@ disable_offload()
{ {
# From https://forum.openwrt.org/t/snort-3-nfq-with-ips-mode/161172 # From https://forum.openwrt.org/t/snort-3-nfq-with-ips-mode/161172
# https://blog.snort.org/2016/08/running-snort-on-commodity-hardware.html # https://blog.snort.org/2016/08/running-snort-on-commodity-hardware.html
# Not needed when running the nft daq as defragmentation is done by the kernel. # Not needed when running the nfq daq as defragmentation is done by the kernel.
# What about pcap? # What about pcap?
local filter_method=$(uci -q get snort.snort.method) local filter_method=$(uci -q get snort.snort.method)
@ -55,6 +55,8 @@ nft_add_table() {
setup() { setup() {
# Generates all the configuration, then reports the config file for snort. # Generates all the configuration, then reports the config file for snort.
# Does NOT generate the rules file, you'll need to do 'update-rules' first. # Does NOT generate the rules file, you'll need to do 'update-rules' first.
local log_dir=$(uci get snort.snort.log_dir)
[ ! -e "$log_dir" ] && mkdir -p "$log_dir"
nft_rm_table nft_rm_table
print snort > "$CONF" print snort > "$CONF"
nft_add_table nft_add_table
@ -82,13 +84,33 @@ check() {
[ "$manual" = 1 ] && return 0 [ "$manual" = 1 ] && return 0
[ -n "$QUIET" ] && OUT=/dev/null || OUT=$STDOUT [ -n "$QUIET" ] && OUT=/dev/null || OUT=$STDOUT
local test_conf="${CONF_DIR}/test_conf.lua" local warn no_rules
print snort > "${test_conf}" || die "Errors during generation of config." if [ -n "$VERBOSE" ]; then
if $PROG -T -q --warn-all -c "${test_conf}" 2> $OUT ; then warn='--warn-all'
rm "${test_conf}" no_rules=0
return 0 else
warn='-q'
no_rules=1
fi fi
die "Errors in snort config tests."
local test_conf="${CONF_DIR}/test_conf.lua"
_SNORT_WITHOUT_RULES="$no_rules" print snort > "${test_conf}" || die "Errors during generation of snort config."
if $PROG -T $warn -c "${test_conf}" 2> $OUT ; then
rm "${test_conf}"
else
die "Errors in snort config tests. Examine ${test_conf} for issues."
fi
if [ "$(uci -q get snort.snort.method)" = "nfq" ]; then
local test_nft="${CONF_DIR}/test_conf.nft"
print nftables > "${test_nft}" || die "Errors during generation of nftables config."
if nft $VERBOSE --check -f "${test_nft}" ; then
rm "${test_nft}"
else
die "Errors in nftables config tests. Examine ${test_nft} for issues."
fi
fi
} }
report() { report() {
@ -120,20 +142,23 @@ report() {
die "Logging is not enabled in snort config." die "Logging is not enabled in snort config."
fi fi
#if [ -z "$pattern" ]; then
# die "Provide a valid IP and try again."
#fi
[ "$NLINES" = 0 ] && output="cat" || output="head -n $NLINES" [ "$NLINES" = 0 ] && output="cat" || output="head -n $NLINES"
# Fix this to use json file. local msg src dst dir
tmp="/tmp/snort.report.$$" tmp="/tmp/snort.report.$$"
echo "Intrusions involving ${pattern:-all IPs}" for file in "${log_dir}"/*alert_json.txt; do
grep "\b${pattern}\b" "$log_dir/alert_fast.txt" \ while read -r line; do
| sed 's/.*"\([^"]*\)".* \([^ :]*\)[: ].*-> \(.*\)/\1#\2#\3/' > "$tmp" eval $(jsonfilter -s "$line" -e 'msg=$.msg' -e 'src=$.src_ap' -e 'dst=$.dst_ap' -e 'dir=$.dir')
src=$(echo "$src" | sed 's/:.*$//') # Delete all source ports.
dst=$(echo "$dst" | sed 's/:0$//') # Delete unspecified dest port.
echo "$msg#$src#$dst#$dir"
done < "$file"
done | grep -i "$pattern" > "$tmp"
echo "Events involving ${pattern:-all IPs}"
n_incidents="$(wc -l < $tmp)" n_incidents="$(wc -l < $tmp)"
lines=$(sort "$tmp" | uniq -c | sort -nr \ lines=$(sort "$tmp" | uniq -c | sort -nr \
| awk -F'#' '{printf "%-80s %-12s -> %s\n", $1, $2, $3}') | awk -F'#' '{printf "%-80s %s %-13s -> %s\n", $1, $4, $2, $3}')
echo "$lines" | $output echo "$lines" | $output
n_lines=$(echo "$lines" | wc -l) n_lines=$(echo "$lines" | wc -l)
[ "$NLINES" -gt 0 ] && [ "$NLINES" -lt "$n_lines" ] && echo " ... Only showing $NLINES of $n_lines most frequent incidents." [ "$NLINES" -gt 0 ] && [ "$NLINES" -lt "$n_lines" ] && echo " ... Only showing $NLINES of $n_lines most frequent incidents."
@ -142,7 +167,8 @@ report() {
} }
status() { status() {
echo 'tbd' echo -n 'snort is ' ; service snort status
ps w | grep -E 'PID|snort' | grep -v grep
} }
@ -179,7 +205,7 @@ case "$1" in
teardown teardown
;; ;;
resetup) resetup)
QUIET=1 check || die "The generated snort lua configuration contains errors, not restarting." QUIET=1 check || die "The generated snort lua configuration contains errors, not restarting. Run 'snort-mgr check'"
teardown teardown
setup setup
;; ;;
@ -221,7 +247,7 @@ Usage:
Report on incidents. Note this is somewhat experimental, so suggested Report on incidents. Note this is somewhat experimental, so suggested
improvements are quite welcome. improvements are quite welcome.
pattern = IP or piece of IP or something in the message to filter. pattern = A case-insensitive grep pattern used to filter output.
$0 [-t] update-rules $0 [-t] update-rules
@ -243,6 +269,7 @@ Usage:
snort = The snort configuration file, which is a lua script. snort = The snort configuration file, which is a lua script.
nftables = The nftables script used to define the input queues when using nftables = The nftables script used to define the input queues when using
the 'nfq' DAQ. the 'nfq' DAQ.
help = Display config file help.
$0 [-q] check $0 [-q] check

View file

@ -13,7 +13,7 @@
# your lan range, default is '192.168.1.0/24' # your lan range, default is '192.168.1.0/24'
# external_net - IP range external to home. Usually 'any', but if you only # external_net - IP range external to home. Usually 'any', but if you only
# care about true external hosts (trusting all lan devices), # care about true external hosts (trusting all lan devices),
# then '!$HOMENET' or some specific range # then '!$HOME_NET' or some specific range
# mode - 'ids' or 'ips', for detection-only or prevention, respectively # mode - 'ids' or 'ips', for detection-only or prevention, respectively
# oinkcode - https://www.snort.org/oinkcodes # oinkcode - https://www.snort.org/oinkcodes
# config_dir - Location of the base snort configuration files. Default /etc/snort # config_dir - Location of the base snort configuration files. Default /etc/snort
@ -28,6 +28,7 @@
# action - 'alert', 'block', 'reject' or 'drop' # action - 'alert', 'block', 'reject' or 'drop'
# method - 'pcap', 'afpacket' or 'nfq' # method - 'pcap', 'afpacket' or 'nfq'
# snaplen - int daq.snaplen = 1518: set snap length (same as -s) { 0:65535 } # snaplen - int daq.snaplen = 1518: set snap length (same as -s) { 0:65535 }
# include - User-defined snort configuration, applied at end of generated snort.lua
# #
# nfq - https://github.com/snort3/libdaq/blob/master/modules/nfq/README.nfq.md # nfq - https://github.com/snort3/libdaq/blob/master/modules/nfq/README.nfq.md
# queue_maxlen - nfq's '--daq-var queue_maxlen=int' # queue_maxlen - nfq's '--daq-var queue_maxlen=int'
@ -61,6 +62,7 @@ config snort 'snort'
option action 'alert' # one of [alert, block, drop, reject] option action 'alert' # one of [alert, block, drop, reject]
option interface 'eth0' # a string option interface 'eth0' # a string
option snaplen '1518' # 1518 <= x <= 65535 option snaplen '1518' # 1518 <= x <= 65535
option include '' # a path string
config nfq 'nfq' config nfq 'nfq'
option queue_count '4' # 1 <= x <= 16 option queue_count '4' # 1 <= x <= 16

View file

@ -8,6 +8,7 @@ let home_net = snort.home_net == 'any' ? "'any'" : snort.home_net;
let external_net = snort.external_net; let external_net = snort.external_net;
let line_mode = snort.mode == "ids" ? "tap" : "inline"; let line_mode = snort.mode == "ids" ? "tap" : "inline";
let mod_mode = snort.mode == "ids" ? "passive" : "inline";
let inputs = null; let inputs = null;
let vars = null; let vars = null;
@ -32,9 +33,8 @@ case "nfq":
-- Do not edit, automatically generated. See /usr/share/snort/templates. -- Do not edit, automatically generated. See /usr/share/snort/templates.
-- These must be defined before processing snort.lua -- These must be defined before processing snort.lua
-- The default include '/etc/snort/homenet.lua' must not redefine them.
HOME_NET = [[ {{ home_net }} ]] HOME_NET = [[ {{ home_net }} ]]
EXTERNAL_NET = '{{ external_net }}' EXTERNAL_NET = [[ {{ external_net }} ]]
include('{{ snort.config_dir }}/snort.lua') include('{{ snort.config_dir }}/snort.lua')
@ -42,7 +42,7 @@ snort = {
{% if (snort.mode == 'ips'): %} {% if (snort.mode == 'ips'): %}
['-Q'] = true, ['-Q'] = true,
{% endif %} {% endif %}
['--daq'] = {{ snort.method }}, ['--daq'] = '{{ snort.method }}',
--['--daq-dir'] = '/usr/lib/daq/', --['--daq-dir'] = '/usr/lib/daq/',
{% if (snort.method == 'nfq'): %} {% if (snort.method == 'nfq'): %}
['--max-packet-threads'] = {{ nfq.thread_count }}, ['--max-packet-threads'] = {{ nfq.thread_count }},
@ -50,10 +50,14 @@ snort = {
} }
ips = { ips = {
mode = {{ line_mode }}, mode = '{{ line_mode }}',
variables = default_variables, variables = default_variables,
action_override = {{ snort.action }}, action_override = '{{ snort.action }}',
include = "{{ snort.config_dir }}/" .. RULE_PATH .. '/snort.rules', {% if (getenv("_SNORT_WITHOUT_RULES") == "1"): %}
-- WARNING: THIS IS A TEST-ONLY CONFIGURATION WITHOUT ANY RULES.
{% else %}
include = '{{ snort.config_dir }}/' .. RULE_PATH .. '/snort.rules',
{% endif -%}
} }
daq = { daq = {
@ -63,7 +67,7 @@ daq = {
modules = { modules = {
{ {
name = '{{ snort.method }}', name = '{{ snort.method }}',
mode = {{ line_mode }}, mode = '{{ mod_mode }}',
variables = {{ vars }}, variables = {{ vars }},
} }
} }
@ -75,12 +79,11 @@ alert_syslog = {
{% if (int(snort.logging)): %} {% if (int(snort.logging)): %}
-- Note that this is also the location of the PID file, if you use it. -- Note that this is also the location of the PID file, if you use it.
output.logdir = "{{ snort.log_dir }}" output.logdir = '{{ snort.log_dir }}'
-- Maybe add snort.log_type, 'fast', 'json' and 'full'?
-- Json would be best for reporting, see 'snort-mgr report' code.
-- alert_full = { file = true, } -- alert_full = { file = true, }
--[[
alert_fast = { alert_fast = {
-- bool alert_fast.file = false: output to alert_fast.txt instead of stdout -- bool alert_fast.file = false: output to alert_fast.txt instead of stdout
-- bool alert_fast.packet = false: output packet dump with alert -- bool alert_fast.packet = false: output packet dump with alert
@ -88,14 +91,40 @@ alert_fast = {
file = true, file = true,
packet = false, packet = false,
} }
--]]
alert_json = { alert_json = {
-- bool alert_json.file = false: output to alert_json.txt instead of stdout -- bool alert_json.file = false: output to alert_json.txt instead of stdout
-- multi alert_json.fields = timestamp pkt_num proto pkt_gen pkt_len dir src_ap dst_ap rule action: selected fields will be output
-- int alert_json.limit = 0: set maximum size in MB before rollover (0 is unlimited) { 0:maxSZ } -- int alert_json.limit = 0: set maximum size in MB before rollover (0 is unlimited) { 0:maxSZ }
-- string alert_json.separator = , : separate fields with this character sequence -- string alert_json.separator = , : separate fields with this character sequence
-- multi alert_json.fields = 'timestamp pkt_num proto pkt_gen pkt_len dir src_ap dst_ap'
-- Rule action: selected fields will be output in given order left to right.
-- { action | class | b64_data | client_bytes | client_pkts | dir
-- | dst_addr | dst_ap | dst_port | eth_dst | eth_len | eth_src
-- | eth_type | flowstart_time | geneve_vni | gid | icmp_code
-- | icmp_id | icmp_seq | icmp_type | iface | ip_id | ip_len
-- | msg | mpls | pkt_gen | pkt_len | pkt_num | priority
-- | proto | rev | rule | seconds | server_bytes | server_pkts
-- | service | sgt | sid | src_addr | src_ap | src_port | target
-- | tcp_ack | tcp_flags | tcp_len | tcp_seq | tcp_win | timestamp
-- | tos | ttl | udp_len | vlan }
-- This is a minimal set of fields that simply supports 'snort-mgr report'
-- and minimizes log size:
fields = 'dir src_ap dst_ap msg',
-- This set also supports the report, but closely matches 'alert_fast' contents.
--fields = 'timestamp pkt_num proto pkt_gen pkt_len dir src_ap dst_ap rule action msg',
file = true, file = true,
} }
--[[
unified2 = {
limit = 10, -- int unified2.limit = 0: set maximum size in MB before rollover (0 is unlimited) { 0:maxSZ }
}
--]]
{% endif -%} {% endif -%}
normalizer = { normalizer = {
@ -124,3 +153,12 @@ appid = {
app_stats_period = 60, app_stats_period = 60,
} }
{% endif %} {% endif %}
{%
if (snort.include) {
// We use the ucode include here, so that the included file is also
// part of the template and can use values passed in from the config.
printf("-- The following content from included file '%s'\n", snort.include);
include(snort.include, { snort, nfq });
}
%}

View file

@ -1,13 +1,13 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=snowflake PKG_NAME:=snowflake
PKG_VERSION:=2.7.0 PKG_VERSION:=2.8.0
PKG_RELEASE:=1 PKG_RELEASE:=1
PKG_SOURCE_PROTO:=git PKG_SOURCE_PROTO:=git
PKG_SOURCE_URL=https://gitlab.torproject.org/tpo/anti-censorship/pluggable-transports/snowflake.git PKG_SOURCE_URL=https://gitlab.torproject.org/tpo/anti-censorship/pluggable-transports/snowflake.git
PKG_SOURCE_VERSION:=v$(PKG_VERSION) PKG_SOURCE_VERSION:=v$(PKG_VERSION)
PKG_MIRROR_HASH:=3156dbeffaea82761372c7e64322cf9c24a05894c54ccb0d80eaed61b54e08c6 PKG_MIRROR_HASH:=20ff3c292be6d91f535b009b95578d708daeb8b88cc2290e69feade7b844bf60
PKG_LICENSE:=BSD-3-Clause PKG_LICENSE:=BSD-3-Clause
PKG_LICENSE_FILES:=LICENSE PKG_LICENSE_FILES:=LICENSE

Some files were not shown because too many files have changed in this diff Show more