Improves flow handling and adds runtime stats APIs

Refactors TCP and UDP flow managers to enhance analyzer selection and flow binding accuracy, including O(1) UDP stream rebinding by 5-tuple.
Introduces runtime stats tracking for engine and ruleset operations, exposing new APIs for granular performance and error metrics.
Optimizes GeoMatcher with result caching and supports efficient geosite set matching, reducing redundant computation in ruleset expressions.
This commit is contained in:
2026-05-13 06:10:38 +05:30
parent 3f895adb43
commit 7a3f6e945d
23 changed files with 1440 additions and 152 deletions
+233 -54
View File
@@ -1,52 +1,45 @@
package geo
import (
"container/list"
"net"
"sort"
"strings"
"sync"
)
const (
geoSiteResultCacheSize = 1 << 16
geoSiteSetResultCacheSize = 1 << 16
)
type GeoMatcher struct {
geoLoader GeoLoader
geoSiteMatcher map[string]hostMatcher
siteMatcherLock sync.Mutex
siteMatcherLock sync.RWMutex
geoSiteSets map[string][]hostMatcher
siteSetLock sync.RWMutex
geoIpMatcher map[string]hostMatcher
ipMatcherLock sync.Mutex
ipMatcherLock sync.RWMutex
geoSiteResult *boolLRUCache
geoSiteSetCache *boolLRUCache
}
func NewGeoMatcher(geoSiteFilename, geoIpFilename string) *GeoMatcher {
return &GeoMatcher{
geoLoader: NewDefaultGeoLoader(geoSiteFilename, geoIpFilename),
geoSiteMatcher: make(map[string]hostMatcher),
geoIpMatcher: make(map[string]hostMatcher),
geoLoader: NewDefaultGeoLoader(geoSiteFilename, geoIpFilename),
geoSiteMatcher: make(map[string]hostMatcher),
geoSiteSets: make(map[string][]hostMatcher),
geoIpMatcher: make(map[string]hostMatcher),
geoSiteResult: newBoolLRUCache(geoSiteResultCacheSize),
geoSiteSetCache: newBoolLRUCache(geoSiteSetResultCacheSize),
}
}
func (g *GeoMatcher) MatchGeoIp(ip, condition string) bool {
g.ipMatcherLock.Lock()
defer g.ipMatcherLock.Unlock()
matcher, ok := g.geoIpMatcher[condition]
if !ok {
// GeoIP matcher
condition = strings.ToLower(condition)
country := condition
if len(country) == 0 {
return false
}
gMap, err := g.geoLoader.LoadGeoIP()
if err != nil {
return false
}
list, ok := gMap[country]
if !ok || list == nil {
return false
}
matcher, err = newGeoIPMatcher(list)
if err != nil {
return false
}
g.geoIpMatcher[condition] = matcher
matcher, ok := g.getOrCreateGeoIPMatcher(condition)
if !ok || matcher == nil {
return false
}
parseIp := net.ParseIP(ip)
if parseIp == nil {
@@ -64,32 +57,69 @@ func (g *GeoMatcher) MatchGeoIp(ip, condition string) bool {
}
func (g *GeoMatcher) MatchGeoSite(site, condition string) bool {
g.siteMatcherLock.Lock()
defer g.siteMatcherLock.Unlock()
matcher, ok := g.geoSiteMatcher[condition]
if !ok {
// MatchGeoSite matcher
condition = strings.ToLower(condition)
name, attrs := parseGeoSiteName(condition)
if len(name) == 0 {
return false
}
gMap, err := g.geoLoader.LoadGeoSite()
if err != nil {
return false
}
list, ok := gMap[name]
if !ok || list == nil {
return false
}
matcher, err = newGeositeMatcher(list, attrs)
if err != nil {
return false
}
g.geoSiteMatcher[condition] = matcher
conditionKey := strings.TrimSpace(strings.ToLower(condition))
if conditionKey == "" {
return false
}
return matcher.Match(HostInfo{Name: site})
cacheKey := site + "\x1f" + conditionKey
if v, ok := g.geoSiteResult.Get(cacheKey); ok {
return v
}
matcher, ok := g.getOrCreateGeoSiteMatcher(condition)
if !ok || matcher == nil {
return false
}
result := matcher.Match(HostInfo{Name: site})
g.geoSiteResult.Set(cacheKey, result)
return result
}
func (g *GeoMatcher) MatchGeoSiteSet(site string, set *SiteConditionSet) bool {
if set == nil {
return false
}
conditions := normalizeGeoSiteSetConditions(set.Conditions)
if len(conditions) == 0 {
return false
}
key := strings.Join(conditions, "\x1f")
cacheKey := site + "\x1e" + key
if v, ok := g.geoSiteSetCache.Get(cacheKey); ok {
return v
}
g.siteSetLock.RLock()
matchers, ok := g.geoSiteSets[key]
g.siteSetLock.RUnlock()
if !ok {
compiled := make([]hostMatcher, 0, len(conditions))
for _, condition := range conditions {
m, ok := g.getOrCreateGeoSiteMatcher(condition)
if ok && m != nil {
compiled = append(compiled, m)
}
}
g.siteSetLock.Lock()
if existing, exists := g.geoSiteSets[key]; exists {
matchers = existing
} else {
g.geoSiteSets[key] = compiled
matchers = compiled
}
g.siteSetLock.Unlock()
}
if len(matchers) == 0 {
return false
}
host := HostInfo{Name: site}
for _, matcher := range matchers {
if matcher.Match(host) {
g.geoSiteSetCache.Set(cacheKey, true)
return true
}
}
g.geoSiteSetCache.Set(cacheKey, false)
return false
}
func (g *GeoMatcher) LoadGeoSite() error {
@@ -111,3 +141,152 @@ func parseGeoSiteName(s string) (string, []string) {
}
return base, attrs
}
func (g *GeoMatcher) getOrCreateGeoSiteMatcher(condition string) (hostMatcher, bool) {
condition = strings.TrimSpace(strings.ToLower(condition))
if condition == "" {
return nil, false
}
g.siteMatcherLock.RLock()
matcher, ok := g.geoSiteMatcher[condition]
g.siteMatcherLock.RUnlock()
if ok {
return matcher, true
}
name, attrs := parseGeoSiteName(condition)
if len(name) == 0 {
return nil, false
}
gMap, err := g.geoLoader.LoadGeoSite()
if err != nil {
return nil, false
}
list, ok := gMap[name]
if !ok || list == nil {
return nil, false
}
matcher, err = newGeositeMatcher(list, attrs)
if err != nil {
return nil, false
}
g.siteMatcherLock.Lock()
if existing, exists := g.geoSiteMatcher[condition]; exists {
matcher = existing
} else {
g.geoSiteMatcher[condition] = matcher
}
g.siteMatcherLock.Unlock()
return matcher, true
}
func (g *GeoMatcher) getOrCreateGeoIPMatcher(condition string) (hostMatcher, bool) {
condition = strings.TrimSpace(strings.ToLower(condition))
if condition == "" {
return nil, false
}
g.ipMatcherLock.RLock()
matcher, ok := g.geoIpMatcher[condition]
g.ipMatcherLock.RUnlock()
if ok {
return matcher, true
}
gMap, err := g.geoLoader.LoadGeoIP()
if err != nil {
return nil, false
}
list, ok := gMap[condition]
if !ok || list == nil {
return nil, false
}
matcher, err = newGeoIPMatcher(list)
if err != nil {
return nil, false
}
g.ipMatcherLock.Lock()
if existing, exists := g.geoIpMatcher[condition]; exists {
matcher = existing
} else {
g.geoIpMatcher[condition] = matcher
}
g.ipMatcherLock.Unlock()
return matcher, true
}
func normalizeGeoSiteSetConditions(in []string) []string {
if len(in) == 0 {
return nil
}
out := make([]string, 0, len(in))
seen := make(map[string]struct{}, len(in))
for _, v := range in {
s := strings.TrimSpace(strings.ToLower(v))
if s == "" {
continue
}
if _, ok := seen[s]; ok {
continue
}
seen[s] = struct{}{}
out = append(out, s)
}
sort.Strings(out)
return out
}
type boolLRUCache struct {
mu sync.Mutex
cap int
ll *list.List
items map[string]*list.Element
}
type boolCacheEntry struct {
key string
value bool
}
func newBoolLRUCache(capacity int) *boolLRUCache {
if capacity <= 0 {
capacity = 1
}
return &boolLRUCache{
cap: capacity,
ll: list.New(),
items: make(map[string]*list.Element, capacity),
}
}
func (c *boolLRUCache) Get(key string) (bool, bool) {
c.mu.Lock()
defer c.mu.Unlock()
if ele, ok := c.items[key]; ok {
c.ll.MoveToFront(ele)
entry := ele.Value.(boolCacheEntry)
return entry.value, true
}
return false, false
}
func (c *boolLRUCache) Set(key string, value bool) {
c.mu.Lock()
defer c.mu.Unlock()
if ele, ok := c.items[key]; ok {
ele.Value = boolCacheEntry{key: key, value: value}
c.ll.MoveToFront(ele)
return
}
ele := c.ll.PushFront(boolCacheEntry{key: key, value: value})
c.items[key] = ele
if c.ll.Len() <= c.cap {
return
}
back := c.ll.Back()
if back == nil {
return
}
entry := back.Value.(boolCacheEntry)
delete(c.items, entry.key)
c.ll.Remove(back)
}
+79 -1
View File
@@ -1,13 +1,14 @@
package geo
import (
"sync/atomic"
"testing"
"git.difuse.io/Difuse/Mellaris/ruleset/builtins/geo/v2geo"
)
type fakeGeoLoader struct {
geoip map[string]*v2geo.GeoIP
geoip map[string]*v2geo.GeoIP
geosite map[string]*v2geo.GeoSite
}
@@ -110,6 +111,83 @@ func TestGeoMatcher_MatchGeoSite_MissingSite(t *testing.T) {
}
}
func TestGeoMatcher_MatchGeoSiteSet(t *testing.T) {
loader := &fakeGeoLoader{
geosite: map[string]*v2geo.GeoSite{
"openai": {
Domain: []*v2geo.Domain{
{Type: v2geo.Domain_Plain, Value: "openai"},
},
},
"google": {
Domain: []*v2geo.Domain{
{Type: v2geo.Domain_RootDomain, Value: "google.com"},
},
},
},
}
g := NewGeoMatcher("", "")
g.geoLoader = loader
set := &SiteConditionSet{Conditions: []string{" google ", "openai", "OPENAI"}}
if !g.MatchGeoSiteSet("api.openai.com", set) {
t.Error("MatchGeoSiteSet should match openai")
}
if !g.MatchGeoSiteSet("mail.google.com", set) {
t.Error("MatchGeoSiteSet should match google")
}
if g.MatchGeoSiteSet("example.com", set) {
t.Error("MatchGeoSiteSet should not match unrelated host")
}
}
type countingMatcher struct {
calls *atomic.Uint64
match bool
}
func (m countingMatcher) Match(host HostInfo) bool {
_ = host
m.calls.Add(1)
return m.match
}
func TestGeoMatcher_MatchGeoSite_UsesResultCache(t *testing.T) {
g := NewGeoMatcher("", "")
var calls atomic.Uint64
g.geoSiteMatcher["openai"] = countingMatcher{calls: &calls, match: true}
if !g.MatchGeoSite("api.openai.com", "openai") {
t.Fatal("expected match")
}
if !g.MatchGeoSite("api.openai.com", "openai") {
t.Fatal("expected cached match")
}
if got := calls.Load(); got != 1 {
t.Fatalf("matcher calls=%d want=1", got)
}
}
func TestGeoMatcher_MatchGeoSiteSet_UsesResultCache(t *testing.T) {
g := NewGeoMatcher("", "")
var calls atomic.Uint64
g.geoSiteSets["openai\x1fyoutube"] = []hostMatcher{
countingMatcher{calls: &calls, match: false},
countingMatcher{calls: &calls, match: true},
}
set := &SiteConditionSet{Conditions: []string{"youtube", "openai"}}
if !g.MatchGeoSiteSet("www.youtube.com", set) {
t.Fatal("expected match")
}
if !g.MatchGeoSiteSet("www.youtube.com", set) {
t.Fatal("expected cached match")
}
if got := calls.Load(); got != 2 {
t.Fatalf("matcher calls=%d want=2", got)
}
}
func ipv4(a, b, c, d byte) []byte {
return []byte{a, b, c, d}
}
+4
View File
@@ -13,6 +13,10 @@ type HostInfo struct {
IPv6 net.IP
}
type SiteConditionSet struct {
Conditions []string
}
func (h HostInfo) String() string {
return fmt.Sprintf("%s|%s|%s", h.Name, h.IPv4, h.IPv6)
}
+203 -24
View File
@@ -60,8 +60,8 @@ type compiledExprRule struct {
ModInstance modifier.Instance
Program *vm.Program
GeoSiteConditions []string
StartTimeSecs int // seconds since midnight, -1 if unset
StopTimeSecs int // seconds since midnight, -1 if unset
StartTimeSecs int // seconds since midnight, -1 if unset
StopTimeSecs int // seconds since midnight, -1 if unset
Weekdays []time.Weekday
WeekdaysNegated bool
}
@@ -86,6 +86,7 @@ type exprRuleset struct {
Ans []analyzer.Analyzer
Logger Logger
GeoMatcher *geo.GeoMatcher
stats *statsCounters
}
func (r *exprRuleset) Analyzers(info StreamInfo) []analyzer.Analyzer {
@@ -93,9 +94,24 @@ func (r *exprRuleset) Analyzers(info StreamInfo) []analyzer.Analyzer {
}
func (r *exprRuleset) Match(info StreamInfo) MatchResult {
start := time.Now()
if r.stats != nil {
r.stats.MatchCalls.Add(1)
defer func() {
r.stats.MatchLatencyNanos.Add(uint64(time.Since(start).Nanoseconds()))
}()
}
env := envPool.Get().(map[string]any)
clear(env)
populateExprEnv(env, info)
macMap, ipMap, portMap := populateExprEnv(env, info)
releaseEnv := func() {
clear(env)
envPool.Put(env)
putSubMap(macMap)
putSubMap(ipMap)
putSubMap(portMap)
}
now := time.Now()
for _, rule := range r.Rules {
if !matchTime(now, rule.StartTimeSecs, rule.StopTimeSecs, rule.Weekdays, rule.WeekdaysNegated) {
@@ -103,6 +119,9 @@ func (r *exprRuleset) Match(info StreamInfo) MatchResult {
}
v, err := vm.Run(rule.Program, env)
if err != nil {
if r.stats != nil {
r.stats.MatchErrors.Add(1)
}
r.Logger.MatchError(info, rule.Name, err)
continue
}
@@ -115,7 +134,7 @@ func (r *exprRuleset) Match(info StreamInfo) MatchResult {
r.Logger.Log(logInfo, rule.Name)
}
if rule.Action != nil {
envPool.Put(env)
releaseEnv()
return MatchResult{
Action: *rule.Action,
ModInstance: rule.ModInstance,
@@ -123,12 +142,26 @@ func (r *exprRuleset) Match(info StreamInfo) MatchResult {
}
}
}
envPool.Put(env)
releaseEnv()
return MatchResult{
Action: ActionMaybe,
}
}
func (r *exprRuleset) Stats() Stats {
if r == nil || r.stats == nil {
return Stats{}
}
return Stats{
MatchCalls: r.stats.MatchCalls.Load(),
MatchErrors: r.stats.MatchErrors.Load(),
MatchLatencyNanos: r.stats.MatchLatencyNanos.Load(),
LookupCalls: r.stats.LookupCalls.Load(),
LookupErrors: r.stats.LookupErrors.Load(),
LookupLatencyNanos: r.stats.LookupLatencyNanos.Load(),
}
}
// CompileExprRules compiles a list of expression rules into a ruleset.
// It returns an error if any of the rules are invalid, or if any of the analyzers
// used by the rules are unknown (not provided in the analyzer list).
@@ -137,7 +170,8 @@ func CompileExprRules(rules []ExprRule, ans []analyzer.Analyzer, mods []modifier
fullAnMap := analyzersToMap(ans)
fullModMap := modifiersToMap(mods)
depAnMap := make(map[string]analyzer.Analyzer)
funcMap, geoMatcher := buildFunctionMap(config)
stats := &statsCounters{}
funcMap, geoMatcher := buildFunctionMap(config, stats)
// Compile all rules and build a map of analyzers that are used by the rules.
for _, rule := range rules {
if rule.Action == "" && !rule.Log {
@@ -152,7 +186,7 @@ func CompileExprRules(rules []ExprRule, ans []analyzer.Analyzer, mods []modifier
action = &a
}
visitor := &idVisitor{Variables: make(map[string]bool), Identifiers: make(map[string]bool)}
patcher := &idPatcher{FuncMap: funcMap}
patcher := &idPatcher{FuncMap: funcMap, GeoMatcher: geoMatcher}
program, err := expr.Compile(rule.Expr,
func(c *conf.Config) {
c.Strict = false
@@ -242,29 +276,47 @@ func CompileExprRules(rules []ExprRule, ans []analyzer.Analyzer, mods []modifier
Ans: depAns,
Logger: config.Logger,
GeoMatcher: geoMatcher,
stats: stats,
}, nil
}
func populateExprEnv(m map[string]any, info StreamInfo) {
func populateExprEnv(m map[string]any, info StreamInfo) (macMap, ipMap, portMap map[string]any) {
macMap = getSubMap()
ipMap = getSubMap()
portMap = getSubMap()
macMap["src"] = info.SrcMAC.String()
macMap["dst"] = info.DstMAC.String()
ipMap["src"] = info.SrcIP.String()
ipMap["dst"] = info.DstIP.String()
portMap["src"] = info.SrcPort
portMap["dst"] = info.DstPort
m["id"] = info.ID
m["proto"] = info.Protocol.String()
m["mac"] = map[string]string{
"src": info.SrcMAC.String(),
"dst": info.DstMAC.String(),
}
m["ip"] = map[string]string{
"src": info.SrcIP.String(),
"dst": info.DstIP.String(),
}
m["port"] = map[string]uint16{
"src": info.SrcPort,
"dst": info.DstPort,
}
m["mac"] = macMap
m["ip"] = ipMap
m["port"] = portMap
for anName, anProps := range info.Props {
if len(anProps) != 0 {
m[anName] = anProps
}
}
return macMap, ipMap, portMap
}
func getSubMap() map[string]any {
m := subMapPool.Get().(map[string]any)
clear(m)
return m
}
func putSubMap(m map[string]any) {
if m == nil {
return
}
clear(m)
subMapPool.Put(m)
}
func isBuiltInAnalyzer(name string) bool {
@@ -329,11 +381,15 @@ func (v *idVisitor) Visit(node *ast.Node) {
// idPatcher patches the AST during expr compilation, replacing certain values with
// their internal representations for better runtime performance.
type idPatcher struct {
FuncMap map[string]*Function
Err error
FuncMap map[string]*Function
GeoMatcher *geo.GeoMatcher
Err error
}
func (p *idPatcher) Visit(node *ast.Node) {
if p.tryPatchGeoSiteORChain(node) {
return
}
switch (*node).(type) {
case *ast.CallNode:
callNode := (*node).(*ast.CallNode)
@@ -352,6 +408,108 @@ func (p *idPatcher) Visit(node *ast.Node) {
}
}
func (p *idPatcher) tryPatchGeoSiteORChain(node *ast.Node) bool {
if p == nil || p.GeoMatcher == nil {
return false
}
terms, ok := collectGeoSiteORChain(*node)
if !ok || len(terms) < 2 {
return false
}
hostExpr := strings.TrimSpace(terms[0].hostExpr)
if hostExpr == "" {
return false
}
conditions := make([]string, 0, len(terms))
for _, term := range terms {
if strings.TrimSpace(term.hostExpr) != hostExpr {
return false
}
conditions = append(conditions, term.condition)
}
normalized := normalizeUniqueLowerStrings(conditions)
if len(normalized) < 2 {
return false
}
hostNode, err := parser.Parse(hostExpr)
if err != nil || hostNode == nil || hostNode.Node == nil {
return false
}
call := &ast.CallNode{
Callee: &ast.IdentifierNode{Value: "geosite_set"},
Arguments: []ast.Node{
hostNode.Node,
&ast.ConstantNode{Value: &geo.SiteConditionSet{Conditions: normalized}},
},
}
ast.Patch(node, call)
return true
}
type geositeTerm struct {
hostExpr string
condition string
}
func collectGeoSiteORChain(node ast.Node) ([]geositeTerm, bool) {
switch n := node.(type) {
case *ast.BinaryNode:
if n.Operator != "or" && n.Operator != "||" {
return nil, false
}
left, ok := collectGeoSiteORChain(n.Left)
if !ok {
return nil, false
}
right, ok := collectGeoSiteORChain(n.Right)
if !ok {
return nil, false
}
out := make([]geositeTerm, 0, len(left)+len(right))
out = append(out, left...)
out = append(out, right...)
return out, true
case *ast.CallNode:
idNode, ok := n.Callee.(*ast.IdentifierNode)
if !ok || len(n.Arguments) < 2 {
return nil, false
}
name := strings.ToLower(idNode.Value)
if name == "geosite" {
condNode, ok := n.Arguments[1].(*ast.StringNode)
if !ok {
return nil, false
}
return []geositeTerm{{
hostExpr: n.Arguments[0].String(),
condition: condNode.Value,
}}, true
}
if name != "geosite_set" {
return nil, false
}
setNode, ok := n.Arguments[1].(*ast.ConstantNode)
if !ok || setNode.Value == nil {
return nil, false
}
set, ok := setNode.Value.(*geo.SiteConditionSet)
if !ok || set == nil {
return nil, false
}
if len(set.Conditions) == 0 {
return nil, false
}
out := make([]geositeTerm, 0, len(set.Conditions))
hostExpr := n.Arguments[0].String()
for _, condition := range set.Conditions {
out = append(out, geositeTerm{hostExpr: hostExpr, condition: condition})
}
return out, true
default:
return nil, false
}
}
type Function struct {
InitFunc func() error
PatchFunc func(args *[]ast.Node) error
@@ -359,7 +517,7 @@ type Function struct {
Types []reflect.Type
}
func buildFunctionMap(config *BuiltinConfig) (map[string]*Function, *geo.GeoMatcher) {
func buildFunctionMap(config *BuiltinConfig, stats *statsCounters) (map[string]*Function, *geo.GeoMatcher) {
geoMatcher := geo.NewGeoMatcher(config.GeoSiteFilename, config.GeoIpFilename)
return map[string]*Function{
"geoip": {
@@ -378,6 +536,16 @@ func buildFunctionMap(config *BuiltinConfig) (map[string]*Function, *geo.GeoMatc
},
Types: []reflect.Type{reflect.TypeOf(geoMatcher.MatchGeoSite)},
},
"geosite_set": {
InitFunc: geoMatcher.LoadGeoSite,
PatchFunc: nil,
Func: func(params ...any) (any, error) {
return geoMatcher.MatchGeoSiteSet(params[0].(string), params[1].(*geo.SiteConditionSet)), nil
},
Types: []reflect.Type{
reflect.TypeOf((func(string, *geo.SiteConditionSet) bool)(nil)),
},
},
"cidr": {
InitFunc: nil,
PatchFunc: func(args *[]ast.Node) error {
@@ -425,9 +593,20 @@ func buildFunctionMap(config *BuiltinConfig) (map[string]*Function, *geo.GeoMatc
return nil
},
Func: func(params ...any) (any, error) {
start := time.Now()
if stats != nil {
stats.LookupCalls.Add(1)
defer func() {
stats.LookupLatencyNanos.Add(uint64(time.Since(start).Nanoseconds()))
}()
}
ctx, cancel := context.WithTimeout(context.Background(), 4*time.Second)
defer cancel()
return params[1].(*net.Resolver).LookupHost(ctx, params[0].(string))
out, err := params[1].(*net.Resolver).LookupHost(ctx, params[0].(string))
if err != nil && stats != nil {
stats.LookupErrors.Add(1)
}
return out, err
},
Types: []reflect.Type{
reflect.TypeOf((func(string, *net.Resolver) []string)(nil)),
+25
View File
@@ -2,9 +2,14 @@ package ruleset
import (
"reflect"
"strings"
"testing"
"git.difuse.io/Difuse/Mellaris/analyzer"
"git.difuse.io/Difuse/Mellaris/ruleset/builtins/geo"
"github.com/expr-lang/expr/ast"
"github.com/expr-lang/expr/parser"
)
func TestExtractGeoSiteConditions(t *testing.T) {
@@ -63,3 +68,23 @@ func TestMatchGeoSiteConditions(t *testing.T) {
t.Fatalf("matchGeoSiteConditions() = %v, want %v", got, want)
}
}
func TestIDPatcher_PatchesGeoSiteORChainToGeoSiteSet(t *testing.T) {
tree, err := parser.Parse(`geosite(tls.req.sni, "google") || geosite(tls.req.sni, "youtube") || geosite(tls.req.sni, "openai")`)
if err != nil {
t.Fatalf("parse expression: %v", err)
}
root := tree.Node
patcher := &idPatcher{GeoMatcher: geo.NewGeoMatcher("", "")}
ast.Walk(&root, patcher)
if patcher.Err != nil {
t.Fatalf("patch error: %v", patcher.Err)
}
got := root.String()
if !strings.Contains(got, "geosite_set(") {
t.Fatalf("expected geosite_set rewrite, got %q", got)
}
if strings.Contains(got, "||") || strings.Contains(got, " or ") {
t.Fatalf("expected OR chain to be collapsed, got %q", got)
}
}
+23
View File
@@ -4,6 +4,7 @@ import (
"context"
"net"
"strconv"
"sync/atomic"
"git.difuse.io/Difuse/Mellaris/analyzer"
"git.difuse.io/Difuse/Mellaris/modifier"
@@ -95,6 +96,28 @@ type Ruleset interface {
Match(StreamInfo) MatchResult
}
type Stats struct {
MatchCalls uint64
MatchErrors uint64
MatchLatencyNanos uint64
LookupCalls uint64
LookupErrors uint64
LookupLatencyNanos uint64
}
type statsCounters struct {
MatchCalls atomic.Uint64
MatchErrors atomic.Uint64
MatchLatencyNanos atomic.Uint64
LookupCalls atomic.Uint64
LookupErrors atomic.Uint64
LookupLatencyNanos atomic.Uint64
}
type StatsProvider interface {
Stats() Stats
}
// Logger is the logging interface for the ruleset.
type Logger interface {
Log(info StreamInfo, name string)