pull/2393/head
withchao 1 year ago
parent 67d17c82d5
commit 9a4f5f78cb

@ -175,3 +175,5 @@ require (
golang.org/x/crypto v0.21.0 // indirect golang.org/x/crypto v0.21.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect
) )
replace github.com/openimsdk/protocol => /Users/chao/Desktop/project/protocol

@ -262,8 +262,6 @@ github.com/onsi/gomega v1.25.0 h1:Vw7br2PCDYijJHSfBOWhov+8cAnUf8MfMaIOV323l6Y=
github.com/onsi/gomega v1.25.0/go.mod h1:r+zV744Re+DiYCIPRlYOTxn0YkOLcAnW8k1xXdMPGhM= github.com/onsi/gomega v1.25.0/go.mod h1:r+zV744Re+DiYCIPRlYOTxn0YkOLcAnW8k1xXdMPGhM=
github.com/openimsdk/gomake v0.0.14-alpha.5 h1:VY9c5x515lTfmdhhPjMvR3BBRrRquAUCFsz7t7vbv7Y= github.com/openimsdk/gomake v0.0.14-alpha.5 h1:VY9c5x515lTfmdhhPjMvR3BBRrRquAUCFsz7t7vbv7Y=
github.com/openimsdk/gomake v0.0.14-alpha.5/go.mod h1:PndCozNc2IsQIciyn9mvEblYWZwJmAI+06z94EY+csI= github.com/openimsdk/gomake v0.0.14-alpha.5/go.mod h1:PndCozNc2IsQIciyn9mvEblYWZwJmAI+06z94EY+csI=
github.com/openimsdk/protocol v0.0.69-alpha.24 h1:TYcNJeWOTuE40UQ54eNPdDdy0KTOh9rAOgax8lCyhDc=
github.com/openimsdk/protocol v0.0.69-alpha.24/go.mod h1:OZQA9FR55lseYoN2Ql1XAHYKHJGu7OMNkUbuekrKCM8=
github.com/openimsdk/tools v0.0.49-alpha.45 h1:XIzCoef4myybOiIlGuRY9FTtGBisZFC4Uy4PhG0ZWQ0= github.com/openimsdk/tools v0.0.49-alpha.45 h1:XIzCoef4myybOiIlGuRY9FTtGBisZFC4Uy4PhG0ZWQ0=
github.com/openimsdk/tools v0.0.49-alpha.45/go.mod h1:HtSRjPTL8PsuZ+PhR5noqzrYBF0sdwW3/O/sWVucWg8= github.com/openimsdk/tools v0.0.49-alpha.45/go.mod h1:HtSRjPTL8PsuZ+PhR5noqzrYBF0sdwW3/O/sWVucWg8=
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=

@ -72,8 +72,8 @@ type Client struct {
closed atomic.Bool closed atomic.Bool
closedErr error closedErr error
token string token string
//subLock sync.Mutex subLock sync.Mutex
//subUserIDs map[string]struct{} subUserIDs map[string]struct{}
} }
// ResetClient updates the client's state with new connection and context information. // ResetClient updates the client's state with new connection and context information.
@ -204,6 +204,8 @@ func (c *Client) handleMessage(message []byte) error {
resp, messageErr = c.longConnServer.UserLogout(ctx, binaryReq) resp, messageErr = c.longConnServer.UserLogout(ctx, binaryReq)
case WsSetBackgroundStatus: case WsSetBackgroundStatus:
resp, messageErr = c.setAppBackgroundStatus(ctx, binaryReq) resp, messageErr = c.setAppBackgroundStatus(ctx, binaryReq)
case WsSubUserOnlineStatus:
resp, messageErr = c.longConnServer.SubUserOnlineStatus(ctx, c, binaryReq)
default: default:
return fmt.Errorf( return fmt.Errorf(
"ReqIdentifier failed,sendID:%s,msgIncr:%s,reqIdentifier:%d", "ReqIdentifier failed,sendID:%s,msgIncr:%s,reqIdentifier:%d",

@ -16,10 +16,10 @@ package msggateway
import ( import (
"crypto/rand" "crypto/rand"
"github.com/stretchr/testify/assert"
"sync" "sync"
"testing" "testing"
"unsafe"
"github.com/stretchr/testify/assert"
) )
func mockRandom() []byte { func mockRandom() []byte {
@ -132,3 +132,8 @@ func BenchmarkDecompressWithSyncPool(b *testing.B) {
assert.Equal(b, nil, err) assert.Equal(b, nil, err)
} }
} }
func TestName(t *testing.T) {
t.Log(unsafe.Sizeof(Client{}))
}

@ -43,6 +43,7 @@ const (
WSKickOnlineMsg = 2002 WSKickOnlineMsg = 2002
WsLogoutMsg = 2003 WsLogoutMsg = 2003
WsSetBackgroundStatus = 2004 WsSetBackgroundStatus = 2004
WsSubUserOnlineStatus = 2005
WSDataError = 3001 WSDataError = 3001
) )

@ -19,6 +19,7 @@ import (
"github.com/openimsdk/open-im-server/v3/pkg/authverify" "github.com/openimsdk/open-im-server/v3/pkg/authverify"
"github.com/openimsdk/open-im-server/v3/pkg/common/servererrs" "github.com/openimsdk/open-im-server/v3/pkg/common/servererrs"
"github.com/openimsdk/open-im-server/v3/pkg/common/startrpc" "github.com/openimsdk/open-im-server/v3/pkg/common/startrpc"
"github.com/openimsdk/open-im-server/v3/pkg/rpcclient"
"github.com/openimsdk/protocol/constant" "github.com/openimsdk/protocol/constant"
"github.com/openimsdk/protocol/msggateway" "github.com/openimsdk/protocol/msggateway"
"github.com/openimsdk/tools/discovery" "github.com/openimsdk/tools/discovery"
@ -31,6 +32,10 @@ import (
func (s *Server) InitServer(ctx context.Context, config *Config, disCov discovery.SvcDiscoveryRegistry, server *grpc.Server) error { func (s *Server) InitServer(ctx context.Context, config *Config, disCov discovery.SvcDiscoveryRegistry, server *grpc.Server) error {
s.LongConnServer.SetDiscoveryRegistry(disCov, config) s.LongConnServer.SetDiscoveryRegistry(disCov, config)
msggateway.RegisterMsgGatewayServer(server, s) msggateway.RegisterMsgGatewayServer(server, s)
s.userRcp = rpcclient.NewUserRpcClient(disCov, config.Share.RpcRegisterName.User, config.Share.IMAdminUserID)
if s.ready != nil {
return s.ready(s)
}
return nil return nil
} }
@ -50,18 +55,21 @@ type Server struct {
LongConnServer LongConnServer LongConnServer LongConnServer
config *Config config *Config
pushTerminal map[int]struct{} pushTerminal map[int]struct{}
ready func(srv *Server) error
userRcp rpcclient.UserRpcClient
} }
func (s *Server) SetLongConnServer(LongConnServer LongConnServer) { func (s *Server) SetLongConnServer(LongConnServer LongConnServer) {
s.LongConnServer = LongConnServer s.LongConnServer = LongConnServer
} }
func NewServer(rpcPort int, longConnServer LongConnServer, conf *Config) *Server { func NewServer(rpcPort int, longConnServer LongConnServer, conf *Config, ready func(srv *Server) error) *Server {
s := &Server{ s := &Server{
rpcPort: rpcPort, rpcPort: rpcPort,
LongConnServer: longConnServer, LongConnServer: longConnServer,
pushTerminal: make(map[int]struct{}), pushTerminal: make(map[int]struct{}),
config: conf, config: conf,
ready: ready,
} }
s.pushTerminal[constant.IOSPlatformID] = struct{}{} s.pushTerminal[constant.IOSPlatformID] = struct{}{}
s.pushTerminal[constant.AndroidPlatformID] = struct{}{} s.pushTerminal[constant.AndroidPlatformID] = struct{}{}

@ -17,6 +17,7 @@ package msggateway
import ( import (
"context" "context"
"github.com/openimsdk/open-im-server/v3/pkg/common/config" "github.com/openimsdk/open-im-server/v3/pkg/common/config"
"github.com/openimsdk/open-im-server/v3/pkg/rpccache"
"github.com/openimsdk/tools/db/redisutil" "github.com/openimsdk/tools/db/redisutil"
"github.com/openimsdk/tools/utils/datautil" "github.com/openimsdk/tools/utils/datautil"
"time" "time"
@ -56,11 +57,13 @@ func Start(ctx context.Context, index int, conf *Config) error {
WithMessageMaxMsgLength(conf.MsgGateway.LongConnSvr.WebsocketMaxMsgLen), WithMessageMaxMsgLength(conf.MsgGateway.LongConnSvr.WebsocketMaxMsgLen),
) )
go longServer.ChangeOnlineStatus(4) hubServer := NewServer(rpcPort, longServer, conf, func(srv *Server) error {
longServer.online = rpccache.NewOnlineCache(srv.userRcp, nil, rdb, longServer.subscriberUserOnlineStatusChanges)
return nil
})
go longServer.SubscriberUserOnlineStatusChanges(rdb) go longServer.ChangeOnlineStatus(4)
hubServer := NewServer(rpcPort, longServer, conf)
netDone := make(chan error) netDone := make(chan error)
go func() { go func() {
err = hubServer.Start(ctx, index, conf) err = hubServer.Start(ctx, index, conf)

@ -18,6 +18,7 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/openimsdk/open-im-server/v3/pkg/common/webhook" "github.com/openimsdk/open-im-server/v3/pkg/common/webhook"
"github.com/openimsdk/open-im-server/v3/pkg/rpccache"
pbAuth "github.com/openimsdk/protocol/auth" pbAuth "github.com/openimsdk/protocol/auth"
"github.com/openimsdk/tools/mcontext" "github.com/openimsdk/tools/mcontext"
"net/http" "net/http"
@ -48,20 +49,22 @@ type LongConnServer interface {
KickUserConn(client *Client) error KickUserConn(client *Client) error
UnRegister(c *Client) UnRegister(c *Client)
SetKickHandlerInfo(i *kickHandler) SetKickHandlerInfo(i *kickHandler)
SubUserOnlineStatus(ctx context.Context, client *Client, data *Req) ([]byte, error)
Compressor Compressor
Encoder Encoder
MessageHandler MessageHandler
} }
type WsServer struct { type WsServer struct {
msgGatewayConfig *Config msgGatewayConfig *Config
port int port int
wsMaxConnNum int64 wsMaxConnNum int64
registerChan chan *Client registerChan chan *Client
unregisterChan chan *Client unregisterChan chan *Client
kickHandlerChan chan *kickHandler kickHandlerChan chan *kickHandler
clients UserMap clients UserMap
//subscription *Subscription online *rpccache.OnlineCache
subscription *Subscription
clientPool sync.Pool clientPool sync.Pool
onlineUserNum atomic.Int64 onlineUserNum atomic.Int64
onlineUserConnNum atomic.Int64 onlineUserConnNum atomic.Int64
@ -125,6 +128,8 @@ func NewWsServer(msgGatewayConfig *Config, opts ...Option) *WsServer {
for _, o := range opts { for _, o := range opts {
o(&config) o(&config)
} }
//userRpcClient := rpcclient.NewUserRpcClient(client, config.Share.RpcRegisterName.User, config.Share.IMAdminUserID)
v := validator.New() v := validator.New()
return &WsServer{ return &WsServer{
msgGatewayConfig: msgGatewayConfig, msgGatewayConfig: msgGatewayConfig,
@ -142,10 +147,10 @@ func NewWsServer(msgGatewayConfig *Config, opts ...Option) *WsServer {
kickHandlerChan: make(chan *kickHandler, 1000), kickHandlerChan: make(chan *kickHandler, 1000),
validate: v, validate: v,
clients: newUserMap(), clients: newUserMap(),
//subscription: newSubscription(), subscription: newSubscription(),
Compressor: NewGzipCompressor(), Compressor: NewGzipCompressor(),
Encoder: NewGobEncoder(), Encoder: NewGobEncoder(),
webhookClient: webhook.NewWebhookClient(msgGatewayConfig.WebhooksConfig.URL), webhookClient: webhook.NewWebhookClient(msgGatewayConfig.WebhooksConfig.URL),
} }
} }
@ -353,6 +358,9 @@ func (ws *WsServer) unregisterClient(client *Client) {
prommetrics.OnlineUserGauge.Dec() prommetrics.OnlineUserGauge.Dec()
} }
ws.onlineUserConnNum.Add(-1) ws.onlineUserConnNum.Add(-1)
client.subLock.Lock()
clear(client.subUserIDs)
client.subLock.Unlock()
//ws.SetUserOnlineStatus(client.ctx, client, constant.Offline) //ws.SetUserOnlineStatus(client.ctx, client, constant.Offline)
log.ZInfo(client.ctx, "user offline", "close reason", client.closedErr, "online user Num", log.ZInfo(client.ctx, "user offline", "close reason", client.closedErr, "online user Num",
ws.onlineUserNum.Load(), "online user conn Num", ws.onlineUserNum.Load(), "online user conn Num",

@ -2,174 +2,180 @@ package msggateway
import ( import (
"context" "context"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/cache/cachekey" "encoding/json"
"github.com/openimsdk/open-im-server/v3/pkg/util/useronline" "github.com/openimsdk/protocol/constant"
"github.com/openimsdk/protocol/sdkws"
"github.com/openimsdk/tools/log" "github.com/openimsdk/tools/log"
"github.com/openimsdk/tools/mcontext" "github.com/openimsdk/tools/utils/datautil"
"github.com/redis/go-redis/v9" "github.com/openimsdk/tools/utils/idutil"
"math/rand" "google.golang.org/protobuf/proto"
"strconv" "sync"
"time"
) )
func (ws *WsServer) SubscriberUserOnlineStatusChanges(rdb redis.UniversalClient) { func (ws *WsServer) subscriberUserOnlineStatusChanges(ctx context.Context, userID string, platformIDs []int32) {
ctx := mcontext.SetOperationID(context.Background(), cachekey.OnlineChannel+strconv.FormatUint(rand.Uint64(), 10)) if ws.clients.RecvSubChange(userID, platformIDs) {
for message := range rdb.Subscribe(ctx, cachekey.OnlineChannel).Channel() { log.ZDebug(ctx, "gateway receive subscription message and go back online", "userID", userID, "platformIDs", platformIDs)
userID, platformIDs, err := useronline.ParseUserOnlineStatus(message.Payload) } else {
if err != nil { log.ZDebug(ctx, "gateway ignore user online status changes", "userID", userID, "platformIDs", platformIDs)
log.ZError(ctx, "OnlineCache redis subscribe parseUserOnlineStatus", err, "payload", message.Payload, "channel", message.Channel) }
ws.pushUserIDOnlineStatus(ctx, userID, platformIDs)
}
func (ws *WsServer) SubUserOnlineStatus(ctx context.Context, client *Client, data *Req) ([]byte, error) {
var sub sdkws.SubUserOnlineStatus
if err := proto.Unmarshal(data.Data, &sub); err != nil {
return nil, err
}
ws.subscription.Sub(client, sub.SubscribeUserID, sub.UnsubscribeUserID)
var resp sdkws.SubUserOnlineStatusTips
if len(sub.SubscribeUserID) > 0 {
resp.Subscribers = make([]*sdkws.SubUserOnlineStatusElem, 0, len(sub.SubscribeUserID))
for _, userID := range sub.SubscribeUserID {
platformIDs, err := ws.online.GetUserOnlinePlatform(ctx, userID)
if err != nil {
return nil, err
}
resp.Subscribers = append(resp.Subscribers, &sdkws.SubUserOnlineStatusElem{
UserID: userID,
OnlinePlatformIDs: platformIDs,
})
}
}
return proto.Marshal(&resp)
}
type subClient struct {
clients map[string]*Client
}
func newSubscription() *Subscription {
return &Subscription{
userIDs: make(map[string]*subClient),
}
}
type Subscription struct {
lock sync.RWMutex
userIDs map[string]*subClient
}
func (s *Subscription) GetClient(userID string) []*Client {
s.lock.RLock()
defer s.lock.RUnlock()
cs, ok := s.userIDs[userID]
if !ok {
return nil
}
clients := make([]*Client, 0, len(cs.clients))
for _, client := range cs.clients {
clients = append(clients, client)
}
return clients
}
func (s *Subscription) DelClient(client *Client) {
client.subLock.Lock()
userIDs := datautil.Keys(client.subUserIDs)
for _, userID := range userIDs {
delete(client.subUserIDs, userID)
}
client.subLock.Unlock()
if len(userIDs) == 0 {
return
}
addr := client.ctx.GetRemoteAddr()
s.lock.Lock()
defer s.lock.Unlock()
for _, userID := range userIDs {
sub, ok := s.userIDs[userID]
if !ok {
continue continue
} }
if ws.clients.RecvSubChange(userID, platformIDs) { delete(sub.clients, addr)
log.ZDebug(ctx, "gateway receive subscription message and go back online", "userID", userID, "platformIDs", platformIDs) if len(sub.clients) == 0 {
} else { delete(s.userIDs, userID)
log.ZDebug(ctx, "gateway ignore user online status changes", "userID", userID, "platformIDs", platformIDs)
} }
} }
} }
//import ( func (s *Subscription) Sub(client *Client, addUserIDs, delUserIDs []string) {
// "context" if len(addUserIDs)+len(delUserIDs) == 0 {
// "encoding/json" return
// "github.com/openimsdk/protocol/constant" }
// "github.com/openimsdk/protocol/sdkws" var (
// "github.com/openimsdk/tools/log" del = make(map[string]struct{})
// "github.com/openimsdk/tools/utils/datautil" add = make(map[string]struct{})
// "github.com/openimsdk/tools/utils/idutil" )
// "sync" client.subLock.Lock()
// "time" for _, userID := range delUserIDs {
//) if _, ok := client.subUserIDs[userID]; !ok {
// continue
//type subClient struct { }
// clients map[string]*Client del[userID] = struct{}{}
//} delete(client.subUserIDs, userID)
// }
//func newSubscription() *Subscription { for _, userID := range addUserIDs {
// return &Subscription{ delete(del, userID)
// userIDs: make(map[string]*subClient), if _, ok := client.subUserIDs[userID]; ok {
// } continue
//} }
// client.subUserIDs[userID] = struct{}{}
//type Subscription struct { }
// lock sync.RWMutex client.subLock.Unlock()
// userIDs map[string]*subClient if len(del)+len(add) == 0 {
//} return
// }
//func (s *Subscription) GetClient(userID string) []*Client { addr := client.ctx.GetRemoteAddr()
// s.lock.RLock() s.lock.Lock()
// defer s.lock.RUnlock() defer s.lock.Unlock()
// cs, ok := s.userIDs[userID] for userID := range del {
// if !ok { sub, ok := s.userIDs[userID]
// return nil if !ok {
// } continue
// clients := make([]*Client, 0, len(cs.clients)) }
// for _, client := range cs.clients { delete(sub.clients, addr)
// clients = append(clients, client) if len(sub.clients) == 0 {
// } delete(s.userIDs, userID)
// return clients }
//} }
// for userID := range add {
//func (s *Subscription) DelClient(client *Client) { sub, ok := s.userIDs[userID]
// client.subLock.Lock() if !ok {
// userIDs := datautil.Keys(client.subUserIDs) sub = &subClient{clients: make(map[string]*Client)}
// for _, userID := range userIDs { s.userIDs[userID] = sub
// delete(client.subUserIDs, userID) }
// } sub.clients[addr] = client
// client.subLock.Unlock() }
// if len(userIDs) == 0 { }
// return
// } func (ws *WsServer) pushUserIDOnlineStatus(ctx context.Context, userID string, platformIDs []int32) {
// addr := client.ctx.GetRemoteAddr() clients := ws.subscription.GetClient(userID)
// s.lock.Lock() if len(clients) == 0 {
// defer s.lock.Unlock() return
// for _, userID := range userIDs { }
// sub, ok := s.userIDs[userID] msgContent, err := json.Marshal(platformIDs)
// if !ok { if err != nil {
// continue log.ZError(ctx, "pushUserIDOnlineStatus json.Marshal", err)
// } return
// delete(sub.clients, addr) }
// if len(sub.clients) == 0 { now := time.Now().UnixMilli()
// delete(s.userIDs, userID) msgID := idutil.GetMsgIDByMD5(userID)
// } msg := &sdkws.MsgData{
// } SendID: userID,
//} ClientMsgID: msgID,
// ServerMsgID: msgID,
//func (s *Subscription) Sub(client *Client, addUserIDs, delUserIDs []string) { SenderPlatformID: constant.AdminPlatformID,
// if len(addUserIDs)+len(delUserIDs) == 0 { SessionType: constant.NotificationChatType,
// return ContentType: constant.UserSubscribeOnlineStatusNotification,
// } Content: msgContent,
// var ( SendTime: now,
// del = make(map[string]struct{}) CreateTime: now,
// add = make(map[string]struct{}) }
// ) for _, client := range clients {
// client.subLock.Lock() msg.RecvID = client.UserID
// for _, userID := range delUserIDs { if err := client.PushMessage(ctx, msg); err != nil {
// if _, ok := client.subUserIDs[userID]; !ok { log.ZError(ctx, "UserSubscribeOnlineStatusNotification push failed", err, "userID", client.UserID, "platformID", client.PlatformID, "changeUserID", userID, "content", msgContent)
// continue }
// } }
// del[userID] = struct{}{} }
// delete(client.subUserIDs, userID)
// }
// for _, userID := range addUserIDs {
// delete(del, userID)
// if _, ok := client.subUserIDs[userID]; ok {
// continue
// }
// client.subUserIDs[userID] = struct{}{}
// }
// client.subLock.Unlock()
// if len(del)+len(add) == 0 {
// return
// }
// addr := client.ctx.GetRemoteAddr()
// s.lock.Lock()
// defer s.lock.Unlock()
// for userID := range del {
// sub, ok := s.userIDs[userID]
// if !ok {
// continue
// }
// delete(sub.clients, addr)
// if len(sub.clients) == 0 {
// delete(s.userIDs, userID)
// }
// }
// for userID := range add {
// sub, ok := s.userIDs[userID]
// if !ok {
// sub = &subClient{clients: make(map[string]*Client)}
// s.userIDs[userID] = sub
// }
// sub.clients[addr] = client
// }
//}
//
//func (ws *WsServer) pushUserIDOnlineStatus(ctx context.Context, userID string, platformIDs []int32) {
// clients := ws.subscription.GetClient(userID)
// if len(clients) == 0 {
// return
// }
// msgContent, err := json.Marshal(platformIDs)
// if err != nil {
// log.ZError(ctx, "pushUserIDOnlineStatus json.Marshal", err)
// return
// }
// now := time.Now().UnixMilli()
// msgID := idutil.GetMsgIDByMD5(userID)
// msg := &sdkws.MsgData{
// SendID: userID,
// ClientMsgID: msgID,
// ServerMsgID: msgID,
// SenderPlatformID: constant.AdminPlatformID,
// SessionType: constant.NotificationChatType,
// ContentType: constant.UserSubscribeOnlineStatusNotification,
// Content: msgContent,
// SendTime: now,
// CreateTime: now,
// }
// for _, client := range clients {
// msg.RecvID = client.UserID
// if err := client.PushMessage(ctx, msg); err != nil {
// log.ZError(ctx, "UserSubscribeOnlineStatusNotification push failed", err, "userID", client.UserID, "platformID", client.PlatformID, "changeUserID", userID, "content", msgContent)
// }
// }
//}

@ -75,7 +75,7 @@ func NewConsumerHandler(config *Config, offlinePusher offlinepush.OfflinePusher,
consumerHandler.conversationLocalCache = rpccache.NewConversationLocalCache(consumerHandler.conversationRpcClient, &config.LocalCacheConfig, rdb) consumerHandler.conversationLocalCache = rpccache.NewConversationLocalCache(consumerHandler.conversationRpcClient, &config.LocalCacheConfig, rdb)
consumerHandler.webhookClient = webhook.NewWebhookClient(config.WebhooksConfig.URL) consumerHandler.webhookClient = webhook.NewWebhookClient(config.WebhooksConfig.URL)
consumerHandler.config = config consumerHandler.config = config
consumerHandler.onlineCache = rpccache.NewOnlineCache(userRpcClient, consumerHandler.groupLocalCache, rdb) consumerHandler.onlineCache = rpccache.NewOnlineCache(userRpcClient, consumerHandler.groupLocalCache, rdb, nil)
return &consumerHandler, nil return &consumerHandler, nil
} }

@ -15,7 +15,7 @@ import (
"time" "time"
) )
func NewOnlineCache(user rpcclient.UserRpcClient, group *GroupLocalCache, rdb redis.UniversalClient) *OnlineCache { func NewOnlineCache(user rpcclient.UserRpcClient, group *GroupLocalCache, rdb redis.UniversalClient, fn func(ctx context.Context, userID string, platformIDs []int32)) *OnlineCache {
x := &OnlineCache{ x := &OnlineCache{
user: user, user: user,
group: group, group: group,
@ -33,6 +33,9 @@ func NewOnlineCache(user rpcclient.UserRpcClient, group *GroupLocalCache, rdb re
} }
storageCache := x.setUserOnline(userID, platformIDs) storageCache := x.setUserOnline(userID, platformIDs)
log.ZDebug(ctx, "OnlineCache setUserOnline", "userID", userID, "platformIDs", platformIDs, "payload", message.Payload, "storageCache", storageCache) log.ZDebug(ctx, "OnlineCache setUserOnline", "userID", userID, "platformIDs", platformIDs, "payload", message.Payload, "storageCache", storageCache)
if fn != nil {
fn(ctx, userID, platformIDs)
}
} }
}() }()
return x return x

Loading…
Cancel
Save