merge: main

pull/2559/head
icey-yu 1 year ago
commit 1f6058d547

@ -33,8 +33,8 @@ jobs:
remote-repository-name: cla remote-repository-name: cla
create-file-commit-message: 'Creating file for storing CLA Signatures' create-file-commit-message: 'Creating file for storing CLA Signatures'
# signed-commit-message: '$contributorName has signed the CLA in $owner/$repo#$pullRequestNo' # signed-commit-message: '$contributorName has signed the CLA in $owner/$repo#$pullRequestNo'
custom-notsigned-prcomment: '💕 Thank you for your contribution and please kindly read and sign our [CLA Docs](https://github.com/OpenIM-Robot/cla/blob/main/README.md)' custom-notsigned-prcomment: '💕 Thank you for your contribution and please kindly read and sign our CLA. [CLA Docs](https://github.com/OpenIM-Robot/cla/blob/main/README.md)'
custom-pr-sign-comment: 'The signature to be committed in order to sign the CLA' custom-pr-sign-comment: 'I have read the CLA Document and I hereby sign the CLA'
custom-allsigned-prcomment: '🤖 All Contributors have signed the [CLA](https://github.com/OpenIM-Robot/cla/blob/main/README.md).<br> The signed information is recorded [🤖here](https://github.com/openim-sigs/cla/tree/main/signatures/cla.json)' custom-allsigned-prcomment: '🤖 All Contributors have signed the [CLA](https://github.com/OpenIM-Robot/cla/blob/main/README.md).<br> The signed information is recorded [**here**](https://github.com/OpenIM-Robot/cla/blob/main/signatures/cla.json)'
#lock-pullrequest-aftermerge: false - if you don't want this bot to automatically lock the pull request after merging (default - true) #lock-pullrequest-aftermerge: false - if you don't want this bot to automatically lock the pull request after merging (default - true)
#use-dco-flag: true - If you are using DCO instead of CLA #use-dco-flag: true - If you are using DCO instead of CLA

@ -7,6 +7,9 @@ on:
pull_request: pull_request:
branches: branches:
- main - main
paths-ignore:
- '**/*.md'
workflow_dispatch: workflow_dispatch:
jobs: jobs:

@ -29,7 +29,7 @@ jobs:
uses: peter-evans/create-or-update-comment@v4 uses: peter-evans/create-or-update-comment@v4
with: with:
issue-number: ${{ github.event.issue.number }} issue-number: ${{ github.event.issue.number }}
token: ${{ secrets.BOT_GITHUB_TOKEN }} token: ${{ secrets.BOT_TOKEN }}
body: | body: |
This issue is available for anyone to work on. **Make sure to reference this issue in your pull request.** :sparkles: Thank you for your contribution! :sparkles: This issue is available for anyone to work on. **Make sure to reference this issue in your pull request.** :sparkles: Thank you for your contribution! :sparkles:
[Join slack 🤖](https://join.slack.com/t/openimsdk/shared_invite/zt-22720d66b-o_FvKxMTGXtcnnnHiMqe9Q) to connect and communicate with our developers. [Join slack 🤖](https://join.slack.com/t/openimsdk/shared_invite/zt-22720d66b-o_FvKxMTGXtcnnnHiMqe9Q) to connect and communicate with our developers.

@ -0,0 +1,19 @@
name: 'issue-translator'
on:
issue_comment:
types: [created]
issues:
types: [opened]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: usthe/issues-translate-action@v2.7
with:
BOT_GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }}
IS_MODIFY_TITLE: true
# not require, default false, . Decide whether to modify the issue title
# if true, the robot account @Issues-translate-bot must have modification permissions, invite @Issues-translate-bot to your project or use your custom bot.
CUSTOM_BOT_NOTE: Bot detected the issue body's language is not English, translate it automatically. 👯👭🏻🧑‍🤝‍🧑👫🧑🏿‍🤝‍🧑🏻👩🏾‍🤝‍👨🏿👬🏿
# not require. Customize the translation robot prefix message.

@ -1,4 +1,4 @@
enable: "etcd" enable: etcd
etcd: etcd:
rootDirectory: openim rootDirectory: openim
address: [ localhost:12379 ] address: [ localhost:12379 ]

@ -3,17 +3,17 @@ username: ''
# Password for authentication # Password for authentication
password: '' password: ''
# Producer acknowledgment settings # Producer acknowledgment settings
producerAck: "" producerAck:
# Compression type to use (e.g., none, gzip, snappy) # Compression type to use (e.g., none, gzip, snappy)
compressType: "none" compressType: none
# List of Kafka broker addresses # List of Kafka broker addresses
address: [ localhost:19094 ] address: [ localhost:19094 ]
# Kafka topic for Redis integration # Kafka topic for Redis integration
toRedisTopic: "toRedis" toRedisTopic: toRedis
# Kafka topic for MongoDB integration # Kafka topic for MongoDB integration
toMongoTopic: "toMongo" toMongoTopic: toMongo
# Kafka topic for push notifications # Kafka topic for push notifications
toPushTopic: "toPush" toPushTopic: toPush
# Consumer group ID for Redis topic # Consumer group ID for Redis topic
toRedisGroupID: redis toRedisGroupID: redis
# Consumer group ID for MongoDB topic # Consumer group ID for MongoDB topic
@ -25,12 +25,12 @@ tls:
# Enable or disable TLS # Enable or disable TLS
enableTLS: false enableTLS: false
# CA certificate file path # CA certificate file path
caCrt: "" caCrt:
# Client certificate file path # Client certificate file path
clientCrt: "" clientCrt:
# Client key file path # Client key file path
clientKey: "" clientKey:
# Client key password # Client key password
clientKeyPwd: "" clientKeyPwd:
# Whether to skip TLS verification (not recommended for production) # Whether to skip TLS verification (not recommended for production)
insecureSkipVerify: false insecureSkipVerify: false

@ -1,15 +1,15 @@
# Name of the bucket in MinIO # Name of the bucket in MinIO
bucket: "openim" bucket: openim
# Access key ID for MinIO authentication # Access key ID for MinIO authentication
accessKeyID: "root" accessKeyID: root
# Secret access key for MinIO authentication # Secret access key for MinIO authentication
secretAccessKey: "openIM123" secretAccessKey: openIM123
# Session token for MinIO authentication (optional) # Session token for MinIO authentication (optional)
sessionToken: '' sessionToken:
# Internal address of the MinIO server # Internal address of the MinIO server
internalAddress: "localhost:10005" internalAddress: localhost:10005
# External address of the MinIO server, accessible from outside. Supports both HTTP and HTTPS using a domain name # External address of the MinIO server, accessible from outside. Supports both HTTP and HTTPS using a domain name
externalAddress: "http://external_ip:10005" externalAddress: http://external_ip:10005
# Flag to enable or disable public read access to the bucket # Flag to enable or disable public read access to the bucket
publicRead: false publicRead: false

@ -1,5 +1,5 @@
# URI for database connection, leave empty if using address and credential settings directly # URI for database connection, leave empty if using address and credential settings directly
uri: '' uri:
# List of MongoDB server addresses # List of MongoDB server addresses
address: [ localhost:37017 ] address: [ localhost:37017 ]
# Name of the database # Name of the database

@ -28,11 +28,11 @@ groupCreated:
# Enables or disables offline push notifications. # Enables or disables offline push notifications.
enable: false enable: false
# Title for the notification when a group is created. # Title for the notification when a group is created.
title: "create group title" title: create group title
# Description for the notification. # Description for the notification.
desc: "create group desc" desc: create group desc
# Additional information for the notification. # Additional information for the notification.
ext: "create group ext" ext: create group ext
groupInfoSet: groupInfoSet:
isSendMsg: false isSendMsg: false
@ -40,9 +40,9 @@ groupInfoSet:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupInfoSet title" title: groupInfoSet title
desc: "groupInfoSet desc" desc: groupInfoSet desc
ext: "groupInfoSet ext" ext: groupInfoSet ext
joinGroupApplication: joinGroupApplication:
@ -51,9 +51,9 @@ joinGroupApplication:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "joinGroupApplication title" title: joinGroupApplication title
desc: "joinGroupApplication desc" desc: joinGroupApplication desc
ext: "joinGroupApplication ext" ext: joinGroupApplication ext
memberQuit: memberQuit:
isSendMsg: true isSendMsg: true
@ -61,9 +61,9 @@ memberQuit:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "memberQuit title" title: memberQuit title
desc: "memberQuit desc" desc: memberQuit desc
ext: "memberQuit ext" ext: memberQuit ext
groupApplicationAccepted: groupApplicationAccepted:
isSendMsg: false isSendMsg: false
@ -71,9 +71,9 @@ groupApplicationAccepted:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupApplicationAccepted title" title: groupApplicationAccepted title
desc: "groupApplicationAccepted desc" desc: groupApplicationAccepted desc
ext: "groupApplicationAccepted ext" ext: groupApplicationAccepted ext
groupApplicationRejected: groupApplicationRejected:
isSendMsg: false isSendMsg: false
@ -81,9 +81,9 @@ groupApplicationRejected:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupApplicationRejected title" title: groupApplicationRejected title
desc: "groupApplicationRejected desc" desc: groupApplicationRejected desc
ext: "groupApplicationRejected ext" ext: groupApplicationRejected ext
groupOwnerTransferred: groupOwnerTransferred:
@ -92,9 +92,9 @@ groupOwnerTransferred:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupOwnerTransferred title" title: groupOwnerTransferred title
desc: "groupOwnerTransferred desc" desc: groupOwnerTransferred desc
ext: "groupOwnerTransferred ext" ext: groupOwnerTransferred ext
memberKicked: memberKicked:
isSendMsg: true isSendMsg: true
@ -102,9 +102,9 @@ memberKicked:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "memberKicked title" title: memberKicked title
desc: "memberKicked desc" desc: memberKicked desc
ext: "memberKicked ext" ext: memberKicked ext
memberInvited: memberInvited:
isSendMsg: true isSendMsg: true
@ -112,9 +112,9 @@ memberInvited:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "memberInvited title" title: memberInvited title
desc: "memberInvited desc" desc: memberInvited desc
ext: "memberInvited ext" ext: memberInvited ext
memberEnter: memberEnter:
isSendMsg: true isSendMsg: true
@ -122,9 +122,9 @@ memberEnter:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "memberEnter title" title: memberEnter title
desc: "memberEnter desc" desc: memberEnter desc
ext: "memberEnter ext" ext: memberEnter ext
groupDismissed: groupDismissed:
isSendMsg: true isSendMsg: true
@ -132,9 +132,9 @@ groupDismissed:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupDismissed title" title: groupDismissed title
desc: "groupDismissed desc" desc: groupDismissed desc
ext: "groupDismissed ext" ext: groupDismissed ext
groupMuted: groupMuted:
isSendMsg: true isSendMsg: true
@ -142,9 +142,9 @@ groupMuted:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupMuted title" title: groupMuted title
desc: "groupMuted desc" desc: groupMuted desc
ext: "groupMuted ext" ext: groupMuted ext
groupCancelMuted: groupCancelMuted:
isSendMsg: true isSendMsg: true
@ -152,11 +152,11 @@ groupCancelMuted:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupCancelMuted title" title: groupCancelMuted title
desc: "groupCancelMuted desc" desc: groupCancelMuted desc
ext: "groupCancelMuted ext" ext: groupCancelMuted ext
defaultTips: defaultTips:
tips: "group Cancel Muted" tips: group Cancel Muted
groupMemberMuted: groupMemberMuted:
@ -165,9 +165,9 @@ groupMemberMuted:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupMemberMuted title" title: groupMemberMuted title
desc: "groupMemberMuted desc" desc: groupMemberMuted desc
ext: "groupMemberMuted ext" ext: groupMemberMuted ext
groupMemberCancelMuted: groupMemberCancelMuted:
isSendMsg: true isSendMsg: true
@ -175,9 +175,9 @@ groupMemberCancelMuted:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupMemberCancelMuted title" title: groupMemberCancelMuted title
desc: "groupMemberCancelMuted desc" desc: groupMemberCancelMuted desc
ext: "groupMemberCancelMuted ext" ext: groupMemberCancelMuted ext
groupMemberInfoSet: groupMemberInfoSet:
isSendMsg: false isSendMsg: false
@ -185,9 +185,9 @@ groupMemberInfoSet:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupMemberInfoSet title" title: groupMemberInfoSet title
desc: "groupMemberInfoSet desc" desc: groupMemberInfoSet desc
ext: "groupMemberInfoSet ext" ext: groupMemberInfoSet ext
groupInfoSetAnnouncement: groupInfoSetAnnouncement:
isSendMsg: true isSendMsg: true
@ -195,9 +195,9 @@ groupInfoSetAnnouncement:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupInfoSetAnnouncement title" title: groupInfoSetAnnouncement title
desc: "groupInfoSetAnnouncement desc" desc: groupInfoSetAnnouncement desc
ext: "groupInfoSetAnnouncement ext" ext: groupInfoSetAnnouncement ext
groupInfoSetName: groupInfoSetName:
@ -206,9 +206,9 @@ groupInfoSetName:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "groupInfoSetName title" title: groupInfoSetName title
desc: "groupInfoSetName desc" desc: groupInfoSetName desc
ext: "groupInfoSetName ext" ext: groupInfoSetName ext
#############################friend################################# #############################friend#################################
@ -218,9 +218,9 @@ friendApplicationAdded:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "Somebody applies to add you as a friend" title: Somebody applies to add you as a friend
desc: "Somebody applies to add you as a friend" desc: Somebody applies to add you as a friend
ext: "Somebody applies to add you as a friend" ext: Somebody applies to add you as a friend
friendApplicationApproved: friendApplicationApproved:
isSendMsg: true isSendMsg: true
@ -228,9 +228,9 @@ friendApplicationApproved:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: true enable: true
title: "Someone applies to add your friend application" title: Someone applies to add your friend application
desc: "Someone applies to add your friend application" desc: Someone applies to add your friend application
ext: "Someone applies to add your friend application" ext: Someone applies to add your friend application
friendApplicationRejected: friendApplicationRejected:
isSendMsg: false isSendMsg: false
@ -238,9 +238,9 @@ friendApplicationRejected:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: true enable: true
title: "Someone rejected your friend application" title: Someone rejected your friend application
desc: "Someone rejected your friend application" desc: Someone rejected your friend application
ext: "Someone rejected your friend application" ext: Someone rejected your friend application
friendAdded: friendAdded:
isSendMsg: false isSendMsg: false
@ -248,9 +248,9 @@ friendAdded:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: true enable: true
title: "We have become friends" title: We have become friends
desc: "We have become friends" desc: We have become friends
ext: "We have become friends" ext: We have become friends
friendDeleted: friendDeleted:
isSendMsg: false isSendMsg: false
@ -258,9 +258,9 @@ friendDeleted:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: true enable: true
title: "deleted a friend" title: deleted a friend
desc: "deleted a friend" desc: deleted a friend
ext: "deleted a friend" ext: deleted a friend
friendRemarkSet: friendRemarkSet:
isSendMsg: false isSendMsg: false
@ -268,9 +268,9 @@ friendRemarkSet:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: true enable: true
title: "Your friend's profile has been changed" title: Your friend's profile has been changed
desc: "Your friend's profile has been changed" desc: Your friend's profile has been changed
ext: "Your friend's profile has been changed" ext: Your friend's profile has been changed
blackAdded: blackAdded:
isSendMsg: false isSendMsg: false
@ -278,9 +278,9 @@ blackAdded:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: true enable: true
title: "blocked a user" title: blocked a user
desc: "blocked a user" desc: blocked a user
ext: "blocked a user" ext: blocked a user
blackDeleted: blackDeleted:
isSendMsg: false isSendMsg: false
@ -288,9 +288,9 @@ blackDeleted:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: true enable: true
title: "Remove a blocked user" title: Remove a blocked user
desc: "Remove a blocked user" desc: Remove a blocked user
ext: "Remove a blocked user" ext: Remove a blocked user
friendInfoUpdated: friendInfoUpdated:
isSendMsg: false isSendMsg: false
@ -298,9 +298,9 @@ friendInfoUpdated:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: true enable: true
title: "friend info updated" title: friend info updated
desc: "friend info updated" desc: friend info updated
ext: "friend info updated" ext: friend info updated
#####################user######################### #####################user#########################
userInfoUpdated: userInfoUpdated:
@ -309,9 +309,9 @@ userInfoUpdated:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: true enable: true
title: "Remove a blocked user" title: Remove a blocked user
desc: "Remove a blocked user" desc: Remove a blocked user
ext: "Remove a blocked user" ext: Remove a blocked user
userStatusChanged: userStatusChanged:
isSendMsg: false isSendMsg: false
@ -319,9 +319,9 @@ userStatusChanged:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: false enable: false
title: "user status changed" title: user status changed
desc: "user status changed" desc: user status changed
ext: "user status changed" ext: user status changed
#####################conversation######################### #####################conversation#########################
conversationChanged: conversationChanged:
@ -330,9 +330,9 @@ conversationChanged:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: true enable: true
title: "conversation changed" title: conversation changed
desc: "conversation changed" desc: conversation changed
ext: "conversation changed" ext: conversation changed
conversationSetPrivate: conversationSetPrivate:
isSendMsg: true isSendMsg: true
@ -340,6 +340,6 @@ conversationSetPrivate:
unreadCount: false unreadCount: false
offlinePush: offlinePush:
enable: true enable: true
title: "burn after reading" title: burn after reading
desc: "burn after reading" desc: burn after reading
ext: "burn after reading" ext: burn after reading

@ -1,3 +1,3 @@
cronExecuteTime: "0 2 * * *" cronExecuteTime: 0 2 * * *
retainChatRecords: 365 retainChatRecords: 365
fileExpireTime: 90 fileExpireTime: 90

@ -1,6 +1,6 @@
rpc: rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP # The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: '' registerIP:
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports # List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports
ports: [ 10140 ] ports: [ 10140 ]

@ -1,6 +1,6 @@
rpc: rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP # The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: '' registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP # IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0 listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports # List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports
@ -13,28 +13,28 @@ prometheus:
ports: [ 20670, 20671, 20672, 20673 ] ports: [ 20670, 20671, 20672, 20673 ]
maxConcurrentWorkers: 3 maxConcurrentWorkers: 3
#"Use geTui for offline push notifications, or choose fcm or jpns; corresponding configuration settings must be specified." #Use geTui for offline push notifications, or choose fcm or jpns; corresponding configuration settings must be specified.
enable: "geTui" enable: geTui
geTui: geTui:
pushUrl: "https://restapi.getui.com/v2/$appId" pushUrl: https://restapi.getui.com/v2/$appId
masterSecret: '' masterSecret:
appKey: '' appKey:
intent: '' intent:
channelID: '' channelID:
channelName: '' channelName:
fcm: fcm:
# Prioritize using file paths. If the file path is empty, use URL # Prioritize using file paths. If the file path is empty, use URL
filePath: "" # File path is concatenated with the parameters passed in through - c(`mage` default pass in `config/`) and filePath. filePath: # File path is concatenated with the parameters passed in through - c(`mage` default pass in `config/`) and filePath.
authURL: "" # Must start with https or http. authURL: # Must start with https or http.
jpns: jpns:
appKey: '' appKey:
masterSecret: '' masterSecret:
pushURL: '' pushURL:
pushIntent: '' pushIntent:
# iOS system push sound and badge count # iOS system push sound and badge count
iosPush: iosPush:
pushSound: "xxx" pushSound: xxx
badgeCount: true badgeCount: true
production: false production: false

@ -1,6 +1,6 @@
rpc: rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP # The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: '' registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP # IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0 listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports # List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports

@ -1,6 +1,6 @@
rpc: rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP # The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: '' registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP # IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0 listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports # List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports

@ -1,6 +1,6 @@
rpc: rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP # The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: '' registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP # IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0 listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports # List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports

@ -1,6 +1,6 @@
rpc: rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP # The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: '' registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP # IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0 listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports # List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports
@ -11,3 +11,6 @@ prometheus:
enable: true enable: true
# List of ports that Prometheus listens on; these must match the number of rpc.ports to ensure correct monitoring setup # List of ports that Prometheus listens on; these must match the number of rpc.ports to ensure correct monitoring setup
ports: [ 20650 ] ports: [ 20650 ]
enableHistoryForNewMembers: true

@ -1,6 +1,6 @@
rpc: rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP # The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: '' registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP # IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0 listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports # List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports

@ -1,6 +1,6 @@
rpc: rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP # The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: '' registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP # IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0 listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports # List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports
@ -15,26 +15,26 @@ prometheus:
object: object:
# Use MinIO as object storage, or set to "cos", "oss", "kodo", "aws", while also configuring the corresponding settings # Use MinIO as object storage, or set to "cos", "oss", "kodo", "aws", while also configuring the corresponding settings
enable: "minio" enable: minio
cos: cos:
bucketURL: https://temp-1252357374.cos.ap-chengdu.myqcloud.com bucketURL: https://temp-1252357374.cos.ap-chengdu.myqcloud.com
secretID: '' secretID:
secretKey: '' secretKey:
sessionToken: '' sessionToken:
publicRead: false publicRead: false
oss: oss:
endpoint: "https://oss-cn-chengdu.aliyuncs.com" endpoint: https://oss-cn-chengdu.aliyuncs.com
bucket: "demo-9999999" bucket: demo-9999999
bucketURL: "https://demo-9999999.oss-cn-chengdu.aliyuncs.com" bucketURL: https://demo-9999999.oss-cn-chengdu.aliyuncs.com
accessKeyID: '' accessKeyID:
accessKeySecret: '' accessKeySecret:
sessionToken: '' sessionToken:
publicRead: false publicRead: false
kodo: kodo:
endpoint: "http://s3.cn-south-1.qiniucs.com" endpoint: http://s3.cn-south-1.qiniucs.com
bucket: "kodo-bucket-test" bucket: kodo-bucket-test
bucketURL: "http://kodo-bucket-test-oetobfb.qiniudns.com" bucketURL: http://kodo-bucket-test-oetobfb.qiniudns.com
accessKeyID: '' accessKeyID:
accessKeySecret: '' accessKeySecret:
sessionToken: '' sessionToken:
publicRead: false publicRead: false

@ -1,6 +1,6 @@
rpc: rpc:
# API or other RPCs can access this RPC through this IP; if left blank, the internal network IP is obtained by default # API or other RPCs can access this RPC through this IP; if left blank, the internal network IP is obtained by default
registerIP: '' registerIP:
# Listening IP; 0.0.0.0 means both internal and external IPs are listened to, if blank, the internal network IP is automatically obtained by default # Listening IP; 0.0.0.0 means both internal and external IPs are listened to, if blank, the internal network IP is automatically obtained by default
listenIP: 0.0.0.0 listenIP: 0.0.0.0
# Listening ports; if multiple are configured, multiple instances will be launched, and must be consistent with the number of prometheus.ports # Listening ports; if multiple are configured, multiple instances will be launched, and must be consistent with the number of prometheus.ports

@ -8,9 +8,9 @@ global:
alerting: alerting:
alertmanagers: alertmanagers:
- static_configs: - static_configs:
- targets: ['internal_ip:19093'] - targets: [internal_ip:19093]
# Load rules once and periodically evaluate them according to the global 'evaluation_interval'. # Load rules once and periodically evaluate them according to the global evaluation_interval.
rule_files: rule_files:
- "instance-down-rules.yml" - "instance-down-rules.yml"
# - "first_rules.yml" # - "first_rules.yml"
@ -19,65 +19,65 @@ rule_files:
# A scrape configuration containing exactly one endpoint to scrape: # A scrape configuration containing exactly one endpoint to scrape:
# Here it's Prometheus itself. # Here it's Prometheus itself.
scrape_configs: scrape_configs:
# The job name is added as a label "job='job_name'"" to any timeseries scraped from this config. # The job name is added as a label "job=job_name"" to any timeseries scraped from this config.
# Monitored information captured by prometheus # Monitored information captured by prometheus
# prometheus fetches application services # prometheus fetches application services
- job_name: 'node_exporter' - job_name: node_exporter
static_configs: static_configs:
- targets: [ 'internal_ip:20500' ] - targets: [ internal_ip:20500 ]
- job_name: 'openimserver-openim-api' - job_name: openimserver-openim-api
static_configs: static_configs:
- targets: [ 'internal_ip:20502' ] - targets: [ internal_ip:20502 ]
labels: labels:
namespace: 'default' namespace: default
- job_name: 'openimserver-openim-msggateway' - job_name: openimserver-openim-msggateway
static_configs: static_configs:
- targets: [ 'internal_ip:20640' ] - targets: [ internal_ip:20640 ]
labels: labels:
namespace: 'default' namespace: default
- job_name: 'openimserver-openim-msgtransfer' - job_name: openimserver-openim-msgtransfer
static_configs: static_configs:
- targets: [ 'internal_ip:20600', 'internal_ip:20601', 'internal_ip:20602', 'internal_ip:20603' ] - targets: [ internal_ip:20600, internal_ip:20601, internal_ip:20602, internal_ip:20603 ]
labels: labels:
namespace: 'default' namespace: default
- job_name: 'openimserver-openim-push' - job_name: openimserver-openim-push
static_configs: static_configs:
- targets: [ 'internal_ip:20670', 'internal_ip:20671', 'internal_ip:20672', 'internal_ip:20673'] - targets: [ internal_ip:20670, internal_ip:20671, internal_ip:20672, internal_ip:20673]
labels: labels:
namespace: 'default' namespace: default
- job_name: 'openimserver-openim-rpc-auth' - job_name: openimserver-openim-rpc-auth
static_configs: static_configs:
- targets: [ 'internal_ip:20600' ] - targets: [ internal_ip:20600 ]
labels: labels:
namespace: 'default' namespace: default
- job_name: 'openimserver-openim-rpc-conversation' - job_name: openimserver-openim-rpc-conversation
static_configs: static_configs:
- targets: [ 'internal_ip:20680' ] - targets: [ internal_ip:20680 ]
labels: labels:
namespace: 'default' namespace: default
- job_name: 'openimserver-openim-rpc-friend' - job_name: openimserver-openim-rpc-friend
static_configs: static_configs:
- targets: [ 'internal_ip:20620' ] - targets: [ internal_ip:20620 ]
labels: labels:
namespace: 'default' namespace: default
- job_name: 'openimserver-openim-rpc-group' - job_name: openimserver-openim-rpc-group
static_configs: static_configs:
- targets: [ 'internal_ip:20650' ] - targets: [ internal_ip:20650 ]
labels: labels:
namespace: 'default' namespace: default
- job_name: 'openimserver-openim-rpc-msg' - job_name: openimserver-openim-rpc-msg
static_configs: static_configs:
- targets: [ 'internal_ip:20630' ] - targets: [ internal_ip:20630 ]
labels: labels:
namespace: 'default' namespace: default
- job_name: 'openimserver-openim-rpc-third' - job_name: openimserver-openim-rpc-third
static_configs: static_configs:
- targets: [ 'internal_ip:20690' ] - targets: [ internal_ip:20690 ]
labels: labels:
namespace: 'default' namespace: default
- job_name: 'openimserver-openim-rpc-user' - job_name: openimserver-openim-rpc-user
static_configs: static_configs:
- targets: [ 'internal_ip:20610' ] - targets: [ internal_ip:20610 ]
labels: labels:
namespace: 'default' namespace: default

@ -1,5 +1,5 @@
address: [ localhost:16379 ] address: [ localhost:16379 ]
username: '' username:
password: openIM123 password: openIM123
clusterMode: false clusterMode: false
db: 0 db: 0

@ -10,5 +10,5 @@ rpcRegisterName:
conversation: conversation conversation: conversation
third: third third: third
imAdminUserID: [ "imAdmin" ] imAdminUserID: [ imAdmin ]

@ -1,4 +1,4 @@
url: "webhook://127.0.0.1:10008/callbackExample" url: webhook://127.0.0.1:10008/callbackExample
beforeSendSingleMsg: beforeSendSingleMsg:
enable: false enable: false
timeout: 5 timeout: 5
@ -130,6 +130,13 @@ beforeSetGroupInfo:
enable: false enable: false
timeout: 5 timeout: 5
failedContinue: true failedContinue: true
afterSetGroupInfoEX:
enable: false
timeout: 5
beforeSetGroupInfoEX:
enable: false
timeout: 5
failedContinue: true
afterRevokeMsg: afterRevokeMsg:
enable: false enable: false
timeout: 5 timeout: 5

@ -140,50 +140,50 @@ services:
networks: networks:
- openim - openim
prometheus: # prometheus:
image: ${PROMETHEUS_IMAGE} # image: ${PROMETHEUS_IMAGE}
container_name: prometheus # container_name: prometheus
restart: always # restart: always
volumes: # volumes:
- ./config/prometheus.yml:/etc/prometheus/prometheus.yml # - ./config/prometheus.yml:/etc/prometheus/prometheus.yml
- ./config/instance-down-rules.yml:/etc/prometheus/instance-down-rules.yml # - ./config/instance-down-rules.yml:/etc/prometheus/instance-down-rules.yml
- ${DATA_DIR}/components/prometheus/data:/prometheus # - ${DATA_DIR}/components/prometheus/data:/prometheus
command: # command:
- '--config.file=/etc/prometheus/prometheus.yml' # - '--config.file=/etc/prometheus/prometheus.yml'
- '--storage.tsdb.path=/prometheus' # - '--storage.tsdb.path=/prometheus'
ports: # ports:
- "19091:9090" # - "19091:9090"
networks: # networks:
- openim # - openim
#
alertmanager: # alertmanager:
image: ${ALERTMANAGER_IMAGE} # image: ${ALERTMANAGER_IMAGE}
container_name: alertmanager # container_name: alertmanager
restart: always # restart: always
volumes: # volumes:
- ./config/alertmanager.yml:/etc/alertmanager/alertmanager.yml # - ./config/alertmanager.yml:/etc/alertmanager/alertmanager.yml
- ./config/email.tmpl:/etc/alertmanager/email.tmpl # - ./config/email.tmpl:/etc/alertmanager/email.tmpl
ports: # ports:
- "19093:9093" # - "19093:9093"
networks: # networks:
- openim # - openim
#
grafana: # grafana:
image: ${GRAFANA_IMAGE} # image: ${GRAFANA_IMAGE}
container_name: grafana # container_name: grafana
user: root # user: root
restart: always # restart: always
environment: # environment:
- GF_SECURITY_ALLOW_EMBEDDING=true # - GF_SECURITY_ALLOW_EMBEDDING=true
- GF_SESSION_COOKIE_SAMESITE=none # - GF_SESSION_COOKIE_SAMESITE=none
- GF_SESSION_COOKIE_SECURE=true # - GF_SESSION_COOKIE_SECURE=true
- GF_AUTH_ANONYMOUS_ENABLED=true # - GF_AUTH_ANONYMOUS_ENABLED=true
- GF_AUTH_ANONYMOUS_ORG_ROLE=Admin # - GF_AUTH_ANONYMOUS_ORG_ROLE=Admin
ports: # ports:
- "13000:3000" # - "13000:3000"
volumes: # volumes:
- ${DATA_DIR:-./}/components/grafana:/var/lib/grafana # - ${DATA_DIR:-./}/components/grafana:/var/lib/grafana
networks: # networks:
- openim # - openim

@ -12,8 +12,8 @@ require (
github.com/gorilla/websocket v1.5.1 github.com/gorilla/websocket v1.5.1
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0
github.com/mitchellh/mapstructure v1.5.0 github.com/mitchellh/mapstructure v1.5.0
github.com/openimsdk/protocol v0.0.69 github.com/openimsdk/protocol v0.0.72-alpha.9
github.com/openimsdk/tools v0.0.50-alpha.3 github.com/openimsdk/tools v0.0.49-alpha.55
github.com/pkg/errors v0.9.1 // indirect github.com/pkg/errors v0.9.1 // indirect
github.com/prometheus/client_golang v1.18.0 github.com/prometheus/client_golang v1.18.0
github.com/stretchr/testify v1.9.0 github.com/stretchr/testify v1.9.0
@ -41,6 +41,7 @@ require (
github.com/spf13/viper v1.18.2 github.com/spf13/viper v1.18.2
github.com/stathat/consistent v1.0.0 github.com/stathat/consistent v1.0.0
go.uber.org/automaxprocs v1.5.3 go.uber.org/automaxprocs v1.5.3
golang.org/x/exp v0.0.0-20230905200255-921286631fa9
golang.org/x/sync v0.6.0 golang.org/x/sync v0.6.0
) )
@ -74,7 +75,6 @@ require (
github.com/beorn7/perks v1.0.1 // indirect github.com/beorn7/perks v1.0.1 // indirect
github.com/bytedance/sonic v1.9.1 // indirect github.com/bytedance/sonic v1.9.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/chai2010/webp v1.1.1 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
github.com/clbanning/mxj v1.8.4 // indirect github.com/clbanning/mxj v1.8.4 // indirect
github.com/coreos/go-semver v0.3.0 // indirect github.com/coreos/go-semver v0.3.0 // indirect
@ -170,7 +170,6 @@ require (
go.uber.org/atomic v1.9.0 // indirect go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.11.0 // indirect go.uber.org/multierr v1.11.0 // indirect
golang.org/x/arch v0.3.0 // indirect golang.org/x/arch v0.3.0 // indirect
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect
golang.org/x/image v0.15.0 // indirect golang.org/x/image v0.15.0 // indirect
golang.org/x/net v0.22.0 // indirect golang.org/x/net v0.22.0 // indirect
golang.org/x/oauth2 v0.17.0 // indirect golang.org/x/oauth2 v0.17.0 // indirect

@ -71,8 +71,6 @@ github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZX
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chai2010/webp v1.1.1 h1:jTRmEccAJ4MGrhFOrPMpNGIJ/eybIgwKpcACsrTEapk=
github.com/chai2010/webp v1.1.1/go.mod h1:0XVwvZWdjjdxpUEIf7b9g9VkHFnInUSYujwqTLEuldU=
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams= github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
@ -321,10 +319,11 @@ github.com/onsi/gomega v1.25.0 h1:Vw7br2PCDYijJHSfBOWhov+8cAnUf8MfMaIOV323l6Y=
github.com/onsi/gomega v1.25.0/go.mod h1:r+zV744Re+DiYCIPRlYOTxn0YkOLcAnW8k1xXdMPGhM= github.com/onsi/gomega v1.25.0/go.mod h1:r+zV744Re+DiYCIPRlYOTxn0YkOLcAnW8k1xXdMPGhM=
github.com/openimsdk/gomake v0.0.14-alpha.5 h1:VY9c5x515lTfmdhhPjMvR3BBRrRquAUCFsz7t7vbv7Y= github.com/openimsdk/gomake v0.0.14-alpha.5 h1:VY9c5x515lTfmdhhPjMvR3BBRrRquAUCFsz7t7vbv7Y=
github.com/openimsdk/gomake v0.0.14-alpha.5/go.mod h1:PndCozNc2IsQIciyn9mvEblYWZwJmAI+06z94EY+csI= github.com/openimsdk/gomake v0.0.14-alpha.5/go.mod h1:PndCozNc2IsQIciyn9mvEblYWZwJmAI+06z94EY+csI=
github.com/openimsdk/protocol v0.0.69 h1:dVi8meSg8kmUzSH1XQab4MjihqKkkcCAmt1BYXPJuXo= github.com/openimsdk/protocol v0.0.72-alpha.8 h1:MhxSsdxXx2ZaeSLQk4uFftsB5L2rPh1Qup+dURQNzXQ=
github.com/openimsdk/protocol v0.0.69/go.mod h1:OZQA9FR55lseYoN2Ql1XAHYKHJGu7OMNkUbuekrKCM8= github.com/openimsdk/protocol v0.0.72-alpha.8/go.mod h1:OZQA9FR55lseYoN2Ql1XAHYKHJGu7OMNkUbuekrKCM8=
github.com/openimsdk/tools v0.0.50-alpha.3 h1:PGXccSevL+xOg08JKy9oS4poagr85nfZiB+I1jvv2/U= github.com/openimsdk/protocol v0.0.72-alpha.9/go.mod h1:OZQA9FR55lseYoN2Ql1XAHYKHJGu7OMNkUbuekrKCM8=
github.com/openimsdk/tools v0.0.50-alpha.3/go.mod h1:oiSQU5Z6fzjxKFjbqDHImD8EmCIwClU1Rkur1sK12Po= github.com/openimsdk/tools v0.0.49-alpha.55 h1:KPgC53oqiwZYssLKljhtXbWXifMlTj2SSQEusj4Uf4k=
github.com/openimsdk/tools v0.0.49-alpha.55/go.mod h1:h1cYmfyaVtgFbKmb1Cfsl8XwUOMTt8ubVUQrdGtsUh4=
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ= github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=

@ -35,6 +35,10 @@ func (o *GroupApi) SetGroupInfo(c *gin.Context) {
a2r.Call(group.GroupClient.SetGroupInfo, o.Client, c) a2r.Call(group.GroupClient.SetGroupInfo, o.Client, c)
} }
func (o *GroupApi) SetGroupInfoEX(c *gin.Context) {
a2r.Call(group.GroupClient.SetGroupInfoEX, o.Client, c)
}
func (o *GroupApi) JoinGroup(c *gin.Context) { func (o *GroupApi) JoinGroup(c *gin.Context) {
a2r.Call(group.GroupClient.JoinGroup, o.Client, c) a2r.Call(group.GroupClient.JoinGroup, o.Client, c)
} }

@ -49,14 +49,14 @@ func NewMessageApi(msgRpcClient *rpcclient.Message, userRpcClient *rpcclient.Use
userRpcClient: rpcclient.NewUserRpcClientByUser(userRpcClient), imAdminUserID: imAdminUserID} userRpcClient: rpcclient.NewUserRpcClientByUser(userRpcClient), imAdminUserID: imAdminUserID}
} }
func (MessageApi) SetOptions(options map[string]bool, value bool) { func (*MessageApi) SetOptions(options map[string]bool, value bool) {
datautil.SetSwitchFromOptions(options, constant.IsHistory, value) datautil.SetSwitchFromOptions(options, constant.IsHistory, value)
datautil.SetSwitchFromOptions(options, constant.IsPersistent, value) datautil.SetSwitchFromOptions(options, constant.IsPersistent, value)
datautil.SetSwitchFromOptions(options, constant.IsSenderSync, value) datautil.SetSwitchFromOptions(options, constant.IsSenderSync, value)
datautil.SetSwitchFromOptions(options, constant.IsConversationUpdate, value) datautil.SetSwitchFromOptions(options, constant.IsConversationUpdate, value)
} }
func (m MessageApi) newUserSendMsgReq(_ *gin.Context, params *apistruct.SendMsg) *msg.SendMsgReq { func (m *MessageApi) newUserSendMsgReq(_ *gin.Context, params *apistruct.SendMsg) *msg.SendMsgReq {
var newContent string var newContent string
options := make(map[string]bool, 5) options := make(map[string]bool, 5)
switch params.ContentType { switch params.ContentType {
@ -231,7 +231,7 @@ func (m *MessageApi) SendMessage(c *gin.Context) {
} }
// Set the status to successful if the message is sent. // Set the status to successful if the message is sent.
var status int = constant.MsgSendSuccessed var status = constant.MsgSendSuccessed
// Attempt to update the message sending status in the system. // Attempt to update the message sending status in the system.
_, err = m.Client.SetSendMsgStatus(c, &msg.SetSendMsgStatusReq{ _, err = m.Client.SetSendMsgStatus(c, &msg.SetSendMsgStatusReq{

@ -2,12 +2,16 @@ package api
import ( import (
"fmt" "fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/gin-gonic/gin/binding" "github.com/gin-gonic/gin/binding"
"github.com/go-playground/validator/v10" "github.com/go-playground/validator/v10"
"google.golang.org/grpc" "google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure" "google.golang.org/grpc/credentials/insecure"
"net/http"
"strings"
"github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics" "github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics"
"github.com/openimsdk/open-im-server/v3/pkg/common/servererrs" "github.com/openimsdk/open-im-server/v3/pkg/common/servererrs"
"github.com/openimsdk/open-im-server/v3/pkg/rpcclient" "github.com/openimsdk/open-im-server/v3/pkg/rpcclient"
@ -16,8 +20,6 @@ import (
"github.com/openimsdk/tools/discovery" "github.com/openimsdk/tools/discovery"
"github.com/openimsdk/tools/log" "github.com/openimsdk/tools/log"
"github.com/openimsdk/tools/mw" "github.com/openimsdk/tools/mw"
"net/http"
"strings"
) )
func prommetricsGin() gin.HandlerFunc { func prommetricsGin() gin.HandlerFunc {
@ -112,6 +114,7 @@ func newGinRouter(disCov discovery.SvcDiscoveryRegistry, config *Config) *gin.En
{ {
groupRouterGroup.POST("/create_group", g.CreateGroup) groupRouterGroup.POST("/create_group", g.CreateGroup)
groupRouterGroup.POST("/set_group_info", g.SetGroupInfo) groupRouterGroup.POST("/set_group_info", g.SetGroupInfo)
groupRouterGroup.POST("/set_group_info_ex", g.SetGroupInfoEX)
groupRouterGroup.POST("/join_group", g.JoinGroup) groupRouterGroup.POST("/join_group", g.JoinGroup)
groupRouterGroup.POST("/quit_group", g.QuitGroup) groupRouterGroup.POST("/quit_group", g.QuitGroup)
groupRouterGroup.POST("/group_application_response", g.ApplicationGroupResponse) groupRouterGroup.POST("/group_application_response", g.ApplicationGroupResponse)

@ -36,9 +36,11 @@ func (u *UserApi) UserRegister(c *gin.Context) {
a2r.Call(user.UserClient.UserRegister, u.Client, c) a2r.Call(user.UserClient.UserRegister, u.Client, c)
} }
// UpdateUserInfo is deprecated. Use UpdateUserInfoEx
func (u *UserApi) UpdateUserInfo(c *gin.Context) { func (u *UserApi) UpdateUserInfo(c *gin.Context) {
a2r.Call(user.UserClient.UpdateUserInfo, u.Client, c) a2r.Call(user.UserClient.UpdateUserInfo, u.Client, c)
} }
func (u *UserApi) UpdateUserInfoEx(c *gin.Context) { func (u *UserApi) UpdateUserInfoEx(c *gin.Context) {
a2r.Call(user.UserClient.UpdateUserInfoEx, u.Client, c) a2r.Call(user.UserClient.UpdateUserInfoEx, u.Client, c)
} }

@ -275,7 +275,7 @@ func (ws *WsServer) registerClient(client *Client) {
} }
wg := sync.WaitGroup{} wg := sync.WaitGroup{}
log.ZDebug(client.ctx, "ws.msgGatewayConfig.Discovery.Enable", ws.msgGatewayConfig.Discovery.Enable) log.ZDebug(client.ctx, "ws.msgGatewayConfig.Discovery.Enable", "discoveryEnable", ws.msgGatewayConfig.Discovery.Enable)
if ws.msgGatewayConfig.Discovery.Enable != "k8s" { if ws.msgGatewayConfig.Discovery.Enable != "k8s" {
wg.Add(1) wg.Add(1)

@ -16,6 +16,7 @@ package msgtransfer
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics" "github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/cache/redis" "github.com/openimsdk/open-im-server/v3/pkg/common/storage/cache/redis"
@ -137,7 +138,7 @@ func (m *MsgTransfer) Start(index int, config *Config) error {
return return
} }
if err := prommetrics.TransferInit(prometheusPort); err != nil && err != http.ErrServerClosed { if err := prommetrics.TransferInit(prometheusPort); err != nil && !errors.Is(err, http.ErrServerClosed) {
netErr = errs.WrapMsg(err, "prometheus start error", "prometheusPort", prometheusPort) netErr = errs.WrapMsg(err, "prometheus start error", "prometheusPort", prometheusPort)
netDone <- struct{}{} netDone <- struct{}{}
} }

@ -16,6 +16,7 @@ package msgtransfer
import ( import (
"context" "context"
"errors"
"github.com/IBM/sarama" "github.com/IBM/sarama"
"github.com/go-redis/redis" "github.com/go-redis/redis"
"github.com/openimsdk/open-im-server/v3/pkg/common/config" "github.com/openimsdk/open-im-server/v3/pkg/common/config"
@ -187,7 +188,7 @@ func (och *OnlineHistoryRedisConsumerHandler) handleMsg(ctx context.Context, key
if len(storageMessageList) > 0 { if len(storageMessageList) > 0 {
msg := storageMessageList[0] msg := storageMessageList[0]
lastSeq, isNewConversation, err := och.msgDatabase.BatchInsertChat2Cache(ctx, conversationID, storageMessageList) lastSeq, isNewConversation, err := och.msgDatabase.BatchInsertChat2Cache(ctx, conversationID, storageMessageList)
if err != nil && errs.Unwrap(err) != redis.Nil { if err != nil && !errors.Is(errs.Unwrap(err), redis.Nil) {
log.ZError(ctx, "batch data insert to redis err", err, "storageMsgList", storageMessageList) log.ZError(ctx, "batch data insert to redis err", err, "storageMsgList", storageMessageList)
return return
} }

@ -91,13 +91,13 @@ func (mc *OnlineHistoryMongoConsumerHandler) handleChatWs2Mongo(ctx context.Cont
} }
} }
func (OnlineHistoryMongoConsumerHandler) Setup(_ sarama.ConsumerGroupSession) error { return nil } func (*OnlineHistoryMongoConsumerHandler) Setup(_ sarama.ConsumerGroupSession) error { return nil }
func (OnlineHistoryMongoConsumerHandler) Cleanup(_ sarama.ConsumerGroupSession) error { return nil } func (*OnlineHistoryMongoConsumerHandler) Cleanup(_ sarama.ConsumerGroupSession) error { return nil }
func (mc *OnlineHistoryMongoConsumerHandler) ConsumeClaim( func (mc *OnlineHistoryMongoConsumerHandler) ConsumeClaim(
sess sarama.ConsumerGroupSession, sess sarama.ConsumerGroupSession,
claim sarama.ConsumerGroupClaim, claim sarama.ConsumerGroupClaim,
) error { // a instance in the consumer group ) error { // an instance in the consumer group
log.ZDebug(context.Background(), "online new session msg come", "highWaterMarkOffset", log.ZDebug(context.Background(), "online new session msg come", "highWaterMarkOffset",
claim.HighWaterMarkOffset(), "topic", claim.Topic(), "partition", claim.Partition()) claim.HighWaterMarkOffset(), "topic", claim.Topic(), "partition", claim.Partition())
for msg := range claim.Messages() { for msg := range claim.Messages() {

@ -19,20 +19,20 @@ type OnlinePusher interface {
pushToUserIDs *[]string) []string pushToUserIDs *[]string) []string
} }
type emptyOnlinePUsher struct{} type emptyOnlinePusher struct{}
func newEmptyOnlinePUsher() *emptyOnlinePUsher { func newEmptyOnlinePusher() *emptyOnlinePusher {
return &emptyOnlinePUsher{} return &emptyOnlinePusher{}
} }
func (emptyOnlinePUsher) GetConnsAndOnlinePush(ctx context.Context, msg *sdkws.MsgData, func (emptyOnlinePusher) GetConnsAndOnlinePush(ctx context.Context, msg *sdkws.MsgData,
pushToUserIDs []string) (wsResults []*msggateway.SingleMsgToUserResults, err error) { pushToUserIDs []string) (wsResults []*msggateway.SingleMsgToUserResults, err error) {
log.ZWarn(ctx, "emptyOnlinePUsher GetConnsAndOnlinePush", nil) log.ZWarn(ctx, "emptyOnlinePusher GetConnsAndOnlinePush", nil)
return nil, nil return nil, nil
} }
func (u emptyOnlinePUsher) GetOnlinePushFailedUserIDs(ctx context.Context, msg *sdkws.MsgData, func (u emptyOnlinePusher) GetOnlinePushFailedUserIDs(ctx context.Context, msg *sdkws.MsgData,
wsResults []*msggateway.SingleMsgToUserResults, pushToUserIDs *[]string) []string { wsResults []*msggateway.SingleMsgToUserResults, pushToUserIDs *[]string) []string {
log.ZWarn(ctx, "emptyOnlinePUsher GetOnlinePushFailedUserIDs", nil) log.ZWarn(ctx, "emptyOnlinePusher GetOnlinePushFailedUserIDs", nil)
return nil return nil
} }
@ -45,7 +45,7 @@ func NewOnlinePusher(disCov discovery.SvcDiscoveryRegistry, config *Config) Onli
case "etcd": case "etcd":
return NewDefaultAllNode(disCov, config) return NewDefaultAllNode(disCov, config)
default: default:
return newEmptyOnlinePUsher() return newEmptyOnlinePusher()
} }
} }

@ -154,17 +154,17 @@ func (c *ConsumerHandler) Push2User(ctx context.Context, userIDs []string, msg *
return nil return nil
} }
} }
offlinePUshUserID := []string{msg.RecvID} offlinePushUserID := []string{msg.RecvID}
//receiver offline push //receiver offline push
if err = c.webhookBeforeOfflinePush(ctx, &c.config.WebhooksConfig.BeforeOfflinePush, if err = c.webhookBeforeOfflinePush(ctx, &c.config.WebhooksConfig.BeforeOfflinePush,
offlinePUshUserID, msg, nil); err != nil { offlinePushUserID, msg, nil); err != nil {
return err return err
} }
err = c.offlinePushMsg(ctx, msg, offlinePUshUserID) err = c.offlinePushMsg(ctx, msg, offlinePushUserID)
if err != nil { if err != nil {
log.ZWarn(ctx, "offlinePushMsg failed", err, "offlinePUshUserID", offlinePUshUserID, "msg", msg) log.ZWarn(ctx, "offlinePushMsg failed", err, "offlinePushUserID", offlinePushUserID, "msg", msg)
return nil return nil
} }

@ -221,11 +221,11 @@ func (c *conversationServer) SetConversation(ctx context.Context, req *pbconvers
return resp, nil return resp, nil
} }
// nolint
func (c *conversationServer) SetConversations(ctx context.Context, req *pbconversation.SetConversationsReq) (*pbconversation.SetConversationsResp, error) { func (c *conversationServer) SetConversations(ctx context.Context, req *pbconversation.SetConversationsReq) (*pbconversation.SetConversationsResp, error) {
if req.Conversation == nil { if req.Conversation == nil {
return nil, errs.ErrArgs.WrapMsg("conversation must not be nil") return nil, errs.ErrArgs.WrapMsg("conversation must not be nil")
} }
if req.Conversation.ConversationType == constant.WriteGroupChatType { if req.Conversation.ConversationType == constant.WriteGroupChatType {
groupInfo, err := c.groupRpcClient.GetGroupInfo(ctx, req.Conversation.GroupID) groupInfo, err := c.groupRpcClient.GetGroupInfo(ctx, req.Conversation.GroupID)
if err != nil { if err != nil {
@ -235,100 +235,144 @@ func (c *conversationServer) SetConversations(ctx context.Context, req *pbconver
return nil, servererrs.ErrDismissedAlready.WrapMsg("group dismissed") return nil, servererrs.ErrDismissedAlready.WrapMsg("group dismissed")
} }
} }
var unequal int
var conv dbModel.Conversation conversationMap := make(map[string]*dbModel.Conversation)
if len(req.UserIDs) == 1 { var needUpdateUsersList []string
cs, err := c.conversationDatabase.FindConversations(ctx, req.UserIDs[0], []string{req.Conversation.ConversationID})
for _, userID := range req.UserIDs {
conversationList, err := c.conversationDatabase.FindConversations(ctx, userID, []string{req.Conversation.ConversationID})
if err != nil { if err != nil {
return nil, err return nil, err
} }
if len(cs) == 0 { if len(conversationList) != 0 {
return nil, errs.ErrRecordNotFound.WrapMsg("conversation not found") conversationMap[userID] = conversationList[0]
} else {
needUpdateUsersList = append(needUpdateUsersList, userID)
} }
conv = *cs[0]
} }
var conversation dbModel.Conversation var conversation dbModel.Conversation
conversation.ConversationID = req.Conversation.ConversationID conversation.ConversationID = req.Conversation.ConversationID
conversation.ConversationType = req.Conversation.ConversationType conversation.ConversationType = req.Conversation.ConversationType
conversation.UserID = req.Conversation.UserID conversation.UserID = req.Conversation.UserID
conversation.GroupID = req.Conversation.GroupID conversation.GroupID = req.Conversation.GroupID
m := make(map[string]any) m := make(map[string]any)
setConversationFieldsFunc := func() {
if req.Conversation.RecvMsgOpt != nil { if req.Conversation.RecvMsgOpt != nil {
m["recv_msg_opt"] = req.Conversation.RecvMsgOpt.Value m["recv_msg_opt"] = req.Conversation.RecvMsgOpt.Value
if req.Conversation.RecvMsgOpt.Value != conv.RecvMsgOpt {
unequal++
}
} }
if req.Conversation.AttachedInfo != nil { if req.Conversation.AttachedInfo != nil {
m["attached_info"] = req.Conversation.AttachedInfo.Value m["attached_info"] = req.Conversation.AttachedInfo.Value
if req.Conversation.AttachedInfo.Value != conv.AttachedInfo {
unequal++
}
} }
if req.Conversation.Ex != nil { if req.Conversation.Ex != nil {
m["ex"] = req.Conversation.Ex.Value m["ex"] = req.Conversation.Ex.Value
if req.Conversation.Ex.Value != conv.Ex {
unequal++
}
} }
if req.Conversation.IsPinned != nil { if req.Conversation.IsPinned != nil {
m["is_pinned"] = req.Conversation.IsPinned.Value m["is_pinned"] = req.Conversation.IsPinned.Value
if req.Conversation.IsPinned.Value != conv.IsPinned {
unequal++
}
} }
if req.Conversation.GroupAtType != nil { if req.Conversation.GroupAtType != nil {
m["group_at_type"] = req.Conversation.GroupAtType.Value m["group_at_type"] = req.Conversation.GroupAtType.Value
if req.Conversation.GroupAtType.Value != conv.GroupAtType {
unequal++
} }
if req.Conversation.MsgDestructTime != nil {
m["msg_destruct_time"] = req.Conversation.MsgDestructTime.Value
} }
if req.Conversation.MsgDestructTime != nil { if req.Conversation.MsgDestructTime != nil {
m["msg_destruct_time"] = req.Conversation.MsgDestructTime.Value m["msg_destruct_time"] = req.Conversation.MsgDestructTime.Value
if req.Conversation.MsgDestructTime.Value != conv.MsgDestructTime { }
unequal++ if req.Conversation.BurnDuration != nil {
m["burn_duration"] = req.Conversation.BurnDuration.Value
}
}
// set need set field in conversation
setConversationFieldsFunc()
for userID := range conversationMap {
unequal := len(m)
if req.Conversation.RecvMsgOpt != nil {
if req.Conversation.RecvMsgOpt.Value != conversationMap[userID].RecvMsgOpt {
unequal--
}
}
if req.Conversation.AttachedInfo != nil {
if req.Conversation.AttachedInfo.Value != conversationMap[userID].AttachedInfo {
unequal--
}
}
if req.Conversation.Ex != nil {
if req.Conversation.Ex.Value != conversationMap[userID].Ex {
unequal--
}
}
if req.Conversation.IsPinned != nil {
m["is_pinned"] = req.Conversation.IsPinned.Value
if req.Conversation.IsPinned.Value != conversationMap[userID].IsPinned {
unequal--
} }
} }
if req.Conversation.GroupAtType != nil {
if req.Conversation.GroupAtType.Value != conversationMap[userID].GroupAtType {
unequal--
}
}
if req.Conversation.MsgDestructTime != nil {
if req.Conversation.MsgDestructTime.Value != conversationMap[userID].MsgDestructTime {
unequal--
}
}
if req.Conversation.IsMsgDestruct != nil { if req.Conversation.IsMsgDestruct != nil {
m["is_msg_destruct"] = req.Conversation.IsMsgDestruct.Value if req.Conversation.IsMsgDestruct.Value != conversationMap[userID].IsMsgDestruct {
if req.Conversation.IsMsgDestruct.Value != conv.IsMsgDestruct { unequal--
unequal++
} }
} }
if req.Conversation.BurnDuration != nil {
if req.Conversation.BurnDuration.Value != conversationMap[userID].BurnDuration {
unequal--
}
}
if unequal > 0 {
needUpdateUsersList = append(needUpdateUsersList, userID)
}
}
if req.Conversation.IsPrivateChat != nil && req.Conversation.ConversationType != constant.ReadGroupChatType { if req.Conversation.IsPrivateChat != nil && req.Conversation.ConversationType != constant.ReadGroupChatType {
var conversations []*dbModel.Conversation var conversations []*dbModel.Conversation
for _, ownerUserID := range req.UserIDs { for _, ownerUserID := range req.UserIDs {
conversation2 := conversation transConversation := conversation
conversation2.OwnerUserID = ownerUserID transConversation.OwnerUserID = ownerUserID
conversation2.IsPrivateChat = req.Conversation.IsPrivateChat.Value transConversation.IsPrivateChat = req.Conversation.IsPrivateChat.Value
conversations = append(conversations, &conversation2) conversations = append(conversations, &transConversation)
} }
if err := c.conversationDatabase.SyncPeerUserPrivateConversationTx(ctx, conversations); err != nil { if err := c.conversationDatabase.SyncPeerUserPrivateConversationTx(ctx, conversations); err != nil {
return nil, err return nil, err
} }
for _, userID := range req.UserIDs { for _, userID := range req.UserIDs {
c.conversationNotificationSender.ConversationSetPrivateNotification(ctx, userID, req.Conversation.UserID, c.conversationNotificationSender.ConversationSetPrivateNotification(ctx, userID, req.Conversation.UserID,
req.Conversation.IsPrivateChat.Value, req.Conversation.ConversationID) req.Conversation.IsPrivateChat.Value, req.Conversation.ConversationID)
} }
} } else {
if len(m) != 0 {
if req.Conversation.BurnDuration != nil { if err := c.conversationDatabase.SetUsersConversationFieldTx(ctx, needUpdateUsersList, &conversation, m); err != nil {
m["burn_duration"] = req.Conversation.BurnDuration.Value
if req.Conversation.BurnDuration.Value != conv.BurnDuration {
unequal++
}
}
if err := c.conversationDatabase.SetUsersConversationFieldTx(ctx, req.UserIDs, &conversation, m); err != nil {
return nil, err return nil, err
} }
if unequal > 0 { for _, v := range needUpdateUsersList {
for _, v := range req.UserIDs {
c.conversationNotificationSender.ConversationChangeNotification(ctx, v, []string{req.Conversation.ConversationID}) c.conversationNotificationSender.ConversationChangeNotification(ctx, v, []string{req.Conversation.ConversationID})
} }
} }
}
return &pbconversation.SetConversationsResp{}, nil return &pbconversation.SetConversationsResp{}, nil
} }
@ -392,6 +436,14 @@ func (c *conversationServer) SetConversationMaxSeq(ctx context.Context, req *pbc
return &pbconversation.SetConversationMaxSeqResp{}, nil return &pbconversation.SetConversationMaxSeqResp{}, nil
} }
func (c *conversationServer) SetConversationMinSeq(ctx context.Context, req *pbconversation.SetConversationMinSeqReq) (*pbconversation.SetConversationMinSeqResp, error) {
if err := c.conversationDatabase.UpdateUsersConversationField(ctx, req.OwnerUserID, req.ConversationID,
map[string]any{"min_seq": req.MinSeq}); err != nil {
return nil, err
}
return &pbconversation.SetConversationMinSeqResp{}, nil
}
func (c *conversationServer) GetConversationIDs(ctx context.Context, req *pbconversation.GetConversationIDsReq) (*pbconversation.GetConversationIDsResp, error) { func (c *conversationServer) GetConversationIDs(ctx context.Context, req *pbconversation.GetConversationIDsReq) (*pbconversation.GetConversationIDsResp, error) {
conversationIDs, err := c.conversationDatabase.GetConversationIDs(ctx, req.UserID) conversationIDs, err := c.conversationDatabase.GetConversationIDs(ctx, req.UserID)
if err != nil { if err != nil {
@ -634,11 +686,11 @@ func (c *conversationServer) GetConversationsNeedDestructMsgs(ctx context.Contex
conversationIDs, err := c.conversationDatabase.PageConversationIDs(ctx, pagination) conversationIDs, err := c.conversationDatabase.PageConversationIDs(ctx, pagination)
if err != nil { if err != nil {
log.ZError(ctx, "PageConversationIDs failed", err, "pageNumber", pageNumber) // log.ZError(ctx, "PageConversationIDs failed", err, "pageNumber", pageNumber)
continue continue
} }
log.ZDebug(ctx, "PageConversationIDs success", "pageNumber", pageNumber, "conversationIDsNum", len(conversationIDs), "conversationIDs", conversationIDs) // log.ZDebug(ctx, "PageConversationIDs success", "pageNumber", pageNumber, "conversationIDsNum", len(conversationIDs), "conversationIDs", conversationIDs)
if len(conversationIDs) == 0 { if len(conversationIDs) == 0 {
continue continue
} }

@ -358,3 +358,74 @@ func (s *groupServer) webhookAfterSetGroupInfo(ctx context.Context, after *confi
} }
s.webhookClient.AsyncPost(ctx, cbReq.GetCallbackCommand(), cbReq, &callbackstruct.CallbackAfterSetGroupInfoResp{}, after) s.webhookClient.AsyncPost(ctx, cbReq.GetCallbackCommand(), cbReq, &callbackstruct.CallbackAfterSetGroupInfoResp{}, after)
} }
func (s *groupServer) webhookBeforeSetGroupInfoEX(ctx context.Context, before *config.BeforeConfig, req *group.SetGroupInfoEXReq) error {
return webhook.WithCondition(ctx, before, func(ctx context.Context) error {
cbReq := &callbackstruct.CallbackBeforeSetGroupInfoEXReq{
CallbackCommand: callbackstruct.CallbackBeforeSetGroupInfoCommand,
GroupID: req.GroupInfoForSetEX.GroupID,
GroupName: req.GroupInfoForSetEX.GroupName,
Notification: req.GroupInfoForSetEX.Notification,
Introduction: req.GroupInfoForSetEX.Introduction,
FaceURL: req.GroupInfoForSetEX.FaceURL,
}
if req.GroupInfoForSetEX.Ex != nil {
cbReq.Ex = req.GroupInfoForSetEX.Ex
}
log.ZDebug(ctx, "debug CallbackBeforeSetGroupInfoEX", "ex", cbReq.Ex)
if req.GroupInfoForSetEX.NeedVerification != nil {
cbReq.NeedVerification = req.GroupInfoForSetEX.NeedVerification
}
if req.GroupInfoForSetEX.LookMemberInfo != nil {
cbReq.LookMemberInfo = req.GroupInfoForSetEX.LookMemberInfo
}
if req.GroupInfoForSetEX.ApplyMemberFriend != nil {
cbReq.ApplyMemberFriend = req.GroupInfoForSetEX.ApplyMemberFriend
}
resp := &callbackstruct.CallbackBeforeSetGroupInfoEXResp{}
if err := s.webhookClient.SyncPost(ctx, cbReq.GetCallbackCommand(), cbReq, resp, before); err != nil {
return err
}
datautil.NotNilReplace(&req.GroupInfoForSetEX.GroupID, &resp.GroupID)
datautil.NotNilReplace(&req.GroupInfoForSetEX.GroupName, &resp.GroupName)
datautil.NotNilReplace(&req.GroupInfoForSetEX.FaceURL, &resp.FaceURL)
datautil.NotNilReplace(&req.GroupInfoForSetEX.Introduction, &resp.Introduction)
datautil.NotNilReplace(&req.GroupInfoForSetEX.Ex, &resp.Ex)
datautil.NotNilReplace(&req.GroupInfoForSetEX.NeedVerification, &resp.NeedVerification)
datautil.NotNilReplace(&req.GroupInfoForSetEX.LookMemberInfo, &resp.LookMemberInfo)
datautil.NotNilReplace(&req.GroupInfoForSetEX.ApplyMemberFriend, &resp.ApplyMemberFriend)
return nil
})
}
func (s *groupServer) webhookAfterSetGroupInfoEX(ctx context.Context, after *config.AfterConfig, req *group.SetGroupInfoEXReq) {
cbReq := &callbackstruct.CallbackAfterSetGroupInfoEXReq{
CallbackCommand: callbackstruct.CallbackAfterSetGroupInfoCommand,
GroupID: req.GroupInfoForSetEX.GroupID,
GroupName: req.GroupInfoForSetEX.GroupName,
Notification: req.GroupInfoForSetEX.Notification,
Introduction: req.GroupInfoForSetEX.Introduction,
FaceURL: req.GroupInfoForSetEX.FaceURL,
}
if req.GroupInfoForSetEX.Ex != nil {
cbReq.Ex = req.GroupInfoForSetEX.Ex
}
if req.GroupInfoForSetEX.NeedVerification != nil {
cbReq.NeedVerification = req.GroupInfoForSetEX.NeedVerification
}
if req.GroupInfoForSetEX.LookMemberInfo != nil {
cbReq.LookMemberInfo = req.GroupInfoForSetEX.LookMemberInfo
}
if req.GroupInfoForSetEX.ApplyMemberFriend != nil {
cbReq.ApplyMemberFriend = req.GroupInfoForSetEX.ApplyMemberFriend
}
s.webhookClient.AsyncPost(ctx, cbReq.GetCallbackCommand(), cbReq, &callbackstruct.CallbackAfterSetGroupInfoEXResp{}, after)
}

@ -54,6 +54,39 @@ func UpdateGroupInfoMap(ctx context.Context, group *sdkws.GroupInfoForSet) map[s
return m return m
} }
func UpdateGroupInfoEXMap(ctx context.Context, group *sdkws.GroupInfoForSetEX) map[string]any {
m := make(map[string]any)
if group.GroupName != "" {
m["group_name"] = group.GroupName
}
if group.Notification != nil {
m["notification"] = group.Notification.Value
m["notification_update_time"] = time.Now()
m["notification_user_id"] = mcontext.GetOpUserID(ctx)
}
if group.Introduction != nil {
m["introduction"] = group.Introduction.Value
}
if group.FaceURL != nil {
m["face_url"] = group.FaceURL.Value
}
if group.NeedVerification != nil {
m["need_verification"] = group.NeedVerification.Value
}
if group.LookMemberInfo != nil {
m["look_member_info"] = group.LookMemberInfo.Value
}
if group.ApplyMemberFriend != nil {
m["apply_member_friend"] = group.ApplyMemberFriend.Value
}
if group.Ex != nil {
m["ex"] = group.Ex.Value
}
return m
}
func UpdateGroupStatusMap(status int) map[string]any { func UpdateGroupStatusMap(status int) map[string]any {
return map[string]any{ return map[string]any{
"status": status, "status": status,

File diff suppressed because it is too large Load Diff

@ -21,7 +21,9 @@ import (
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/database" "github.com/openimsdk/open-im-server/v3/pkg/common/storage/database"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/model" "github.com/openimsdk/open-im-server/v3/pkg/common/storage/model"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/versionctx" "github.com/openimsdk/open-im-server/v3/pkg/common/storage/versionctx"
"github.com/openimsdk/open-im-server/v3/pkg/msgprocessor"
"github.com/openimsdk/open-im-server/v3/pkg/rpcclient/notification" "github.com/openimsdk/open-im-server/v3/pkg/rpcclient/notification"
"github.com/openimsdk/protocol/msg"
"github.com/openimsdk/open-im-server/v3/pkg/authverify" "github.com/openimsdk/open-im-server/v3/pkg/authverify"
"github.com/openimsdk/open-im-server/v3/pkg/common/servererrs" "github.com/openimsdk/open-im-server/v3/pkg/common/servererrs"
@ -43,12 +45,22 @@ const (
adminReceiver adminReceiver
) )
func NewGroupNotificationSender(db controller.GroupDatabase, msgRpcClient *rpcclient.MessageRpcClient, userRpcClient *rpcclient.UserRpcClient, config *Config, fn func(ctx context.Context, userIDs []string) ([]notification.CommonUser, error)) *GroupNotificationSender { func NewGroupNotificationSender(
db controller.GroupDatabase,
msgRpcClient *rpcclient.MessageRpcClient,
userRpcClient *rpcclient.UserRpcClient,
conversationRpcClient *rpcclient.ConversationRpcClient,
config *Config,
fn func(ctx context.Context, userIDs []string) ([]notification.CommonUser, error),
) *GroupNotificationSender {
return &GroupNotificationSender{ return &GroupNotificationSender{
NotificationSender: rpcclient.NewNotificationSender(&config.NotificationConfig, rpcclient.WithRpcClient(msgRpcClient), rpcclient.WithUserRpcClient(userRpcClient)), NotificationSender: rpcclient.NewNotificationSender(&config.NotificationConfig, rpcclient.WithRpcClient(msgRpcClient), rpcclient.WithUserRpcClient(userRpcClient)),
getUsersInfo: fn, getUsersInfo: fn,
db: db, db: db,
config: config, config: config,
conversationRpcClient: conversationRpcClient,
msgRpcClient: msgRpcClient,
} }
} }
@ -57,6 +69,9 @@ type GroupNotificationSender struct {
getUsersInfo func(ctx context.Context, userIDs []string) ([]notification.CommonUser, error) getUsersInfo func(ctx context.Context, userIDs []string) ([]notification.CommonUser, error)
db controller.GroupDatabase db controller.GroupDatabase
config *Config config *Config
conversationRpcClient *rpcclient.ConversationRpcClient
msgRpcClient *rpcclient.MessageRpcClient
} }
func (g *GroupNotificationSender) PopulateGroupMember(ctx context.Context, members ...*model.GroupMember) error { func (g *GroupNotificationSender) PopulateGroupMember(ctx context.Context, members ...*model.GroupMember) error {
@ -494,50 +509,46 @@ func (g *GroupNotificationSender) MemberKickedNotification(ctx context.Context,
g.Notification(ctx, mcontext.GetOpUserID(ctx), tips.Group.GroupID, constant.MemberKickedNotification, tips) g.Notification(ctx, mcontext.GetOpUserID(ctx), tips.Group.GroupID, constant.MemberKickedNotification, tips)
} }
func (g *GroupNotificationSender) MemberInvitedNotification(ctx context.Context, groupID, reason string, invitedUserIDList []string) { func (g *GroupNotificationSender) MemberEnterNotification(ctx context.Context, groupID string, entrantUserID ...string) error {
var err error var err error
defer func() { defer func() {
if err != nil { if err != nil {
log.ZError(ctx, stringutil.GetFuncName(1)+" failed", err) log.ZError(ctx, stringutil.GetFuncName(1)+" failed", err)
} }
}() }()
var group *sdkws.GroupInfo
group, err = g.getGroupInfo(ctx, groupID)
if err != nil {
return
}
var users []*sdkws.GroupMemberFullInfo if !g.config.RpcConfig.EnableHistoryForNewMembers {
users, err = g.getGroupMembers(ctx, groupID, invitedUserIDList) conversationID := msgprocessor.GetConversationIDBySessionType(constant.ReadGroupChatType, groupID)
maxSeq, err := g.msgRpcClient.GetConversationMaxSeq(ctx, conversationID)
if err != nil { if err != nil {
return return err
}
if _, err = g.msgRpcClient.SetUserConversationsMinSeq(ctx, &msg.SetUserConversationsMinSeqReq{
UserIDs: entrantUserID,
ConversationID: conversationID,
Seq: maxSeq,
}); err != nil {
return err
} }
tips := &sdkws.MemberInvitedTips{Group: group, InvitedUserList: users}
err = g.fillOpUser(ctx, &tips.OpUser, tips.Group.GroupID)
g.setVersion(ctx, &tips.GroupMemberVersion, &tips.GroupMemberVersionID, database.GroupMemberVersionName, tips.Group.GroupID)
g.Notification(ctx, mcontext.GetOpUserID(ctx), group.GroupID, constant.MemberInvitedNotification, tips)
} }
func (g *GroupNotificationSender) MemberEnterNotification(ctx context.Context, groupID string, entrantUserID string) { if err := g.conversationRpcClient.GroupChatFirstCreateConversation(ctx, groupID, entrantUserID); err != nil {
var err error return err
defer func() {
if err != nil {
log.ZError(ctx, stringutil.GetFuncName(1)+" failed", err)
} }
}()
var group *sdkws.GroupInfo var group *sdkws.GroupInfo
group, err = g.getGroupInfo(ctx, groupID) group, err = g.getGroupInfo(ctx, groupID)
if err != nil { if err != nil {
return return err
} }
var user *sdkws.GroupMemberFullInfo users, err := g.getGroupMembers(ctx, groupID, entrantUserID)
user, err = g.getGroupMember(ctx, groupID, entrantUserID)
if err != nil { if err != nil {
return return err
} }
tips := &sdkws.MemberEnterTips{Group: group, EntrantUser: user} tips := &sdkws.MemberInvitedTips{Group: group, InvitedUserList: users}
g.setVersion(ctx, &tips.GroupMemberVersion, &tips.GroupMemberVersionID, database.GroupMemberVersionName, tips.Group.GroupID) g.setVersion(ctx, &tips.GroupMemberVersion, &tips.GroupMemberVersionID, database.GroupMemberVersionName, tips.Group.GroupID)
g.Notification(ctx, mcontext.GetOpUserID(ctx), group.GroupID, constant.MemberEnterNotification, tips) g.Notification(ctx, mcontext.GetOpUserID(ctx), group.GroupID, constant.MemberInvitedNotification, tips)
return nil
} }
func (g *GroupNotificationSender) GroupDismissedNotification(ctx context.Context, tips *sdkws.GroupDismissedTips) { func (g *GroupNotificationSender) GroupDismissedNotification(ctx context.Context, tips *sdkws.GroupDismissedTips) {

@ -30,8 +30,14 @@ func (m *msgServer) ClearMsg(ctx context.Context, req *msg.ClearMsgReq) (_ *msg.
msgNum int msgNum int
start = time.Now() start = time.Now()
) )
clearMsg := func(ctx context.Context) (bool, error) { clearMsg := func(ctx context.Context) (bool, error) {
msgs, err := m.MsgDatabase.GetBeforeMsg(ctx, req.Timestamp, 100) docIDs, err := m.MsgDatabase.GetDocIDs(ctx)
if err != nil {
return false, err
}
msgs, err := m.MsgDatabase.GetBeforeMsg(ctx, req.Timestamp, docIDs, 5000)
if err != nil { if err != nil {
return false, err return false, err
} }
@ -55,19 +61,14 @@ func (m *msgServer) ClearMsg(ctx context.Context, req *msg.ClearMsgReq) (_ *msg.
return true, nil return true, nil
} }
for { _, err = clearMsg(ctx)
keep, err := clearMsg(ctx)
if err != nil { if err != nil {
log.ZError(ctx, "clear msg failed", err, "docNum", docNum, "msgNum", msgNum, "cost", time.Since(start)) log.ZError(ctx, "clear msg failed", err, "docNum", docNum, "msgNum", msgNum, "cost", time.Since(start))
return nil, err return nil, err
} }
if !keep {
log.ZInfo(ctx, "clear msg success", "docNum", docNum, "msgNum", msgNum, "cost", time.Since(start))
break
}
log.ZInfo(ctx, "clearing message", "docNum", docNum, "msgNum", msgNum, "cost", time.Since(start)) log.ZInfo(ctx, "clearing message", "docNum", docNum, "msgNum", msgNum, "cost", time.Since(start))
}
return &msg.ClearMsgResp{}, nil return &msg.ClearMsgResp{}, nil
} }

@ -53,3 +53,12 @@ func (m *msgServer) GetMsgByConversationIDs(ctx context.Context, req *pbmsg.GetM
} }
return &pbmsg.GetMsgByConversationIDsResp{MsgDatas: Msgs}, nil return &pbmsg.GetMsgByConversationIDsResp{MsgDatas: Msgs}, nil
} }
func (m *msgServer) SetUserConversationsMinSeq(ctx context.Context, req *pbmsg.SetUserConversationsMinSeqReq) (*pbmsg.SetUserConversationsMinSeqResp, error) {
for _, userID := range req.UserIDs {
if err := m.MsgDatabase.SetUserConversationsMinSeqs(ctx, userID, map[string]int64{req.ConversationID: req.Seq}); err != nil {
return nil, err
}
}
return &pbmsg.SetUserConversationsMinSeqResp{}, nil
}

@ -290,6 +290,7 @@ func (t *thirdServer) apiAddress(prefix, name string) string {
func (t *thirdServer) DeleteOutdatedData(ctx context.Context, req *third.DeleteOutdatedDataReq) (*third.DeleteOutdatedDataResp, error) { func (t *thirdServer) DeleteOutdatedData(ctx context.Context, req *third.DeleteOutdatedDataReq) (*third.DeleteOutdatedDataResp, error) {
var conf config.Third var conf config.Third
expireTime := time.UnixMilli(req.ExpireTime) expireTime := time.UnixMilli(req.ExpireTime)
var deltotal int
findPagination := &sdkws.RequestPagination{ findPagination := &sdkws.RequestPagination{
PageNumber: 1, PageNumber: 1,
ShowNumber: 1000, ShowNumber: 1000,
@ -311,10 +312,8 @@ func (t *thirdServer) DeleteOutdatedData(ctx context.Context, req *third.DeleteO
return nil, errs.Wrap(err) return nil, errs.Wrap(err)
} }
if int(count) < 1 && t.minio != nil { if int(count) < 1 && t.minio != nil {
thumbnailKey, err := t.getMinioImageThumbnailKey(ctx, key) thumbnailKey, _ := t.getMinioImageThumbnailKey(ctx, key)
if err != nil {
return nil, errs.Wrap(err)
}
t.s3dataBase.DeleteObject(ctx, thumbnailKey) t.s3dataBase.DeleteObject(ctx, thumbnailKey)
t.s3dataBase.DelS3Key(ctx, conf.Object.Enable, needDelObjectKeys...) t.s3dataBase.DelS3Key(ctx, conf.Object.Enable, needDelObjectKeys...)
t.s3dataBase.DeleteObject(ctx, key) t.s3dataBase.DeleteObject(ctx, key)
@ -329,7 +328,9 @@ func (t *thirdServer) DeleteOutdatedData(ctx context.Context, req *third.DeleteO
if total < int64(findPagination.ShowNumber) { if total < int64(findPagination.ShowNumber) {
break break
} }
deltotal += int(total)
} }
log.ZDebug(ctx, "DeleteOutdatedData", "delete Total", deltotal)
return &third.DeleteOutdatedDataResp{}, nil return &third.DeleteOutdatedDataResp{}, nil
} }

@ -17,6 +17,11 @@ package user
import ( import (
"context" "context"
"errors" "errors"
"math/rand"
"strings"
"sync"
"time"
"github.com/openimsdk/open-im-server/v3/internal/rpc/relation" "github.com/openimsdk/open-im-server/v3/internal/rpc/relation"
"github.com/openimsdk/open-im-server/v3/pkg/common/config" "github.com/openimsdk/open-im-server/v3/pkg/common/config"
"github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics" "github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics"
@ -29,10 +34,6 @@ import (
"github.com/openimsdk/protocol/group" "github.com/openimsdk/protocol/group"
friendpb "github.com/openimsdk/protocol/relation" friendpb "github.com/openimsdk/protocol/relation"
"github.com/openimsdk/tools/db/redisutil" "github.com/openimsdk/tools/db/redisutil"
"math/rand"
"strings"
"sync"
"time"
"github.com/openimsdk/open-im-server/v3/pkg/authverify" "github.com/openimsdk/open-im-server/v3/pkg/authverify"
"github.com/openimsdk/open-im-server/v3/pkg/common/convert" "github.com/openimsdk/open-im-server/v3/pkg/common/convert"
@ -147,41 +148,35 @@ func (s *userServer) UpdateUserInfo(ctx context.Context, req *pbuser.UpdateUserI
return nil, err return nil, err
} }
s.friendNotificationSender.UserInfoUpdatedNotification(ctx, req.UserInfo.UserID) s.friendNotificationSender.UserInfoUpdatedNotification(ctx, req.UserInfo.UserID)
//friends, err := s.friendRpcClient.GetFriendIDs(ctx, req.UserInfo.UserID)
//if err != nil {
// return nil, err
//}
//if req.UserInfo.Nickname != "" || req.UserInfo.FaceURL != "" {
// if err = s.NotificationUserInfoUpdate(ctx, req.UserInfo.UserID,oldUser); err != nil {
// return nil, err
// }
//}
//for _, friendID := range friends {
// s.friendNotificationSender.FriendInfoUpdatedNotification(ctx, req.UserInfo.UserID, friendID)
//}
s.webhookAfterUpdateUserInfo(ctx, &s.config.WebhooksConfig.AfterUpdateUserInfo, req) s.webhookAfterUpdateUserInfo(ctx, &s.config.WebhooksConfig.AfterUpdateUserInfo, req)
if err = s.NotificationUserInfoUpdate(ctx, req.UserInfo.UserID, oldUser); err != nil { if err = s.NotificationUserInfoUpdate(ctx, req.UserInfo.UserID, oldUser); err != nil {
return nil, err return nil, err
} }
return resp, nil return resp, nil
} }
func (s *userServer) UpdateUserInfoEx(ctx context.Context, req *pbuser.UpdateUserInfoExReq) (resp *pbuser.UpdateUserInfoExResp, err error) { func (s *userServer) UpdateUserInfoEx(ctx context.Context, req *pbuser.UpdateUserInfoExReq) (resp *pbuser.UpdateUserInfoExResp, err error) {
resp = &pbuser.UpdateUserInfoExResp{} resp = &pbuser.UpdateUserInfoExResp{}
err = authverify.CheckAccessV3(ctx, req.UserInfo.UserID, s.config.Share.IMAdminUserID) err = authverify.CheckAccessV3(ctx, req.UserInfo.UserID, s.config.Share.IMAdminUserID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
if err = s.webhookBeforeUpdateUserInfoEx(ctx, &s.config.WebhooksConfig.BeforeUpdateUserInfoEx, req); err != nil { if err = s.webhookBeforeUpdateUserInfoEx(ctx, &s.config.WebhooksConfig.BeforeUpdateUserInfoEx, req); err != nil {
return nil, err return nil, err
} }
oldUser, err := s.db.GetUserByID(ctx, req.UserInfo.UserID) oldUser, err := s.db.GetUserByID(ctx, req.UserInfo.UserID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
data := convert.UserPb2DBMapEx(req.UserInfo) data := convert.UserPb2DBMapEx(req.UserInfo)
if err = s.db.UpdateByMap(ctx, req.UserInfo.UserID, data); err != nil { if err = s.db.UpdateByMap(ctx, req.UserInfo.UserID, data); err != nil {
return nil, err return nil, err
} }
s.friendNotificationSender.UserInfoUpdatedNotification(ctx, req.UserInfo.UserID) s.friendNotificationSender.UserInfoUpdatedNotification(ctx, req.UserInfo.UserID)
//friends, err := s.friendRpcClient.GetFriendIDs(ctx, req.UserInfo.UserID) //friends, err := s.friendRpcClient.GetFriendIDs(ctx, req.UserInfo.UserID)
//if err != nil { //if err != nil {
@ -199,6 +194,7 @@ func (s *userServer) UpdateUserInfoEx(ctx context.Context, req *pbuser.UpdateUse
if err := s.NotificationUserInfoUpdate(ctx, req.UserInfo.UserID, oldUser); err != nil { if err := s.NotificationUserInfoUpdate(ctx, req.UserInfo.UserID, oldUser); err != nil {
return nil, err return nil, err
} }
return resp, nil return resp, nil
} }
func (s *userServer) SetGlobalRecvMessageOpt(ctx context.Context, req *pbuser.SetGlobalRecvMessageOptReq) (resp *pbuser.SetGlobalRecvMessageOptResp, err error) { func (s *userServer) SetGlobalRecvMessageOpt(ctx context.Context, req *pbuser.SetGlobalRecvMessageOptReq) (resp *pbuser.SetGlobalRecvMessageOptResp, err error) {

@ -81,6 +81,7 @@ func Start(ctx context.Context, config *CronTaskConfig) error {
deltime := now.Add(-time.Hour * 24 * time.Duration(config.CronTask.RetainChatRecords)) deltime := now.Add(-time.Hour * 24 * time.Duration(config.CronTask.RetainChatRecords))
ctx := mcontext.SetOperationID(ctx, fmt.Sprintf("cron_%d_%d", os.Getpid(), deltime.UnixMilli())) ctx := mcontext.SetOperationID(ctx, fmt.Sprintf("cron_%d_%d", os.Getpid(), deltime.UnixMilli()))
log.ZInfo(ctx, "clear chat records", "deltime", deltime, "timestamp", deltime.UnixMilli()) log.ZInfo(ctx, "clear chat records", "deltime", deltime, "timestamp", deltime.UnixMilli())
if _, err := msgClient.ClearMsg(ctx, &msg.ClearMsgReq{Timestamp: deltime.UnixMilli()}); err != nil { if _, err := msgClient.ClearMsg(ctx, &msg.ClearMsgReq{Timestamp: deltime.UnixMilli()}); err != nil {
log.ZError(ctx, "cron clear chat records failed", err, "deltime", deltime, "cont", time.Since(now)) log.ZError(ctx, "cron clear chat records failed", err, "deltime", deltime, "cont", time.Since(now))
return return

@ -18,7 +18,9 @@ const (
CallbackBeforeInviteJoinGroupCommand = "callbackBeforeInviteJoinGroupCommand" CallbackBeforeInviteJoinGroupCommand = "callbackBeforeInviteJoinGroupCommand"
CallbackAfterJoinGroupCommand = "callbackAfterJoinGroupCommand" CallbackAfterJoinGroupCommand = "callbackAfterJoinGroupCommand"
CallbackAfterSetGroupInfoCommand = "callbackAfterSetGroupInfoCommand" CallbackAfterSetGroupInfoCommand = "callbackAfterSetGroupInfoCommand"
CallbackAfterSetGroupInfoEXCommand = "callbackAfterSetGroupInfoCommandEX"
CallbackBeforeSetGroupInfoCommand = "callbackBeforeSetGroupInfoCommand" CallbackBeforeSetGroupInfoCommand = "callbackBeforeSetGroupInfoCommand"
CallbackBeforeSetGroupInfoEXCommand = "callbackBeforeSetGroupInfoEXCommand"
CallbackAfterRevokeMsgCommand = "callbackBeforeAfterMsgCommand" CallbackAfterRevokeMsgCommand = "callbackBeforeAfterMsgCommand"
CallbackBeforeAddBlackCommand = "callbackBeforeAddBlackCommand" CallbackBeforeAddBlackCommand = "callbackBeforeAddBlackCommand"
CallbackAfterAddFriendCommand = "callbackAfterAddFriendCommand" CallbackAfterAddFriendCommand = "callbackAfterAddFriendCommand"

@ -17,6 +17,7 @@ package callbackstruct
import ( import (
"github.com/openimsdk/open-im-server/v3/pkg/apistruct" "github.com/openimsdk/open-im-server/v3/pkg/apistruct"
common "github.com/openimsdk/protocol/sdkws" common "github.com/openimsdk/protocol/sdkws"
"github.com/openimsdk/protocol/wrapperspb"
) )
type CallbackCommand string type CallbackCommand string
@ -242,3 +243,48 @@ type CallbackAfterSetGroupInfoReq struct {
type CallbackAfterSetGroupInfoResp struct { type CallbackAfterSetGroupInfoResp struct {
CommonCallbackResp CommonCallbackResp
} }
type CallbackBeforeSetGroupInfoEXReq struct {
CallbackCommand `json:"callbackCommand"`
OperationID string `json:"operationID"`
GroupID string `json:"groupID"`
GroupName string `json:"groupName"`
Notification *wrapperspb.StringValue `json:"notification"`
Introduction *wrapperspb.StringValue `json:"introduction"`
FaceURL *wrapperspb.StringValue `json:"faceURL"`
Ex *wrapperspb.StringValue `json:"ex"`
NeedVerification *wrapperspb.Int32Value `json:"needVerification"`
LookMemberInfo *wrapperspb.Int32Value `json:"lookMemberInfo"`
ApplyMemberFriend *wrapperspb.Int32Value `json:"applyMemberFriend"`
}
type CallbackBeforeSetGroupInfoEXResp struct {
CommonCallbackResp
GroupID string `json:"groupID"`
GroupName string `json:"groupName"`
Notification *wrapperspb.StringValue `json:"notification"`
Introduction *wrapperspb.StringValue `json:"introduction"`
FaceURL *wrapperspb.StringValue `json:"faceURL"`
Ex *wrapperspb.StringValue `json:"ex"`
NeedVerification *wrapperspb.Int32Value `json:"needVerification"`
LookMemberInfo *wrapperspb.Int32Value `json:"lookMemberInfo"`
ApplyMemberFriend *wrapperspb.Int32Value `json:"applyMemberFriend"`
}
type CallbackAfterSetGroupInfoEXReq struct {
CallbackCommand `json:"callbackCommand"`
OperationID string `json:"operationID"`
GroupID string `json:"groupID"`
GroupName string `json:"groupName"`
Notification *wrapperspb.StringValue `json:"notification"`
Introduction *wrapperspb.StringValue `json:"introduction"`
FaceURL *wrapperspb.StringValue `json:"faceURL"`
Ex *wrapperspb.StringValue `json:"ex"`
NeedVerification *wrapperspb.Int32Value `json:"needVerification"`
LookMemberInfo *wrapperspb.Int32Value `json:"lookMemberInfo"`
ApplyMemberFriend *wrapperspb.Int32Value `json:"applyMemberFriend"`
}
type CallbackAfterSetGroupInfoEXResp struct {
CommonCallbackResp
}

@ -259,6 +259,7 @@ type Group struct {
Ports []int `mapstructure:"ports"` Ports []int `mapstructure:"ports"`
} `mapstructure:"rpc"` } `mapstructure:"rpc"`
Prometheus Prometheus `mapstructure:"prometheus"` Prometheus Prometheus `mapstructure:"prometheus"`
EnableHistoryForNewMembers bool `mapstructure:"enableHistoryForNewMembers"`
} }
type Msg struct { type Msg struct {
@ -421,6 +422,8 @@ type Webhooks struct {
BeforeInviteUserToGroup BeforeConfig `mapstructure:"beforeInviteUserToGroup"` BeforeInviteUserToGroup BeforeConfig `mapstructure:"beforeInviteUserToGroup"`
AfterSetGroupInfo AfterConfig `mapstructure:"afterSetGroupInfo"` AfterSetGroupInfo AfterConfig `mapstructure:"afterSetGroupInfo"`
BeforeSetGroupInfo BeforeConfig `mapstructure:"beforeSetGroupInfo"` BeforeSetGroupInfo BeforeConfig `mapstructure:"beforeSetGroupInfo"`
AfterSetGroupInfoEX AfterConfig `mapstructure:"afterSetGroupInfoEX"`
BeforeSetGroupInfoEX BeforeConfig `mapstructure:"beforeSetGroupInfoEX"`
AfterRevokeMsg AfterConfig `mapstructure:"afterRevokeMsg"` AfterRevokeMsg AfterConfig `mapstructure:"afterRevokeMsg"`
BeforeAddBlack BeforeConfig `mapstructure:"beforeAddBlack"` BeforeAddBlack BeforeConfig `mapstructure:"beforeAddBlack"`
AfterAddFriend AfterConfig `mapstructure:"afterAddFriend"` AfterAddFriend AfterConfig `mapstructure:"afterAddFriend"`

@ -36,3 +36,26 @@ func TestLoadOpenIMRpcUserConfig(t *testing.T) {
//export IMENV_OPENIM_RPC_USER_RPC_PORTS="10110,10111,10112" //export IMENV_OPENIM_RPC_USER_RPC_PORTS="10110,10111,10112"
assert.Equal(t, []int{10110, 10111, 10112}, user.RPC.Ports) assert.Equal(t, []int{10110, 10111, 10112}, user.RPC.Ports)
} }
func TestLoadNotificationConfig(t *testing.T) {
var noti Notification
err := LoadConfig("../../../config/notification.yml", "IMENV_NOTIFICATION", &noti)
assert.Nil(t, err)
assert.Equal(t, "Your friend's profile has been changed", noti.FriendRemarkSet.OfflinePush.Title)
}
func TestLoadOpenIMThirdConfig(t *testing.T) {
var third Third
err := LoadConfig("../../../config/openim-rpc-third.yml", "IMENV_OPENIM_RPC_THIRD", &third)
assert.Nil(t, err)
assert.Equal(t, "enabled", third.Object.Enable)
assert.Equal(t, "https://oss-cn-chengdu.aliyuncs.com", third.Object.Oss.Endpoint)
assert.Equal(t, "my_bucket_name", third.Object.Oss.Bucket)
assert.Equal(t, "https://my_bucket_name.oss-cn-chengdu.aliyuncs.com", third.Object.Oss.BucketURL)
assert.Equal(t, "AKID1234567890", third.Object.Oss.AccessKeyID)
assert.Equal(t, "abc123xyz789", third.Object.Oss.AccessKeySecret)
assert.Equal(t, "session_token_value", third.Object.Oss.SessionToken) // Uncomment if session token is needed
assert.Equal(t, true, third.Object.Oss.PublicRead)
// Environment: IMENV_OPENIM_RPC_THIRD_OBJECT_ENABLE=enabled;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_ENDPOINT=https://oss-cn-chengdu.aliyuncs.com;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_BUCKET=my_bucket_name;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_BUCKETURL=https://my_bucket_name.oss-cn-chengdu.aliyuncs.com;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_ACCESSKEYID=AKID1234567890;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_ACCESSKEYSECRET=abc123xyz789;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_SESSIONTOKEN=session_token_value;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_PUBLICREAD=true
}

@ -18,11 +18,12 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"errors" "errors"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/database"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/model"
"strings" "strings"
"time" "time"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/database"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/model"
"github.com/openimsdk/open-im-server/v3/pkg/common/config" "github.com/openimsdk/open-im-server/v3/pkg/common/config"
"github.com/openimsdk/open-im-server/v3/pkg/common/convert" "github.com/openimsdk/open-im-server/v3/pkg/common/convert"
"github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics" "github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics"
@ -89,16 +90,18 @@ type CommonMsgDatabase interface {
// to mq // to mq
MsgToMQ(ctx context.Context, key string, msg2mq *sdkws.MsgData) error MsgToMQ(ctx context.Context, key string, msg2mq *sdkws.MsgData) error
MsgToPushMQ(ctx context.Context, key, conversarionID string, msg2mq *sdkws.MsgData) (int32, int64, error) MsgToPushMQ(ctx context.Context, key, conversationID string, msg2mq *sdkws.MsgData) (int32, int64, error)
MsgToMongoMQ(ctx context.Context, key, conversarionID string, msgs []*sdkws.MsgData, lastSeq int64) error MsgToMongoMQ(ctx context.Context, key, conversationID string, msgs []*sdkws.MsgData, lastSeq int64) error
RangeUserSendCount(ctx context.Context, start time.Time, end time.Time, group bool, ase bool, pageNumber int32, showNumber int32) (msgCount int64, userCount int64, users []*model.UserCount, dateCount map[string]int64, err error) RangeUserSendCount(ctx context.Context, start time.Time, end time.Time, group bool, ase bool, pageNumber int32, showNumber int32) (msgCount int64, userCount int64, users []*model.UserCount, dateCount map[string]int64, err error)
RangeGroupSendCount(ctx context.Context, start time.Time, end time.Time, ase bool, pageNumber int32, showNumber int32) (msgCount int64, userCount int64, groups []*model.GroupCount, dateCount map[string]int64, err error) RangeGroupSendCount(ctx context.Context, start time.Time, end time.Time, ase bool, pageNumber int32, showNumber int32) (msgCount int64, userCount int64, groups []*model.GroupCount, dateCount map[string]int64, err error)
ConvertMsgsDocLen(ctx context.Context, conversationIDs []string) ConvertMsgsDocLen(ctx context.Context, conversationIDs []string)
// clear msg // clear msg
GetBeforeMsg(ctx context.Context, ts int64, limit int) ([]*model.MsgDocModel, error) GetBeforeMsg(ctx context.Context, ts int64, docIds []string, limit int) ([]*model.MsgDocModel, error)
DeleteDocMsgBefore(ctx context.Context, ts int64, doc *model.MsgDocModel) ([]int, error) DeleteDocMsgBefore(ctx context.Context, ts int64, doc *model.MsgDocModel) ([]int, error)
GetDocIDs(ctx context.Context) ([]string, error)
} }
func NewCommonMsgDatabase(msgDocModel database.Msg, msg cache.MsgCache, seqUser cache.SeqUser, seqConversation cache.SeqConversationCache, kafkaConf *config.Kafka) (CommonMsgDatabase, error) { func NewCommonMsgDatabase(msgDocModel database.Msg, msg cache.MsgCache, seqUser cache.SeqUser, seqConversation cache.SeqConversationCache, kafkaConf *config.Kafka) (CommonMsgDatabase, error) {
@ -912,8 +915,25 @@ func (db *commonMsgDatabase) ConvertMsgsDocLen(ctx context.Context, conversation
db.msgDocDatabase.ConvertMsgsDocLen(ctx, conversationIDs) db.msgDocDatabase.ConvertMsgsDocLen(ctx, conversationIDs)
} }
func (db *commonMsgDatabase) GetBeforeMsg(ctx context.Context, ts int64, limit int) ([]*model.MsgDocModel, error) { func (db *commonMsgDatabase) GetBeforeMsg(ctx context.Context, ts int64, docIDs []string, limit int) ([]*model.MsgDocModel, error) {
return db.msgDocDatabase.GetBeforeMsg(ctx, ts, limit) var msgs []*model.MsgDocModel
for i := 0; i < len(docIDs); i += 1000 {
end := i + 1000
if end > len(docIDs) {
end = len(docIDs)
}
res, err := db.msgDocDatabase.GetBeforeMsg(ctx, ts, docIDs[i:end], limit)
if err != nil {
return nil, err
}
msgs = append(msgs, res...)
if len(msgs) >= limit {
return msgs[:limit], nil
}
}
return msgs, nil
} }
func (db *commonMsgDatabase) DeleteDocMsgBefore(ctx context.Context, ts int64, doc *model.MsgDocModel) ([]int, error) { func (db *commonMsgDatabase) DeleteDocMsgBefore(ctx context.Context, ts int64, doc *model.MsgDocModel) ([]int, error) {
@ -936,8 +956,10 @@ func (db *commonMsgDatabase) DeleteDocMsgBefore(ctx context.Context, ts int64, d
return index, err return index, err
} }
if len(index) == notNull { if len(index) == notNull {
log.ZDebug(ctx, "Delete db in Doc", "DocID", doc.DocID, "index", index, "maxSeq", maxSeq)
return index, db.msgDocDatabase.DeleteDoc(ctx, doc.DocID) return index, db.msgDocDatabase.DeleteDoc(ctx, doc.DocID)
} else { } else {
log.ZDebug(ctx, "delete db in index", "DocID", doc.DocID, "index", index, "maxSeq", maxSeq)
return index, db.msgDocDatabase.DeleteMsgByIndex(ctx, doc.DocID, index) return index, db.msgDocDatabase.DeleteMsgByIndex(ctx, doc.DocID, index)
} }
} }
@ -955,3 +977,7 @@ func (db *commonMsgDatabase) setMinSeq(ctx context.Context, conversationID strin
} }
return db.seqConversation.SetMinSeq(ctx, conversationID, seq) return db.seqConversation.SetMinSeq(ctx, conversationID, seq)
} }
func (db *commonMsgDatabase) GetDocIDs(ctx context.Context) ([]string, error) {
return db.msgDocDatabase.GetDocIDs(ctx)
}

@ -8,6 +8,7 @@ import (
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/database" "github.com/openimsdk/open-im-server/v3/pkg/common/storage/database"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/model" "github.com/openimsdk/open-im-server/v3/pkg/common/storage/model"
"github.com/openimsdk/tools/utils/datautil" "github.com/openimsdk/tools/utils/datautil"
"golang.org/x/exp/rand"
"github.com/openimsdk/protocol/constant" "github.com/openimsdk/protocol/constant"
"github.com/openimsdk/protocol/msg" "github.com/openimsdk/protocol/msg"
@ -117,9 +118,9 @@ func (m *MsgMgo) GetMsgBySeqIndexIn1Doc(ctx context.Context, userID, docID strin
} }
func (m *MsgMgo) getMsgBySeqIndexIn1Doc(ctx context.Context, userID, docID string, seqs []int64) ([]*model.MsgInfoModel, error) { func (m *MsgMgo) getMsgBySeqIndexIn1Doc(ctx context.Context, userID, docID string, seqs []int64) ([]*model.MsgInfoModel, error) {
indexs := make([]int64, 0, len(seqs)) indexes := make([]int64, 0, len(seqs))
for _, seq := range seqs { for _, seq := range seqs {
indexs = append(indexs, m.model.GetMsgIndex(seq)) indexes = append(indexes, m.model.GetMsgIndex(seq))
} }
pipeline := mongo.Pipeline{ pipeline := mongo.Pipeline{
bson.D{{Key: "$match", Value: bson.D{ bson.D{{Key: "$match", Value: bson.D{
@ -130,7 +131,7 @@ func (m *MsgMgo) getMsgBySeqIndexIn1Doc(ctx context.Context, userID, docID strin
{Key: "doc_id", Value: 1}, {Key: "doc_id", Value: 1},
{Key: "msgs", Value: bson.D{ {Key: "msgs", Value: bson.D{
{Key: "$map", Value: bson.D{ {Key: "$map", Value: bson.D{
{Key: "input", Value: indexs}, {Key: "input", Value: indexes},
{Key: "as", Value: "index"}, {Key: "as", Value: "index"},
{Key: "in", Value: bson.D{ {Key: "in", Value: bson.D{
{Key: "$arrayElemAt", Value: bson.A{"$msgs", "$$index"}}, {Key: "$arrayElemAt", Value: bson.A{"$msgs", "$$index"}},
@ -1226,10 +1227,53 @@ func (m *MsgMgo) ConvertMsgsDocLen(ctx context.Context, conversationIDs []string
} }
} }
func (m *MsgMgo) GetBeforeMsg(ctx context.Context, ts int64, limit int) ([]*model.MsgDocModel, error) { func (m *MsgMgo) GetDocIDs(ctx context.Context) ([]string, error) {
limit := 5000
var skip int
var docIDs []string
var offset int
count, err := m.coll.CountDocuments(ctx, bson.M{})
if err != nil {
return nil, err
}
if count < int64(limit) {
skip = 0
} else {
rand.Seed(uint64(time.Now().UnixMilli()))
skip = rand.Intn(int(count / int64(limit)))
offset = skip * limit
}
log.ZDebug(ctx, "offset", "skip", skip, "offset", offset)
res, err := mongoutil.Aggregate[*model.MsgDocModel](ctx, m.coll, []bson.M{
{
"$project": bson.M{
"doc_id": 1,
},
},
{
"$skip": offset,
},
{
"$limit": limit,
},
})
for _, doc := range res {
docIDs = append(docIDs, doc.DocID)
}
return docIDs, errs.Wrap(err)
}
func (m *MsgMgo) GetBeforeMsg(ctx context.Context, ts int64, docIDs []string, limit int) ([]*model.MsgDocModel, error) {
return mongoutil.Aggregate[*model.MsgDocModel](ctx, m.coll, []bson.M{ return mongoutil.Aggregate[*model.MsgDocModel](ctx, m.coll, []bson.M{
{ {
"$match": bson.M{ "$match": bson.M{
"doc_id": bson.M{
"$in": docIDs,
},
"msgs.msg.send_time": bson.M{ "msgs.msg.send_time": bson.M{
"$lt": ts, "$lt": ts,
}, },

@ -167,6 +167,10 @@ func (u *UserMgo) DeleteUserCommand(ctx context.Context, userID string, Type int
filter := bson.M{"userID": userID, "type": Type, "uuid": UUID} filter := bson.M{"userID": userID, "type": Type, "uuid": UUID}
result, err := collection.DeleteOne(ctx, filter) result, err := collection.DeleteOne(ctx, filter)
// when err is not nil, result might be nil
if err != nil {
return errs.Wrap(err)
}
if result.DeletedCount == 0 { if result.DeletedCount == 0 {
// No records found to update // No records found to update
return errs.Wrap(errs.ErrRecordNotFound) return errs.Wrap(errs.ErrRecordNotFound)

@ -16,10 +16,11 @@ package database
import ( import (
"context" "context"
"time"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/model" "github.com/openimsdk/open-im-server/v3/pkg/common/storage/model"
"github.com/openimsdk/protocol/msg" "github.com/openimsdk/protocol/msg"
"go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo"
"time"
) )
type Msg interface { type Msg interface {
@ -44,5 +45,7 @@ type Msg interface {
DeleteDoc(ctx context.Context, docID string) error DeleteDoc(ctx context.Context, docID string) error
DeleteMsgByIndex(ctx context.Context, docID string, index []int) error DeleteMsgByIndex(ctx context.Context, docID string, index []int) error
GetBeforeMsg(ctx context.Context, ts int64, limit int) ([]*model.MsgDocModel, error) GetBeforeMsg(ctx context.Context, ts int64, docIDs []string, limit int) ([]*model.MsgDocModel, error)
GetDocIDs(ctx context.Context) ([]string, error)
} }

@ -92,15 +92,15 @@ type GroupCount struct {
Count int64 `bson:"count"` Count int64 `bson:"count"`
} }
func (MsgDocModel) TableName() string { func (*MsgDocModel) TableName() string {
return MsgTableName return MsgTableName
} }
func (MsgDocModel) GetSingleGocMsgNum() int64 { func (*MsgDocModel) GetSingleGocMsgNum() int64 {
return singleGocMsgNum return singleGocMsgNum
} }
func (MsgDocModel) GetSingleGocMsgNum5000() int64 { func (*MsgDocModel) GetSingleGocMsgNum5000() int64 {
return singleGocMsgNum5000 return singleGocMsgNum5000
} }
@ -108,12 +108,12 @@ func (m *MsgDocModel) IsFull() bool {
return m.Msg[len(m.Msg)-1].Msg != nil return m.Msg[len(m.Msg)-1].Msg != nil
} }
func (m MsgDocModel) GetDocID(conversationID string, seq int64) string { func (m *MsgDocModel) GetDocID(conversationID string, seq int64) string {
seqSuffix := (seq - 1) / singleGocMsgNum seqSuffix := (seq - 1) / singleGocMsgNum
return m.indexGen(conversationID, seqSuffix) return m.indexGen(conversationID, seqSuffix)
} }
func (m MsgDocModel) GetDocIDSeqsMap(conversationID string, seqs []int64) map[string][]int64 { func (m *MsgDocModel) GetDocIDSeqsMap(conversationID string, seqs []int64) map[string][]int64 {
t := make(map[string][]int64) t := make(map[string][]int64)
for i := 0; i < len(seqs); i++ { for i := 0; i < len(seqs); i++ {
docID := m.GetDocID(conversationID, seqs[i]) docID := m.GetDocID(conversationID, seqs[i])
@ -127,15 +127,15 @@ func (m MsgDocModel) GetDocIDSeqsMap(conversationID string, seqs []int64) map[st
return t return t
} }
func (MsgDocModel) GetMsgIndex(seq int64) int64 { func (*MsgDocModel) GetMsgIndex(seq int64) int64 {
return (seq - 1) % singleGocMsgNum return (seq - 1) % singleGocMsgNum
} }
func (MsgDocModel) indexGen(conversationID string, seqSuffix int64) string { func (*MsgDocModel) indexGen(conversationID string, seqSuffix int64) string {
return conversationID + ":" + strconv.FormatInt(seqSuffix, 10) return conversationID + ":" + strconv.FormatInt(seqSuffix, 10)
} }
func (MsgDocModel) GenExceptionMessageBySeqs(seqs []int64) (exceptionMsg []*sdkws.MsgData) { func (*MsgDocModel) GenExceptionMessageBySeqs(seqs []int64) (exceptionMsg []*sdkws.MsgData) {
for _, v := range seqs { for _, v := range seqs {
msgModel := new(sdkws.MsgData) msgModel := new(sdkws.MsgData)
msgModel.Seq = v msgModel.Seq = v

@ -77,6 +77,11 @@ func (c *ConversationRpcClient) SetConversationMaxSeq(ctx context.Context, owner
return err return err
} }
func (c *ConversationRpcClient) SetConversationMinSeq(ctx context.Context, ownerUserIDs []string, conversationID string, minSeq int64) error {
_, err := c.Client.SetConversationMinSeq(ctx, &pbconversation.SetConversationMinSeqReq{OwnerUserID: ownerUserIDs, ConversationID: conversationID, MinSeq: minSeq})
return err
}
func (c *ConversationRpcClient) SetConversations(ctx context.Context, userIDs []string, conversation *pbconversation.ConversationReq) error { func (c *ConversationRpcClient) SetConversations(ctx context.Context, userIDs []string, conversation *pbconversation.ConversationReq) error {
_, err := c.Client.SetConversations(ctx, &pbconversation.SetConversationsReq{UserIDs: userIDs, Conversation: conversation}) _, err := c.Client.SetConversations(ctx, &pbconversation.SetConversationsReq{UserIDs: userIDs, Conversation: conversation})
return err return err

@ -159,6 +159,15 @@ func (m *MessageRpcClient) SendMsg(ctx context.Context, req *msg.SendMsgReq) (*m
return resp, nil return resp, nil
} }
// SetUserConversationsMinSeq set min seq
func (m *MessageRpcClient) SetUserConversationsMinSeq(ctx context.Context, req *msg.SetUserConversationsMinSeqReq) (*msg.SetUserConversationsMinSeqResp, error) {
resp, err := m.Client.SetUserConversationsMinSeq(ctx, req)
if err != nil {
return nil, err
}
return resp, nil
}
// GetMaxSeq retrieves the maximum sequence number from the gRPC client. // GetMaxSeq retrieves the maximum sequence number from the gRPC client.
// Errors during the gRPC call are wrapped to provide additional context. // Errors during the gRPC call are wrapped to provide additional context.
func (m *MessageRpcClient) GetMaxSeq(ctx context.Context, req *sdkws.GetMaxSeqReq) (*sdkws.GetMaxSeqResp, error) { func (m *MessageRpcClient) GetMaxSeq(ctx context.Context, req *sdkws.GetMaxSeqReq) (*sdkws.GetMaxSeqResp, error) {
@ -174,6 +183,9 @@ func (m *MessageRpcClient) GetMaxSeqs(ctx context.Context, conversationIDs []str
resp, err := m.Client.GetMaxSeqs(ctx, &msg.GetMaxSeqsReq{ resp, err := m.Client.GetMaxSeqs(ctx, &msg.GetMaxSeqsReq{
ConversationIDs: conversationIDs, ConversationIDs: conversationIDs,
}) })
if err != nil {
return nil, err
}
return resp.MaxSeqs, err return resp.MaxSeqs, err
} }
@ -182,6 +194,9 @@ func (m *MessageRpcClient) GetHasReadSeqs(ctx context.Context, userID string, co
UserID: userID, UserID: userID,
ConversationIDs: conversationIDs, ConversationIDs: conversationIDs,
}) })
if err != nil {
return nil, err
}
return resp.MaxSeqs, err return resp.MaxSeqs, err
} }
@ -190,6 +205,9 @@ func (m *MessageRpcClient) GetMsgByConversationIDs(ctx context.Context, docIDs [
ConversationIDs: docIDs, ConversationIDs: docIDs,
MaxSeqs: seqs, MaxSeqs: seqs,
}) })
if err != nil {
return nil, err
}
return resp.MsgDatas, err return resp.MsgDatas, err
} }

@ -234,7 +234,7 @@ func (m *Manage) RunTask(ctx context.Context, task Task) (string, error) {
} }
for i, currentPartSize := range part.PartSizes { for i, currentPartSize := range part.PartSizes {
md5Reader := NewMd5Reader(io.LimitReader(reader, currentPartSize)) md5Reader := NewMd5Reader(io.LimitReader(reader, currentPartSize))
if m.doPut(ctx, m.api.Client, initiateMultipartUploadResp.Upload.Sign, uploadParts[i], md5Reader, currentPartSize); err != nil { if err := m.doPut(ctx, m.api.Client, initiateMultipartUploadResp.Upload.Sign, uploadParts[i], md5Reader, currentPartSize); err != nil {
return "", err return "", err
} }
if md5val := md5Reader.Md5(); md5val != part.PartMd5s[i] { if md5val := md5Reader.Md5(); md5val != part.PartMd5s[i] {

Loading…
Cancel
Save