merge: main

pull/2559/head
icey-yu 1 year ago
commit 1f6058d547

@ -33,8 +33,8 @@ jobs:
remote-repository-name: cla
create-file-commit-message: 'Creating file for storing CLA Signatures'
# signed-commit-message: '$contributorName has signed the CLA in $owner/$repo#$pullRequestNo'
custom-notsigned-prcomment: '💕 Thank you for your contribution and please kindly read and sign our [CLA Docs](https://github.com/OpenIM-Robot/cla/blob/main/README.md)'
custom-pr-sign-comment: 'The signature to be committed in order to sign the CLA'
custom-allsigned-prcomment: '🤖 All Contributors have signed the [CLA](https://github.com/OpenIM-Robot/cla/blob/main/README.md).<br> The signed information is recorded [🤖here](https://github.com/openim-sigs/cla/tree/main/signatures/cla.json)'
custom-notsigned-prcomment: '💕 Thank you for your contribution and please kindly read and sign our CLA. [CLA Docs](https://github.com/OpenIM-Robot/cla/blob/main/README.md)'
custom-pr-sign-comment: 'I have read the CLA Document and I hereby sign the CLA'
custom-allsigned-prcomment: '🤖 All Contributors have signed the [CLA](https://github.com/OpenIM-Robot/cla/blob/main/README.md).<br> The signed information is recorded [**here**](https://github.com/OpenIM-Robot/cla/blob/main/signatures/cla.json)'
#lock-pullrequest-aftermerge: false - if you don't want this bot to automatically lock the pull request after merging (default - true)
#use-dco-flag: true - If you are using DCO instead of CLA

@ -7,6 +7,9 @@ on:
pull_request:
branches:
- main
paths-ignore:
- '**/*.md'
workflow_dispatch:
jobs:

@ -29,7 +29,7 @@ jobs:
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ github.event.issue.number }}
token: ${{ secrets.BOT_GITHUB_TOKEN }}
token: ${{ secrets.BOT_TOKEN }}
body: |
This issue is available for anyone to work on. **Make sure to reference this issue in your pull request.** :sparkles: Thank you for your contribution! :sparkles:
[Join slack 🤖](https://join.slack.com/t/openimsdk/shared_invite/zt-22720d66b-o_FvKxMTGXtcnnnHiMqe9Q) to connect and communicate with our developers.

@ -0,0 +1,19 @@
name: 'issue-translator'
on:
issue_comment:
types: [created]
issues:
types: [opened]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: usthe/issues-translate-action@v2.7
with:
BOT_GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }}
IS_MODIFY_TITLE: true
# not require, default false, . Decide whether to modify the issue title
# if true, the robot account @Issues-translate-bot must have modification permissions, invite @Issues-translate-bot to your project or use your custom bot.
CUSTOM_BOT_NOTE: Bot detected the issue body's language is not English, translate it automatically. 👯👭🏻🧑‍🤝‍🧑👫🧑🏿‍🤝‍🧑🏻👩🏾‍🤝‍👨🏿👬🏿
# not require. Customize the translation robot prefix message.

@ -1,4 +1,4 @@
enable: "etcd"
enable: etcd
etcd:
rootDirectory: openim
address: [ localhost:12379 ]

@ -3,17 +3,17 @@ username: ''
# Password for authentication
password: ''
# Producer acknowledgment settings
producerAck: ""
producerAck:
# Compression type to use (e.g., none, gzip, snappy)
compressType: "none"
compressType: none
# List of Kafka broker addresses
address: [ localhost:19094 ]
# Kafka topic for Redis integration
toRedisTopic: "toRedis"
toRedisTopic: toRedis
# Kafka topic for MongoDB integration
toMongoTopic: "toMongo"
toMongoTopic: toMongo
# Kafka topic for push notifications
toPushTopic: "toPush"
toPushTopic: toPush
# Consumer group ID for Redis topic
toRedisGroupID: redis
# Consumer group ID for MongoDB topic
@ -25,12 +25,12 @@ tls:
# Enable or disable TLS
enableTLS: false
# CA certificate file path
caCrt: ""
caCrt:
# Client certificate file path
clientCrt: ""
clientCrt:
# Client key file path
clientKey: ""
clientKey:
# Client key password
clientKeyPwd: ""
clientKeyPwd:
# Whether to skip TLS verification (not recommended for production)
insecureSkipVerify: false

@ -1,15 +1,15 @@
# Name of the bucket in MinIO
bucket: "openim"
bucket: openim
# Access key ID for MinIO authentication
accessKeyID: "root"
accessKeyID: root
# Secret access key for MinIO authentication
secretAccessKey: "openIM123"
secretAccessKey: openIM123
# Session token for MinIO authentication (optional)
sessionToken: ''
sessionToken:
# Internal address of the MinIO server
internalAddress: "localhost:10005"
internalAddress: localhost:10005
# External address of the MinIO server, accessible from outside. Supports both HTTP and HTTPS using a domain name
externalAddress: "http://external_ip:10005"
externalAddress: http://external_ip:10005
# Flag to enable or disable public read access to the bucket
publicRead: false

@ -1,5 +1,5 @@
# URI for database connection, leave empty if using address and credential settings directly
uri: ''
uri:
# List of MongoDB server addresses
address: [ localhost:37017 ]
# Name of the database

@ -28,11 +28,11 @@ groupCreated:
# Enables or disables offline push notifications.
enable: false
# Title for the notification when a group is created.
title: "create group title"
title: create group title
# Description for the notification.
desc: "create group desc"
desc: create group desc
# Additional information for the notification.
ext: "create group ext"
ext: create group ext
groupInfoSet:
isSendMsg: false
@ -40,9 +40,9 @@ groupInfoSet:
unreadCount: false
offlinePush:
enable: false
title: "groupInfoSet title"
desc: "groupInfoSet desc"
ext: "groupInfoSet ext"
title: groupInfoSet title
desc: groupInfoSet desc
ext: groupInfoSet ext
joinGroupApplication:
@ -51,9 +51,9 @@ joinGroupApplication:
unreadCount: false
offlinePush:
enable: false
title: "joinGroupApplication title"
desc: "joinGroupApplication desc"
ext: "joinGroupApplication ext"
title: joinGroupApplication title
desc: joinGroupApplication desc
ext: joinGroupApplication ext
memberQuit:
isSendMsg: true
@ -61,9 +61,9 @@ memberQuit:
unreadCount: false
offlinePush:
enable: false
title: "memberQuit title"
desc: "memberQuit desc"
ext: "memberQuit ext"
title: memberQuit title
desc: memberQuit desc
ext: memberQuit ext
groupApplicationAccepted:
isSendMsg: false
@ -71,9 +71,9 @@ groupApplicationAccepted:
unreadCount: false
offlinePush:
enable: false
title: "groupApplicationAccepted title"
desc: "groupApplicationAccepted desc"
ext: "groupApplicationAccepted ext"
title: groupApplicationAccepted title
desc: groupApplicationAccepted desc
ext: groupApplicationAccepted ext
groupApplicationRejected:
isSendMsg: false
@ -81,9 +81,9 @@ groupApplicationRejected:
unreadCount: false
offlinePush:
enable: false
title: "groupApplicationRejected title"
desc: "groupApplicationRejected desc"
ext: "groupApplicationRejected ext"
title: groupApplicationRejected title
desc: groupApplicationRejected desc
ext: groupApplicationRejected ext
groupOwnerTransferred:
@ -92,9 +92,9 @@ groupOwnerTransferred:
unreadCount: false
offlinePush:
enable: false
title: "groupOwnerTransferred title"
desc: "groupOwnerTransferred desc"
ext: "groupOwnerTransferred ext"
title: groupOwnerTransferred title
desc: groupOwnerTransferred desc
ext: groupOwnerTransferred ext
memberKicked:
isSendMsg: true
@ -102,9 +102,9 @@ memberKicked:
unreadCount: false
offlinePush:
enable: false
title: "memberKicked title"
desc: "memberKicked desc"
ext: "memberKicked ext"
title: memberKicked title
desc: memberKicked desc
ext: memberKicked ext
memberInvited:
isSendMsg: true
@ -112,9 +112,9 @@ memberInvited:
unreadCount: false
offlinePush:
enable: false
title: "memberInvited title"
desc: "memberInvited desc"
ext: "memberInvited ext"
title: memberInvited title
desc: memberInvited desc
ext: memberInvited ext
memberEnter:
isSendMsg: true
@ -122,9 +122,9 @@ memberEnter:
unreadCount: false
offlinePush:
enable: false
title: "memberEnter title"
desc: "memberEnter desc"
ext: "memberEnter ext"
title: memberEnter title
desc: memberEnter desc
ext: memberEnter ext
groupDismissed:
isSendMsg: true
@ -132,9 +132,9 @@ groupDismissed:
unreadCount: false
offlinePush:
enable: false
title: "groupDismissed title"
desc: "groupDismissed desc"
ext: "groupDismissed ext"
title: groupDismissed title
desc: groupDismissed desc
ext: groupDismissed ext
groupMuted:
isSendMsg: true
@ -142,9 +142,9 @@ groupMuted:
unreadCount: false
offlinePush:
enable: false
title: "groupMuted title"
desc: "groupMuted desc"
ext: "groupMuted ext"
title: groupMuted title
desc: groupMuted desc
ext: groupMuted ext
groupCancelMuted:
isSendMsg: true
@ -152,11 +152,11 @@ groupCancelMuted:
unreadCount: false
offlinePush:
enable: false
title: "groupCancelMuted title"
desc: "groupCancelMuted desc"
ext: "groupCancelMuted ext"
title: groupCancelMuted title
desc: groupCancelMuted desc
ext: groupCancelMuted ext
defaultTips:
tips: "group Cancel Muted"
tips: group Cancel Muted
groupMemberMuted:
@ -165,9 +165,9 @@ groupMemberMuted:
unreadCount: false
offlinePush:
enable: false
title: "groupMemberMuted title"
desc: "groupMemberMuted desc"
ext: "groupMemberMuted ext"
title: groupMemberMuted title
desc: groupMemberMuted desc
ext: groupMemberMuted ext
groupMemberCancelMuted:
isSendMsg: true
@ -175,9 +175,9 @@ groupMemberCancelMuted:
unreadCount: false
offlinePush:
enable: false
title: "groupMemberCancelMuted title"
desc: "groupMemberCancelMuted desc"
ext: "groupMemberCancelMuted ext"
title: groupMemberCancelMuted title
desc: groupMemberCancelMuted desc
ext: groupMemberCancelMuted ext
groupMemberInfoSet:
isSendMsg: false
@ -185,9 +185,9 @@ groupMemberInfoSet:
unreadCount: false
offlinePush:
enable: false
title: "groupMemberInfoSet title"
desc: "groupMemberInfoSet desc"
ext: "groupMemberInfoSet ext"
title: groupMemberInfoSet title
desc: groupMemberInfoSet desc
ext: groupMemberInfoSet ext
groupInfoSetAnnouncement:
isSendMsg: true
@ -195,9 +195,9 @@ groupInfoSetAnnouncement:
unreadCount: false
offlinePush:
enable: false
title: "groupInfoSetAnnouncement title"
desc: "groupInfoSetAnnouncement desc"
ext: "groupInfoSetAnnouncement ext"
title: groupInfoSetAnnouncement title
desc: groupInfoSetAnnouncement desc
ext: groupInfoSetAnnouncement ext
groupInfoSetName:
@ -206,9 +206,9 @@ groupInfoSetName:
unreadCount: false
offlinePush:
enable: false
title: "groupInfoSetName title"
desc: "groupInfoSetName desc"
ext: "groupInfoSetName ext"
title: groupInfoSetName title
desc: groupInfoSetName desc
ext: groupInfoSetName ext
#############################friend#################################
@ -218,9 +218,9 @@ friendApplicationAdded:
unreadCount: false
offlinePush:
enable: false
title: "Somebody applies to add you as a friend"
desc: "Somebody applies to add you as a friend"
ext: "Somebody applies to add you as a friend"
title: Somebody applies to add you as a friend
desc: Somebody applies to add you as a friend
ext: Somebody applies to add you as a friend
friendApplicationApproved:
isSendMsg: true
@ -228,9 +228,9 @@ friendApplicationApproved:
unreadCount: false
offlinePush:
enable: true
title: "Someone applies to add your friend application"
desc: "Someone applies to add your friend application"
ext: "Someone applies to add your friend application"
title: Someone applies to add your friend application
desc: Someone applies to add your friend application
ext: Someone applies to add your friend application
friendApplicationRejected:
isSendMsg: false
@ -238,9 +238,9 @@ friendApplicationRejected:
unreadCount: false
offlinePush:
enable: true
title: "Someone rejected your friend application"
desc: "Someone rejected your friend application"
ext: "Someone rejected your friend application"
title: Someone rejected your friend application
desc: Someone rejected your friend application
ext: Someone rejected your friend application
friendAdded:
isSendMsg: false
@ -248,9 +248,9 @@ friendAdded:
unreadCount: false
offlinePush:
enable: true
title: "We have become friends"
desc: "We have become friends"
ext: "We have become friends"
title: We have become friends
desc: We have become friends
ext: We have become friends
friendDeleted:
isSendMsg: false
@ -258,9 +258,9 @@ friendDeleted:
unreadCount: false
offlinePush:
enable: true
title: "deleted a friend"
desc: "deleted a friend"
ext: "deleted a friend"
title: deleted a friend
desc: deleted a friend
ext: deleted a friend
friendRemarkSet:
isSendMsg: false
@ -268,9 +268,9 @@ friendRemarkSet:
unreadCount: false
offlinePush:
enable: true
title: "Your friend's profile has been changed"
desc: "Your friend's profile has been changed"
ext: "Your friend's profile has been changed"
title: Your friend's profile has been changed
desc: Your friend's profile has been changed
ext: Your friend's profile has been changed
blackAdded:
isSendMsg: false
@ -278,9 +278,9 @@ blackAdded:
unreadCount: false
offlinePush:
enable: true
title: "blocked a user"
desc: "blocked a user"
ext: "blocked a user"
title: blocked a user
desc: blocked a user
ext: blocked a user
blackDeleted:
isSendMsg: false
@ -288,9 +288,9 @@ blackDeleted:
unreadCount: false
offlinePush:
enable: true
title: "Remove a blocked user"
desc: "Remove a blocked user"
ext: "Remove a blocked user"
title: Remove a blocked user
desc: Remove a blocked user
ext: Remove a blocked user
friendInfoUpdated:
isSendMsg: false
@ -298,9 +298,9 @@ friendInfoUpdated:
unreadCount: false
offlinePush:
enable: true
title: "friend info updated"
desc: "friend info updated"
ext: "friend info updated"
title: friend info updated
desc: friend info updated
ext: friend info updated
#####################user#########################
userInfoUpdated:
@ -309,9 +309,9 @@ userInfoUpdated:
unreadCount: false
offlinePush:
enable: true
title: "Remove a blocked user"
desc: "Remove a blocked user"
ext: "Remove a blocked user"
title: Remove a blocked user
desc: Remove a blocked user
ext: Remove a blocked user
userStatusChanged:
isSendMsg: false
@ -319,9 +319,9 @@ userStatusChanged:
unreadCount: false
offlinePush:
enable: false
title: "user status changed"
desc: "user status changed"
ext: "user status changed"
title: user status changed
desc: user status changed
ext: user status changed
#####################conversation#########################
conversationChanged:
@ -330,9 +330,9 @@ conversationChanged:
unreadCount: false
offlinePush:
enable: true
title: "conversation changed"
desc: "conversation changed"
ext: "conversation changed"
title: conversation changed
desc: conversation changed
ext: conversation changed
conversationSetPrivate:
isSendMsg: true
@ -340,6 +340,6 @@ conversationSetPrivate:
unreadCount: false
offlinePush:
enable: true
title: "burn after reading"
desc: "burn after reading"
ext: "burn after reading"
title: burn after reading
desc: burn after reading
ext: burn after reading

@ -1,3 +1,3 @@
cronExecuteTime: "0 2 * * *"
cronExecuteTime: 0 2 * * *
retainChatRecords: 365
fileExpireTime: 90

@ -1,6 +1,6 @@
rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: ''
registerIP:
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports
ports: [ 10140 ]

@ -1,6 +1,6 @@
rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: ''
registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports
@ -13,28 +13,28 @@ prometheus:
ports: [ 20670, 20671, 20672, 20673 ]
maxConcurrentWorkers: 3
#"Use geTui for offline push notifications, or choose fcm or jpns; corresponding configuration settings must be specified."
enable: "geTui"
#Use geTui for offline push notifications, or choose fcm or jpns; corresponding configuration settings must be specified.
enable: geTui
geTui:
pushUrl: "https://restapi.getui.com/v2/$appId"
masterSecret: ''
appKey: ''
intent: ''
channelID: ''
channelName: ''
pushUrl: https://restapi.getui.com/v2/$appId
masterSecret:
appKey:
intent:
channelID:
channelName:
fcm:
# Prioritize using file paths. If the file path is empty, use URL
filePath: "" # File path is concatenated with the parameters passed in through - c(`mage` default pass in `config/`) and filePath.
authURL: "" # Must start with https or http.
filePath: # File path is concatenated with the parameters passed in through - c(`mage` default pass in `config/`) and filePath.
authURL: # Must start with https or http.
jpns:
appKey: ''
masterSecret: ''
pushURL: ''
pushIntent: ''
appKey:
masterSecret:
pushURL:
pushIntent:
# iOS system push sound and badge count
iosPush:
pushSound: "xxx"
pushSound: xxx
badgeCount: true
production: false

@ -1,6 +1,6 @@
rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: ''
registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports

@ -1,6 +1,6 @@
rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: ''
registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports

@ -1,6 +1,6 @@
rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: ''
registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports

@ -1,6 +1,6 @@
rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: ''
registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports
@ -11,3 +11,6 @@ prometheus:
enable: true
# List of ports that Prometheus listens on; these must match the number of rpc.ports to ensure correct monitoring setup
ports: [ 20650 ]
enableHistoryForNewMembers: true

@ -1,6 +1,6 @@
rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: ''
registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports

@ -1,6 +1,6 @@
rpc:
# The IP address where this RPC service registers itself; if left blank, it defaults to the internal network IP
registerIP: ''
registerIP:
# IP address that the RPC service listens on; setting to 0.0.0.0 listens on both internal and external IPs. If left blank, it automatically uses the internal network IP
listenIP: 0.0.0.0
# List of ports that the RPC service listens on; configuring multiple ports will launch multiple instances. These must match the number of configured prometheus ports
@ -15,26 +15,26 @@ prometheus:
object:
# Use MinIO as object storage, or set to "cos", "oss", "kodo", "aws", while also configuring the corresponding settings
enable: "minio"
enable: minio
cos:
bucketURL: https://temp-1252357374.cos.ap-chengdu.myqcloud.com
secretID: ''
secretKey: ''
sessionToken: ''
secretID:
secretKey:
sessionToken:
publicRead: false
oss:
endpoint: "https://oss-cn-chengdu.aliyuncs.com"
bucket: "demo-9999999"
bucketURL: "https://demo-9999999.oss-cn-chengdu.aliyuncs.com"
accessKeyID: ''
accessKeySecret: ''
sessionToken: ''
endpoint: https://oss-cn-chengdu.aliyuncs.com
bucket: demo-9999999
bucketURL: https://demo-9999999.oss-cn-chengdu.aliyuncs.com
accessKeyID:
accessKeySecret:
sessionToken:
publicRead: false
kodo:
endpoint: "http://s3.cn-south-1.qiniucs.com"
bucket: "kodo-bucket-test"
bucketURL: "http://kodo-bucket-test-oetobfb.qiniudns.com"
accessKeyID: ''
accessKeySecret: ''
sessionToken: ''
endpoint: http://s3.cn-south-1.qiniucs.com
bucket: kodo-bucket-test
bucketURL: http://kodo-bucket-test-oetobfb.qiniudns.com
accessKeyID:
accessKeySecret:
sessionToken:
publicRead: false

@ -1,6 +1,6 @@
rpc:
# API or other RPCs can access this RPC through this IP; if left blank, the internal network IP is obtained by default
registerIP: ''
registerIP:
# Listening IP; 0.0.0.0 means both internal and external IPs are listened to, if blank, the internal network IP is automatically obtained by default
listenIP: 0.0.0.0
# Listening ports; if multiple are configured, multiple instances will be launched, and must be consistent with the number of prometheus.ports

@ -8,9 +8,9 @@ global:
alerting:
alertmanagers:
- static_configs:
- targets: ['internal_ip:19093']
- targets: [internal_ip:19093]
# Load rules once and periodically evaluate them according to the global 'evaluation_interval'.
# Load rules once and periodically evaluate them according to the global evaluation_interval.
rule_files:
- "instance-down-rules.yml"
# - "first_rules.yml"
@ -19,65 +19,65 @@ rule_files:
# A scrape configuration containing exactly one endpoint to scrape:
# Here it's Prometheus itself.
scrape_configs:
# The job name is added as a label "job='job_name'"" to any timeseries scraped from this config.
# The job name is added as a label "job=job_name"" to any timeseries scraped from this config.
# Monitored information captured by prometheus
# prometheus fetches application services
- job_name: 'node_exporter'
- job_name: node_exporter
static_configs:
- targets: [ 'internal_ip:20500' ]
- job_name: 'openimserver-openim-api'
- targets: [ internal_ip:20500 ]
- job_name: openimserver-openim-api
static_configs:
- targets: [ 'internal_ip:20502' ]
- targets: [ internal_ip:20502 ]
labels:
namespace: 'default'
- job_name: 'openimserver-openim-msggateway'
namespace: default
- job_name: openimserver-openim-msggateway
static_configs:
- targets: [ 'internal_ip:20640' ]
- targets: [ internal_ip:20640 ]
labels:
namespace: 'default'
- job_name: 'openimserver-openim-msgtransfer'
namespace: default
- job_name: openimserver-openim-msgtransfer
static_configs:
- targets: [ 'internal_ip:20600', 'internal_ip:20601', 'internal_ip:20602', 'internal_ip:20603' ]
- targets: [ internal_ip:20600, internal_ip:20601, internal_ip:20602, internal_ip:20603 ]
labels:
namespace: 'default'
- job_name: 'openimserver-openim-push'
namespace: default
- job_name: openimserver-openim-push
static_configs:
- targets: [ 'internal_ip:20670', 'internal_ip:20671', 'internal_ip:20672', 'internal_ip:20673']
- targets: [ internal_ip:20670, internal_ip:20671, internal_ip:20672, internal_ip:20673]
labels:
namespace: 'default'
- job_name: 'openimserver-openim-rpc-auth'
namespace: default
- job_name: openimserver-openim-rpc-auth
static_configs:
- targets: [ 'internal_ip:20600' ]
- targets: [ internal_ip:20600 ]
labels:
namespace: 'default'
- job_name: 'openimserver-openim-rpc-conversation'
namespace: default
- job_name: openimserver-openim-rpc-conversation
static_configs:
- targets: [ 'internal_ip:20680' ]
- targets: [ internal_ip:20680 ]
labels:
namespace: 'default'
- job_name: 'openimserver-openim-rpc-friend'
namespace: default
- job_name: openimserver-openim-rpc-friend
static_configs:
- targets: [ 'internal_ip:20620' ]
- targets: [ internal_ip:20620 ]
labels:
namespace: 'default'
- job_name: 'openimserver-openim-rpc-group'
namespace: default
- job_name: openimserver-openim-rpc-group
static_configs:
- targets: [ 'internal_ip:20650' ]
- targets: [ internal_ip:20650 ]
labels:
namespace: 'default'
- job_name: 'openimserver-openim-rpc-msg'
namespace: default
- job_name: openimserver-openim-rpc-msg
static_configs:
- targets: [ 'internal_ip:20630' ]
- targets: [ internal_ip:20630 ]
labels:
namespace: 'default'
- job_name: 'openimserver-openim-rpc-third'
namespace: default
- job_name: openimserver-openim-rpc-third
static_configs:
- targets: [ 'internal_ip:20690' ]
- targets: [ internal_ip:20690 ]
labels:
namespace: 'default'
- job_name: 'openimserver-openim-rpc-user'
namespace: default
- job_name: openimserver-openim-rpc-user
static_configs:
- targets: [ 'internal_ip:20610' ]
- targets: [ internal_ip:20610 ]
labels:
namespace: 'default'
namespace: default

@ -1,5 +1,5 @@
address: [ localhost:16379 ]
username: ''
username:
password: openIM123
clusterMode: false
db: 0

@ -10,5 +10,5 @@ rpcRegisterName:
conversation: conversation
third: third
imAdminUserID: [ "imAdmin" ]
imAdminUserID: [ imAdmin ]

@ -1,4 +1,4 @@
url: "webhook://127.0.0.1:10008/callbackExample"
url: webhook://127.0.0.1:10008/callbackExample
beforeSendSingleMsg:
enable: false
timeout: 5
@ -130,6 +130,13 @@ beforeSetGroupInfo:
enable: false
timeout: 5
failedContinue: true
afterSetGroupInfoEX:
enable: false
timeout: 5
beforeSetGroupInfoEX:
enable: false
timeout: 5
failedContinue: true
afterRevokeMsg:
enable: false
timeout: 5

@ -140,50 +140,50 @@ services:
networks:
- openim
prometheus:
image: ${PROMETHEUS_IMAGE}
container_name: prometheus
restart: always
volumes:
- ./config/prometheus.yml:/etc/prometheus/prometheus.yml
- ./config/instance-down-rules.yml:/etc/prometheus/instance-down-rules.yml
- ${DATA_DIR}/components/prometheus/data:/prometheus
command:
- '--config.file=/etc/prometheus/prometheus.yml'
- '--storage.tsdb.path=/prometheus'
ports:
- "19091:9090"
networks:
- openim
alertmanager:
image: ${ALERTMANAGER_IMAGE}
container_name: alertmanager
restart: always
volumes:
- ./config/alertmanager.yml:/etc/alertmanager/alertmanager.yml
- ./config/email.tmpl:/etc/alertmanager/email.tmpl
ports:
- "19093:9093"
networks:
- openim
grafana:
image: ${GRAFANA_IMAGE}
container_name: grafana
user: root
restart: always
environment:
- GF_SECURITY_ALLOW_EMBEDDING=true
- GF_SESSION_COOKIE_SAMESITE=none
- GF_SESSION_COOKIE_SECURE=true
- GF_AUTH_ANONYMOUS_ENABLED=true
- GF_AUTH_ANONYMOUS_ORG_ROLE=Admin
ports:
- "13000:3000"
volumes:
- ${DATA_DIR:-./}/components/grafana:/var/lib/grafana
networks:
- openim
# prometheus:
# image: ${PROMETHEUS_IMAGE}
# container_name: prometheus
# restart: always
# volumes:
# - ./config/prometheus.yml:/etc/prometheus/prometheus.yml
# - ./config/instance-down-rules.yml:/etc/prometheus/instance-down-rules.yml
# - ${DATA_DIR}/components/prometheus/data:/prometheus
# command:
# - '--config.file=/etc/prometheus/prometheus.yml'
# - '--storage.tsdb.path=/prometheus'
# ports:
# - "19091:9090"
# networks:
# - openim
#
# alertmanager:
# image: ${ALERTMANAGER_IMAGE}
# container_name: alertmanager
# restart: always
# volumes:
# - ./config/alertmanager.yml:/etc/alertmanager/alertmanager.yml
# - ./config/email.tmpl:/etc/alertmanager/email.tmpl
# ports:
# - "19093:9093"
# networks:
# - openim
#
# grafana:
# image: ${GRAFANA_IMAGE}
# container_name: grafana
# user: root
# restart: always
# environment:
# - GF_SECURITY_ALLOW_EMBEDDING=true
# - GF_SESSION_COOKIE_SAMESITE=none
# - GF_SESSION_COOKIE_SECURE=true
# - GF_AUTH_ANONYMOUS_ENABLED=true
# - GF_AUTH_ANONYMOUS_ORG_ROLE=Admin
# ports:
# - "13000:3000"
# volumes:
# - ${DATA_DIR:-./}/components/grafana:/var/lib/grafana
# networks:
# - openim

@ -12,8 +12,8 @@ require (
github.com/gorilla/websocket v1.5.1
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0
github.com/mitchellh/mapstructure v1.5.0
github.com/openimsdk/protocol v0.0.69
github.com/openimsdk/tools v0.0.50-alpha.3
github.com/openimsdk/protocol v0.0.72-alpha.9
github.com/openimsdk/tools v0.0.49-alpha.55
github.com/pkg/errors v0.9.1 // indirect
github.com/prometheus/client_golang v1.18.0
github.com/stretchr/testify v1.9.0
@ -41,6 +41,7 @@ require (
github.com/spf13/viper v1.18.2
github.com/stathat/consistent v1.0.0
go.uber.org/automaxprocs v1.5.3
golang.org/x/exp v0.0.0-20230905200255-921286631fa9
golang.org/x/sync v0.6.0
)
@ -74,7 +75,6 @@ require (
github.com/beorn7/perks v1.0.1 // indirect
github.com/bytedance/sonic v1.9.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/chai2010/webp v1.1.1 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
github.com/clbanning/mxj v1.8.4 // indirect
github.com/coreos/go-semver v0.3.0 // indirect
@ -170,7 +170,6 @@ require (
go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
golang.org/x/arch v0.3.0 // indirect
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect
golang.org/x/image v0.15.0 // indirect
golang.org/x/net v0.22.0 // indirect
golang.org/x/oauth2 v0.17.0 // indirect

@ -71,8 +71,6 @@ github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZX
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chai2010/webp v1.1.1 h1:jTRmEccAJ4MGrhFOrPMpNGIJ/eybIgwKpcACsrTEapk=
github.com/chai2010/webp v1.1.1/go.mod h1:0XVwvZWdjjdxpUEIf7b9g9VkHFnInUSYujwqTLEuldU=
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
@ -321,10 +319,11 @@ github.com/onsi/gomega v1.25.0 h1:Vw7br2PCDYijJHSfBOWhov+8cAnUf8MfMaIOV323l6Y=
github.com/onsi/gomega v1.25.0/go.mod h1:r+zV744Re+DiYCIPRlYOTxn0YkOLcAnW8k1xXdMPGhM=
github.com/openimsdk/gomake v0.0.14-alpha.5 h1:VY9c5x515lTfmdhhPjMvR3BBRrRquAUCFsz7t7vbv7Y=
github.com/openimsdk/gomake v0.0.14-alpha.5/go.mod h1:PndCozNc2IsQIciyn9mvEblYWZwJmAI+06z94EY+csI=
github.com/openimsdk/protocol v0.0.69 h1:dVi8meSg8kmUzSH1XQab4MjihqKkkcCAmt1BYXPJuXo=
github.com/openimsdk/protocol v0.0.69/go.mod h1:OZQA9FR55lseYoN2Ql1XAHYKHJGu7OMNkUbuekrKCM8=
github.com/openimsdk/tools v0.0.50-alpha.3 h1:PGXccSevL+xOg08JKy9oS4poagr85nfZiB+I1jvv2/U=
github.com/openimsdk/tools v0.0.50-alpha.3/go.mod h1:oiSQU5Z6fzjxKFjbqDHImD8EmCIwClU1Rkur1sK12Po=
github.com/openimsdk/protocol v0.0.72-alpha.8 h1:MhxSsdxXx2ZaeSLQk4uFftsB5L2rPh1Qup+dURQNzXQ=
github.com/openimsdk/protocol v0.0.72-alpha.8/go.mod h1:OZQA9FR55lseYoN2Ql1XAHYKHJGu7OMNkUbuekrKCM8=
github.com/openimsdk/protocol v0.0.72-alpha.9/go.mod h1:OZQA9FR55lseYoN2Ql1XAHYKHJGu7OMNkUbuekrKCM8=
github.com/openimsdk/tools v0.0.49-alpha.55 h1:KPgC53oqiwZYssLKljhtXbWXifMlTj2SSQEusj4Uf4k=
github.com/openimsdk/tools v0.0.49-alpha.55/go.mod h1:h1cYmfyaVtgFbKmb1Cfsl8XwUOMTt8ubVUQrdGtsUh4=
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=

@ -35,6 +35,10 @@ func (o *GroupApi) SetGroupInfo(c *gin.Context) {
a2r.Call(group.GroupClient.SetGroupInfo, o.Client, c)
}
func (o *GroupApi) SetGroupInfoEX(c *gin.Context) {
a2r.Call(group.GroupClient.SetGroupInfoEX, o.Client, c)
}
func (o *GroupApi) JoinGroup(c *gin.Context) {
a2r.Call(group.GroupClient.JoinGroup, o.Client, c)
}

@ -49,14 +49,14 @@ func NewMessageApi(msgRpcClient *rpcclient.Message, userRpcClient *rpcclient.Use
userRpcClient: rpcclient.NewUserRpcClientByUser(userRpcClient), imAdminUserID: imAdminUserID}
}
func (MessageApi) SetOptions(options map[string]bool, value bool) {
func (*MessageApi) SetOptions(options map[string]bool, value bool) {
datautil.SetSwitchFromOptions(options, constant.IsHistory, value)
datautil.SetSwitchFromOptions(options, constant.IsPersistent, value)
datautil.SetSwitchFromOptions(options, constant.IsSenderSync, value)
datautil.SetSwitchFromOptions(options, constant.IsConversationUpdate, value)
}
func (m MessageApi) newUserSendMsgReq(_ *gin.Context, params *apistruct.SendMsg) *msg.SendMsgReq {
func (m *MessageApi) newUserSendMsgReq(_ *gin.Context, params *apistruct.SendMsg) *msg.SendMsgReq {
var newContent string
options := make(map[string]bool, 5)
switch params.ContentType {
@ -231,7 +231,7 @@ func (m *MessageApi) SendMessage(c *gin.Context) {
}
// Set the status to successful if the message is sent.
var status int = constant.MsgSendSuccessed
var status = constant.MsgSendSuccessed
// Attempt to update the message sending status in the system.
_, err = m.Client.SetSendMsgStatus(c, &msg.SetSendMsgStatusReq{

@ -2,12 +2,16 @@ package api
import (
"fmt"
"github.com/gin-gonic/gin"
"github.com/gin-gonic/gin/binding"
"github.com/go-playground/validator/v10"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure"
"net/http"
"strings"
"github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics"
"github.com/openimsdk/open-im-server/v3/pkg/common/servererrs"
"github.com/openimsdk/open-im-server/v3/pkg/rpcclient"
@ -16,8 +20,6 @@ import (
"github.com/openimsdk/tools/discovery"
"github.com/openimsdk/tools/log"
"github.com/openimsdk/tools/mw"
"net/http"
"strings"
)
func prommetricsGin() gin.HandlerFunc {
@ -112,6 +114,7 @@ func newGinRouter(disCov discovery.SvcDiscoveryRegistry, config *Config) *gin.En
{
groupRouterGroup.POST("/create_group", g.CreateGroup)
groupRouterGroup.POST("/set_group_info", g.SetGroupInfo)
groupRouterGroup.POST("/set_group_info_ex", g.SetGroupInfoEX)
groupRouterGroup.POST("/join_group", g.JoinGroup)
groupRouterGroup.POST("/quit_group", g.QuitGroup)
groupRouterGroup.POST("/group_application_response", g.ApplicationGroupResponse)

@ -36,9 +36,11 @@ func (u *UserApi) UserRegister(c *gin.Context) {
a2r.Call(user.UserClient.UserRegister, u.Client, c)
}
// UpdateUserInfo is deprecated. Use UpdateUserInfoEx
func (u *UserApi) UpdateUserInfo(c *gin.Context) {
a2r.Call(user.UserClient.UpdateUserInfo, u.Client, c)
}
func (u *UserApi) UpdateUserInfoEx(c *gin.Context) {
a2r.Call(user.UserClient.UpdateUserInfoEx, u.Client, c)
}

@ -275,7 +275,7 @@ func (ws *WsServer) registerClient(client *Client) {
}
wg := sync.WaitGroup{}
log.ZDebug(client.ctx, "ws.msgGatewayConfig.Discovery.Enable", ws.msgGatewayConfig.Discovery.Enable)
log.ZDebug(client.ctx, "ws.msgGatewayConfig.Discovery.Enable", "discoveryEnable", ws.msgGatewayConfig.Discovery.Enable)
if ws.msgGatewayConfig.Discovery.Enable != "k8s" {
wg.Add(1)

@ -16,6 +16,7 @@ package msgtransfer
import (
"context"
"errors"
"fmt"
"github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/cache/redis"
@ -137,7 +138,7 @@ func (m *MsgTransfer) Start(index int, config *Config) error {
return
}
if err := prommetrics.TransferInit(prometheusPort); err != nil && err != http.ErrServerClosed {
if err := prommetrics.TransferInit(prometheusPort); err != nil && !errors.Is(err, http.ErrServerClosed) {
netErr = errs.WrapMsg(err, "prometheus start error", "prometheusPort", prometheusPort)
netDone <- struct{}{}
}

@ -16,6 +16,7 @@ package msgtransfer
import (
"context"
"errors"
"github.com/IBM/sarama"
"github.com/go-redis/redis"
"github.com/openimsdk/open-im-server/v3/pkg/common/config"
@ -187,7 +188,7 @@ func (och *OnlineHistoryRedisConsumerHandler) handleMsg(ctx context.Context, key
if len(storageMessageList) > 0 {
msg := storageMessageList[0]
lastSeq, isNewConversation, err := och.msgDatabase.BatchInsertChat2Cache(ctx, conversationID, storageMessageList)
if err != nil && errs.Unwrap(err) != redis.Nil {
if err != nil && !errors.Is(errs.Unwrap(err), redis.Nil) {
log.ZError(ctx, "batch data insert to redis err", err, "storageMsgList", storageMessageList)
return
}

@ -91,13 +91,13 @@ func (mc *OnlineHistoryMongoConsumerHandler) handleChatWs2Mongo(ctx context.Cont
}
}
func (OnlineHistoryMongoConsumerHandler) Setup(_ sarama.ConsumerGroupSession) error { return nil }
func (OnlineHistoryMongoConsumerHandler) Cleanup(_ sarama.ConsumerGroupSession) error { return nil }
func (*OnlineHistoryMongoConsumerHandler) Setup(_ sarama.ConsumerGroupSession) error { return nil }
func (*OnlineHistoryMongoConsumerHandler) Cleanup(_ sarama.ConsumerGroupSession) error { return nil }
func (mc *OnlineHistoryMongoConsumerHandler) ConsumeClaim(
sess sarama.ConsumerGroupSession,
claim sarama.ConsumerGroupClaim,
) error { // a instance in the consumer group
) error { // an instance in the consumer group
log.ZDebug(context.Background(), "online new session msg come", "highWaterMarkOffset",
claim.HighWaterMarkOffset(), "topic", claim.Topic(), "partition", claim.Partition())
for msg := range claim.Messages() {

@ -19,20 +19,20 @@ type OnlinePusher interface {
pushToUserIDs *[]string) []string
}
type emptyOnlinePUsher struct{}
type emptyOnlinePusher struct{}
func newEmptyOnlinePUsher() *emptyOnlinePUsher {
return &emptyOnlinePUsher{}
func newEmptyOnlinePusher() *emptyOnlinePusher {
return &emptyOnlinePusher{}
}
func (emptyOnlinePUsher) GetConnsAndOnlinePush(ctx context.Context, msg *sdkws.MsgData,
func (emptyOnlinePusher) GetConnsAndOnlinePush(ctx context.Context, msg *sdkws.MsgData,
pushToUserIDs []string) (wsResults []*msggateway.SingleMsgToUserResults, err error) {
log.ZWarn(ctx, "emptyOnlinePUsher GetConnsAndOnlinePush", nil)
log.ZWarn(ctx, "emptyOnlinePusher GetConnsAndOnlinePush", nil)
return nil, nil
}
func (u emptyOnlinePUsher) GetOnlinePushFailedUserIDs(ctx context.Context, msg *sdkws.MsgData,
func (u emptyOnlinePusher) GetOnlinePushFailedUserIDs(ctx context.Context, msg *sdkws.MsgData,
wsResults []*msggateway.SingleMsgToUserResults, pushToUserIDs *[]string) []string {
log.ZWarn(ctx, "emptyOnlinePUsher GetOnlinePushFailedUserIDs", nil)
log.ZWarn(ctx, "emptyOnlinePusher GetOnlinePushFailedUserIDs", nil)
return nil
}
@ -45,7 +45,7 @@ func NewOnlinePusher(disCov discovery.SvcDiscoveryRegistry, config *Config) Onli
case "etcd":
return NewDefaultAllNode(disCov, config)
default:
return newEmptyOnlinePUsher()
return newEmptyOnlinePusher()
}
}

@ -154,17 +154,17 @@ func (c *ConsumerHandler) Push2User(ctx context.Context, userIDs []string, msg *
return nil
}
}
offlinePUshUserID := []string{msg.RecvID}
offlinePushUserID := []string{msg.RecvID}
//receiver offline push
if err = c.webhookBeforeOfflinePush(ctx, &c.config.WebhooksConfig.BeforeOfflinePush,
offlinePUshUserID, msg, nil); err != nil {
offlinePushUserID, msg, nil); err != nil {
return err
}
err = c.offlinePushMsg(ctx, msg, offlinePUshUserID)
err = c.offlinePushMsg(ctx, msg, offlinePushUserID)
if err != nil {
log.ZWarn(ctx, "offlinePushMsg failed", err, "offlinePUshUserID", offlinePUshUserID, "msg", msg)
log.ZWarn(ctx, "offlinePushMsg failed", err, "offlinePushUserID", offlinePushUserID, "msg", msg)
return nil
}

@ -221,11 +221,11 @@ func (c *conversationServer) SetConversation(ctx context.Context, req *pbconvers
return resp, nil
}
// nolint
func (c *conversationServer) SetConversations(ctx context.Context, req *pbconversation.SetConversationsReq) (*pbconversation.SetConversationsResp, error) {
if req.Conversation == nil {
return nil, errs.ErrArgs.WrapMsg("conversation must not be nil")
}
if req.Conversation.ConversationType == constant.WriteGroupChatType {
groupInfo, err := c.groupRpcClient.GetGroupInfo(ctx, req.Conversation.GroupID)
if err != nil {
@ -235,98 +235,142 @@ func (c *conversationServer) SetConversations(ctx context.Context, req *pbconver
return nil, servererrs.ErrDismissedAlready.WrapMsg("group dismissed")
}
}
var unequal int
var conv dbModel.Conversation
if len(req.UserIDs) == 1 {
cs, err := c.conversationDatabase.FindConversations(ctx, req.UserIDs[0], []string{req.Conversation.ConversationID})
conversationMap := make(map[string]*dbModel.Conversation)
var needUpdateUsersList []string
for _, userID := range req.UserIDs {
conversationList, err := c.conversationDatabase.FindConversations(ctx, userID, []string{req.Conversation.ConversationID})
if err != nil {
return nil, err
}
if len(cs) == 0 {
return nil, errs.ErrRecordNotFound.WrapMsg("conversation not found")
if len(conversationList) != 0 {
conversationMap[userID] = conversationList[0]
} else {
needUpdateUsersList = append(needUpdateUsersList, userID)
}
conv = *cs[0]
}
var conversation dbModel.Conversation
conversation.ConversationID = req.Conversation.ConversationID
conversation.ConversationType = req.Conversation.ConversationType
conversation.UserID = req.Conversation.UserID
conversation.GroupID = req.Conversation.GroupID
m := make(map[string]any)
if req.Conversation.RecvMsgOpt != nil {
m["recv_msg_opt"] = req.Conversation.RecvMsgOpt.Value
if req.Conversation.RecvMsgOpt.Value != conv.RecvMsgOpt {
unequal++
setConversationFieldsFunc := func() {
if req.Conversation.RecvMsgOpt != nil {
m["recv_msg_opt"] = req.Conversation.RecvMsgOpt.Value
}
}
if req.Conversation.AttachedInfo != nil {
m["attached_info"] = req.Conversation.AttachedInfo.Value
if req.Conversation.AttachedInfo.Value != conv.AttachedInfo {
unequal++
if req.Conversation.AttachedInfo != nil {
m["attached_info"] = req.Conversation.AttachedInfo.Value
}
}
if req.Conversation.Ex != nil {
m["ex"] = req.Conversation.Ex.Value
if req.Conversation.Ex.Value != conv.Ex {
unequal++
if req.Conversation.Ex != nil {
m["ex"] = req.Conversation.Ex.Value
}
}
if req.Conversation.IsPinned != nil {
m["is_pinned"] = req.Conversation.IsPinned.Value
if req.Conversation.IsPinned.Value != conv.IsPinned {
unequal++
if req.Conversation.IsPinned != nil {
m["is_pinned"] = req.Conversation.IsPinned.Value
}
}
if req.Conversation.GroupAtType != nil {
m["group_at_type"] = req.Conversation.GroupAtType.Value
if req.Conversation.GroupAtType.Value != conv.GroupAtType {
unequal++
if req.Conversation.GroupAtType != nil {
m["group_at_type"] = req.Conversation.GroupAtType.Value
}
}
if req.Conversation.MsgDestructTime != nil {
m["msg_destruct_time"] = req.Conversation.MsgDestructTime.Value
if req.Conversation.MsgDestructTime.Value != conv.MsgDestructTime {
unequal++
if req.Conversation.MsgDestructTime != nil {
m["msg_destruct_time"] = req.Conversation.MsgDestructTime.Value
}
if req.Conversation.MsgDestructTime != nil {
m["msg_destruct_time"] = req.Conversation.MsgDestructTime.Value
}
if req.Conversation.BurnDuration != nil {
m["burn_duration"] = req.Conversation.BurnDuration.Value
}
}
if req.Conversation.IsMsgDestruct != nil {
m["is_msg_destruct"] = req.Conversation.IsMsgDestruct.Value
if req.Conversation.IsMsgDestruct.Value != conv.IsMsgDestruct {
unequal++
// set need set field in conversation
setConversationFieldsFunc()
for userID := range conversationMap {
unequal := len(m)
if req.Conversation.RecvMsgOpt != nil {
if req.Conversation.RecvMsgOpt.Value != conversationMap[userID].RecvMsgOpt {
unequal--
}
}
if req.Conversation.AttachedInfo != nil {
if req.Conversation.AttachedInfo.Value != conversationMap[userID].AttachedInfo {
unequal--
}
}
if req.Conversation.Ex != nil {
if req.Conversation.Ex.Value != conversationMap[userID].Ex {
unequal--
}
}
if req.Conversation.IsPinned != nil {
m["is_pinned"] = req.Conversation.IsPinned.Value
if req.Conversation.IsPinned.Value != conversationMap[userID].IsPinned {
unequal--
}
}
if req.Conversation.GroupAtType != nil {
if req.Conversation.GroupAtType.Value != conversationMap[userID].GroupAtType {
unequal--
}
}
if req.Conversation.MsgDestructTime != nil {
if req.Conversation.MsgDestructTime.Value != conversationMap[userID].MsgDestructTime {
unequal--
}
}
if req.Conversation.IsMsgDestruct != nil {
if req.Conversation.IsMsgDestruct.Value != conversationMap[userID].IsMsgDestruct {
unequal--
}
}
if req.Conversation.BurnDuration != nil {
if req.Conversation.BurnDuration.Value != conversationMap[userID].BurnDuration {
unequal--
}
}
if unequal > 0 {
needUpdateUsersList = append(needUpdateUsersList, userID)
}
}
if req.Conversation.IsPrivateChat != nil && req.Conversation.ConversationType != constant.ReadGroupChatType {
var conversations []*dbModel.Conversation
for _, ownerUserID := range req.UserIDs {
conversation2 := conversation
conversation2.OwnerUserID = ownerUserID
conversation2.IsPrivateChat = req.Conversation.IsPrivateChat.Value
conversations = append(conversations, &conversation2)
transConversation := conversation
transConversation.OwnerUserID = ownerUserID
transConversation.IsPrivateChat = req.Conversation.IsPrivateChat.Value
conversations = append(conversations, &transConversation)
}
if err := c.conversationDatabase.SyncPeerUserPrivateConversationTx(ctx, conversations); err != nil {
return nil, err
}
for _, userID := range req.UserIDs {
c.conversationNotificationSender.ConversationSetPrivateNotification(ctx, userID, req.Conversation.UserID,
req.Conversation.IsPrivateChat.Value, req.Conversation.ConversationID)
}
}
if req.Conversation.BurnDuration != nil {
m["burn_duration"] = req.Conversation.BurnDuration.Value
if req.Conversation.BurnDuration.Value != conv.BurnDuration {
unequal++
}
}
if err := c.conversationDatabase.SetUsersConversationFieldTx(ctx, req.UserIDs, &conversation, m); err != nil {
return nil, err
}
} else {
if len(m) != 0 {
if err := c.conversationDatabase.SetUsersConversationFieldTx(ctx, needUpdateUsersList, &conversation, m); err != nil {
return nil, err
}
if unequal > 0 {
for _, v := range req.UserIDs {
c.conversationNotificationSender.ConversationChangeNotification(ctx, v, []string{req.Conversation.ConversationID})
for _, v := range needUpdateUsersList {
c.conversationNotificationSender.ConversationChangeNotification(ctx, v, []string{req.Conversation.ConversationID})
}
}
}
@ -392,6 +436,14 @@ func (c *conversationServer) SetConversationMaxSeq(ctx context.Context, req *pbc
return &pbconversation.SetConversationMaxSeqResp{}, nil
}
func (c *conversationServer) SetConversationMinSeq(ctx context.Context, req *pbconversation.SetConversationMinSeqReq) (*pbconversation.SetConversationMinSeqResp, error) {
if err := c.conversationDatabase.UpdateUsersConversationField(ctx, req.OwnerUserID, req.ConversationID,
map[string]any{"min_seq": req.MinSeq}); err != nil {
return nil, err
}
return &pbconversation.SetConversationMinSeqResp{}, nil
}
func (c *conversationServer) GetConversationIDs(ctx context.Context, req *pbconversation.GetConversationIDsReq) (*pbconversation.GetConversationIDsResp, error) {
conversationIDs, err := c.conversationDatabase.GetConversationIDs(ctx, req.UserID)
if err != nil {
@ -634,11 +686,11 @@ func (c *conversationServer) GetConversationsNeedDestructMsgs(ctx context.Contex
conversationIDs, err := c.conversationDatabase.PageConversationIDs(ctx, pagination)
if err != nil {
log.ZError(ctx, "PageConversationIDs failed", err, "pageNumber", pageNumber)
// log.ZError(ctx, "PageConversationIDs failed", err, "pageNumber", pageNumber)
continue
}
log.ZDebug(ctx, "PageConversationIDs success", "pageNumber", pageNumber, "conversationIDsNum", len(conversationIDs), "conversationIDs", conversationIDs)
// log.ZDebug(ctx, "PageConversationIDs success", "pageNumber", pageNumber, "conversationIDsNum", len(conversationIDs), "conversationIDs", conversationIDs)
if len(conversationIDs) == 0 {
continue
}

@ -358,3 +358,74 @@ func (s *groupServer) webhookAfterSetGroupInfo(ctx context.Context, after *confi
}
s.webhookClient.AsyncPost(ctx, cbReq.GetCallbackCommand(), cbReq, &callbackstruct.CallbackAfterSetGroupInfoResp{}, after)
}
func (s *groupServer) webhookBeforeSetGroupInfoEX(ctx context.Context, before *config.BeforeConfig, req *group.SetGroupInfoEXReq) error {
return webhook.WithCondition(ctx, before, func(ctx context.Context) error {
cbReq := &callbackstruct.CallbackBeforeSetGroupInfoEXReq{
CallbackCommand: callbackstruct.CallbackBeforeSetGroupInfoCommand,
GroupID: req.GroupInfoForSetEX.GroupID,
GroupName: req.GroupInfoForSetEX.GroupName,
Notification: req.GroupInfoForSetEX.Notification,
Introduction: req.GroupInfoForSetEX.Introduction,
FaceURL: req.GroupInfoForSetEX.FaceURL,
}
if req.GroupInfoForSetEX.Ex != nil {
cbReq.Ex = req.GroupInfoForSetEX.Ex
}
log.ZDebug(ctx, "debug CallbackBeforeSetGroupInfoEX", "ex", cbReq.Ex)
if req.GroupInfoForSetEX.NeedVerification != nil {
cbReq.NeedVerification = req.GroupInfoForSetEX.NeedVerification
}
if req.GroupInfoForSetEX.LookMemberInfo != nil {
cbReq.LookMemberInfo = req.GroupInfoForSetEX.LookMemberInfo
}
if req.GroupInfoForSetEX.ApplyMemberFriend != nil {
cbReq.ApplyMemberFriend = req.GroupInfoForSetEX.ApplyMemberFriend
}
resp := &callbackstruct.CallbackBeforeSetGroupInfoEXResp{}
if err := s.webhookClient.SyncPost(ctx, cbReq.GetCallbackCommand(), cbReq, resp, before); err != nil {
return err
}
datautil.NotNilReplace(&req.GroupInfoForSetEX.GroupID, &resp.GroupID)
datautil.NotNilReplace(&req.GroupInfoForSetEX.GroupName, &resp.GroupName)
datautil.NotNilReplace(&req.GroupInfoForSetEX.FaceURL, &resp.FaceURL)
datautil.NotNilReplace(&req.GroupInfoForSetEX.Introduction, &resp.Introduction)
datautil.NotNilReplace(&req.GroupInfoForSetEX.Ex, &resp.Ex)
datautil.NotNilReplace(&req.GroupInfoForSetEX.NeedVerification, &resp.NeedVerification)
datautil.NotNilReplace(&req.GroupInfoForSetEX.LookMemberInfo, &resp.LookMemberInfo)
datautil.NotNilReplace(&req.GroupInfoForSetEX.ApplyMemberFriend, &resp.ApplyMemberFriend)
return nil
})
}
func (s *groupServer) webhookAfterSetGroupInfoEX(ctx context.Context, after *config.AfterConfig, req *group.SetGroupInfoEXReq) {
cbReq := &callbackstruct.CallbackAfterSetGroupInfoEXReq{
CallbackCommand: callbackstruct.CallbackAfterSetGroupInfoCommand,
GroupID: req.GroupInfoForSetEX.GroupID,
GroupName: req.GroupInfoForSetEX.GroupName,
Notification: req.GroupInfoForSetEX.Notification,
Introduction: req.GroupInfoForSetEX.Introduction,
FaceURL: req.GroupInfoForSetEX.FaceURL,
}
if req.GroupInfoForSetEX.Ex != nil {
cbReq.Ex = req.GroupInfoForSetEX.Ex
}
if req.GroupInfoForSetEX.NeedVerification != nil {
cbReq.NeedVerification = req.GroupInfoForSetEX.NeedVerification
}
if req.GroupInfoForSetEX.LookMemberInfo != nil {
cbReq.LookMemberInfo = req.GroupInfoForSetEX.LookMemberInfo
}
if req.GroupInfoForSetEX.ApplyMemberFriend != nil {
cbReq.ApplyMemberFriend = req.GroupInfoForSetEX.ApplyMemberFriend
}
s.webhookClient.AsyncPost(ctx, cbReq.GetCallbackCommand(), cbReq, &callbackstruct.CallbackAfterSetGroupInfoEXResp{}, after)
}

@ -54,6 +54,39 @@ func UpdateGroupInfoMap(ctx context.Context, group *sdkws.GroupInfoForSet) map[s
return m
}
func UpdateGroupInfoEXMap(ctx context.Context, group *sdkws.GroupInfoForSetEX) map[string]any {
m := make(map[string]any)
if group.GroupName != "" {
m["group_name"] = group.GroupName
}
if group.Notification != nil {
m["notification"] = group.Notification.Value
m["notification_update_time"] = time.Now()
m["notification_user_id"] = mcontext.GetOpUserID(ctx)
}
if group.Introduction != nil {
m["introduction"] = group.Introduction.Value
}
if group.FaceURL != nil {
m["face_url"] = group.FaceURL.Value
}
if group.NeedVerification != nil {
m["need_verification"] = group.NeedVerification.Value
}
if group.LookMemberInfo != nil {
m["look_member_info"] = group.LookMemberInfo.Value
}
if group.ApplyMemberFriend != nil {
m["apply_member_friend"] = group.ApplyMemberFriend.Value
}
if group.Ex != nil {
m["ex"] = group.Ex.Value
}
return m
}
func UpdateGroupStatusMap(status int) map[string]any {
return map[string]any{
"status": status,

File diff suppressed because it is too large Load Diff

@ -21,7 +21,9 @@ import (
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/database"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/model"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/versionctx"
"github.com/openimsdk/open-im-server/v3/pkg/msgprocessor"
"github.com/openimsdk/open-im-server/v3/pkg/rpcclient/notification"
"github.com/openimsdk/protocol/msg"
"github.com/openimsdk/open-im-server/v3/pkg/authverify"
"github.com/openimsdk/open-im-server/v3/pkg/common/servererrs"
@ -43,12 +45,22 @@ const (
adminReceiver
)
func NewGroupNotificationSender(db controller.GroupDatabase, msgRpcClient *rpcclient.MessageRpcClient, userRpcClient *rpcclient.UserRpcClient, config *Config, fn func(ctx context.Context, userIDs []string) ([]notification.CommonUser, error)) *GroupNotificationSender {
func NewGroupNotificationSender(
db controller.GroupDatabase,
msgRpcClient *rpcclient.MessageRpcClient,
userRpcClient *rpcclient.UserRpcClient,
conversationRpcClient *rpcclient.ConversationRpcClient,
config *Config,
fn func(ctx context.Context, userIDs []string) ([]notification.CommonUser, error),
) *GroupNotificationSender {
return &GroupNotificationSender{
NotificationSender: rpcclient.NewNotificationSender(&config.NotificationConfig, rpcclient.WithRpcClient(msgRpcClient), rpcclient.WithUserRpcClient(userRpcClient)),
getUsersInfo: fn,
db: db,
config: config,
conversationRpcClient: conversationRpcClient,
msgRpcClient: msgRpcClient,
}
}
@ -57,6 +69,9 @@ type GroupNotificationSender struct {
getUsersInfo func(ctx context.Context, userIDs []string) ([]notification.CommonUser, error)
db controller.GroupDatabase
config *Config
conversationRpcClient *rpcclient.ConversationRpcClient
msgRpcClient *rpcclient.MessageRpcClient
}
func (g *GroupNotificationSender) PopulateGroupMember(ctx context.Context, members ...*model.GroupMember) error {
@ -494,50 +509,46 @@ func (g *GroupNotificationSender) MemberKickedNotification(ctx context.Context,
g.Notification(ctx, mcontext.GetOpUserID(ctx), tips.Group.GroupID, constant.MemberKickedNotification, tips)
}
func (g *GroupNotificationSender) MemberInvitedNotification(ctx context.Context, groupID, reason string, invitedUserIDList []string) {
func (g *GroupNotificationSender) MemberEnterNotification(ctx context.Context, groupID string, entrantUserID ...string) error {
var err error
defer func() {
if err != nil {
log.ZError(ctx, stringutil.GetFuncName(1)+" failed", err)
}
}()
var group *sdkws.GroupInfo
group, err = g.getGroupInfo(ctx, groupID)
if err != nil {
return
if !g.config.RpcConfig.EnableHistoryForNewMembers {
conversationID := msgprocessor.GetConversationIDBySessionType(constant.ReadGroupChatType, groupID)
maxSeq, err := g.msgRpcClient.GetConversationMaxSeq(ctx, conversationID)
if err != nil {
return err
}
if _, err = g.msgRpcClient.SetUserConversationsMinSeq(ctx, &msg.SetUserConversationsMinSeqReq{
UserIDs: entrantUserID,
ConversationID: conversationID,
Seq: maxSeq,
}); err != nil {
return err
}
}
var users []*sdkws.GroupMemberFullInfo
users, err = g.getGroupMembers(ctx, groupID, invitedUserIDList)
if err != nil {
return
if err := g.conversationRpcClient.GroupChatFirstCreateConversation(ctx, groupID, entrantUserID); err != nil {
return err
}
tips := &sdkws.MemberInvitedTips{Group: group, InvitedUserList: users}
err = g.fillOpUser(ctx, &tips.OpUser, tips.Group.GroupID)
g.setVersion(ctx, &tips.GroupMemberVersion, &tips.GroupMemberVersionID, database.GroupMemberVersionName, tips.Group.GroupID)
g.Notification(ctx, mcontext.GetOpUserID(ctx), group.GroupID, constant.MemberInvitedNotification, tips)
}
func (g *GroupNotificationSender) MemberEnterNotification(ctx context.Context, groupID string, entrantUserID string) {
var err error
defer func() {
if err != nil {
log.ZError(ctx, stringutil.GetFuncName(1)+" failed", err)
}
}()
var group *sdkws.GroupInfo
group, err = g.getGroupInfo(ctx, groupID)
if err != nil {
return
return err
}
var user *sdkws.GroupMemberFullInfo
user, err = g.getGroupMember(ctx, groupID, entrantUserID)
users, err := g.getGroupMembers(ctx, groupID, entrantUserID)
if err != nil {
return
return err
}
tips := &sdkws.MemberEnterTips{Group: group, EntrantUser: user}
tips := &sdkws.MemberInvitedTips{Group: group, InvitedUserList: users}
g.setVersion(ctx, &tips.GroupMemberVersion, &tips.GroupMemberVersionID, database.GroupMemberVersionName, tips.Group.GroupID)
g.Notification(ctx, mcontext.GetOpUserID(ctx), group.GroupID, constant.MemberEnterNotification, tips)
g.Notification(ctx, mcontext.GetOpUserID(ctx), group.GroupID, constant.MemberInvitedNotification, tips)
return nil
}
func (g *GroupNotificationSender) GroupDismissedNotification(ctx context.Context, tips *sdkws.GroupDismissedTips) {

@ -30,8 +30,14 @@ func (m *msgServer) ClearMsg(ctx context.Context, req *msg.ClearMsgReq) (_ *msg.
msgNum int
start = time.Now()
)
clearMsg := func(ctx context.Context) (bool, error) {
msgs, err := m.MsgDatabase.GetBeforeMsg(ctx, req.Timestamp, 100)
docIDs, err := m.MsgDatabase.GetDocIDs(ctx)
if err != nil {
return false, err
}
msgs, err := m.MsgDatabase.GetBeforeMsg(ctx, req.Timestamp, docIDs, 5000)
if err != nil {
return false, err
}
@ -55,19 +61,14 @@ func (m *msgServer) ClearMsg(ctx context.Context, req *msg.ClearMsgReq) (_ *msg.
return true, nil
}
for {
keep, err := clearMsg(ctx)
if err != nil {
log.ZError(ctx, "clear msg failed", err, "docNum", docNum, "msgNum", msgNum, "cost", time.Since(start))
return nil, err
}
if !keep {
log.ZInfo(ctx, "clear msg success", "docNum", docNum, "msgNum", msgNum, "cost", time.Since(start))
break
}
log.ZInfo(ctx, "clearing message", "docNum", docNum, "msgNum", msgNum, "cost", time.Since(start))
_, err = clearMsg(ctx)
if err != nil {
log.ZError(ctx, "clear msg failed", err, "docNum", docNum, "msgNum", msgNum, "cost", time.Since(start))
return nil, err
}
log.ZInfo(ctx, "clearing message", "docNum", docNum, "msgNum", msgNum, "cost", time.Since(start))
return &msg.ClearMsgResp{}, nil
}

@ -53,3 +53,12 @@ func (m *msgServer) GetMsgByConversationIDs(ctx context.Context, req *pbmsg.GetM
}
return &pbmsg.GetMsgByConversationIDsResp{MsgDatas: Msgs}, nil
}
func (m *msgServer) SetUserConversationsMinSeq(ctx context.Context, req *pbmsg.SetUserConversationsMinSeqReq) (*pbmsg.SetUserConversationsMinSeqResp, error) {
for _, userID := range req.UserIDs {
if err := m.MsgDatabase.SetUserConversationsMinSeqs(ctx, userID, map[string]int64{req.ConversationID: req.Seq}); err != nil {
return nil, err
}
}
return &pbmsg.SetUserConversationsMinSeqResp{}, nil
}

@ -290,6 +290,7 @@ func (t *thirdServer) apiAddress(prefix, name string) string {
func (t *thirdServer) DeleteOutdatedData(ctx context.Context, req *third.DeleteOutdatedDataReq) (*third.DeleteOutdatedDataResp, error) {
var conf config.Third
expireTime := time.UnixMilli(req.ExpireTime)
var deltotal int
findPagination := &sdkws.RequestPagination{
PageNumber: 1,
ShowNumber: 1000,
@ -311,10 +312,8 @@ func (t *thirdServer) DeleteOutdatedData(ctx context.Context, req *third.DeleteO
return nil, errs.Wrap(err)
}
if int(count) < 1 && t.minio != nil {
thumbnailKey, err := t.getMinioImageThumbnailKey(ctx, key)
if err != nil {
return nil, errs.Wrap(err)
}
thumbnailKey, _ := t.getMinioImageThumbnailKey(ctx, key)
t.s3dataBase.DeleteObject(ctx, thumbnailKey)
t.s3dataBase.DelS3Key(ctx, conf.Object.Enable, needDelObjectKeys...)
t.s3dataBase.DeleteObject(ctx, key)
@ -329,7 +328,9 @@ func (t *thirdServer) DeleteOutdatedData(ctx context.Context, req *third.DeleteO
if total < int64(findPagination.ShowNumber) {
break
}
deltotal += int(total)
}
log.ZDebug(ctx, "DeleteOutdatedData", "delete Total", deltotal)
return &third.DeleteOutdatedDataResp{}, nil
}

@ -17,6 +17,11 @@ package user
import (
"context"
"errors"
"math/rand"
"strings"
"sync"
"time"
"github.com/openimsdk/open-im-server/v3/internal/rpc/relation"
"github.com/openimsdk/open-im-server/v3/pkg/common/config"
"github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics"
@ -29,10 +34,6 @@ import (
"github.com/openimsdk/protocol/group"
friendpb "github.com/openimsdk/protocol/relation"
"github.com/openimsdk/tools/db/redisutil"
"math/rand"
"strings"
"sync"
"time"
"github.com/openimsdk/open-im-server/v3/pkg/authverify"
"github.com/openimsdk/open-im-server/v3/pkg/common/convert"
@ -147,41 +148,35 @@ func (s *userServer) UpdateUserInfo(ctx context.Context, req *pbuser.UpdateUserI
return nil, err
}
s.friendNotificationSender.UserInfoUpdatedNotification(ctx, req.UserInfo.UserID)
//friends, err := s.friendRpcClient.GetFriendIDs(ctx, req.UserInfo.UserID)
//if err != nil {
// return nil, err
//}
//if req.UserInfo.Nickname != "" || req.UserInfo.FaceURL != "" {
// if err = s.NotificationUserInfoUpdate(ctx, req.UserInfo.UserID,oldUser); err != nil {
// return nil, err
// }
//}
//for _, friendID := range friends {
// s.friendNotificationSender.FriendInfoUpdatedNotification(ctx, req.UserInfo.UserID, friendID)
//}
s.webhookAfterUpdateUserInfo(ctx, &s.config.WebhooksConfig.AfterUpdateUserInfo, req)
if err = s.NotificationUserInfoUpdate(ctx, req.UserInfo.UserID, oldUser); err != nil {
return nil, err
}
return resp, nil
}
func (s *userServer) UpdateUserInfoEx(ctx context.Context, req *pbuser.UpdateUserInfoExReq) (resp *pbuser.UpdateUserInfoExResp, err error) {
resp = &pbuser.UpdateUserInfoExResp{}
err = authverify.CheckAccessV3(ctx, req.UserInfo.UserID, s.config.Share.IMAdminUserID)
if err != nil {
return nil, err
}
if err = s.webhookBeforeUpdateUserInfoEx(ctx, &s.config.WebhooksConfig.BeforeUpdateUserInfoEx, req); err != nil {
return nil, err
}
oldUser, err := s.db.GetUserByID(ctx, req.UserInfo.UserID)
if err != nil {
return nil, err
}
data := convert.UserPb2DBMapEx(req.UserInfo)
if err = s.db.UpdateByMap(ctx, req.UserInfo.UserID, data); err != nil {
return nil, err
}
s.friendNotificationSender.UserInfoUpdatedNotification(ctx, req.UserInfo.UserID)
//friends, err := s.friendRpcClient.GetFriendIDs(ctx, req.UserInfo.UserID)
//if err != nil {
@ -199,6 +194,7 @@ func (s *userServer) UpdateUserInfoEx(ctx context.Context, req *pbuser.UpdateUse
if err := s.NotificationUserInfoUpdate(ctx, req.UserInfo.UserID, oldUser); err != nil {
return nil, err
}
return resp, nil
}
func (s *userServer) SetGlobalRecvMessageOpt(ctx context.Context, req *pbuser.SetGlobalRecvMessageOptReq) (resp *pbuser.SetGlobalRecvMessageOptResp, err error) {

@ -81,6 +81,7 @@ func Start(ctx context.Context, config *CronTaskConfig) error {
deltime := now.Add(-time.Hour * 24 * time.Duration(config.CronTask.RetainChatRecords))
ctx := mcontext.SetOperationID(ctx, fmt.Sprintf("cron_%d_%d", os.Getpid(), deltime.UnixMilli()))
log.ZInfo(ctx, "clear chat records", "deltime", deltime, "timestamp", deltime.UnixMilli())
if _, err := msgClient.ClearMsg(ctx, &msg.ClearMsgReq{Timestamp: deltime.UnixMilli()}); err != nil {
log.ZError(ctx, "cron clear chat records failed", err, "deltime", deltime, "cont", time.Since(now))
return

@ -18,7 +18,9 @@ const (
CallbackBeforeInviteJoinGroupCommand = "callbackBeforeInviteJoinGroupCommand"
CallbackAfterJoinGroupCommand = "callbackAfterJoinGroupCommand"
CallbackAfterSetGroupInfoCommand = "callbackAfterSetGroupInfoCommand"
CallbackAfterSetGroupInfoEXCommand = "callbackAfterSetGroupInfoCommandEX"
CallbackBeforeSetGroupInfoCommand = "callbackBeforeSetGroupInfoCommand"
CallbackBeforeSetGroupInfoEXCommand = "callbackBeforeSetGroupInfoEXCommand"
CallbackAfterRevokeMsgCommand = "callbackBeforeAfterMsgCommand"
CallbackBeforeAddBlackCommand = "callbackBeforeAddBlackCommand"
CallbackAfterAddFriendCommand = "callbackAfterAddFriendCommand"

@ -17,6 +17,7 @@ package callbackstruct
import (
"github.com/openimsdk/open-im-server/v3/pkg/apistruct"
common "github.com/openimsdk/protocol/sdkws"
"github.com/openimsdk/protocol/wrapperspb"
)
type CallbackCommand string
@ -242,3 +243,48 @@ type CallbackAfterSetGroupInfoReq struct {
type CallbackAfterSetGroupInfoResp struct {
CommonCallbackResp
}
type CallbackBeforeSetGroupInfoEXReq struct {
CallbackCommand `json:"callbackCommand"`
OperationID string `json:"operationID"`
GroupID string `json:"groupID"`
GroupName string `json:"groupName"`
Notification *wrapperspb.StringValue `json:"notification"`
Introduction *wrapperspb.StringValue `json:"introduction"`
FaceURL *wrapperspb.StringValue `json:"faceURL"`
Ex *wrapperspb.StringValue `json:"ex"`
NeedVerification *wrapperspb.Int32Value `json:"needVerification"`
LookMemberInfo *wrapperspb.Int32Value `json:"lookMemberInfo"`
ApplyMemberFriend *wrapperspb.Int32Value `json:"applyMemberFriend"`
}
type CallbackBeforeSetGroupInfoEXResp struct {
CommonCallbackResp
GroupID string `json:"groupID"`
GroupName string `json:"groupName"`
Notification *wrapperspb.StringValue `json:"notification"`
Introduction *wrapperspb.StringValue `json:"introduction"`
FaceURL *wrapperspb.StringValue `json:"faceURL"`
Ex *wrapperspb.StringValue `json:"ex"`
NeedVerification *wrapperspb.Int32Value `json:"needVerification"`
LookMemberInfo *wrapperspb.Int32Value `json:"lookMemberInfo"`
ApplyMemberFriend *wrapperspb.Int32Value `json:"applyMemberFriend"`
}
type CallbackAfterSetGroupInfoEXReq struct {
CallbackCommand `json:"callbackCommand"`
OperationID string `json:"operationID"`
GroupID string `json:"groupID"`
GroupName string `json:"groupName"`
Notification *wrapperspb.StringValue `json:"notification"`
Introduction *wrapperspb.StringValue `json:"introduction"`
FaceURL *wrapperspb.StringValue `json:"faceURL"`
Ex *wrapperspb.StringValue `json:"ex"`
NeedVerification *wrapperspb.Int32Value `json:"needVerification"`
LookMemberInfo *wrapperspb.Int32Value `json:"lookMemberInfo"`
ApplyMemberFriend *wrapperspb.Int32Value `json:"applyMemberFriend"`
}
type CallbackAfterSetGroupInfoEXResp struct {
CommonCallbackResp
}

@ -258,7 +258,8 @@ type Group struct {
ListenIP string `mapstructure:"listenIP"`
Ports []int `mapstructure:"ports"`
} `mapstructure:"rpc"`
Prometheus Prometheus `mapstructure:"prometheus"`
Prometheus Prometheus `mapstructure:"prometheus"`
EnableHistoryForNewMembers bool `mapstructure:"enableHistoryForNewMembers"`
}
type Msg struct {
@ -421,6 +422,8 @@ type Webhooks struct {
BeforeInviteUserToGroup BeforeConfig `mapstructure:"beforeInviteUserToGroup"`
AfterSetGroupInfo AfterConfig `mapstructure:"afterSetGroupInfo"`
BeforeSetGroupInfo BeforeConfig `mapstructure:"beforeSetGroupInfo"`
AfterSetGroupInfoEX AfterConfig `mapstructure:"afterSetGroupInfoEX"`
BeforeSetGroupInfoEX BeforeConfig `mapstructure:"beforeSetGroupInfoEX"`
AfterRevokeMsg AfterConfig `mapstructure:"afterRevokeMsg"`
BeforeAddBlack BeforeConfig `mapstructure:"beforeAddBlack"`
AfterAddFriend AfterConfig `mapstructure:"afterAddFriend"`

@ -36,3 +36,26 @@ func TestLoadOpenIMRpcUserConfig(t *testing.T) {
//export IMENV_OPENIM_RPC_USER_RPC_PORTS="10110,10111,10112"
assert.Equal(t, []int{10110, 10111, 10112}, user.RPC.Ports)
}
func TestLoadNotificationConfig(t *testing.T) {
var noti Notification
err := LoadConfig("../../../config/notification.yml", "IMENV_NOTIFICATION", &noti)
assert.Nil(t, err)
assert.Equal(t, "Your friend's profile has been changed", noti.FriendRemarkSet.OfflinePush.Title)
}
func TestLoadOpenIMThirdConfig(t *testing.T) {
var third Third
err := LoadConfig("../../../config/openim-rpc-third.yml", "IMENV_OPENIM_RPC_THIRD", &third)
assert.Nil(t, err)
assert.Equal(t, "enabled", third.Object.Enable)
assert.Equal(t, "https://oss-cn-chengdu.aliyuncs.com", third.Object.Oss.Endpoint)
assert.Equal(t, "my_bucket_name", third.Object.Oss.Bucket)
assert.Equal(t, "https://my_bucket_name.oss-cn-chengdu.aliyuncs.com", third.Object.Oss.BucketURL)
assert.Equal(t, "AKID1234567890", third.Object.Oss.AccessKeyID)
assert.Equal(t, "abc123xyz789", third.Object.Oss.AccessKeySecret)
assert.Equal(t, "session_token_value", third.Object.Oss.SessionToken) // Uncomment if session token is needed
assert.Equal(t, true, third.Object.Oss.PublicRead)
// Environment: IMENV_OPENIM_RPC_THIRD_OBJECT_ENABLE=enabled;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_ENDPOINT=https://oss-cn-chengdu.aliyuncs.com;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_BUCKET=my_bucket_name;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_BUCKETURL=https://my_bucket_name.oss-cn-chengdu.aliyuncs.com;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_ACCESSKEYID=AKID1234567890;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_ACCESSKEYSECRET=abc123xyz789;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_SESSIONTOKEN=session_token_value;IMENV_OPENIM_RPC_THIRD_OBJECT_OSS_PUBLICREAD=true
}

@ -18,11 +18,12 @@ import (
"context"
"encoding/json"
"errors"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/database"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/model"
"strings"
"time"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/database"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/model"
"github.com/openimsdk/open-im-server/v3/pkg/common/config"
"github.com/openimsdk/open-im-server/v3/pkg/common/convert"
"github.com/openimsdk/open-im-server/v3/pkg/common/prommetrics"
@ -89,16 +90,18 @@ type CommonMsgDatabase interface {
// to mq
MsgToMQ(ctx context.Context, key string, msg2mq *sdkws.MsgData) error
MsgToPushMQ(ctx context.Context, key, conversarionID string, msg2mq *sdkws.MsgData) (int32, int64, error)
MsgToMongoMQ(ctx context.Context, key, conversarionID string, msgs []*sdkws.MsgData, lastSeq int64) error
MsgToPushMQ(ctx context.Context, key, conversationID string, msg2mq *sdkws.MsgData) (int32, int64, error)
MsgToMongoMQ(ctx context.Context, key, conversationID string, msgs []*sdkws.MsgData, lastSeq int64) error
RangeUserSendCount(ctx context.Context, start time.Time, end time.Time, group bool, ase bool, pageNumber int32, showNumber int32) (msgCount int64, userCount int64, users []*model.UserCount, dateCount map[string]int64, err error)
RangeGroupSendCount(ctx context.Context, start time.Time, end time.Time, ase bool, pageNumber int32, showNumber int32) (msgCount int64, userCount int64, groups []*model.GroupCount, dateCount map[string]int64, err error)
ConvertMsgsDocLen(ctx context.Context, conversationIDs []string)
// clear msg
GetBeforeMsg(ctx context.Context, ts int64, limit int) ([]*model.MsgDocModel, error)
GetBeforeMsg(ctx context.Context, ts int64, docIds []string, limit int) ([]*model.MsgDocModel, error)
DeleteDocMsgBefore(ctx context.Context, ts int64, doc *model.MsgDocModel) ([]int, error)
GetDocIDs(ctx context.Context) ([]string, error)
}
func NewCommonMsgDatabase(msgDocModel database.Msg, msg cache.MsgCache, seqUser cache.SeqUser, seqConversation cache.SeqConversationCache, kafkaConf *config.Kafka) (CommonMsgDatabase, error) {
@ -912,8 +915,25 @@ func (db *commonMsgDatabase) ConvertMsgsDocLen(ctx context.Context, conversation
db.msgDocDatabase.ConvertMsgsDocLen(ctx, conversationIDs)
}
func (db *commonMsgDatabase) GetBeforeMsg(ctx context.Context, ts int64, limit int) ([]*model.MsgDocModel, error) {
return db.msgDocDatabase.GetBeforeMsg(ctx, ts, limit)
func (db *commonMsgDatabase) GetBeforeMsg(ctx context.Context, ts int64, docIDs []string, limit int) ([]*model.MsgDocModel, error) {
var msgs []*model.MsgDocModel
for i := 0; i < len(docIDs); i += 1000 {
end := i + 1000
if end > len(docIDs) {
end = len(docIDs)
}
res, err := db.msgDocDatabase.GetBeforeMsg(ctx, ts, docIDs[i:end], limit)
if err != nil {
return nil, err
}
msgs = append(msgs, res...)
if len(msgs) >= limit {
return msgs[:limit], nil
}
}
return msgs, nil
}
func (db *commonMsgDatabase) DeleteDocMsgBefore(ctx context.Context, ts int64, doc *model.MsgDocModel) ([]int, error) {
@ -936,8 +956,10 @@ func (db *commonMsgDatabase) DeleteDocMsgBefore(ctx context.Context, ts int64, d
return index, err
}
if len(index) == notNull {
log.ZDebug(ctx, "Delete db in Doc", "DocID", doc.DocID, "index", index, "maxSeq", maxSeq)
return index, db.msgDocDatabase.DeleteDoc(ctx, doc.DocID)
} else {
log.ZDebug(ctx, "delete db in index", "DocID", doc.DocID, "index", index, "maxSeq", maxSeq)
return index, db.msgDocDatabase.DeleteMsgByIndex(ctx, doc.DocID, index)
}
}
@ -955,3 +977,7 @@ func (db *commonMsgDatabase) setMinSeq(ctx context.Context, conversationID strin
}
return db.seqConversation.SetMinSeq(ctx, conversationID, seq)
}
func (db *commonMsgDatabase) GetDocIDs(ctx context.Context) ([]string, error) {
return db.msgDocDatabase.GetDocIDs(ctx)
}

@ -8,6 +8,7 @@ import (
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/database"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/model"
"github.com/openimsdk/tools/utils/datautil"
"golang.org/x/exp/rand"
"github.com/openimsdk/protocol/constant"
"github.com/openimsdk/protocol/msg"
@ -117,9 +118,9 @@ func (m *MsgMgo) GetMsgBySeqIndexIn1Doc(ctx context.Context, userID, docID strin
}
func (m *MsgMgo) getMsgBySeqIndexIn1Doc(ctx context.Context, userID, docID string, seqs []int64) ([]*model.MsgInfoModel, error) {
indexs := make([]int64, 0, len(seqs))
indexes := make([]int64, 0, len(seqs))
for _, seq := range seqs {
indexs = append(indexs, m.model.GetMsgIndex(seq))
indexes = append(indexes, m.model.GetMsgIndex(seq))
}
pipeline := mongo.Pipeline{
bson.D{{Key: "$match", Value: bson.D{
@ -130,7 +131,7 @@ func (m *MsgMgo) getMsgBySeqIndexIn1Doc(ctx context.Context, userID, docID strin
{Key: "doc_id", Value: 1},
{Key: "msgs", Value: bson.D{
{Key: "$map", Value: bson.D{
{Key: "input", Value: indexs},
{Key: "input", Value: indexes},
{Key: "as", Value: "index"},
{Key: "in", Value: bson.D{
{Key: "$arrayElemAt", Value: bson.A{"$msgs", "$$index"}},
@ -1226,10 +1227,53 @@ func (m *MsgMgo) ConvertMsgsDocLen(ctx context.Context, conversationIDs []string
}
}
func (m *MsgMgo) GetBeforeMsg(ctx context.Context, ts int64, limit int) ([]*model.MsgDocModel, error) {
func (m *MsgMgo) GetDocIDs(ctx context.Context) ([]string, error) {
limit := 5000
var skip int
var docIDs []string
var offset int
count, err := m.coll.CountDocuments(ctx, bson.M{})
if err != nil {
return nil, err
}
if count < int64(limit) {
skip = 0
} else {
rand.Seed(uint64(time.Now().UnixMilli()))
skip = rand.Intn(int(count / int64(limit)))
offset = skip * limit
}
log.ZDebug(ctx, "offset", "skip", skip, "offset", offset)
res, err := mongoutil.Aggregate[*model.MsgDocModel](ctx, m.coll, []bson.M{
{
"$project": bson.M{
"doc_id": 1,
},
},
{
"$skip": offset,
},
{
"$limit": limit,
},
})
for _, doc := range res {
docIDs = append(docIDs, doc.DocID)
}
return docIDs, errs.Wrap(err)
}
func (m *MsgMgo) GetBeforeMsg(ctx context.Context, ts int64, docIDs []string, limit int) ([]*model.MsgDocModel, error) {
return mongoutil.Aggregate[*model.MsgDocModel](ctx, m.coll, []bson.M{
{
"$match": bson.M{
"doc_id": bson.M{
"$in": docIDs,
},
"msgs.msg.send_time": bson.M{
"$lt": ts,
},

@ -167,6 +167,10 @@ func (u *UserMgo) DeleteUserCommand(ctx context.Context, userID string, Type int
filter := bson.M{"userID": userID, "type": Type, "uuid": UUID}
result, err := collection.DeleteOne(ctx, filter)
// when err is not nil, result might be nil
if err != nil {
return errs.Wrap(err)
}
if result.DeletedCount == 0 {
// No records found to update
return errs.Wrap(errs.ErrRecordNotFound)

@ -16,10 +16,11 @@ package database
import (
"context"
"time"
"github.com/openimsdk/open-im-server/v3/pkg/common/storage/model"
"github.com/openimsdk/protocol/msg"
"go.mongodb.org/mongo-driver/mongo"
"time"
)
type Msg interface {
@ -44,5 +45,7 @@ type Msg interface {
DeleteDoc(ctx context.Context, docID string) error
DeleteMsgByIndex(ctx context.Context, docID string, index []int) error
GetBeforeMsg(ctx context.Context, ts int64, limit int) ([]*model.MsgDocModel, error)
GetBeforeMsg(ctx context.Context, ts int64, docIDs []string, limit int) ([]*model.MsgDocModel, error)
GetDocIDs(ctx context.Context) ([]string, error)
}

@ -92,15 +92,15 @@ type GroupCount struct {
Count int64 `bson:"count"`
}
func (MsgDocModel) TableName() string {
func (*MsgDocModel) TableName() string {
return MsgTableName
}
func (MsgDocModel) GetSingleGocMsgNum() int64 {
func (*MsgDocModel) GetSingleGocMsgNum() int64 {
return singleGocMsgNum
}
func (MsgDocModel) GetSingleGocMsgNum5000() int64 {
func (*MsgDocModel) GetSingleGocMsgNum5000() int64 {
return singleGocMsgNum5000
}
@ -108,12 +108,12 @@ func (m *MsgDocModel) IsFull() bool {
return m.Msg[len(m.Msg)-1].Msg != nil
}
func (m MsgDocModel) GetDocID(conversationID string, seq int64) string {
func (m *MsgDocModel) GetDocID(conversationID string, seq int64) string {
seqSuffix := (seq - 1) / singleGocMsgNum
return m.indexGen(conversationID, seqSuffix)
}
func (m MsgDocModel) GetDocIDSeqsMap(conversationID string, seqs []int64) map[string][]int64 {
func (m *MsgDocModel) GetDocIDSeqsMap(conversationID string, seqs []int64) map[string][]int64 {
t := make(map[string][]int64)
for i := 0; i < len(seqs); i++ {
docID := m.GetDocID(conversationID, seqs[i])
@ -127,15 +127,15 @@ func (m MsgDocModel) GetDocIDSeqsMap(conversationID string, seqs []int64) map[st
return t
}
func (MsgDocModel) GetMsgIndex(seq int64) int64 {
func (*MsgDocModel) GetMsgIndex(seq int64) int64 {
return (seq - 1) % singleGocMsgNum
}
func (MsgDocModel) indexGen(conversationID string, seqSuffix int64) string {
func (*MsgDocModel) indexGen(conversationID string, seqSuffix int64) string {
return conversationID + ":" + strconv.FormatInt(seqSuffix, 10)
}
func (MsgDocModel) GenExceptionMessageBySeqs(seqs []int64) (exceptionMsg []*sdkws.MsgData) {
func (*MsgDocModel) GenExceptionMessageBySeqs(seqs []int64) (exceptionMsg []*sdkws.MsgData) {
for _, v := range seqs {
msgModel := new(sdkws.MsgData)
msgModel.Seq = v

@ -77,6 +77,11 @@ func (c *ConversationRpcClient) SetConversationMaxSeq(ctx context.Context, owner
return err
}
func (c *ConversationRpcClient) SetConversationMinSeq(ctx context.Context, ownerUserIDs []string, conversationID string, minSeq int64) error {
_, err := c.Client.SetConversationMinSeq(ctx, &pbconversation.SetConversationMinSeqReq{OwnerUserID: ownerUserIDs, ConversationID: conversationID, MinSeq: minSeq})
return err
}
func (c *ConversationRpcClient) SetConversations(ctx context.Context, userIDs []string, conversation *pbconversation.ConversationReq) error {
_, err := c.Client.SetConversations(ctx, &pbconversation.SetConversationsReq{UserIDs: userIDs, Conversation: conversation})
return err

@ -159,6 +159,15 @@ func (m *MessageRpcClient) SendMsg(ctx context.Context, req *msg.SendMsgReq) (*m
return resp, nil
}
// SetUserConversationsMinSeq set min seq
func (m *MessageRpcClient) SetUserConversationsMinSeq(ctx context.Context, req *msg.SetUserConversationsMinSeqReq) (*msg.SetUserConversationsMinSeqResp, error) {
resp, err := m.Client.SetUserConversationsMinSeq(ctx, req)
if err != nil {
return nil, err
}
return resp, nil
}
// GetMaxSeq retrieves the maximum sequence number from the gRPC client.
// Errors during the gRPC call are wrapped to provide additional context.
func (m *MessageRpcClient) GetMaxSeq(ctx context.Context, req *sdkws.GetMaxSeqReq) (*sdkws.GetMaxSeqResp, error) {
@ -174,6 +183,9 @@ func (m *MessageRpcClient) GetMaxSeqs(ctx context.Context, conversationIDs []str
resp, err := m.Client.GetMaxSeqs(ctx, &msg.GetMaxSeqsReq{
ConversationIDs: conversationIDs,
})
if err != nil {
return nil, err
}
return resp.MaxSeqs, err
}
@ -182,6 +194,9 @@ func (m *MessageRpcClient) GetHasReadSeqs(ctx context.Context, userID string, co
UserID: userID,
ConversationIDs: conversationIDs,
})
if err != nil {
return nil, err
}
return resp.MaxSeqs, err
}
@ -190,6 +205,9 @@ func (m *MessageRpcClient) GetMsgByConversationIDs(ctx context.Context, docIDs [
ConversationIDs: docIDs,
MaxSeqs: seqs,
})
if err != nil {
return nil, err
}
return resp.MsgDatas, err
}

@ -234,7 +234,7 @@ func (m *Manage) RunTask(ctx context.Context, task Task) (string, error) {
}
for i, currentPartSize := range part.PartSizes {
md5Reader := NewMd5Reader(io.LimitReader(reader, currentPartSize))
if m.doPut(ctx, m.api.Client, initiateMultipartUploadResp.Upload.Sign, uploadParts[i], md5Reader, currentPartSize); err != nil {
if err := m.doPut(ctx, m.api.Client, initiateMultipartUploadResp.Upload.Sign, uploadParts[i], md5Reader, currentPartSize); err != nil {
return "", err
}
if md5val := md5Reader.Md5(); md5val != part.PartMd5s[i] {

Loading…
Cancel
Save