Merge branch 'v2.3.0release' of github.com:OpenIMSDK/Open-IM-Server into v2.3.0release

pull/351/head
wangchuxiao 2 years ago
commit 978a7e209e

@ -0,0 +1,3 @@
USER=root
PASSWORD=openIM
DATA_DIR=./

@ -1,6 +1,5 @@
version: "3"
#fixme Clone openIM Server project before using docker-compose,project addresshttps://github.com/OpenIMSDK/Open-IM-Server.git
version: "3"
services:
mysql:
image: mysql:5.7
@ -9,10 +8,10 @@ services:
- 23306:33060
container_name: mysql
volumes:
- ./components/mysql/data:/var/lib/mysql
- ${DATA_DIR}/components/mysql/data:/var/lib/mysql
- /etc/localtime:/etc/localtime
environment:
MYSQL_ROOT_PASSWORD: openIM
MYSQL_ROOT_PASSWORD: ${PASSWORD}
restart: always
mongodb:
@ -22,19 +21,17 @@ services:
container_name: mongo
command: --wiredTigerCacheSizeGB 1
volumes:
- ./components/mongodb/data/db:/data/db
- ./components/mongodb/data/logs:/data/logs
- ./components/mongodb/data/conf:/etc/mongo
# - ./script/mongo-init.sh:/docker-entrypoint-initdb.d/mongo-init.sh
- ${DATA_DIR}/components/mongodb/data/db:/data/db
- ${DATA_DIR}/components/mongodb/data/logs:/data/logs
- ${DATA_DIR}/components/mongodb/data/conf:/etc/mongo
- ./script/mongo-init.sh:/docker-entrypoint-initdb.d/mongo-init.sh:ro
environment:
- TZ=Asia/Shanghai
# cache
- wiredTigerCacheSizeGB=1
# - MONGO_USERNAME=openIM
# - MONGO_PASSWORD=openIM
# - MONGO_INITDB_ROOT_USERNAME=root
# - MONGO_INITDB_ROOT_PASSWORD=root
# - MONGO_INITDB_DATABASE=openIM
- MONGO_INITDB_ROOT_USERNAME=${USER}
- MONGO_INITDB_ROOT_PASSWORD=${PASSWORD}
- MONGO_INITDB_DATABASE=openIM
restart: always
redis:
@ -43,15 +40,15 @@ services:
- 16379:6379
container_name: redis
volumes:
- ./components/redis/data:/data
- ${DATA_DIR}/components/redis/data:/data
#redis config file
- ./components/redis/config/redis.conf:/usr/local/redis/config/redis.conf
- ${DATA_DIR}/components/redis/config/redis.conf:/usr/local/redis/config/redis.conf
environment:
TZ: Asia/Shanghai
restart: always
sysctls:
net.core.somaxconn: 1024
command: redis-server --requirepass openIM --appendonly yes
command: redis-server --requirepass ${PASSWORD} --appendonly yes
zookeeper:
@ -108,19 +105,19 @@ services:
- /mnt/data:/data
- /mnt/config:/root/.minio
environment:
MINIO_ROOT_USER: user12345
MINIO_ROOT_PASSWORD: key12345
MINIO_ROOT_USER: ${USER}
MINIO_ROOT_PASSWORD: ${PASSWORD}
restart: always
command: minio server /data --console-address ':9090'
open_im_server:
image: openim/open_im_server:v2.3.3
image: openim/open_im_server:v2.3.2
container_name: open_im_server
volumes:
- ./logs:/Open-IM-Server/logs
- ./config/config.yaml:/Open-IM-Server/config/config.yaml
- ./db/sdk:/Open-IM-Server/db/sdk
- ${DATA_DIR}/db/sdk:/Open-IM-Server/db/sdk
- ./script:/Open-IM-Server/script
restart: always
depends_on:
@ -140,8 +137,8 @@ services:
prometheus:
image: prom/prometheus
volumes:
- ./docker-compose_cfg/prometheus-compose.yml:/etc/prometheus/prometheus.yml
# - ./components/prometheus_data:/prometheus
- ${DATA_DIR}/docker-compose_cfg/prometheus-compose.yml:/etc/prometheus/prometheus.yml
# - ${DATA_DIR}/components/prometheus_data:/prometheus
container_name: prometheus
ports:
- 9091:9091
@ -153,20 +150,17 @@ services:
grafana:
image: grafana/grafana
volumes:
- ./docker-compose_cfg/datasource-compose.yaml:/etc/grafana/provisioning/datasources/datasource.yaml
- ./docker-compose_cfg/grafana.ini:/etc/grafana/grafana.ini
# - ./docker-compose_cfg/node-exporter-full_rev1.json:/var/lib/grafana/dashboards/node-exporter-full_rev1.json
# - ./components/grafana:/var/lib/grafana
- ./docker-compose_cfg/grafana.db:/var/lib/grafana/grafana.db
# - ./grafana/dashboards/dashboard.json:/var/lib/grafana/dashboards/dashboard.json
# - ./grafana/provisioning/dashboard.yaml:/etc/grafana/provisioning/dashboards/dashboard.yaml
- ${DATA_DIR}/docker-compose_cfg/datasource-compose.yaml:/etc/grafana/provisioning/datasources/datasource.yaml
- ${DATA_DIR}/docker-compose_cfg/grafana.ini:/etc/grafana/grafana.ini
- ${DATA_DIR}/docker-compose_cfg/node-exporter-full_rev1.json:/var/lib/grafana/dashboards/node-exporter-full_rev1.json
container_name: grafana
ports:
- 10007:10007
depends_on:
- prometheus
network_mode: "host"
privileged: true
user: root
# -rw-r-----
node-exporter:
image: quay.io/prometheus/node-exporter

@ -578,7 +578,7 @@ func init() {
panic(err.Error())
}
} else {
bytes, err := ioutil.ReadFile(filepath.Join(Root, "config", "config.yaml"))
bytes, err := ioutil.ReadFile("../config/config.yaml")
if err != nil {
panic(err.Error())
}

@ -3,7 +3,6 @@ package db
import (
"Open_IM/pkg/common/config"
"fmt"
"sync"
"time"
"gorm.io/driver/mysql"
@ -12,7 +11,7 @@ import (
)
type mysqlDB struct {
sync.RWMutex
//sync.RWMutex
db *gorm.DB
}

@ -17,6 +17,6 @@ func SetClientInitConfig(m map[string]interface{}) error {
func GetClientInitConfig() (db.ClientInitConfig, error) {
var config db.ClientInitConfig
err := db.DB.MysqlDB.DefaultGormDB().Model((&db.ClientInitConfig{})).First(&config).Error
err := db.DB.MysqlDB.DefaultGormDB().Model(&db.ClientInitConfig{}).First(&config).Error
return config, err
}

@ -31,7 +31,6 @@ message UserTokenResp {
}
message ForceLogoutReq {
int32 Platform = 1;
string FromUserID = 2;
@ -56,8 +55,6 @@ message ParseTokenResp{
}
service Auth {
rpc UserRegister(UserRegisterReq) returns(UserRegisterResp);
rpc UserToken(UserTokenReq) returns(UserTokenResp);

Loading…
Cancel
Save