mirror of https://github.com/rocboss/paopao-ce
commit
91f56c26c5
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,17 @@
|
||||
//go:build docs
|
||||
// +build docs
|
||||
|
||||
package docs
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
//go:embed index.html openapi.json **/*
|
||||
var files embed.FS
|
||||
|
||||
// NewFileSystem get an embed static assets http.FileSystem instance.
|
||||
func NewFileSystem() http.FileSystem {
|
||||
return http.FS(files)
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
<!DOCTYPE html>
|
||||
<!-- Important: must specify -->
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<link rel="icon" href="/favicon.ico" />
|
||||
<title>paopao-ce develop documents</title>
|
||||
<!-- Important: rapi-doc uses utf8 charecters -->
|
||||
<script
|
||||
type="module"
|
||||
src="/docs/assets/rapidoc-min.js"
|
||||
></script>
|
||||
</head>
|
||||
<body>
|
||||
<rapi-doc
|
||||
spec-url="/docs/openapi.json"
|
||||
render-style="read"
|
||||
>
|
||||
</rapi-doc>
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,334 @@
|
||||
{
|
||||
"openapi": "3.0.0",
|
||||
"info": {
|
||||
"title": "paopao-ce API",
|
||||
"description": "# The paopao-ce API documentation\n\nWelcome to the paopao-ce API documentation!\n\n",
|
||||
"version": "0.1.0"
|
||||
},
|
||||
"servers": [
|
||||
{
|
||||
"url": "https://api.paopao.info"
|
||||
}
|
||||
],
|
||||
"tags": [],
|
||||
"paths": {
|
||||
"/{bucket}": {
|
||||
"post": {
|
||||
"summary": "Upload Image",
|
||||
"description": "Upload an image to the given bucket.\nThe `content-type` header must be provided as well\nas the `content-length` header otherwise the request will be rejected.\n\nThe uploaded file must also not exceed the given `content-length`.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "bucket",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"in": "path",
|
||||
"description": "The bucket that the image should be uploaded.",
|
||||
"required": true,
|
||||
"deprecated": false
|
||||
},
|
||||
{
|
||||
"name": "content-length",
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "uint64"
|
||||
},
|
||||
"in": "header",
|
||||
"description": "The total size of the image in bytes.",
|
||||
"required": true,
|
||||
"deprecated": false
|
||||
},
|
||||
{
|
||||
"name": "format",
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/ImageKind"
|
||||
},
|
||||
"in": "query",
|
||||
"description": "The format that the uploaded image is encoded in.\n\nIf not provided, lust will guess the encoding.",
|
||||
"required": false,
|
||||
"deprecated": false
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/octet-stream": {
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "binary"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/UploadInfo"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Bucket not found"
|
||||
},
|
||||
"400": {
|
||||
"description": "The image format was incorrect or the system was\nunable to guess the format of the image."
|
||||
},
|
||||
"413": {
|
||||
"description": "The upload exceeds the configured maximum file size."
|
||||
},
|
||||
"401": {
|
||||
"description": "You are not authorized to complete this action.\n\nThis normally means the `Authorization` bearer has been left out\nof the request or is invalid."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/{bucket}/{image_id}": {
|
||||
"get": {
|
||||
"summary": "Fetch Image",
|
||||
"description": "Fetch the image from the storage backend and apply and additional affects\nif required.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "bucket",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"in": "path",
|
||||
"description": "The bucket to try fetch the image from.",
|
||||
"required": true,
|
||||
"deprecated": false
|
||||
},
|
||||
{
|
||||
"name": "image_id",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"in": "path",
|
||||
"description": "The id of the image.",
|
||||
"required": true,
|
||||
"deprecated": false
|
||||
},
|
||||
{
|
||||
"name": "format",
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/ImageKind"
|
||||
},
|
||||
"in": "query",
|
||||
"description": "The encoding format that the image should be returned as.",
|
||||
"required": false,
|
||||
"deprecated": false
|
||||
},
|
||||
{
|
||||
"name": "size",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"in": "query",
|
||||
"description": "The size preset that should be used when returning the image.",
|
||||
"required": false,
|
||||
"deprecated": false
|
||||
},
|
||||
{
|
||||
"name": "width",
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "uint32"
|
||||
},
|
||||
"in": "query",
|
||||
"description": "A custom width to resize the returned image to.",
|
||||
"required": false,
|
||||
"deprecated": false
|
||||
},
|
||||
{
|
||||
"name": "height",
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "uint32"
|
||||
},
|
||||
"in": "query",
|
||||
"description": "A custom height to resize the returned image to.",
|
||||
"required": false,
|
||||
"deprecated": false
|
||||
},
|
||||
{
|
||||
"name": "accept",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"in": "header",
|
||||
"description": "A set of `,` seperated content-types that could be sent as a response.\nE.g. `image/png,image/webp,image/gif`",
|
||||
"required": false,
|
||||
"deprecated": false
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "",
|
||||
"content": {
|
||||
"application/octet-stream": {
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "binary"
|
||||
}
|
||||
}
|
||||
},
|
||||
"headers": {
|
||||
"CONTENT-TYPE": {
|
||||
"required": true,
|
||||
"deprecated": false,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "The request is invalid with the current configuration.\n\nSee the detail section for more info.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Detail"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Bucket does not exist or image does not exist.\n\nSee the detail section for more info.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Detail"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"summary": "Delete Image",
|
||||
"description": "Delete the given image.\nThis will purge all variants of the image including sizing presets and formats.\n\nImages that do not exist already will be ignored and will not return a 404.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "bucket",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"in": "path",
|
||||
"description": "The bucket to try delete the image from.",
|
||||
"required": true,
|
||||
"deprecated": false
|
||||
},
|
||||
{
|
||||
"name": "image_id",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"in": "path",
|
||||
"description": "The image to delete try delete.",
|
||||
"required": true,
|
||||
"deprecated": false
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": ""
|
||||
},
|
||||
"401": {
|
||||
"description": "You are not authorized to complete this action.\n\nThis normally means the `Authorization` bearer has been left out\nof the request or is invalid."
|
||||
},
|
||||
"404": {
|
||||
"description": "Bucket does not exist."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Detail": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"detail"
|
||||
],
|
||||
"properties": {
|
||||
"detail": {
|
||||
"type": "string",
|
||||
"description": "Additional information regarding the response."
|
||||
}
|
||||
}
|
||||
},
|
||||
"ImageKind": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"png",
|
||||
"jpeg",
|
||||
"webp",
|
||||
"gif"
|
||||
]
|
||||
},
|
||||
"ImageUploadInfo": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"sizing_id"
|
||||
],
|
||||
"properties": {
|
||||
"sizing_id": {
|
||||
"type": "integer",
|
||||
"format": "uint32",
|
||||
"description": "The computed image sizing id.\n\nThis is useful for tracking files outside of lust as this is\ngenerally used for filtering within the storage systems."
|
||||
}
|
||||
}
|
||||
},
|
||||
"UploadInfo": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"image_id",
|
||||
"processing_time",
|
||||
"io_time",
|
||||
"checksum",
|
||||
"images",
|
||||
"bucket_id"
|
||||
],
|
||||
"properties": {
|
||||
"image_id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "The generated ID for the file.\n\nThis can be used to access the file for the given bucket."
|
||||
},
|
||||
"processing_time": {
|
||||
"type": "number",
|
||||
"format": "float",
|
||||
"description": "The time spent processing the image in seconds."
|
||||
},
|
||||
"io_time": {
|
||||
"type": "number",
|
||||
"format": "float",
|
||||
"description": "The time spent uploading the image to the persistent store."
|
||||
},
|
||||
"checksum": {
|
||||
"type": "integer",
|
||||
"format": "uint32",
|
||||
"description": "The crc32 checksum of the uploaded image."
|
||||
},
|
||||
"images": {
|
||||
"type": "array",
|
||||
"description": "The information that is specific to the image.",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ImageUploadInfo"
|
||||
}
|
||||
},
|
||||
"bucket_id": {
|
||||
"type": "integer",
|
||||
"format": "uint32",
|
||||
"description": "The id of the bucket the image was stored in.\n\nThis is useful for tracking files outside of lust as this is\ngenerally used for filtering within the storage systems."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,71 @@
|
||||
package conf
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/rocboss/paopao-ce/pkg/json"
|
||||
"github.com/sirupsen/logrus"
|
||||
"gopkg.in/resty.v1"
|
||||
)
|
||||
|
||||
type zincLogData struct {
|
||||
Time time.Time `json:"time"`
|
||||
Level logrus.Level `json:"level"`
|
||||
Message string `json:"message"`
|
||||
Data logrus.Fields `json:"data"`
|
||||
}
|
||||
|
||||
type zincLogIndex struct {
|
||||
Index map[string]string `json:"index"`
|
||||
}
|
||||
|
||||
type zincLogHook struct {
|
||||
host string
|
||||
index string
|
||||
user string
|
||||
password string
|
||||
}
|
||||
|
||||
func (h *zincLogHook) Fire(entry *logrus.Entry) error {
|
||||
index := &zincLogIndex{
|
||||
Index: map[string]string{
|
||||
"_index": h.index,
|
||||
},
|
||||
}
|
||||
indexBytes, _ := json.Marshal(index)
|
||||
|
||||
data := &zincLogData{
|
||||
Time: entry.Time,
|
||||
Level: entry.Level,
|
||||
Message: entry.Message,
|
||||
Data: entry.Data,
|
||||
}
|
||||
dataBytes, _ := json.Marshal(data)
|
||||
|
||||
logStr := string(indexBytes) + "\n" + string(dataBytes) + "\n"
|
||||
client := resty.New()
|
||||
|
||||
if _, err := client.SetDisableWarn(true).R().
|
||||
SetHeader("Content-Type", "application/json").
|
||||
SetBasicAuth(h.user, h.password).
|
||||
SetBody(logStr).
|
||||
Post(h.host); err != nil {
|
||||
fmt.Println(err.Error())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (h *zincLogHook) Levels() []logrus.Level {
|
||||
return logrus.AllLevels
|
||||
}
|
||||
|
||||
func newZincLogHook() *zincLogHook {
|
||||
return &zincLogHook{
|
||||
host: loggerZincSetting.Endpoint() + "/es/_bulk",
|
||||
index: loggerZincSetting.Index,
|
||||
user: loggerZincSetting.User,
|
||||
password: loggerZincSetting.Password,
|
||||
}
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"github.com/Masterminds/semver/v3"
|
||||
)
|
||||
|
||||
type VersionInfo interface {
|
||||
Name() string
|
||||
Version() *semver.Version
|
||||
}
|
@ -0,0 +1,40 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/allegro/bigcache/v3"
|
||||
"github.com/rocboss/paopao-ce/internal/core"
|
||||
"github.com/rocboss/paopao-ce/internal/model"
|
||||
)
|
||||
|
||||
var (
|
||||
_ core.CacheIndexService = (*simpleCacheIndexServant)(nil)
|
||||
_ core.CacheIndexService = (*bigCacheIndexServant)(nil)
|
||||
)
|
||||
|
||||
type postsEntry struct {
|
||||
key string
|
||||
posts []*model.PostFormated
|
||||
}
|
||||
|
||||
type indexPostsFunc func(int64, int, int) ([]*model.PostFormated, error)
|
||||
|
||||
type bigCacheIndexServant struct {
|
||||
getIndexPosts indexPostsFunc
|
||||
indexActionCh chan core.IndexActionT
|
||||
cachePostsCh chan *postsEntry
|
||||
cache *bigcache.BigCache
|
||||
lastCacheResetTime time.Time
|
||||
}
|
||||
|
||||
type simpleCacheIndexServant struct {
|
||||
getIndexPosts indexPostsFunc
|
||||
indexActionCh chan core.IndexActionT
|
||||
indexPosts []*model.PostFormated
|
||||
atomicIndex atomic.Value
|
||||
maxIndexSize int
|
||||
checkTick *time.Ticker
|
||||
expireIndexTick *time.Ticker
|
||||
}
|
@ -0,0 +1,55 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
||||
"github.com/minio/minio-go/v7"
|
||||
"github.com/rocboss/paopao-ce/internal/conf"
|
||||
"github.com/rocboss/paopao-ce/internal/core"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var (
|
||||
_ core.ObjectStorageService = (*aliossServant)(nil)
|
||||
_ core.ObjectStorageService = (*minioServant)(nil)
|
||||
_ core.ObjectStorageService = (*s3Servant)(nil)
|
||||
_ core.ObjectStorageService = (*localossServant)(nil)
|
||||
)
|
||||
|
||||
type localossServant struct {
|
||||
savePath string
|
||||
domain string
|
||||
}
|
||||
|
||||
type aliossServant struct {
|
||||
bucket *oss.Bucket
|
||||
domain string
|
||||
}
|
||||
|
||||
type minioServant struct {
|
||||
client *minio.Client
|
||||
bucket string
|
||||
domain string
|
||||
}
|
||||
|
||||
type s3Servant = minioServant
|
||||
|
||||
func NewObjectStorageService() (oss core.ObjectStorageService) {
|
||||
if conf.CfgIf("AliOSS") {
|
||||
oss = newAliossServent()
|
||||
} else if conf.CfgIf("MinIO") {
|
||||
oss = newMinioServeant()
|
||||
} else if conf.CfgIf("S3") {
|
||||
oss = newS3Servent()
|
||||
logrus.Infof("use S3 as object storage by version %s", oss.Version())
|
||||
return
|
||||
} else if conf.CfgIf("LocalOSS") {
|
||||
oss = newLocalossServent()
|
||||
} else {
|
||||
// default use AliOSS as object storage service
|
||||
oss = newAliossServent()
|
||||
logrus.Infof("use default AliOSS as object storage by version %s", oss.Version())
|
||||
return
|
||||
}
|
||||
logrus.Infof("use %s as object storage by version %s", oss.Name(), oss.Version())
|
||||
return
|
||||
}
|
@ -1,165 +1,71 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"github.com/meilisearch/meilisearch-go"
|
||||
"github.com/rocboss/paopao-ce/internal/conf"
|
||||
"github.com/rocboss/paopao-ce/internal/core"
|
||||
"github.com/rocboss/paopao-ce/pkg/zinc"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func (d *dataServant) CreateSearchIndex(indexName string) {
|
||||
// 不存在则创建索引
|
||||
d.zinc.CreateIndex(indexName, &zinc.ZincIndexProperty{
|
||||
"id": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Store: true,
|
||||
Sortable: true,
|
||||
},
|
||||
"user_id": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Store: true,
|
||||
},
|
||||
"comment_count": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"collection_count": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"upvote_count": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"is_top": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"is_essence": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"content": &zinc.ZincIndexPropertyT{
|
||||
Type: "text",
|
||||
Index: true,
|
||||
Store: true,
|
||||
Aggregatable: true,
|
||||
Highlightable: true,
|
||||
Analyzer: "gse_search",
|
||||
SearchAnalyzer: "gse_standard",
|
||||
},
|
||||
"tags": &zinc.ZincIndexPropertyT{
|
||||
Type: "keyword",
|
||||
Index: true,
|
||||
Store: true,
|
||||
},
|
||||
"ip_loc": &zinc.ZincIndexPropertyT{
|
||||
Type: "keyword",
|
||||
Index: true,
|
||||
Store: true,
|
||||
},
|
||||
"latest_replied_on": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"attachment_price": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"created_on": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"modified_on": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
})
|
||||
var (
|
||||
_ core.TweetSearchService = (*zincTweetSearchServant)(nil)
|
||||
_ core.TweetSearchService = (*bridgeTweetSearchServant)(nil)
|
||||
)
|
||||
|
||||
type documents struct {
|
||||
primaryKey []string
|
||||
docItems core.DocItems
|
||||
identifiers []string
|
||||
}
|
||||
|
||||
func (d *dataServant) BulkPushDoc(data []map[string]interface{}) (bool, error) {
|
||||
return d.zinc.BulkPushDoc(data)
|
||||
type bridgeTweetSearchServant struct {
|
||||
ts core.TweetSearchService
|
||||
updateDocsCh chan *documents
|
||||
}
|
||||
|
||||
func (d *dataServant) DelDoc(indexName, id string) error {
|
||||
return d.zinc.DelDoc(indexName, id)
|
||||
type zincTweetSearchServant struct {
|
||||
indexName string
|
||||
client *zinc.ZincClient
|
||||
}
|
||||
|
||||
func (d *dataServant) QueryAll(q *core.QueryT, indexName string, offset, limit int) (*zinc.QueryResultT, error) {
|
||||
// 普通搜索
|
||||
if q.Type == core.SearchTypeDefault && q.Query != "" {
|
||||
return d.QuerySearch(indexName, q.Query, offset, limit)
|
||||
}
|
||||
// Tag分类
|
||||
if q.Type == core.SearchTypeTag && q.Query != "" {
|
||||
return d.QueryTagSearch(indexName, q.Query, offset, limit)
|
||||
}
|
||||
|
||||
queryMap := map[string]interface{}{
|
||||
"query": map[string]interface{}{
|
||||
"match_all": map[string]string{},
|
||||
},
|
||||
"sort": []string{"-is_top", "-latest_replied_on"},
|
||||
"from": offset,
|
||||
"size": limit,
|
||||
}
|
||||
rsp, err := d.zinc.EsQuery(indexName, queryMap)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return rsp, err
|
||||
type meiliTweetSearchServant struct {
|
||||
client *meilisearch.Client
|
||||
index *meilisearch.Index
|
||||
}
|
||||
|
||||
func (d *dataServant) QuerySearch(indexName, query string, offset, limit int) (*zinc.QueryResultT, error) {
|
||||
rsp, err := d.zinc.EsQuery(indexName, map[string]interface{}{
|
||||
"query": map[string]interface{}{
|
||||
"match_phrase": map[string]interface{}{
|
||||
"content": query,
|
||||
},
|
||||
},
|
||||
"sort": []string{"-is_top", "-latest_replied_on"},
|
||||
"from": offset,
|
||||
"size": limit,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
func NewTweetSearchService() core.TweetSearchService {
|
||||
bts := &bridgeTweetSearchServant{}
|
||||
|
||||
capacity := conf.TweetSearchSetting.MaxUpdateQPS
|
||||
if capacity < 10 {
|
||||
capacity = 10
|
||||
} else if capacity > 10000 {
|
||||
capacity = 10000
|
||||
}
|
||||
bts.updateDocsCh = make(chan *documents, capacity)
|
||||
|
||||
return rsp, err
|
||||
}
|
||||
if conf.CfgIf("Zinc") {
|
||||
bts.ts = newZincTweetSearchServant()
|
||||
} else if conf.CfgIf("Meili") {
|
||||
bts.ts = newMeiliTweetSearchServant()
|
||||
} else {
|
||||
// default use Zinc as tweet search service
|
||||
bts.ts = newZincTweetSearchServant()
|
||||
}
|
||||
logrus.Infof("use %s as tweet search serice by version %s", bts.ts.Name(), bts.ts.Version())
|
||||
|
||||
func (d *dataServant) QueryTagSearch(indexName, query string, offset, limit int) (*zinc.QueryResultT, error) {
|
||||
rsp, err := d.zinc.ApiQuery(indexName, map[string]interface{}{
|
||||
"search_type": "querystring",
|
||||
"query": map[string]interface{}{
|
||||
"term": "tags." + query + ":1",
|
||||
},
|
||||
"sort_fields": []string{"-is_top", "-latest_replied_on"},
|
||||
"from": offset,
|
||||
"max_results": limit,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
numWorker := conf.TweetSearchSetting.MinWorker
|
||||
if numWorker < 5 {
|
||||
numWorker = 5
|
||||
} else if numWorker > 1000 {
|
||||
numWorker = 1000
|
||||
}
|
||||
logrus.Debugf("use %d backend worker to update documents to search engine", numWorker)
|
||||
// 启动文档更新器
|
||||
for ; numWorker > 0; numWorker-- {
|
||||
go bts.startUpdateDocs()
|
||||
}
|
||||
|
||||
return rsp, err
|
||||
return bts
|
||||
}
|
||||
|
@ -0,0 +1,71 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"github.com/Masterminds/semver/v3"
|
||||
"github.com/rocboss/paopao-ce/internal/core"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func (s *bridgeTweetSearchServant) Name() string {
|
||||
return "BridgeTweetSearch"
|
||||
}
|
||||
|
||||
func (s *bridgeTweetSearchServant) Version() *semver.Version {
|
||||
return semver.MustParse("v0.1.0")
|
||||
}
|
||||
|
||||
func (s *bridgeTweetSearchServant) IndexName() string {
|
||||
return s.ts.IndexName()
|
||||
}
|
||||
|
||||
func (s *bridgeTweetSearchServant) AddDocuments(data core.DocItems, primaryKey ...string) (bool, error) {
|
||||
s.updateDocs(&documents{
|
||||
primaryKey: primaryKey,
|
||||
docItems: data,
|
||||
})
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (s *bridgeTweetSearchServant) DeleteDocuments(identifiers []string) error {
|
||||
s.updateDocs(&documents{
|
||||
identifiers: identifiers,
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *bridgeTweetSearchServant) Search(q *core.QueryReq, offset, limit int) (*core.QueryResp, error) {
|
||||
return s.ts.Search(q, offset, limit)
|
||||
}
|
||||
|
||||
func (s *bridgeTweetSearchServant) updateDocs(doc *documents) {
|
||||
select {
|
||||
case s.updateDocsCh <- doc:
|
||||
logrus.Debugln("addDocuments send documents by chan")
|
||||
default:
|
||||
go func(item *documents) {
|
||||
if len(item.docItems) > 0 {
|
||||
if _, err := s.ts.AddDocuments(item.docItems, item.primaryKey...); err != nil {
|
||||
logrus.Errorf("addDocuments in gorotine occurs error: %v", err)
|
||||
}
|
||||
} else if len(item.identifiers) > 0 {
|
||||
if err := s.ts.DeleteDocuments(item.identifiers); err != nil {
|
||||
logrus.Errorf("deleteDocuments in gorotine occurs error: %s", err)
|
||||
}
|
||||
}
|
||||
}(doc)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *bridgeTweetSearchServant) startUpdateDocs() {
|
||||
for doc := range s.updateDocsCh {
|
||||
if len(doc.docItems) > 0 {
|
||||
if _, err := s.ts.AddDocuments(doc.docItems, doc.primaryKey...); err != nil {
|
||||
logrus.Errorf("addDocuments occurs error: %v", err)
|
||||
}
|
||||
} else if len(doc.identifiers) > 0 {
|
||||
if err := s.ts.DeleteDocuments(doc.identifiers); err != nil {
|
||||
logrus.Errorf("deleteDocuments occurs error: %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,139 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"github.com/Masterminds/semver/v3"
|
||||
"github.com/meilisearch/meilisearch-go"
|
||||
"github.com/rocboss/paopao-ce/internal/conf"
|
||||
"github.com/rocboss/paopao-ce/internal/core"
|
||||
"github.com/rocboss/paopao-ce/internal/model"
|
||||
"github.com/rocboss/paopao-ce/pkg/json"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func newMeiliTweetSearchServant() *meiliTweetSearchServant {
|
||||
s := conf.MeiliSetting
|
||||
client := meilisearch.NewClient(meilisearch.ClientConfig{
|
||||
Host: s.Endpoint(),
|
||||
APIKey: s.ApiKey,
|
||||
})
|
||||
|
||||
if _, err := client.Index(s.Index).FetchInfo(); err != nil {
|
||||
logrus.Debugf("create index because fetch index info error: %v", err)
|
||||
client.CreateIndex(&meilisearch.IndexConfig{
|
||||
Uid: s.Index,
|
||||
PrimaryKey: "id",
|
||||
})
|
||||
searchableAttributes := []string{"content", "tags"}
|
||||
sortableAttributes := []string{"is_top", "latest_replied_on"}
|
||||
|
||||
index := client.Index(s.Index)
|
||||
index.UpdateSearchableAttributes(&searchableAttributes)
|
||||
index.UpdateSortableAttributes(&sortableAttributes)
|
||||
}
|
||||
|
||||
return &meiliTweetSearchServant{
|
||||
client: client,
|
||||
index: client.Index(s.Index),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *meiliTweetSearchServant) Name() string {
|
||||
return "Meili"
|
||||
}
|
||||
|
||||
func (s *meiliTweetSearchServant) Version() *semver.Version {
|
||||
return semver.MustParse("v0.1.0")
|
||||
}
|
||||
|
||||
func (s *meiliTweetSearchServant) IndexName() string {
|
||||
return s.index.UID
|
||||
}
|
||||
|
||||
func (s *meiliTweetSearchServant) AddDocuments(data core.DocItems, primaryKey ...string) (bool, error) {
|
||||
task, err := s.index.AddDocuments(data, primaryKey...)
|
||||
if err != nil {
|
||||
logrus.Errorf("meiliTweetSearchServant.AddDocuments error: %v", err)
|
||||
return false, err
|
||||
}
|
||||
logrus.Debugf("meiliTweetSearchServant.AddDocuments task: %+v", task.Details)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (s *meiliTweetSearchServant) DeleteDocuments(identifiers []string) error {
|
||||
task, err := s.index.DeleteDocuments(identifiers)
|
||||
if err != nil {
|
||||
logrus.Errorf("meiliTweetSearchServant.DeleteDocuments error: %v", err)
|
||||
return err
|
||||
}
|
||||
logrus.Debugf("meiliTweetSearchServant.DeleteDocuments task: %+v", task.Details)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *meiliTweetSearchServant) Search(q *core.QueryReq, offset, limit int) (*core.QueryResp, error) {
|
||||
if q.Type == core.SearchTypeDefault && q.Query != "" {
|
||||
return s.queryByContent(q, offset, limit)
|
||||
} else if q.Type == core.SearchTypeTag && q.Query != "" {
|
||||
return s.queryByTag(q, offset, limit)
|
||||
}
|
||||
return s.queryAny(offset, limit)
|
||||
}
|
||||
|
||||
func (s *meiliTweetSearchServant) queryByContent(q *core.QueryReq, offset, limit int) (*core.QueryResp, error) {
|
||||
resp, err := s.index.Search(q.Query, &meilisearch.SearchRequest{
|
||||
Offset: int64(offset),
|
||||
Limit: int64(limit),
|
||||
AttributesToRetrieve: []string{"*"},
|
||||
Sort: []string{"is_top:desc", "latest_replied_on:desc"},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.postsFrom(resp)
|
||||
}
|
||||
|
||||
func (s *meiliTweetSearchServant) queryByTag(q *core.QueryReq, offset, limit int) (*core.QueryResp, error) {
|
||||
resp, err := s.index.Search("#"+q.Query, &meilisearch.SearchRequest{
|
||||
Offset: int64(offset),
|
||||
Limit: int64(limit),
|
||||
AttributesToRetrieve: []string{"*"},
|
||||
Sort: []string{"is_top:desc", "latest_replied_on:desc"},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.postsFrom(resp)
|
||||
}
|
||||
|
||||
func (s *meiliTweetSearchServant) queryAny(offset, limit int) (*core.QueryResp, error) {
|
||||
resp, err := s.index.Search("", &meilisearch.SearchRequest{
|
||||
Offset: int64(offset),
|
||||
Limit: int64(limit),
|
||||
Matches: true,
|
||||
Sort: []string{"is_top:desc", "latest_replied_on:desc"},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.postsFrom(resp)
|
||||
}
|
||||
|
||||
func (s *meiliTweetSearchServant) postsFrom(resp *meilisearch.SearchResponse) (*core.QueryResp, error) {
|
||||
logrus.Debugf("resp Hits:%d NbHits:%d offset: %d limit:%d ", len(resp.Hits), resp.NbHits, resp.Offset, resp.Limit)
|
||||
posts := make([]*model.PostFormated, 0, len(resp.Hits))
|
||||
for _, hit := range resp.Hits {
|
||||
item := &model.PostFormated{}
|
||||
raw, err := json.Marshal(hit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = json.Unmarshal(raw, item); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
posts = append(posts, item)
|
||||
}
|
||||
|
||||
return &core.QueryResp{
|
||||
Items: posts,
|
||||
Total: resp.NbHits,
|
||||
}, nil
|
||||
}
|
@ -0,0 +1,235 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"github.com/Masterminds/semver/v3"
|
||||
"github.com/rocboss/paopao-ce/internal/conf"
|
||||
"github.com/rocboss/paopao-ce/internal/core"
|
||||
"github.com/rocboss/paopao-ce/internal/model"
|
||||
"github.com/rocboss/paopao-ce/pkg/json"
|
||||
"github.com/rocboss/paopao-ce/pkg/zinc"
|
||||
)
|
||||
|
||||
func newZincTweetSearchServant() *zincTweetSearchServant {
|
||||
s := conf.ZincSetting
|
||||
zts := &zincTweetSearchServant{
|
||||
indexName: s.Index,
|
||||
client: zinc.NewClient(s),
|
||||
}
|
||||
zts.createIndex()
|
||||
|
||||
return zts
|
||||
}
|
||||
|
||||
func (s *zincTweetSearchServant) Name() string {
|
||||
return "Zinc"
|
||||
}
|
||||
|
||||
func (s *zincTweetSearchServant) Version() *semver.Version {
|
||||
return semver.MustParse("v0.1.0")
|
||||
}
|
||||
|
||||
func (s *zincTweetSearchServant) IndexName() string {
|
||||
return s.indexName
|
||||
}
|
||||
|
||||
func (s *zincTweetSearchServant) AddDocuments(data core.DocItems, primaryKey ...string) (bool, error) {
|
||||
buf := make(core.DocItems, 0, len(data)+1)
|
||||
if len(primaryKey) > 0 {
|
||||
buf = append(buf, map[string]interface{}{
|
||||
"index": map[string]interface{}{
|
||||
"_index": s.indexName,
|
||||
"_id": primaryKey[0],
|
||||
},
|
||||
})
|
||||
} else {
|
||||
buf = append(buf, map[string]interface{}{
|
||||
"index": map[string]interface{}{
|
||||
"_index": s.indexName,
|
||||
},
|
||||
})
|
||||
}
|
||||
buf = append(buf, data...)
|
||||
return s.client.BulkPushDoc(buf)
|
||||
}
|
||||
|
||||
func (s *zincTweetSearchServant) DeleteDocuments(identifiers []string) error {
|
||||
for _, id := range identifiers {
|
||||
if err := s.client.DelDoc(s.indexName, id); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *zincTweetSearchServant) Search(q *core.QueryReq, offset, limit int) (*core.QueryResp, error) {
|
||||
if q.Type == core.SearchTypeDefault && q.Query != "" {
|
||||
return s.queryByContent(q, offset, limit)
|
||||
} else if q.Type == core.SearchTypeTag && q.Query != "" {
|
||||
return s.queryByTag(q, offset, limit)
|
||||
}
|
||||
return s.queryAny(offset, limit)
|
||||
}
|
||||
|
||||
func (s *zincTweetSearchServant) queryByContent(q *core.QueryReq, offset, limit int) (*core.QueryResp, error) {
|
||||
resp, err := s.client.EsQuery(s.indexName, map[string]interface{}{
|
||||
"query": map[string]interface{}{
|
||||
"match_phrase": map[string]interface{}{
|
||||
"content": q.Query,
|
||||
},
|
||||
},
|
||||
"sort": []string{"-is_top", "-latest_replied_on"},
|
||||
"from": offset,
|
||||
"size": limit,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.postsFrom(resp)
|
||||
}
|
||||
|
||||
func (s *zincTweetSearchServant) queryByTag(q *core.QueryReq, offset, limit int) (*core.QueryResp, error) {
|
||||
resp, err := s.client.ApiQuery(s.indexName, map[string]interface{}{
|
||||
"search_type": "querystring",
|
||||
"query": map[string]interface{}{
|
||||
"term": "tags." + q.Query + ":1",
|
||||
},
|
||||
"sort_fields": []string{"-is_top", "-latest_replied_on"},
|
||||
"from": offset,
|
||||
"max_results": limit,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.postsFrom(resp)
|
||||
}
|
||||
|
||||
func (s *zincTweetSearchServant) queryAny(offset, limit int) (*core.QueryResp, error) {
|
||||
queryMap := map[string]interface{}{
|
||||
"query": map[string]interface{}{
|
||||
"match_all": map[string]string{},
|
||||
},
|
||||
"sort": []string{"-is_top", "-latest_replied_on"},
|
||||
"from": offset,
|
||||
"size": limit,
|
||||
}
|
||||
resp, err := s.client.EsQuery(s.indexName, queryMap)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.postsFrom(resp)
|
||||
}
|
||||
|
||||
func (s *zincTweetSearchServant) postsFrom(resp *zinc.QueryResultT) (*core.QueryResp, error) {
|
||||
posts := make([]*model.PostFormated, 0, len(resp.Hits.Hits))
|
||||
for _, hit := range resp.Hits.Hits {
|
||||
item := &model.PostFormated{}
|
||||
raw, err := json.Marshal(hit.Source)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = json.Unmarshal(raw, item); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
posts = append(posts, item)
|
||||
}
|
||||
|
||||
return &core.QueryResp{
|
||||
Items: posts,
|
||||
Total: resp.Hits.Total.Value,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *zincTweetSearchServant) createIndex() {
|
||||
// 不存在则创建索引
|
||||
s.client.CreateIndex(s.indexName, &zinc.ZincIndexProperty{
|
||||
"id": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Store: true,
|
||||
Sortable: true,
|
||||
},
|
||||
"user_id": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Store: true,
|
||||
},
|
||||
"comment_count": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"collection_count": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"upvote_count": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"visibility": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"is_top": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"is_essence": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"content": &zinc.ZincIndexPropertyT{
|
||||
Type: "text",
|
||||
Index: true,
|
||||
Store: true,
|
||||
Aggregatable: true,
|
||||
Highlightable: true,
|
||||
Analyzer: "gse_search",
|
||||
SearchAnalyzer: "gse_standard",
|
||||
},
|
||||
"tags": &zinc.ZincIndexPropertyT{
|
||||
Type: "keyword",
|
||||
Index: true,
|
||||
Store: true,
|
||||
},
|
||||
"ip_loc": &zinc.ZincIndexPropertyT{
|
||||
Type: "keyword",
|
||||
Index: true,
|
||||
Store: true,
|
||||
},
|
||||
"latest_replied_on": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"attachment_price": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"created_on": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
"modified_on": &zinc.ZincIndexPropertyT{
|
||||
Type: "numeric",
|
||||
Index: true,
|
||||
Sortable: true,
|
||||
Store: true,
|
||||
},
|
||||
})
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
//go:build !docs
|
||||
// +build !docs
|
||||
|
||||
package routers
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
// registerDocs stub function for register docs asset route
|
||||
func registerDocs(e *gin.Engine) {
|
||||
// empty
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
//go:build docs
|
||||
// +build docs
|
||||
|
||||
package routers
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/rocboss/paopao-ce/docs"
|
||||
)
|
||||
|
||||
// registerDocs register docs asset route
|
||||
func registerDocs(e *gin.Engine) {
|
||||
e.StaticFS("/docs", docs.NewFileSystem())
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
var version, commitID, buildDate string
|
||||
|
||||
type BuildInfo struct {
|
||||
Version string
|
||||
Sum string
|
||||
BuildDate string
|
||||
}
|
||||
|
||||
func VersionInfo() string {
|
||||
return fmt.Sprintf("paopao %s (build:%s %s)", version, commitID, buildDate)
|
||||
}
|
||||
|
||||
func ReadBuildInfo() *BuildInfo {
|
||||
return &BuildInfo{
|
||||
Version: version,
|
||||
Sum: commitID,
|
||||
BuildDate: buildDate,
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
// Copyright 2017 Bo-Yi Wu. All rights reserved.
|
||||
// Use of this source code is governed by a MIT style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build go_json
|
||||
// +build go_json
|
||||
|
||||
package json
|
||||
|
||||
import json "github.com/goccy/go-json"
|
||||
|
||||
var (
|
||||
// Marshal is exported by gin/json package.
|
||||
Marshal = json.Marshal
|
||||
// Unmarshal is exported by gin/json package.
|
||||
Unmarshal = json.Unmarshal
|
||||
// MarshalIndent is exported by gin/json package.
|
||||
MarshalIndent = json.MarshalIndent
|
||||
// NewDecoder is exported by gin/json package.
|
||||
NewDecoder = json.NewDecoder
|
||||
// NewEncoder is exported by gin/json package.
|
||||
NewEncoder = json.NewEncoder
|
||||
)
|
@ -0,0 +1,23 @@
|
||||
// Copyright 2017 Bo-Yi Wu. All rights reserved.
|
||||
// Use of this source code is governed by a MIT style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build !jsoniter && !go_json
|
||||
// +build !jsoniter,!go_json
|
||||
|
||||
package json
|
||||
|
||||
import "encoding/json"
|
||||
|
||||
var (
|
||||
// Marshal is exported by gin/json package.
|
||||
Marshal = json.Marshal
|
||||
// Unmarshal is exported by gin/json package.
|
||||
Unmarshal = json.Unmarshal
|
||||
// MarshalIndent is exported by gin/json package.
|
||||
MarshalIndent = json.MarshalIndent
|
||||
// NewDecoder is exported by gin/json package.
|
||||
NewDecoder = json.NewDecoder
|
||||
// NewEncoder is exported by gin/json package.
|
||||
NewEncoder = json.NewEncoder
|
||||
)
|
@ -0,0 +1,24 @@
|
||||
// Copyright 2017 Bo-Yi Wu. All rights reserved.
|
||||
// Use of this source code is governed by a MIT style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build jsoniter
|
||||
// +build jsoniter
|
||||
|
||||
package json
|
||||
|
||||
import jsoniter "github.com/json-iterator/go"
|
||||
|
||||
var (
|
||||
json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
// Marshal is exported by gin/json package.
|
||||
Marshal = json.Marshal
|
||||
// Unmarshal is exported by gin/json package.
|
||||
Unmarshal = json.Unmarshal
|
||||
// MarshalIndent is exported by gin/json package.
|
||||
MarshalIndent = json.MarshalIndent
|
||||
// NewDecoder is exported by gin/json package.
|
||||
NewDecoder = json.NewDecoder
|
||||
// NewEncoder is exported by gin/json package.
|
||||
NewEncoder = json.NewEncoder
|
||||
)
|
@ -0,0 +1,9 @@
|
||||
-- ----------------------------
|
||||
-- Table p_post alter add visibility column
|
||||
-- ----------------------------
|
||||
ALTER TABLE `p_post` ADD COLUMN `visibility` tinyint unsigned NOT NULL DEFAULT '0' COMMENT '可见性 0公开 1私密 2好友可见';
|
||||
|
||||
-- ----------------------------
|
||||
-- Indexes structure for table p_post
|
||||
-- ----------------------------
|
||||
CREATE INDEX `idx_visibility` ON `p_post` ( `visibility` ) USING BTREE;
|
@ -0,0 +1,309 @@
|
||||
PRAGMA foreign_keys = false;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_attachment
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_attachment";
|
||||
CREATE TABLE "p_attachment" (
|
||||
"id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"file_size" integer NOT NULL,
|
||||
"img_width" integer NOT NULL,
|
||||
"img_height" integer NOT NULL,
|
||||
"type" integer NOT NULL,
|
||||
"content" text(255) NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_captcha
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_captcha";
|
||||
CREATE TABLE "p_captcha" (
|
||||
"id" integer NOT NULL,
|
||||
"phone" text(16) NOT NULL,
|
||||
"captcha" text(16) NOT NULL,
|
||||
"use_times" integer NOT NULL,
|
||||
"expired_on" integer NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_comment
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_comment";
|
||||
CREATE TABLE "p_comment" (
|
||||
"id" integer NOT NULL,
|
||||
"post_id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"ip" text(64) NOT NULL,
|
||||
"ip_loc" text(64) NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_comment_content
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_comment_content";
|
||||
CREATE TABLE "p_comment_content" (
|
||||
"id" integer NOT NULL,
|
||||
"comment_id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"content" text(255) NOT NULL,
|
||||
"type" integer NOT NULL,
|
||||
"sort" integer NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_comment_reply
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_comment_reply";
|
||||
CREATE TABLE "p_comment_reply" (
|
||||
"id" integer NOT NULL,
|
||||
"comment_id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"at_user_id" integer NOT NULL,
|
||||
"content" text(255) NOT NULL,
|
||||
"ip" text(64) NOT NULL,
|
||||
"ip_loc" text(64) NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_message
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_message";
|
||||
CREATE TABLE "p_message" (
|
||||
"id" integer NOT NULL,
|
||||
"sender_user_id" integer NOT NULL,
|
||||
"receiver_user_id" integer NOT NULL,
|
||||
"type" integer NOT NULL,
|
||||
"brief" text(255) NOT NULL,
|
||||
"content" text(255) NOT NULL,
|
||||
"post_id" integer NOT NULL,
|
||||
"comment_id" integer NOT NULL,
|
||||
"reply_id" integer NOT NULL,
|
||||
"is_read" integer NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_post
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_post";
|
||||
CREATE TABLE "p_post" (
|
||||
"id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"comment_count" integer NOT NULL,
|
||||
"collection_count" integer NOT NULL,
|
||||
"upvote_count" integer NOT NULL,
|
||||
"is_top" integer NOT NULL,
|
||||
"is_essence" integer NOT NULL,
|
||||
"is_lock" integer NOT NULL,
|
||||
"latest_replied_on" integer NOT NULL,
|
||||
"tags" text(255) NOT NULL,
|
||||
"attachment_price" integer NOT NULL,
|
||||
"ip" text(64) NOT NULL,
|
||||
"ip_loc" text(64) NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_post_attachment_bill
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_post_attachment_bill";
|
||||
CREATE TABLE "p_post_attachment_bill" (
|
||||
"id" integer NOT NULL,
|
||||
"post_id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"paid_amount" integer NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_post_collection
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_post_collection";
|
||||
CREATE TABLE "p_post_collection" (
|
||||
"id" integer NOT NULL,
|
||||
"post_id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_post_content
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_post_content";
|
||||
CREATE TABLE "p_post_content" (
|
||||
"id" integer NOT NULL,
|
||||
"post_id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"content" text(2000) NOT NULL,
|
||||
"type" integer NOT NULL,
|
||||
"sort" integer NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_post_star
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_post_star";
|
||||
CREATE TABLE "p_post_star" (
|
||||
"id" integer NOT NULL,
|
||||
"post_id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_tag
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_tag";
|
||||
CREATE TABLE "p_tag" (
|
||||
"id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"tag" text(255) NOT NULL,
|
||||
"quote_num" integer NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_user
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_user";
|
||||
CREATE TABLE "p_user" (
|
||||
"id" integer NOT NULL,
|
||||
"nickname" text(32) NOT NULL,
|
||||
"username" text(32) NOT NULL,
|
||||
"phone" text(16) NOT NULL,
|
||||
"password" text(32) NOT NULL,
|
||||
"salt" text(16) NOT NULL,
|
||||
"status" integer NOT NULL,
|
||||
"avatar" text(255) NOT NULL,
|
||||
"balance" integer NOT NULL,
|
||||
"is_admin" integer NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_wallet_recharge
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_wallet_recharge";
|
||||
CREATE TABLE "p_wallet_recharge" (
|
||||
"id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"amount" integer NOT NULL,
|
||||
"trade_no" text(64) NOT NULL,
|
||||
"trade_status" text(32) NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for p_wallet_statement
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS "p_wallet_statement";
|
||||
CREATE TABLE "p_wallet_statement" (
|
||||
"id" integer NOT NULL,
|
||||
"user_id" integer NOT NULL,
|
||||
"change_amount" integer NOT NULL,
|
||||
"balance_snapshot" integer NOT NULL,
|
||||
"reason" text(255) NOT NULL,
|
||||
"post_id" integer NOT NULL,
|
||||
"created_on" integer NOT NULL,
|
||||
"modified_on" integer NOT NULL,
|
||||
"deleted_on" integer NOT NULL,
|
||||
"is_del" integer NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Indexes structure for table p_attachment
|
||||
-- ----------------------------
|
||||
CREATE INDEX "main"."idx_user"
|
||||
ON "p_attachment" (
|
||||
"user_id" ASC
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Indexes structure for table p_captcha
|
||||
-- ----------------------------
|
||||
CREATE INDEX "main"."idx_expired_on"
|
||||
ON "p_captcha" (
|
||||
"expired_on" ASC
|
||||
);
|
||||
CREATE INDEX "main"."idx_phone"
|
||||
ON "p_captcha" (
|
||||
"phone" ASC
|
||||
);
|
||||
CREATE INDEX "main"."idx_use_times"
|
||||
ON "p_captcha" (
|
||||
"use_times" ASC
|
||||
);
|
||||
|
||||
-- ----------------------------
|
||||
-- Indexes structure for table p_comment
|
||||
-- ----------------------------
|
||||
CREATE INDEX "main"."idx_post"
|
||||
ON "p_comment" (
|
||||
"post_id" ASC
|
||||
);
|
||||
|
||||
PRAGMA foreign_keys = true;
|
@ -0,0 +1,12 @@
|
||||
-- ----------------------------
|
||||
-- Table p_post alter add visibility column
|
||||
-- ----------------------------
|
||||
ALTER TABLE `p_post` ADD COLUMN `visibility` integer NOT NULL DEFAULT '0';
|
||||
|
||||
-- ----------------------------
|
||||
-- Indexes structure for table p_post
|
||||
-- ----------------------------
|
||||
CREATE INDEX "main"."idx_visibility"
|
||||
ON "p_post" (
|
||||
"visibility" ASC
|
||||
);
|
@ -1,4 +1,4 @@
|
||||
{
|
||||
"version": "3",
|
||||
"buildTime": "2022-06-08 23:29:43"
|
||||
"version": "8",
|
||||
"buildTime": "2022-06-13 17:16:22"
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,69 @@
|
||||
/** 动态内容类型枚举 */
|
||||
export enum PostItemTypeEnum {
|
||||
/** 标题 */
|
||||
TITLE = 1,
|
||||
/** 文字段落 */
|
||||
TEXT = 2,
|
||||
/** 图片地址 */
|
||||
IMAGEURL = 3,
|
||||
/** 视频地址 */
|
||||
VIDEOURL = 4,
|
||||
/** 音频地址 */
|
||||
AUDIOURL = 5,
|
||||
/** 链接地址 */
|
||||
LINKURL = 6,
|
||||
/** 附件资源 */
|
||||
ATTACHMENT = 7,
|
||||
/** 收费资源 */
|
||||
CHARGEATTACHMENT = 8
|
||||
}
|
||||
|
||||
/** 回复内容类型枚举 */
|
||||
export enum CommentItemTypeEnum {
|
||||
/** 标题 */
|
||||
TITLE = 1,
|
||||
/** 文字段落 */
|
||||
TEXT = 2,
|
||||
/** 图片地址 */
|
||||
IMAGEURL = 3,
|
||||
/** 视频地址 */
|
||||
VIDEOURL = 4,
|
||||
/** 音频地址 */
|
||||
AUDIOURL = 5,
|
||||
/** 链接地址 */
|
||||
LINKURL = 6
|
||||
}
|
||||
|
||||
/** 附件类型枚举 */
|
||||
export enum AttachmentTypeEnum {
|
||||
/** 图片 */
|
||||
IMAGE = 1,
|
||||
/** 视频 */
|
||||
VIDEO = 2,
|
||||
/** 其他 */
|
||||
OTHER = 3
|
||||
}
|
||||
|
||||
/** 消息类型枚举 */
|
||||
export enum MessageTypeEnum {
|
||||
/** 动态 */
|
||||
POST = 1,
|
||||
/** 评论 */
|
||||
COMMENT = 2,
|
||||
/** 回复 */
|
||||
REPLY = 3,
|
||||
/** 私信 */
|
||||
PRIVATELETTER = 4,
|
||||
/** 系统通知 */
|
||||
SYSTEMNOTICE = 99
|
||||
}
|
||||
|
||||
/** 动态可见度枚举 */
|
||||
export enum VisibilityEnum {
|
||||
/** 公开 */
|
||||
PUBLIC,
|
||||
/** 私密 */
|
||||
PRIVATE,
|
||||
/** 好友可见 */
|
||||
FRIEND
|
||||
}
|
Loading…
Reference in new issue