This commit is contained in:
xing 2023-11-29 00:00:25 +08:00
parent 7c3f8baaa2
commit 9f49a274cd
4 changed files with 36 additions and 21 deletions

View File

@ -52,20 +52,33 @@ func PostComments(ctx context.Context, postId uint64, _ ...any) ([]uint64, error
} }
func GetCommentByIds(ctx context.Context, ids []uint64, _ ...any) (map[uint64]models.Comments, error) { func GetCommentByIds(ctx context.Context, ids []uint64, _ ...any) (map[uint64]models.Comments, error) {
if len(ids) < 1 {
return nil, nil
}
m := make(map[uint64]models.Comments) m := make(map[uint64]models.Comments)
r, err := model.ChunkFind[models.Comments](ctx, 500, model.Conditions( off := 0
for {
id := slice.Slice(ids, off, 500)
if len(id) < 1 {
break
}
r, err := model.Finds[models.Comments](ctx, model.Conditions(
model.Where(model.SqlBuilder{ model.Where(model.SqlBuilder{
{"comment_ID", "in", ""}, {"comment_approved", "1"}, {"comment_ID", "in", ""}, {"comment_approved", "1"},
}), }),
model.Fields("*"), model.Fields("*"),
model.In(slice.ToAnySlice(ids)), model.In(slice.ToAnySlice(id)),
)) ))
if err != nil { if err != nil {
return m, err return m, err
} }
return slice.SimpleToMap(r, func(t models.Comments) uint64 { for _, comments := range r {
return t.CommentId m[comments.CommentId] = comments
}), err }
off += 500
}
return m, nil
} }
func GetIncreaseComment(ctx context.Context, currentData []uint64, k uint64, t time.Time, _ ...any) (data []uint64, save bool, refresh bool, err error) { func GetIncreaseComment(ctx context.Context, currentData []uint64, k uint64, t time.Time, _ ...any) (data []uint64, save bool, refresh bool, err error) {

8
cache/map.go vendored
View File

@ -229,7 +229,7 @@ func (m *MapCache[K, V]) getBatchToMap(e Expend[K, V]) func(c context.Context, k
r, er := m.batchCacheFn(ctx, maps.FilterToSlice(needIndex, func(k K, v int) (K, bool) { r, er := m.batchCacheFn(ctx, maps.FilterToSlice(needIndex, func(k K, v int) (K, bool) {
return k, true return k, true
}), params...) }), params...)
if err != nil { if er != nil {
err = er err = er
return return
} }
@ -296,7 +296,7 @@ func (m *MapCache[K, V]) getBatchToMapes(c context.Context, key []K, timeout tim
} }
rr, er := m.batchCacheFn(c, needFlushs, params...) rr, er := m.batchCacheFn(c, needFlushs, params...)
if err != nil { if er != nil {
err = er err = er
return return
} }
@ -361,7 +361,7 @@ func (m *MapCache[K, V]) getCacheBatchs(c context.Context, key []K, timeout time
} }
r, er := m.batchCacheFn(c, needFlushs, params...) r, er := m.batchCacheFn(c, needFlushs, params...)
if err != nil { if er != nil {
err = er err = er
return return
} }
@ -441,7 +441,7 @@ func (m *MapCache[K, V]) getBatches(e Expend[K, V]) func(ctx context.Context, ke
r, er := m.batchCacheFn(ctx, maps.FilterToSlice(needIndex, func(k K, v int) (K, bool) { r, er := m.batchCacheFn(ctx, maps.FilterToSlice(needIndex, func(k K, v int) (K, bool) {
return k, true return k, true
}), params...) }), params...)
if err != nil { if er != nil {
err = er err = er
return return
} }

View File

@ -152,13 +152,11 @@ func Slice[T any](arr []T, offset, length int) (r []T) {
length = l - offset length = l - offset
} }
if l > offset && l >= offset+length { if l > offset && l >= offset+length {
r = append(make([]T, 0, length), arr[offset:offset+length]...) r = arr[offset : offset+length]
arr = append(arr[:offset], arr[offset+length:]...)
} else if l <= offset { } else if l <= offset {
return return
} else if l > offset && l < offset+length { } else if l > offset && l < offset+length {
r = append(make([]T, 0, length), arr[offset:]...) r = arr[offset:]
arr = arr[:offset]
} }
return return
} }

View File

@ -8,6 +8,10 @@ func Splice[T any](a *[]T, offset, length int, replacement []T) []T {
} }
if offset >= 0 { if offset >= 0 {
if offset+length > l { if offset+length > l {
if offset == 0 {
*a = []T{}
return arr[:l]
}
return nil return nil
} else if l > offset && l < offset+length { } else if l > offset && l < offset+length {
v := arr[offset:l] v := arr[offset:l]