我有一个大的过滤器,我将提供它的一个片段。我试图通过一个Map的过滤器与paganization,但我得到一个错误,阅读
the match filter must be an expression in an object
字符串
获取过滤器
func (app *Courses) getFilter(filter *Filter) ([]bson.M, error) {
pipeline := make([]bson.M, 0)
if filter.All {
// Include all items
} else {
// Filter items based on the provided criteria
if filter.Beginner {
pipeline = append(pipeline, bson.M{"tags": "beginner"})
}
if filter.Advanced {
pipeline = append(pipeline, bson.M{"tags": "advanced"})
}
if filter.Go {
pipeline = append(pipeline, bson.M{"tags": "go"})
}
}
return pipeline, nil
}
型
处理器
func (app *Courses) CoursesAllHandler(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
clog := log.GetLoggerFromContext(ctx)
p := r.URL.Query().Get("page")
ps := r.URL.Query().Get("pageSize")
var filter Filter
err := json.NewDecoder(r.Body).Decode(&filter)
if err != nil {
http.Error(w, "Failed to parse request body", http.StatusBadRequest)
return
}
pipeline := make([]bson.M, 0)
page, _ := strconv.Atoi(p)
pageSize, _ := strconv.Atoi(ps)
// Pagination
skip := (page - 1) * pageSize
limit := pageSize
// Add filter
pipeline, err = app.getFilter(&filter)
if err != nil {
clog.Error(err)
}
pipeline = append(pipeline, bson.M{"$match": pipeline})
// Add pagination stages to the pipeline
pipeline = append(pipeline, bson.M{"$skip": skip})
pipeline = append(pipeline, bson.M{"$limit": limit})
res, err := app.repo.GetAll(ctx, pipeline)
if err != nil {
clog.Error(err)
return
}
err = app.helper.WriteJSON(w, http.StatusOK, envelope{"data": res, "metadata": "none"}, nil)
if err != nil {
clog.ErrorCtx(err, log.Ctx{
"header": w.Header(),
"request_url": r.URL.String(),
})
}
}
型
我怎样才能得到设置为“true”或“false”的值,把它们放在一个Map中,并在查询中提交它们,以便像我在这里尝试做的那样与DB匹配。
// Add filter
pipeline, err = app.getFilter(&filter)
if err != nil {
clog.Error(err)
}
pipeline = append(pipeline, bson.M{"$match": pipeline})
型
- 更新-
我现在有:
func (app *Courses) CoursesAllHandler(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
clog := log.GetLoggerFromContext(ctx)
var filter Filter
err := json.NewDecoder(r.Body).Decode(&filter)
if err != nil {
http.Error(w, "Failed to parse request body", http.StatusBadRequest)
return
}
filter.All = true
pipeline := make([]bson.M, 3)
// Add filter
matches, err := app.getFilter(&filter)
if err != nil {
clog.Error(err)
}
pipeline[0] = bson.M{"$skip": 1}
pipeline[1] = bson.M{"$limit": 5}
pipeline[2] = bson.M{"$match": matches}
res, err := app.repo.GetAll(ctx, pipeline)
if err != nil {
clog.Error(err)
return
}
err = app.helper.WriteJSON(w, http.StatusOK, envelope{"data": res, "metadata": "none"}, nil)
if err != nil {
clog.ErrorCtx(err, log.Ctx{
"header": w.Header(),
"request_url": r.URL.String(),
})
}
}
型
过滤器看起来像
func (app *Courses) getFilter(filter *Filter) (bson.M, error) {
match := bson.M{}
tags := []string{}
if filter.All {
// Include all items
tags = append(tags, "beginner")
tags = append(tags, "intermediate")
.....
} else {
// Filter items based on the provided criteria
if filter.Beginner {
tags = append(tags, "beginner")
}
if filter.Advanced {
tags = append(tags, "advanced")
}
if filter.Go {
tags = append(tags, "go")
}
........
}
match = bson.M{
"tags": bson.M{"$in": tags},
}
return match, nil
}
型
这是后来在这里使用的。
func (r *CourseRepo) GetAll(ctx context.Context, pipeline []bson.M) ([]Course, error) {
clog := log.GetLoggerFromContext(ctx)
cur, err := r.collection.Aggregate(ctx, pipeline)
...
型
但是它是空的。过滤器中的所有内容都已选中,并且没有错误。
1条答案
按热度按时间y1aodyip1#
你得到了
the match filter must be an expression in an object
,因为$match
需要一个对象(bson.M
),但你给了slice of objects ([]bson.M)
。试试这个
字符串