new endpoint to return product list

This commit is contained in:
Daniel Goc
2026-03-18 11:39:18 +01:00
parent a0dcb56fda
commit 6cebcacb5d
23 changed files with 1243 additions and 66 deletions

View File

@@ -0,0 +1,150 @@
package filters
import (
"fmt"
"strconv"
"strings"
"time"
"gorm.io/gorm"
)
type FilterFunction = func(*gorm.DB) *gorm.DB
func Where(statement string, args ...interface{}) Filter {
filt := func(db *gorm.DB) *gorm.DB {
return db.Where(statement, args...)
}
return Filter{
category: WHERE_FILTER,
filter: filt,
}
}
func StructToWhereScope[T any](model T) Filter {
filt := func(db *gorm.DB) *gorm.DB {
return db.Where(model)
}
return Filter{
category: WHERE_FILTER,
filter: filt,
}
}
func Order(field string, desc bool) Filter {
var filt FilterFunction
if desc {
filt = func(d *gorm.DB) *gorm.DB {
return d.Order(field + " DESC")
}
} else {
filt = func(d *gorm.DB) *gorm.DB {
return d.Order(field)
}
}
return Filter{
category: ORDER_FILTER,
filter: filt,
}
}
func WhereFromStrings(column, conditionOperator, value string) Filter {
var filt func(*gorm.DB) *gorm.DB
if strings.HasPrefix(value, "~") {
value = strings.ReplaceAll(value, "~", "")
filt = func(d *gorm.DB) *gorm.DB {
return d.Where("lower("+column+`) LIKE lower(?)`, "%"+value+"%")
}
return Filter{
category: LIKE_FILTER,
filter: filt,
}
}
if strings.Contains(value, "]") && strings.Contains(value, "[") {
period := strings.ReplaceAll(value, "[", "")
period = strings.ReplaceAll(period, "]", "")
vals := strings.Split(period, ",")
if len(vals) == 2 {
from, errA := time.Parse("2006-01-02", vals[0])
to, errB := time.Parse("2006-01-02", vals[1])
if errA == nil && errB == nil {
filt = func(d *gorm.DB) *gorm.DB {
return d.Where(column+` BETWEEN ? AND ?`, from.Format("2006-01-02"), to.Format("2006-01-02"))
}
return Filter{
category: WHERE_FILTER,
filter: filt,
}
} else {
filt = func(d *gorm.DB) *gorm.DB {
return d.Where(column+` BETWEEN ? AND ?`, vals[0], vals[1])
}
return Filter{
category: WHERE_FILTER,
filter: filt,
}
}
}
}
if conditionOperator == "LIKE" {
value = fmt.Sprintf("%%%s%%", value)
}
// in future add more grouping functions
if strings.Contains(strings.ToLower(column), "count(") {
filt = func(d *gorm.DB) *gorm.DB {
return d.Having(column+` `+conditionOperator+` ?`, value)
}
return Filter{
category: WHERE_FILTER,
filter: filt,
}
}
if i, err := strconv.ParseInt(value, 10, 64); err == nil {
filt = func(d *gorm.DB) *gorm.DB {
return d.Where(column+` `+conditionOperator+` ?`, i)
}
return Filter{
category: WHERE_FILTER,
filter: filt,
}
}
if f, err := strconv.ParseFloat(value, 64); err == nil {
filt = func(d *gorm.DB) *gorm.DB {
return d.Where(column+` `+conditionOperator+` ?`, f)
}
return Filter{
category: WHERE_FILTER,
filter: filt,
}
}
if b, err := strconv.ParseBool(value); err == nil {
filt = func(d *gorm.DB) *gorm.DB {
return d.Where(column+` `+conditionOperator+` ?`, b)
}
return Filter{
category: WHERE_FILTER,
filter: filt,
}
}
filt = func(d *gorm.DB) *gorm.DB {
return d.Where(column+` `+conditionOperator+` ?`, value)
}
return Filter{
category: WHERE_FILTER,
filter: filt,
}
}

View File

@@ -0,0 +1,107 @@
package filters
import (
"fmt"
"github.com/samber/lo"
"gorm.io/gorm"
)
// Use one of declared in the package constants to instantiate the type.
type filterCategory = string
// Enumaration of known types of filters. The assumption is that all filters
// belonging to a single category (type) can be used together at a particular
// step in the query process.
const (
// Should be safe to use at any step of longer query series to reduce the
// number of results. If it is not, choose a different filter type
WHERE_FILTER filterCategory = "where"
// An like filter
LIKE_FILTER filterCategory = "where"
// An order by clause which can be used at any final step of a complex query
// to change the order of results.
ORDER_FILTER filterCategory = "order"
// TODO: document the special case of filters on products
FEAT_VAL_PRODUCT_FILTER filterCategory = "featval_product"
)
type Filter struct {
category filterCategory
filter func(*gorm.DB) *gorm.DB
}
func NewFilter(category filterCategory, filter func(*gorm.DB) *gorm.DB) Filter {
return Filter{
category: category,
filter: filter,
}
}
type FiltersList struct {
filters []Filter
}
func NewFiltersList() FiltersList {
return FiltersList{
// we allocate some extra space beforehand to reduce the overhead of resizing
filters: make([]Filter, 0, 3),
}
}
func NewListWithFilter(filt Filter) FiltersList {
l := NewFiltersList()
l.filters = append(l.filters, filt)
return l
}
func (f *FiltersList) NewFilter(category filterCategory, filter func(*gorm.DB) *gorm.DB) {
f.filters = append(f.filters, NewFilter(category, filter))
}
func (f *FiltersList) Append(filter ...Filter) {
f.filters = append(f.filters, filter...)
}
// Return all stored filters as []func(*gorm.DB)*gorm.DB
func (f *FiltersList) All() []func(*gorm.DB) *gorm.DB {
return lo.Map(f.filters, func(filt Filter, _ int) func(*gorm.DB) *gorm.DB {
return filt.filter
})
}
func (f *FiltersList) OfCategory(cat filterCategory) []func(*gorm.DB) *gorm.DB {
return lo.Map(lo.Filter(f.filters, func(v Filter, _ int) bool {
return v.category == cat
}), func(el Filter, _ int) func(*gorm.DB) *gorm.DB {
return el.filter
})
}
func (f *FiltersList) ApplyAll(d *gorm.DB) {
d.Scopes(f.All()...)
}
func (f *FiltersList) Apply(d *gorm.DB, cat filterCategory) {
d.Scopes(f.OfCategory(cat)...)
}
func (f *FiltersList) Merge(another FiltersList) {
f.filters = append(f.filters, another.filters...)
}
// An implementation of stringer on FiltersList that is meant rather to be used
// for debug display
func (f FiltersList) String() string {
groupMap := lo.GroupBy(f.filters, func(t Filter) string {
return t.category
})
res := "FiltersList{"
for key := range groupMap {
res += fmt.Sprintf(" \"%s\": %d filters", key, len(groupMap[key]))
}
res += " }"
return res
}

View File

@@ -0,0 +1,159 @@
package find
import (
"errors"
"reflect"
"strings"
"git.ma-al.com/goc_daniel/b2b/app/utils/i18n"
"gorm.io/gorm"
)
type Paging struct {
Page uint `json:"page_number" example:"5"`
Elements uint `json:"elements_per_page" example:"30"`
}
func (p Paging) Offset() int {
return int(p.Elements) * int(p.Page-1)
}
func (p Paging) Limit() int {
return int(p.Elements)
}
type Found[T any] struct {
Items []T `json:"items,omitempty"`
Count uint `json:"items_count" example:"56"`
Spec map[string]interface{} `json:"spec,omitempty"`
}
// Wraps given query adding limit, offset clauses and SQL_CALC_FOUND_ROWS to it
// and running SELECT FOUND_ROWS() afterwards to fetch the total number
// (ignoring LIMIT) of results. The final results are wrapped into the
// [find.Found] type.
func Paginate[T any](langID uint, paging Paging, stmt *gorm.DB) (Found[T], error) {
var items []T
var count uint64
// stmt.Debug()
err := stmt.
Clauses(SqlCalcFound()).
Offset(paging.Offset()).
Limit(paging.Limit()).
Find(&items).
Error
if err != nil {
return Found[T]{}, err
}
countInterface, ok := stmt.Get(FOUND_ROWS_CTX_KEY)
if !ok {
return Found[T]{}, errors.New(FOUND_ROWS_CTX_KEY + " value was not found in the gorm db context")
}
if count, ok = countInterface.(uint64); !ok {
return Found[T]{}, errors.New("failed to cast value under " + FOUND_ROWS_CTX_KEY + " to uint64")
}
columnsSpec := GetColumnsSpec[T](langID)
return Found[T]{
Items: items,
Count: uint(count),
Spec: map[string]interface{}{
"columns": columnsSpec,
},
}, err
}
// GetColumnsSpec[T any] generates a column specification map for a given struct type T.
// Each key is the JSON property name, and the value is a map containing:
// - "filter_type": suggested filter type based on field type or `filt` tag
// - To disable filtering for a field, set `filt:"none"` in the struct tag
// - "sortable": currently hardcoded to true
// - "order": order of fields as they appear
//
// Returns nil if T is not a struct.
func GetColumnsSpec[T any](langID uint) map[string]map[string]interface{} {
result := make(map[string]map[string]interface{})
typ := reflect.TypeOf((*T)(nil)).Elem()
if typ.Kind() != reflect.Struct {
return nil
}
order := 1
processStructFields(langID, typ, result, &order)
return result
}
type FilterType string
const (
FilterTypeRange FilterType = "range"
FilterTypeTimerange FilterType = "timerange"
FilterTypeLike FilterType = "like"
FilterTypeSwitch FilterType = "switch"
FilterTypeNone FilterType = "none"
)
func isValidFilterType(ft string) bool {
switch FilterType(ft) {
case FilterTypeRange, FilterTypeTimerange, FilterTypeLike, FilterTypeSwitch:
return true
default:
return false
}
}
// processStructFields recursively processes struct fields to populate the result map.
// It handles inline structs, reads `json` and `filt` tags, and determines filter types
// based on the field type when `filt` tag is absent.
// `order` is incremented for each field to track field ordering.
func processStructFields(langID uint, typ reflect.Type, result map[string]map[string]interface{}, order *int) {
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
jsonTag := field.Tag.Get("json")
if jsonTag == "" || jsonTag == "-" {
continue
}
propName := strings.Split(jsonTag, ",")[0]
if propName == "" {
propName = field.Name
}
if strings.Contains(jsonTag, ",inline") && field.Type.Kind() == reflect.Struct {
processStructFields(langID, field.Type, result, order)
continue
}
filterType := field.Tag.Get("filt")
if filterType != "" {
if !isValidFilterType(filterType) {
filterType = string(FilterTypeNone)
}
} else {
fieldType := field.Type.String()
switch {
case strings.HasPrefix(fieldType, "int"), strings.HasPrefix(fieldType, "uint"), strings.HasPrefix(fieldType, "float"), strings.HasPrefix(fieldType, "decimal.Decimal"):
filterType = string(FilterTypeRange)
case strings.Contains(fieldType, "Time"):
filterType = string(FilterTypeTimerange)
case fieldType == "string":
filterType = string(FilterTypeLike)
case fieldType == "bool":
filterType = string(FilterTypeSwitch)
default:
filterType = string(FilterTypeNone)
}
}
result[propName] = map[string]interface{}{
"filter_type": filterType,
"sortable": func() bool { val, ok := field.Tag.Lookup("sortable"); return !ok || val == "true" }(),
"order": *order,
"title": i18n.T___(langID, field.Tag.Get("title")),
"display": func() bool { val, ok := field.Tag.Lookup("display"); return !ok || val == "true" }(),
"hidden": field.Tag.Get("hidden") == "true",
}
*order++
}
}

View File

@@ -0,0 +1,46 @@
package find
import (
"errors"
"gorm.io/gorm"
)
const (
// Key under which result of `SELECT FOUND_ROWS()` should be stored in the
// driver context.
FOUND_ROWS_CTX_KEY = "maal:found_rows"
// Suggested name under which [find.FoundRowsCallback] can be registered.
FOUND_ROWS_CALLBACK = "maal:found_rows"
)
// Searches query clauses for presence of `SQL_CALC_FOUND_ROWS` and runs `SELECT
// FOUND_ROWS();` right after the query containing such clause. The result is
// put in the driver context under key [find.FOUND_ROWS_CTX_KEY]. For the
// callback to work correctly it must be registered and executed before the
// `gorm:preload` callback.
func FoundRowsCallback(d *gorm.DB) {
if _, ok := d.Statement.Clauses["SELECT"].AfterNameExpression.(sqlCalcFound); ok {
var count uint64
sqlDB, err := d.DB()
if err != nil {
_ = d.AddError(err)
return
}
res := sqlDB.QueryRowContext(d.Statement.Context, "SELECT FOUND_ROWS();")
if res == nil {
_ = d.AddError(errors.New(`fialed to issue SELECT FOUND_ROWS() query`))
return
}
if res.Err() != nil {
_ = d.AddError(res.Err())
return
}
err = res.Scan(&count)
if err != nil {
_ = d.AddError(err)
return
}
d.Set(FOUND_ROWS_CTX_KEY, count)
}
}

View File

@@ -0,0 +1,51 @@
package find
import (
"gorm.io/gorm"
"gorm.io/gorm/clause"
)
type sqlCalcFound struct{}
// Creates a new Clause which adds `SQL_CALC_FOUND_ROWS` right after `SELECT`.
// If [find.FoundRowsCallback] is registered the presence of this clause will
// cause `FOUND_ROWS()` result to be available in the driver context.
func SqlCalcFound() sqlCalcFound {
return sqlCalcFound{}
}
// Implements gorm's [clause.Clause]
func (sqlCalcFound) Name() string {
return "SQL_CALC_FOUND_ROWS"
}
// Implements gorm's [clause.Clause]
func (sqlCalcFound) Build(builder clause.Builder) {
_, _ = builder.WriteString("SQL_CALC_FOUND_ROWS")
}
// Implements gorm's [clause.Clause]
func (sqlCalcFound) MergeClause(cl *clause.Clause) {
}
// Implements [gorm.StatementModifier]
func (calc sqlCalcFound) ModifyStatement(stmt *gorm.Statement) {
selectClause := stmt.Clauses["SELECT"]
if selectClause.AfterNameExpression == nil {
selectClause.AfterNameExpression = calc
} else if _, ok := selectClause.AfterNameExpression.(sqlCalcFound); !ok {
selectClause.AfterNameExpression = exprs{selectClause.AfterNameExpression, calc}
}
stmt.Clauses["SELECT"] = selectClause
}
type exprs []clause.Expression
func (exprs exprs) Build(builder clause.Builder) {
for idx, expr := range exprs {
if idx > 0 {
_ = builder.WriteByte(' ')
}
expr.Build(builder)
}
}

View File

@@ -0,0 +1,43 @@
package query_params
import (
"fmt"
"reflect"
"strings"
mreflect "git.ma-al.com/goc_daniel/b2b/app/utils/reflect"
)
// MapParamsKeyToDbColumn will attempt to map provided key into unique (prefixed
// with table) column name. It will do so using following priority of sources of
// mapping:
// 1. `formColumnMapping` argument. If the mapped values contain a dot, the part
// before the dot will be used for the table name. Otherwise the table name will
// be derived from the generic parameter `T`.
// 2. json tags of provided as generic `T` struct. The table name will be also
// derived from the generic if not provided as dot prefix.
func MapParamsKeyToDbColumn[DEFAULT_TABLE_MODEL any](key string, mapping ...map[string]string) (string, error) {
ERR := "Failed to find appropiate mapping from form field to database column for key: '%s', and default table name: '%s'"
if len(mapping) > 0 {
if field, ok := (mapping[0])[key]; ok {
return field, nil
}
} else {
var t DEFAULT_TABLE_MODEL
if table, field, ok := strings.Cut(key, "."); ok {
if column, err := mreflect.GetGormColumnFromJsonField(field, reflect.TypeOf(t)); err == nil {
return table + "." + column, nil
}
return "", fmt.Errorf(ERR, key, table)
} else {
table := mreflect.GetTableName[DEFAULT_TABLE_MODEL]()
if column, err := mreflect.GetGormColumnFromJsonField(key, reflect.TypeOf(t)); err == nil {
return table + "." + column, nil
} else {
return "", fmt.Errorf(ERR, key, table)
}
}
}
return "", fmt.Errorf(ERR, key, mreflect.GetTableName[DEFAULT_TABLE_MODEL]())
}

View File

@@ -0,0 +1,63 @@
package query_params
import (
"strconv"
"git.ma-al.com/goc_daniel/b2b/app/utils/query/filters"
"git.ma-al.com/goc_daniel/b2b/app/utils/query/find"
"github.com/gofiber/fiber/v3"
)
var FunctionalQueryParams = []string{
// Used to specidy order of results
"sort",
// Used to specify page of search resulst
"p",
// Used to specify number of elements on a page
"elems",
// Used to specify allowed values of features on products
"values",
}
func ParseFilters[T any](c fiber.Ctx, formColumnMappimg ...map[string]string) (find.Paging, *filters.FiltersList, error) {
// field/column based filters
filters, err := ParseFieldFilters[T](c, formColumnMappimg...)
if err != nil {
return find.Paging{}, filters, err
}
// pagination
pageNum, pageSize := ParsePagination(c)
// ret
return find.Paging{Page: pageNum, Elements: pageSize}, filters, nil
}
// Parse field related filters from params query. Produces where clauses and
// order rules.
func ParseFieldFilters[T any](c fiber.Ctx, formColumnMapping ...map[string]string) (*filters.FiltersList, error) {
// var model T
list := filters.NewFiltersList()
whereScopefilters := ParseWhereScopes[T](c, []string{}, formColumnMapping...)
list.Append(whereScopefilters...)
ord, err := ParseOrdering[T](c, formColumnMapping...)
if err != nil {
return &list, err
}
// addDefaultOrderingIfNeeded(&ord, model)
for i := range ord {
if err == nil {
list.Append(filters.Order(ord[i].Column, ord[i].IsDesc))
}
}
return &list, nil
}
// TODO: Add some source of defaults for pagination size here
func ParsePagination(c fiber.Ctx) (uint, uint) {
pageNum, _ := strconv.ParseInt(c.Query("p", "1"), 10, 64)
pageSize, _ := strconv.ParseInt(c.Query("elems", "30"), 10, 64)
return uint(pageNum), uint(pageSize)
}

View File

@@ -0,0 +1,82 @@
package query_params
import (
"strings"
"github.com/gofiber/fiber/v3"
)
type Ordering struct {
Column string
IsDesc bool
}
func ParseOrdering[T any](c fiber.Ctx, columnMapping ...map[string]string) ([]Ordering, error) {
param := c.Query("sort")
if len(param) < 1 {
return []Ordering{}, nil
}
rules := strings.Split(param, ";")
var orderings []Ordering
for _, r := range rules {
ord, err := parseOrderingRule[T](r, columnMapping...)
if err != nil {
return orderings, err
}
orderings = append(orderings, ord)
}
return orderings, nil
}
func parseOrderingRule[T any](rule string, columnMapping ...map[string]string) (Ordering, error) {
var desc bool
if key, descStr, ok := strings.Cut(rule, ","); ok {
switch {
case strings.Compare(descStr, "desc") == 0:
desc = true
case strings.Compare(descStr, "asc") == 0:
desc = false
default:
desc = true
}
if col, err := MapParamsKeyToDbColumn[T](key, columnMapping...); err == nil {
return Ordering{
Column: col,
IsDesc: desc,
}, nil
} else {
return Ordering{}, err
}
} else {
if col, err := MapParamsKeyToDbColumn[T](key, columnMapping...); err == nil {
return Ordering{
Column: col,
IsDesc: true,
}, nil
} else {
return Ordering{}, err
}
}
}
// func addDefaultOrderingIfNeeded[T any](previousOrderings *[]Ordering, model T) {
// newOrderings := new([]Ordering)
// var t T
// if len(*previousOrderings) < 1 {
// if col, err := mreflect.GetGormColumnFromJsonField("id", reflect.TypeOf(t)); err == nil {
// *newOrderings = append(*newOrderings, Ordering{
// Column: mreflect.GetTableName[T]() + "." + col,
// IsDesc: true,
// })
// }
// if col, err := mreflect.GetGormColumnFromJsonField("iso_code", reflect.TypeOf(t)); err == nil {
// *newOrderings = append(*newOrderings, Ordering{
// Column: mreflect.GetTableName[T]() + "." + col,
// IsDesc: false,
// })
// }
// *newOrderings = append(*newOrderings, *previousOrderings...)
// *previousOrderings = *newOrderings
// }
// }

View File

@@ -0,0 +1,75 @@
package query_params
import (
"strings"
"git.ma-al.com/goc_daniel/b2b/app/utils/query/filters"
"github.com/gofiber/fiber/v3"
)
// ParseWhereScopes will attempt to create where scope query filters from url
// query params. It will map form fields to a database column name using
// `MapParamsKeyToDbColumn` function.
func ParseWhereScopes[T any](c fiber.Ctx, ignoredKeys []string, formColumnMapping ...map[string]string) []filters.Filter {
var parsedFilters []filters.Filter
//nolint
for key, value := range c.Request().URI().QueryArgs().All() {
keyStr := string(key)
valStr := string(value)
isIgnored := false
for _, ignoredKey := range ignoredKeys {
if keyStr == ignoredKey {
isIgnored = true
break
}
}
if isIgnored {
continue
}
baseKey, operator := extractOperator(keyStr)
if col, err := MapParamsKeyToDbColumn[T](baseKey, formColumnMapping...); err == nil {
if strings.HasPrefix(valStr, "~") {
parsedFilters = append(parsedFilters, filters.WhereFromStrings(col, "LIKE", valStr))
continue
}
op := resolveOperator(operator)
parsedFilters = append(parsedFilters, filters.WhereFromStrings(col, op, valStr))
}
}
return parsedFilters
}
func extractOperator(key string) (base string, operatorSuffix string) {
suffixes := []string{"_gt", "_gte", "_lt", "_lte", "_eq", "_neq"}
for _, suf := range suffixes {
if strings.HasSuffix(key, suf) {
return strings.TrimSuffix(key, suf), suf[1:]
}
}
return key, ""
}
func resolveOperator(suffix string) string {
switch suffix {
case "gt":
return ">"
case "gte":
return ">="
case "lt":
return "<"
case "lte":
return "<="
case "neq":
return "!="
case "eq":
return "="
default:
return "LIKE"
}
}

View File

@@ -0,0 +1,37 @@
package queryparser
import (
"regexp"
"strconv"
"strings"
"github.com/gofiber/fiber/v3"
)
func ParseQuery(c fiber.Ctx) map[string]interface{} {
queryParams := map[string]interface{}{}
re := regexp.MustCompile(`\?(\w.+)$`)
xx := re.FindAllStringSubmatch(c.Request().URI().String(), -1)
if len(xx) > 0 {
if len(xx[0]) == 2 {
queryParts := strings.Split(xx[0][1], "&")
for _, q := range queryParts {
qq := strings.Split(q, "=")
if len(qq) == 2 {
if num, err := strconv.ParseInt(qq[1], 10, 64); err == nil {
queryParams[qq[0]] = num
} else if float, err := strconv.ParseFloat(qq[1], 64); err == nil {
queryParams[qq[0]] = float
} else {
queryParams[qq[0]] = qq[1]
}
} else {
queryParams[qq[0]] = true
}
}
}
}
return queryParams
}