tags dynamic filters #3

This commit is contained in:
Kuroshini 2019-12-03 22:35:38 +03:00
parent 013cd864c2
commit 9c84842c11

View File

@ -4,7 +4,7 @@ from django_elasticsearch_dsl_drf.filter_backends import SearchFilterBackend, \
FacetedSearchFilterBackend, GeoSpatialFilteringFilterBackend
from search_indexes.utils import OBJECT_FIELD_PROPERTIES
from six import iteritems
from functools import reduce
from tag.models import TagCategory
class CustomGeoSpatialFilteringFilterBackend(GeoSpatialFilteringFilterBackend):
@ -92,7 +92,7 @@ class CustomFacetedSearchFilterBackend(FacetedSearchFilterBackend):
'global'
).bucket(__field, agg)
else:
if __field != 'tag' or not request.query_params.getlist('tags_id__in'):
if __field != 'tag':
qs = queryset.__copy__()
qs.query = queryset.query._clone()
filterer = make_filter(__facet)
@ -116,9 +116,11 @@ class CustomFacetedSearchFilterBackend(FacetedSearchFilterBackend):
view.paginator.facets_computed.update({facet_name: qs.execute().aggregations[facet_name]})
else:
tag_facets = []
preserve_ids = []
facet_name = '_filter_' + __field
for category_tags_ids in request.query_params.getlist('tags_id__in'):
tags_to_remove = category_tags_ids.split('__')
for category in TagCategory.objects.prefetch_related('tags').filter(public=True,
value_type=TagCategory.LIST):
tags_to_remove = list(map(lambda t: t.id, category.tags.all()))
qs = queryset.__copy__()
qs.query = queryset.query._clone()
filterer = make_tags_filter(__facet, tags_to_remove)
@ -140,32 +142,19 @@ class CustomFacetedSearchFilterBackend(FacetedSearchFilterBackend):
filter=agg_filter
).bucket(__field, agg)
tag_facets.append(qs.execute().aggregations[facet_name])
view.paginator.facets_computed.update({facet_name: self.merge_buckets(tag_facets)})
preserve_ids.append(tags_to_remove)
view.paginator.facets_computed.update({facet_name: self.merge_buckets(tag_facets, preserve_ids)})
return queryset
@staticmethod
def merge_buckets(buckets: list):
def merge_buckets(buckets: list, presrve_ids: list):
"""Reduces all buckets preserving class"""
result_bucket = buckets[0]
for bucket in buckets[1:]:
result_bucket.tag.buckets = list(filter(lambda x: x in presrve_ids[0], result_bucket.tag.buckets._l_))
for bucket, ids in list(zip(buckets, presrve_ids))[1:]:
for tag in bucket.tag.buckets._l_:
result_bucket.tag.buckets.append(tag)
def reducer(prev, cur):
"""Unique by key"""
if not len(list(filter(lambda x: x['key'] == cur['key'], prev))):
prev.append(cur)
return prev
buckets_count = len(buckets)
result_bucket.tag.buckets = list(filter(lambda t: t is not None, [
tag if len(list(filter(lambda t: t['key'] == tag['key'], result_bucket.tag.buckets._l_))) == buckets_count else None
for tag in result_bucket.tag.buckets._l_])) # here we remove tags which don't present in any bucket
result_bucket.tag.buckets = list(reduce(
reducer,
result_bucket.tag.buckets._l_,
[]
))
if tag['key'] in ids:
result_bucket.tag.buckets.append(tag)
return result_bucket