Implement api/search
This commit is contained in:
parent
020cccf258
commit
0a24aa01e1
3 changed files with 70 additions and 41 deletions
72
app/views.py
72
app/views.py
|
@ -1,15 +1,14 @@
|
||||||
import csv
|
import csv
|
||||||
import json
|
import json
|
||||||
import os
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from django.contrib import messages
|
from django.contrib import messages
|
||||||
from werkzeug.utils import secure_filename
|
from werkzeug.utils import secure_filename
|
||||||
from django.views.generic.base import TemplateView
|
from django.views.generic.base import TemplateView, View
|
||||||
from django.core.files.storage import default_storage
|
from django.core.files.storage import default_storage
|
||||||
from django.core.files.base import ContentFile
|
from django.core.files.base import ContentFile
|
||||||
from django.views.decorators.csrf import csrf_protect
|
from django.core.paginator import Paginator
|
||||||
from django.http import FileResponse, HttpResponsePermanentRedirect, HttpResponse
|
from django.http import FileResponse, HttpResponsePermanentRedirect, HttpResponse, JsonResponse
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
import os.path as ospath
|
import os.path as ospath
|
||||||
from shutil import move
|
from shutil import move
|
||||||
|
@ -18,6 +17,7 @@ from os import makedirs
|
||||||
from tempfile import gettempdir
|
from tempfile import gettempdir
|
||||||
from .formats import *
|
from .formats import *
|
||||||
from .convert import reindex_data, refresh_data
|
from .convert import reindex_data, refresh_data
|
||||||
|
from .models import Person, RangesPeople, TaxaPeople, FieldsPeople, MethodsPeople, Range
|
||||||
|
|
||||||
# Get temporary file storage
|
# Get temporary file storage
|
||||||
UPLOAD_PATH = gettempdir()
|
UPLOAD_PATH = gettempdir()
|
||||||
|
@ -161,6 +161,70 @@ class ConfigurationHomePageView(TemplateView):
|
||||||
return context
|
return context
|
||||||
|
|
||||||
|
|
||||||
|
class SearchView(View):
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
page = int(self.request.GET.get('page', 1))
|
||||||
|
per_page = int(self.request.GET.get('per_page', 10))
|
||||||
|
q = self.request.GET.get('q', '').strip()
|
||||||
|
if not q or len(q) < 3:
|
||||||
|
query_set = Person.objects.all()
|
||||||
|
else:
|
||||||
|
query_set = Person.objects.filter(field_indexer__icontains=q.split(" "))
|
||||||
|
|
||||||
|
q_country = self.request.GET.get('country', '')
|
||||||
|
q_range = self.request.GET.get('range', '')
|
||||||
|
q_field = self.request.GET.get('field', '')
|
||||||
|
q_taxon = self.request.GET.get('taxon', '')
|
||||||
|
if len(q_country) > 2:
|
||||||
|
query_set = query_set.filter(country__icontains=q_country.strip().lower())
|
||||||
|
if len(q_range) > 2:
|
||||||
|
ranges_people = RangesPeople.objects.filter(range__name__icontains=q_range.strip().lower())
|
||||||
|
r_people_ids = [rp.person_id for rp in ranges_people]
|
||||||
|
query_set = query_set.filter(id__in=r_people_ids)
|
||||||
|
if len(q_field) > 2:
|
||||||
|
fields_people = FieldsPeople.objects.filter(field__name__icontains=q_field.strip().lower())
|
||||||
|
f_people_ids = [fp.person_id for fp in fields_people]
|
||||||
|
query_set = query_set.filter(id__in=f_people_ids)
|
||||||
|
if len(q_taxon) > 2:
|
||||||
|
taxa_people = TaxaPeople.objects.filter(taxon__name__icontains=q_taxon.strip().lower())
|
||||||
|
t_people_ids = [tp.person_id for tp in taxa_people]
|
||||||
|
query_set = query_set.filter(id__in=t_people_ids)
|
||||||
|
query_set = query_set.order_by('last_name')
|
||||||
|
paginator = Paginator(query_set, per_page).page(page)
|
||||||
|
filters = {
|
||||||
|
'country': [],
|
||||||
|
'range': [],
|
||||||
|
'field': [],
|
||||||
|
'taxon': [],
|
||||||
|
}
|
||||||
|
for p in paginator.object_list:
|
||||||
|
filters['country'].append(p.country)
|
||||||
|
for r in p.rangespeople_set.all():
|
||||||
|
filters['range'].append(r.name)
|
||||||
|
for r in p.fieldspeople_set.all():
|
||||||
|
filters['field'].append(r.name)
|
||||||
|
for r in p.taxapeople_set.all():
|
||||||
|
filters['taxon'].append(r.name)
|
||||||
|
filters = {
|
||||||
|
'country': sorted(set(filters['country'])),
|
||||||
|
'range': sorted(set(filters['range'])),
|
||||||
|
'field': sorted(set(filters['field'])),
|
||||||
|
'taxon': sorted(set(filters['taxon'])),
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(query_set.all()) > len(paginator.object_list) and paginator.paginator.count == 1:
|
||||||
|
paginator.object_list = query_set.all()
|
||||||
|
paginator.count(len(paginator.object_list))
|
||||||
|
return_data = {
|
||||||
|
'items': [p.dict() for p in paginator.object_list],
|
||||||
|
'filters': filters,
|
||||||
|
'page': page, 'pages': paginator.paginator.num_pages, 'total': paginator.paginator.count,
|
||||||
|
'has_next': paginator.has_next(), 'has_prev': paginator.has_previous()
|
||||||
|
}
|
||||||
|
return JsonResponse(return_data)
|
||||||
|
|
||||||
|
|
||||||
class UploadView(TemplateView):
|
class UploadView(TemplateView):
|
||||||
template_name = "app/admin/config.html"
|
template_name = "app/admin/config.html"
|
||||||
|
|
||||||
|
|
|
@ -18,48 +18,12 @@ from django.urls import path
|
||||||
from django.views.generic import TemplateView
|
from django.views.generic import TemplateView
|
||||||
from app.views import (
|
from app.views import (
|
||||||
HomePageView, OfflinePageView, send_from_file, DemoPageView, ConfigurationPageView, get_progress, UploadView,
|
HomePageView, OfflinePageView, send_from_file, DemoPageView, ConfigurationPageView, get_progress, UploadView,
|
||||||
ReindexPageView, RefreshPageView, ConfigurationHomePageView
|
ReindexPageView, RefreshPageView, ConfigurationHomePageView, SearchView
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
FILTER_QUERIES = [ 'country', 'range', 'field', 'taxon' ]
|
FILTER_QUERIES = [ 'country', 'range', 'field', 'taxon' ]
|
||||||
|
|
||||||
|
|
||||||
def get_paginated(query, request):
|
|
||||||
page = int(request.args.get('page', 1))
|
|
||||||
per_page = int(request.args.get('per_page', 10))
|
|
||||||
ppp = query.paginate(page, per_page, error_out=False)
|
|
||||||
filters = {
|
|
||||||
'country': [],
|
|
||||||
'range': [],
|
|
||||||
'field': [],
|
|
||||||
'taxon': [],
|
|
||||||
}
|
|
||||||
for p in ppp.items:
|
|
||||||
filters['country'].append(p.country)
|
|
||||||
for r in p.ranges:
|
|
||||||
filters['range'].append(r.name)
|
|
||||||
for r in p.research_fields:
|
|
||||||
filters['field'].append(r.name)
|
|
||||||
for r in p.research_taxa:
|
|
||||||
filters['taxon'].append(r.name)
|
|
||||||
filters = {
|
|
||||||
'country': sorted(set(filters['country'])),
|
|
||||||
'range': sorted(set(filters['range'])),
|
|
||||||
'field': sorted(set(filters['field'])),
|
|
||||||
'taxon': sorted(set(filters['taxon'])),
|
|
||||||
}
|
|
||||||
if len(query.all()) > len(ppp.items) and ppp.pages == 1:
|
|
||||||
ppp.items = query.all()
|
|
||||||
ppp.total = len(ppp.items)
|
|
||||||
return {
|
|
||||||
'items': [p.dict() for p in ppp.items],
|
|
||||||
'filters': filters,
|
|
||||||
'page': page, 'pages': ppp.pages, 'total': ppp.total,
|
|
||||||
'has_next': ppp.has_next, 'has_prev': ppp.has_prev
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path('', HomePageView.as_view(), name='home'),
|
path('', HomePageView.as_view(), name='home'),
|
||||||
path('offline/', OfflinePageView.as_view(), name='offline'),
|
path('offline/', OfflinePageView.as_view(), name='offline'),
|
||||||
|
@ -73,6 +37,7 @@ urlpatterns = [
|
||||||
path('progress', get_progress, name='progress'),
|
path('progress', get_progress, name='progress'),
|
||||||
path('reindex', ReindexPageView.as_view(), name='reindex'),
|
path('reindex', ReindexPageView.as_view(), name='reindex'),
|
||||||
path('refresh', RefreshPageView.as_view(), name='refresh'),
|
path('refresh', RefreshPageView.as_view(), name='refresh'),
|
||||||
|
path('api/search', SearchView.as_view(), name='api-search'),
|
||||||
|
|
||||||
|
|
||||||
# TODO Implement the following
|
# TODO Implement the following
|
||||||
|
|
Loading…
Reference in a new issue