2021-07-28 11:45:17 +00:00
|
|
|
import csv
|
|
|
|
import json
|
|
|
|
import time
|
2021-07-27 08:13:27 +00:00
|
|
|
|
2021-07-28 04:19:13 +00:00
|
|
|
from django.contrib import messages
|
2021-07-28 11:45:17 +00:00
|
|
|
from werkzeug.utils import secure_filename
|
2021-08-31 14:52:41 +00:00
|
|
|
from django.views.generic.base import TemplateView, View
|
2021-07-28 11:45:17 +00:00
|
|
|
from django.core.files.storage import default_storage
|
|
|
|
from django.core.files.base import ContentFile
|
2021-08-31 14:52:41 +00:00
|
|
|
from django.core.paginator import Paginator
|
|
|
|
from django.http import FileResponse, HttpResponsePermanentRedirect, HttpResponse, JsonResponse
|
2021-07-28 04:19:13 +00:00
|
|
|
from django.urls import reverse
|
2021-08-31 15:10:24 +00:00
|
|
|
from django.shortcuts import get_object_or_404
|
2021-08-31 16:25:15 +00:00
|
|
|
import operator
|
|
|
|
from django.db.models import Q
|
|
|
|
from functools import reduce
|
2021-07-27 07:17:39 +00:00
|
|
|
import os.path as ospath
|
2021-07-28 11:45:17 +00:00
|
|
|
from shutil import move
|
2021-07-27 04:00:24 +00:00
|
|
|
|
2021-07-27 07:17:39 +00:00
|
|
|
from os import makedirs
|
|
|
|
from tempfile import gettempdir
|
|
|
|
from .formats import *
|
2021-07-28 11:45:17 +00:00
|
|
|
from .convert import reindex_data, refresh_data
|
2021-12-07 17:02:58 +00:00
|
|
|
from .models import Person, PeopleRange, TaxaPeople, FieldsPeople, MethodsPeople, MountainRange, Field, Taxon, Resource
|
2021-07-27 07:17:39 +00:00
|
|
|
|
|
|
|
# Get temporary file storage
|
|
|
|
UPLOAD_PATH = gettempdir()
|
|
|
|
DATA_PATH = ospath.join(ospath.dirname(__file__), '..')
|
|
|
|
if not ospath.exists(DATA_PATH):
|
|
|
|
makedirs(DATA_PATH)
|
|
|
|
|
|
|
|
|
|
|
|
def get_datafile(fmt):
|
|
|
|
return ospath.join(
|
|
|
|
DATA_PATH,
|
|
|
|
fmt['folder'],
|
|
|
|
fmt['filename'] + '.' + fmt['extension']
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-07-28 04:19:13 +00:00
|
|
|
# Data update tracking
|
|
|
|
c_progress = 0
|
|
|
|
c_filename = ""
|
|
|
|
|
2021-07-27 07:17:39 +00:00
|
|
|
|
2021-07-28 04:19:13 +00:00
|
|
|
class HomePageView(TemplateView):
|
2021-07-27 07:17:39 +00:00
|
|
|
template_name = "app/index.html"
|
|
|
|
|
|
|
|
def get_context_data(self, **kwargs):
|
|
|
|
fmts = DATAFORMATS
|
|
|
|
for f in fmts:
|
|
|
|
f['ready'] = ospath.isfile(get_datafile(f))
|
|
|
|
context = super().get_context_data(**kwargs)
|
|
|
|
return context
|
2021-07-27 08:13:27 +00:00
|
|
|
|
|
|
|
|
|
|
|
class OfflinePageView(TemplateView):
|
|
|
|
template_name = "app/offline.html"
|
|
|
|
|
|
|
|
def get_context_data(self, **kwargs):
|
|
|
|
context = super().get_context_data(**kwargs)
|
|
|
|
return context
|
|
|
|
|
|
|
|
|
2021-07-27 21:53:02 +00:00
|
|
|
class DemoPageView(TemplateView):
|
2021-07-28 04:19:13 +00:00
|
|
|
template_name = "app/demo.html"
|
2021-07-27 21:53:02 +00:00
|
|
|
|
2021-07-28 04:19:13 +00:00
|
|
|
def get_context_data(self, **kwargs):
|
|
|
|
context = super().get_context_data(**kwargs)
|
|
|
|
return context
|
|
|
|
|
|
|
|
|
|
|
|
class ReindexPageView(TemplateView):
|
2021-07-28 11:47:46 +00:00
|
|
|
template_name = ""
|
2021-07-27 21:53:02 +00:00
|
|
|
|
|
|
|
def get_context_data(self, **kwargs):
|
|
|
|
context = super().get_context_data(**kwargs)
|
|
|
|
return context
|
|
|
|
|
2021-07-28 04:19:13 +00:00
|
|
|
def post(self, request, **kwargs):
|
|
|
|
global c_progress
|
|
|
|
c_progress = 0
|
|
|
|
global c_filename
|
|
|
|
c_filename = ""
|
|
|
|
reindex_data()
|
|
|
|
messages.add_message(request, messages.INFO, 'Search engine refresh complete')
|
|
|
|
print("Search engine reindexed")
|
|
|
|
return HttpResponsePermanentRedirect(reverse('admin:index'))
|
|
|
|
|
2021-07-27 21:53:02 +00:00
|
|
|
|
2021-07-28 11:45:53 +00:00
|
|
|
class RefreshPageView(TemplateView):
|
|
|
|
template_name = ""
|
|
|
|
|
|
|
|
def get_context_data(self, **kwargs):
|
|
|
|
context = super().get_context_data(**kwargs)
|
|
|
|
return context
|
|
|
|
|
|
|
|
def post(self, request):
|
|
|
|
global c_progress
|
|
|
|
c_progress = 0
|
|
|
|
|
|
|
|
def generate():
|
|
|
|
stats = []
|
|
|
|
total = 0
|
|
|
|
for fmt in DATAFORMATS:
|
|
|
|
global c_filename
|
|
|
|
c_filename = fmt['filename']
|
|
|
|
filename = get_datafile(fmt)
|
|
|
|
c = 1
|
|
|
|
c_counter = 0
|
|
|
|
rd = refresh_data(filename, fmt)
|
|
|
|
while c is not None:
|
|
|
|
try:
|
|
|
|
c, p = next(rd)
|
|
|
|
except Exception as e:
|
|
|
|
yield 'error: %s' % str(e)
|
|
|
|
#traceback.print_exc()
|
|
|
|
return
|
|
|
|
if isinstance(c, (int, float)):
|
|
|
|
global c_progress
|
|
|
|
c_counter = c
|
|
|
|
if isinstance(p, (int, float)):
|
|
|
|
c_progress = p
|
|
|
|
yield str(c) + "\n\n"
|
|
|
|
elif isinstance(p, str) and isinstance(c, str):
|
|
|
|
# Error condition
|
|
|
|
yield p + ": " + c + "\n\n"
|
|
|
|
return
|
|
|
|
|
|
|
|
stats.append({'format': fmt['dataformat'], 'count': c_counter})
|
|
|
|
print("Refresh: %d counted at %s" % (c_counter, fmt['dataformat']))
|
|
|
|
total = total + c_counter
|
|
|
|
|
|
|
|
yield "done: %d objects updated" % total
|
|
|
|
print("Refresh: %d objects updated" % total)
|
|
|
|
|
|
|
|
c_progress = 0
|
|
|
|
c_filename = ""
|
|
|
|
return HttpResponse(generate(), content_type='text/html')
|
|
|
|
|
|
|
|
|
2021-07-28 07:13:08 +00:00
|
|
|
class ConfigurationPageView(TemplateView):
|
|
|
|
template_name = "app/admin/config.html"
|
|
|
|
|
|
|
|
def get_context_data(self, **kwargs):
|
|
|
|
context = super().get_context_data(**kwargs)
|
|
|
|
return context
|
|
|
|
|
|
|
|
def post(self, request, **kwargs):
|
|
|
|
global c_progress
|
|
|
|
c_progress = 0
|
|
|
|
global c_filename
|
|
|
|
c_filename = ""
|
|
|
|
reindex_data()
|
|
|
|
messages.add_message(request, messages.INFO, 'Search engine refresh complete')
|
|
|
|
print("Search engine reindexed")
|
|
|
|
return HttpResponsePermanentRedirect(reverse('admin:index'))
|
|
|
|
|
|
|
|
|
2021-07-28 13:46:57 +00:00
|
|
|
class ConfigurationHomePageView(TemplateView):
|
|
|
|
template_name = "app/admin/index.html"
|
|
|
|
|
|
|
|
def get_context_data(self, **kwargs):
|
|
|
|
context = super().get_context_data(**kwargs)
|
|
|
|
return context
|
|
|
|
|
|
|
|
|
2021-08-31 16:25:15 +00:00
|
|
|
def get_paginated(query_set, page, per_page):
|
|
|
|
paginator = Paginator(query_set, per_page).page(page)
|
|
|
|
filters = {
|
|
|
|
'country': [],
|
|
|
|
'range': [],
|
|
|
|
'field': [],
|
|
|
|
'taxon': [],
|
|
|
|
}
|
|
|
|
for p in paginator.object_list:
|
2021-12-07 17:02:58 +00:00
|
|
|
filters['country'].append(p.country.short_name)
|
|
|
|
for r in p.peoplerange_set.all():
|
|
|
|
filters['range'].append(r.range.range_name)
|
2021-08-31 16:25:15 +00:00
|
|
|
for r in p.fieldspeople_set.all():
|
2021-09-02 01:47:08 +00:00
|
|
|
filters['field'].append(r.field.name)
|
2021-08-31 16:25:15 +00:00
|
|
|
for r in p.taxapeople_set.all():
|
2021-09-02 01:47:08 +00:00
|
|
|
filters['taxon'].append(r.taxon.name)
|
2021-08-31 16:25:15 +00:00
|
|
|
filters = {
|
|
|
|
'country': sorted(set(filters['country'])),
|
|
|
|
'range': sorted(set(filters['range'])),
|
|
|
|
'field': sorted(set(filters['field'])),
|
|
|
|
'taxon': sorted(set(filters['taxon'])),
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(query_set.all()) > len(paginator.object_list) and paginator.paginator.count == 1:
|
|
|
|
paginator.object_list = query_set.all()
|
|
|
|
paginator.count(len(paginator.object_list))
|
|
|
|
return_data = {
|
|
|
|
'items': [p.dict() for p in paginator.object_list],
|
|
|
|
'filters': filters,
|
|
|
|
'page': page, 'pages': paginator.paginator.num_pages, 'total': paginator.paginator.count,
|
|
|
|
'has_next': paginator.has_next(), 'has_prev': paginator.has_previous()
|
|
|
|
}
|
|
|
|
return return_data
|
|
|
|
|
|
|
|
|
2021-08-31 14:52:41 +00:00
|
|
|
class SearchView(View):
|
|
|
|
|
|
|
|
def get(self, request):
|
|
|
|
page = int(self.request.GET.get('page', 1))
|
|
|
|
per_page = int(self.request.GET.get('per_page', 10))
|
|
|
|
q = self.request.GET.get('q', '').strip()
|
|
|
|
if not q or len(q) < 3:
|
|
|
|
query_set = Person.objects.all()
|
|
|
|
else:
|
2021-08-31 16:25:15 +00:00
|
|
|
query = reduce(operator.or_, (Q(field_indexer__icontains=item) for item in q.split(" ")))
|
|
|
|
query_set = Person.objects.filter(query)
|
2021-08-31 14:52:41 +00:00
|
|
|
|
|
|
|
q_country = self.request.GET.get('country', '')
|
|
|
|
q_range = self.request.GET.get('range', '')
|
|
|
|
q_field = self.request.GET.get('field', '')
|
|
|
|
q_taxon = self.request.GET.get('taxon', '')
|
|
|
|
if len(q_country) > 2:
|
2021-12-07 17:02:58 +00:00
|
|
|
query_set = query_set.filter(country__short_name__icontains=q_country.strip().lower())
|
2021-08-31 14:52:41 +00:00
|
|
|
if len(q_range) > 2:
|
2021-12-07 17:02:58 +00:00
|
|
|
ranges_people = PeopleRange.objects.filter(range__name__icontains=q_range.strip().lower())
|
2021-08-31 14:52:41 +00:00
|
|
|
r_people_ids = [rp.person_id for rp in ranges_people]
|
|
|
|
query_set = query_set.filter(id__in=r_people_ids)
|
|
|
|
if len(q_field) > 2:
|
|
|
|
fields_people = FieldsPeople.objects.filter(field__name__icontains=q_field.strip().lower())
|
|
|
|
f_people_ids = [fp.person_id for fp in fields_people]
|
|
|
|
query_set = query_set.filter(id__in=f_people_ids)
|
|
|
|
if len(q_taxon) > 2:
|
|
|
|
taxa_people = TaxaPeople.objects.filter(taxon__name__icontains=q_taxon.strip().lower())
|
|
|
|
t_people_ids = [tp.person_id for tp in taxa_people]
|
|
|
|
query_set = query_set.filter(id__in=t_people_ids)
|
|
|
|
query_set = query_set.order_by('last_name')
|
2021-08-31 16:25:15 +00:00
|
|
|
return JsonResponse(get_paginated(query_set, page, per_page))
|
2021-08-31 14:52:41 +00:00
|
|
|
|
|
|
|
|
2021-08-31 15:10:24 +00:00
|
|
|
class PeopleDetailView(View):
|
|
|
|
def get(self, request, people_id):
|
|
|
|
person = get_object_or_404(Person, id=people_id)
|
|
|
|
return_data = {
|
|
|
|
'data': person.dict(),
|
2021-09-02 01:56:13 +00:00
|
|
|
'resources': [r.resource.dict() for r in person.resourcespeople_set.all()],
|
|
|
|
'ranges': [r.range.dict() for r in person.rangespeople_set.all()],
|
|
|
|
'fields': [r.field.name for r in person.fieldspeople_set.all()],
|
|
|
|
'methods': [r.method.name for r in person.methodspeople_set.all()],
|
|
|
|
'scales': [r.scale.name for r in person.scalespeople_set.all()],
|
|
|
|
'taxa': [r.taxon.name for r in person.taxapeople_set.all()],
|
2021-08-31 15:10:24 +00:00
|
|
|
}
|
|
|
|
return JsonResponse(return_data)
|
|
|
|
|
|
|
|
|
2021-08-31 16:25:15 +00:00
|
|
|
class PeopleListView(View):
|
|
|
|
def get(self, request):
|
|
|
|
page = int(self.request.GET.get('page', 1))
|
|
|
|
per_page = int(self.request.GET.get('per_page', 10))
|
|
|
|
query_set = Person.objects.all().order_by('last_name')
|
|
|
|
return JsonResponse(get_paginated(query_set, page, per_page))
|
|
|
|
|
|
|
|
|
|
|
|
class ResourceListView(View):
|
|
|
|
def get(self, request):
|
|
|
|
page = int(self.request.GET.get('page', 1))
|
|
|
|
per_page = int(self.request.GET.get('per_page', 10))
|
|
|
|
query_set = Resource.objects.all().order_by('title')
|
|
|
|
return JsonResponse(get_paginated(query_set, page, per_page))
|
|
|
|
|
|
|
|
|
|
|
|
MAX_FILTER_RESULTS = 50
|
|
|
|
|
|
|
|
|
|
|
|
class RangesListView(View):
|
|
|
|
def get(self, request):
|
|
|
|
q = self.request.GET.get('q', '').strip()
|
|
|
|
if not q or len(q) < 3:
|
2021-12-07 17:02:58 +00:00
|
|
|
query_set = MountainRange.objects.all().order_by('name')[:MAX_FILTER_RESULTS]
|
2021-08-31 16:25:15 +00:00
|
|
|
else:
|
2021-12-07 17:02:58 +00:00
|
|
|
query_set = MountainRange.objects.filter(name__icontains=q.strip().lower())
|
2021-08-31 16:25:15 +00:00
|
|
|
return JsonResponse(list(query_set.values()), safe=False)
|
|
|
|
|
|
|
|
|
|
|
|
class FieldsListView(View):
|
|
|
|
def get(self, request):
|
|
|
|
q = self.request.GET.get('q', '').strip()
|
|
|
|
if not q or len(q) < 3:
|
|
|
|
query_set = Field.objects.all().order_by('name')[:MAX_FILTER_RESULTS]
|
|
|
|
else:
|
|
|
|
query_set = Field.objects.filter(name__icontains=q.strip().lower())
|
|
|
|
return JsonResponse(list(query_set.values()), safe=False)
|
|
|
|
|
|
|
|
|
|
|
|
class TaxaListView(View):
|
|
|
|
def get(self, request):
|
|
|
|
q = self.request.GET.get('q', '').strip()
|
|
|
|
if not q or len(q) < 3:
|
|
|
|
query_set = Taxon.objects.all().order_by('name')[:MAX_FILTER_RESULTS]
|
|
|
|
else:
|
|
|
|
query_set = Taxon.objects.filter(name__icontains=q.strip().lower())
|
|
|
|
return JsonResponse(list(query_set.values()), safe=False)
|
|
|
|
|
|
|
|
|
2021-07-28 11:47:11 +00:00
|
|
|
class UploadView(TemplateView):
|
|
|
|
template_name = "app/admin/config.html"
|
|
|
|
|
|
|
|
def get_context_data(self, **kwargs):
|
|
|
|
context = super().get_context_data(**kwargs)
|
|
|
|
return context
|
|
|
|
|
|
|
|
def get(self, request):
|
|
|
|
messages.add_message(request, messages.ERROR, 'Please select a valid file')
|
|
|
|
return HttpResponsePermanentRedirect(reverse('app:config'))
|
|
|
|
|
|
|
|
def post(self, request, **kwargs):
|
|
|
|
if 'datafile' in request.FILES:
|
|
|
|
fs = request.FILES['datafile']
|
|
|
|
fs_name = secure_filename(fs.name)
|
|
|
|
fs_path = default_storage.save(fs_name, ContentFile(fs.read()))
|
|
|
|
print('Uploading: %s' % fs_path)
|
|
|
|
|
|
|
|
# Validation
|
|
|
|
fmt = None
|
|
|
|
if fs_name.endswith('.csv'):
|
|
|
|
with open(fs_path, 'rt', encoding='utf-8', errors='ignore') as csvfile:
|
|
|
|
datareader = csv.DictReader(csvfile)
|
|
|
|
datalist = list(datareader)
|
|
|
|
fmt = detect_dataformat(datalist[0])
|
|
|
|
|
|
|
|
elif fs_name.endswith('.geojson'):
|
|
|
|
with open(fs_path, 'rt', encoding='utf-8', errors='ignore') as jsonfile:
|
|
|
|
jsondata = json.load(jsonfile)
|
|
|
|
fmt = detect_dataformat(jsondata['features'][0]['properties'])
|
|
|
|
|
|
|
|
# Loading
|
|
|
|
if fmt is not None:
|
|
|
|
fs_target = get_datafile(fmt)
|
|
|
|
move(fs_path, fs_target)
|
|
|
|
messages.add_message(request, message="Uploaded new data file %s" % fmt['filename'],
|
|
|
|
level=messages.SUCCESS)
|
|
|
|
else:
|
|
|
|
messages.add_message(request, message="Could not validate data format!", level=messages.ERROR)
|
|
|
|
else:
|
|
|
|
messages.add_message(request, message="Please select a valid file", level=messages.ERROR)
|
|
|
|
return HttpResponsePermanentRedirect(reverse('config'))
|
|
|
|
|
|
|
|
|
|
|
|
def get_progress(request):
|
|
|
|
global c_progress
|
|
|
|
global c_filename
|
|
|
|
|
|
|
|
def generate():
|
|
|
|
while 1:
|
|
|
|
p = str(100*c_progress)
|
2021-08-31 17:29:41 +00:00
|
|
|
print("progress = %s" % p)
|
2021-07-28 11:47:11 +00:00
|
|
|
yield 'data: { "p":'+p+',"f":"'+c_filename+'"}\n\n'
|
|
|
|
time.sleep(1.0)
|
|
|
|
if c_filename == "":
|
|
|
|
return HttpResponse("{}", content_type='text/event-stream')
|
|
|
|
response = HttpResponse(generate(), content_type='text/event-stream')
|
|
|
|
response['X-Accel-Buffering'] = 'no'
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2021-07-28 04:19:30 +00:00
|
|
|
def send_from_file(request, filename):
|
2021-07-27 08:13:27 +00:00
|
|
|
request_for = ''
|
|
|
|
if request.path.startswith('/geodata'):
|
|
|
|
request_for = 'geodata'
|
|
|
|
elif request.path.startswith('/data'):
|
|
|
|
request_for = 'data'
|
2021-07-28 04:19:30 +00:00
|
|
|
file_path = ospath.join(DATA_PATH, request_for, filename)
|
2021-07-27 08:13:27 +00:00
|
|
|
file_to_send = open(file_path, 'rb')
|
|
|
|
response = FileResponse(file_to_send)
|
|
|
|
return response
|