crawlerApp/dockerhubCrawler/models.py

33 lines
1.2 KiB
Python

from operator import le
from django.db import models
from django.utils import timezone
from datetime import timedelta
# Create your models here.
class DockerhubCrawler(models.Model):
url = models.CharField(max_length=300)
api_url = models.CharField(max_length=1000, null= True, blank=True)
last_pushed = models.DateTimeField(null= True, blank=True, default=timezone.now() + timedelta(days=-1000))
last_updated = models.DateTimeField(null= True, blank=True, default=timezone.now() + timedelta(days=-1000))
def __str__(self):
return self.url
def save(self, *args, **kwargs):
split_string = self.url.split("/")
#print(split_string)
self.api_url = 'https://hub.docker.com/v2/repositories/' + split_string[len(split_string)-2] +'/' \
+ split_string[len(split_string)-1] + '/tags/?page=1&page_size=2'
super(DockerhubCrawler, self).save(*args, **kwargs)
# Research , via Chrome dev tools ;)
#https://hub.docker.com/r/vectorim/element-web/tags
#https://hub.docker.com/v2/repositories/vectorim/element-web/tags/?page=1&page_size=800
#https://hub.docker.com/v2/repositories/library/nextcloud/
#https://hub.docker.com/_/nextcloud?tab=tags