Move django-based uncloud to top-level

This commit is contained in:
fnux 2020-05-07 12:12:35 +02:00
commit 95d43f002f
265 changed files with 0 additions and 0 deletions

View file

@ -0,0 +1,74 @@
import json
import uncloud.secrets as secrets
from xmlrpc.client import ServerProxy as RPCClient
from django.core.management.base import BaseCommand
from xmltodict import parse
from enum import IntEnum
from opennebula.models import VM as VMModel
from uncloud_vm.models import VMHost
from django_auth_ldap.backend import LDAPBackend
class HostStates(IntEnum):
"""
The following flags are copied from
https://docs.opennebula.org/5.8/integration/system_interfaces/api.html#schemas-for-host
"""
INIT = 0 # Initial state for enabled hosts
MONITORING_MONITORED = 1 # Monitoring the host (from monitored)
MONITORED = 2 # The host has been successfully monitored
ERROR = 3 # An error ocurrer while monitoring the host
DISABLED = 4 # The host is disabled
MONITORING_ERROR = 5 # Monitoring the host (from error)
MONITORING_INIT = 6 # Monitoring the host (from init)
MONITORING_DISABLED = 7 # Monitoring the host (from disabled)
OFFLINE = 8 # The host is totally offline
class Command(BaseCommand):
help = 'Syncronize Host information from OpenNebula'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
with RPCClient(secrets.OPENNEBULA_URL) as rpc_client:
success, response, *_ = rpc_client.one.hostpool.info(secrets.OPENNEBULA_USER_PASS)
if success:
response = json.loads(json.dumps(parse(response)))
host_pool = response.get('HOST_POOL', {}).get('HOST', {})
for host in host_pool:
host_share = host.get('HOST_SHARE', {})
host_name = host.get('NAME')
state = int(host.get('STATE', HostStates.OFFLINE.value))
if state == HostStates.MONITORED:
status = 'active'
elif state == HostStates.DISABLED:
status = 'disabled'
else:
status = 'unusable'
usable_cores = host_share.get('TOTAL_CPU')
usable_ram_in_kb = int(host_share.get('TOTAL_MEM', 0))
usable_ram_in_gb = int(usable_ram_in_kb / 2 ** 20)
# vms cannot be created like this -- Nico, 2020-03-17
# vms = host.get('VMS', {}) or {}
# vms = vms.get('ID', []) or []
# vms = ','.join(vms)
VMHost.objects.update_or_create(
hostname=host_name,
defaults={
'usable_cores': usable_cores,
'usable_ram_in_gb': usable_ram_in_gb,
'status': status
}
)
else:
print(response)

View file

@ -0,0 +1,47 @@
import json
import uncloud.secrets as secrets
from xmlrpc.client import ServerProxy as RPCClient
from django_auth_ldap.backend import LDAPBackend
from django.core.management.base import BaseCommand
from xmltodict import parse
from opennebula.models import VM as VMModel
class Command(BaseCommand):
help = 'Syncronize VM information from OpenNebula'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
with RPCClient(secrets.OPENNEBULA_URL) as rpc_client:
success, response, *_ = rpc_client.one.vmpool.infoextended(
secrets.OPENNEBULA_USER_PASS, -2, -1, -1, -1
)
if success:
vms = json.loads(json.dumps(parse(response)))['VM_POOL']['VM']
unknown_user = set()
backend = LDAPBackend()
for vm in vms:
vm_id = vm['ID']
vm_owner = vm['UNAME']
user = backend.populate_user(username=vm_owner)
if not user:
unknown_user.add(vm_owner)
else:
VMModel.objects.update_or_create(
vmid=vm_id,
defaults={'data': vm, 'owner': user}
)
print('User not found in ldap:', unknown_user)
else:
print(response)

View file

@ -0,0 +1,193 @@
import sys
from datetime import datetime
from django.core.management.base import BaseCommand
from django.utils import timezone
from django.contrib.auth import get_user_model
from opennebula.models import VM as VMModel
from uncloud_vm.models import VMHost, VMProduct, VMNetworkCard, VMDiskImageProduct, VMDiskProduct
from uncloud_pay.models import Order
import logging
log = logging.getLogger(__name__)
def convert_mac_to_int(mac_address: str):
# Remove octet connecting characters
mac_address = mac_address.replace(':', '')
mac_address = mac_address.replace('.', '')
mac_address = mac_address.replace('-', '')
mac_address = mac_address.replace(' ', '')
# Parse the resulting number as hexadecimal
mac_address = int(mac_address, base=16)
return mac_address
def get_vm_price(core, ram, ssd_size, hdd_size, n_of_ipv4, n_of_ipv6):
total = 3 * core + 4 * ram + (3.5 * ssd_size/10.) + (1.5 * hdd_size/100.) + 8 * n_of_ipv4 + 0 * n_of_ipv6
# TODO: Find some reason about the following magical subtraction.
total -= 8
return total
def create_nics(one_vm, vm_product):
for nic in one_vm.nics:
mac_address = convert_mac_to_int(nic.get('MAC'))
ip_address = nic.get('IP', None) or nic.get('IP6_GLOBAL', None)
VMNetworkCard.objects.update_or_create(
mac_address=mac_address, vm=vm_product, defaults={'ip_address': ip_address}
)
def sync_disk_and_image(one_vm, vm_product, disk_owner):
"""
a) Check all opennebula disk if they are in the uncloud VM, if not add
b) Check all uncloud disks and remove them if they are not in the opennebula VM
"""
vmdisknum = 0
one_disks_extra_data = []
for disk in one_vm.disks:
vmowner = one_vm.owner
name = disk.get('image')
vmdisknum += 1
log.info("Checking disk {} for VM {}".format(name, one_vm))
is_os_image, is_public, status = True, False, 'active'
image_size_in_gb = disk.get('image_size_in_gb')
disk_size_in_gb = disk.get('size_in_gb')
storage_class = disk.get('storage_class')
image_source = disk.get('source')
image_source_type = disk.get('source_type')
image, _ = VMDiskImageProduct.objects.update_or_create(
name=name,
defaults={
'owner': disk_owner,
'is_os_image': is_os_image,
'is_public': is_public,
'size_in_gb': image_size_in_gb,
'storage_class': storage_class,
'image_source': image_source,
'image_source_type': image_source_type,
'status': status
}
)
# identify vmdisk from opennebula - primary mapping key
extra_data = {
'opennebula_vm': one_vm.vmid,
'opennebula_size_in_gb': disk_size_in_gb,
'opennebula_source': disk.get('opennebula_source'),
'opennebula_disk_num': vmdisknum
}
# Save for comparing later
one_disks_extra_data.append(extra_data)
try:
vm_disk = VMDiskProduct.objects.get(extra_data=extra_data)
except VMDiskProduct.DoesNotExist:
vm_disk = VMDiskProduct.objects.create(
owner=vmowner,
vm=vm_product,
image=image,
size_in_gb=disk_size_in_gb,
extra_data=extra_data
)
# Now remove all disks that are not in above extra_data list
for disk in VMDiskProduct.objects.filter(vm=vm_product):
extra_data = disk.extra_data
if not extra_data in one_disks_extra_data:
log.info("Removing disk {} from VM {}".format(disk, vm_product))
disk.delete()
disks = [ disk.extra_data for disk in VMDiskProduct.objects.filter(vm=vm_product) ]
log.info("VM {} has disks: {}".format(vm_product, disks))
class Command(BaseCommand):
help = 'Migrate Opennebula VM to regular (uncloud) vm'
def add_arguments(self, parser):
parser.add_argument('--disk-owner', required=True, help="The user who owns the the opennebula disks")
def handle(self, *args, **options):
log.debug("{} {}".format(args, options))
disk_owner = get_user_model().objects.get(username=options['disk_owner'])
for one_vm in VMModel.objects.all():
if not one_vm.last_host:
log.warning("No VMHost for VM {} - VM might be on hold - skipping".format(one_vm.vmid))
continue
try:
vmhost = VMHost.objects.get(hostname=one_vm.last_host)
except VMHost.DoesNotExist:
log.error("VMHost {} does not exist, aborting".format(one_vm.last_host))
raise
cores = one_vm.cores
ram_in_gb = one_vm.ram_in_gb
owner = one_vm.owner
status = 'active'
ssd_size = sum([ disk['size_in_gb'] for disk in one_vm.disks if disk['pool_name'] in ['ssd', 'one'] ])
hdd_size = sum([ disk['size_in_gb'] for disk in one_vm.disks if disk['pool_name'] in ['hdd'] ])
# List of IPv4 addresses and Global IPv6 addresses
ipv4, ipv6 = one_vm.ips
# TODO: Insert actual/real creation_date, starting_date, ending_date
# instead of pseudo one we are putting currently
creation_date = starting_date = datetime.now(tz=timezone.utc)
# Price calculation based on datacenterlight.ch
one_time_price = 0
recurring_period = 'per_month'
recurring_price = get_vm_price(cores, ram_in_gb,
ssd_size, hdd_size,
len(ipv4), len(ipv6))
try:
vm_product = VMProduct.objects.get(extra_data__opennebula_id=one_vm.vmid)
except VMProduct.DoesNotExist:
order = Order.objects.create(
owner=owner,
creation_date=creation_date,
starting_date=starting_date
)
vm_product = VMProduct(
extra_data={ 'opennebula_id': one_vm.vmid },
name=one_vm.uncloud_name,
order=order
)
# we don't use update_or_create, as filtering by json AND setting json
# at the same time does not work
vm_product.vmhost = vmhost
vm_product.owner = owner
vm_product.cores = cores
vm_product.ram_in_gb = ram_in_gb
vm_product.status = status
vm_product.save()
# Create VMNetworkCards
create_nics(one_vm, vm_product)
# Create VMDiskImageProduct and VMDiskProduct
sync_disk_and_image(one_vm, vm_product, disk_owner=disk_owner)