diff --git a/.gitignore b/.gitignore old mode 100644 new mode 100755 index 5929a22..a94f03d --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,6 @@ __pycache__/ venv/ settings.json +ucloud_common +etcd3_wrapper +log.txt diff --git a/.gitmodules b/.gitmodules old mode 100644 new mode 100755 index 316b9ea..e69de29 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +0,0 @@ -[submodule "etcd3_wrapper"] - path = etcd3_wrapper - url = https://code.ungleich.ch/ahmedbilal/etcd3_wrapper diff --git a/Pipfile b/Pipfile old mode 100644 new mode 100755 index daeed8a..88496f8 --- a/Pipfile +++ b/Pipfile @@ -4,6 +4,7 @@ url = "https://pypi.org/simple" verify_ssl = true [dev-packages] +prospector = "*" [packages] pyotp = "*" @@ -11,8 +12,12 @@ python-decouple = "*" requests = "*" flask = "*" flask-restful = "*" +grpcio = "*" etcd3 = "*" gunicorn = "*" +bitmath = "*" +pylint = "*" +transitions = "*" [requires] python_version = "3.7" diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index 86ac1a7..0000000 --- a/Pipfile.lock +++ /dev/null @@ -1,252 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "87f5447c7fa8d96dacc7c638075cc31842ef676f6d1c35214e1960572d44e929" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.7" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "aniso8601": { - "hashes": [ - "sha256:513d2b6637b7853806ae79ffaca6f3e8754bdd547048f5ccc1420aec4b714f1e", - "sha256:d10a4bf949f619f719b227ef5386e31f49a2b6d453004b21f02661ccc8670c7b" - ], - "version": "==7.0.0" - }, - "certifi": { - "hashes": [ - "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939", - "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695" - ], - "version": "==2019.6.16" - }, - "chardet": { - "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" - ], - "version": "==3.0.4" - }, - "click": { - "hashes": [ - "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", - "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" - ], - "version": "==7.0" - }, - "etcd3": { - "hashes": [ - "sha256:25a524b9f032c6631ff0097532907dea81243eaa63c3744510fd1598cc4e0e87" - ], - "index": "pypi", - "version": "==0.10.0" - }, - "flask": { - "hashes": [ - "sha256:ad7c6d841e64296b962296c2c2dabc6543752985727af86a975072dea984b6f3", - "sha256:e7d32475d1de5facaa55e3958bc4ec66d3762076b074296aa50ef8fdc5b9df61" - ], - "index": "pypi", - "version": "==1.0.3" - }, - "flask-restful": { - "hashes": [ - "sha256:ecd620c5cc29f663627f99e04f17d1f16d095c83dc1d618426e2ad68b03092f8", - "sha256:f8240ec12349afe8df1db168ea7c336c4e5b0271a36982bff7394f93275f2ca9" - ], - "index": "pypi", - "version": "==0.3.7" - }, - "grpcio": { - "hashes": [ - "sha256:0232add03144dd3cf9b660e2718244cb8e175370dca4d3855cb4e489a7811b53", - "sha256:0f20e6dcb1b8662cdca033bb97c0a8116a5343e3ebc7f71c5fe7f89039978350", - "sha256:10b07a623d33d4966f45c85d410bc6a79c5ac6341f06c3beda6c22be12cbfe07", - "sha256:10c0476d5a52d21f402fc073745dc43b87cc8e080a1f49bbff4e1059019310fb", - "sha256:289dae0b35c59d191c524e976dd0a6f8c995d2062e72621eb866ad0f4472a635", - "sha256:2be726f16142d358a0df1e81d583d6820ee561a7856a79cca2fbe49989308be7", - "sha256:4338d2a81f5b4ca022e085040b3cfce19419a5ce44aa7e6810ac1df05365bed7", - "sha256:4c535b46f20e66bee3097583231977e721acdfcb1671d1490c99b7be8902ce18", - "sha256:557154aef70a0e979700cc9528bc8b606b668084a29a0d57dbc4b06b078a2f1c", - "sha256:5bfdd7e6647498f979dc46583723c852d97b25afe995d55aa1c76a5f9816bc1f", - "sha256:87d8943ae7aa6ca5bbad732867d7f17d2550e4966a0c15b52088e8b579422e47", - "sha256:89d8719d8de4d137678f7caa979e1b0a6fd4026f8096ceef8c2d164bbabefaf2", - "sha256:9c3f4af989ce860710ac1864dc2e867dd87e6cee51a2368df1b253596868e52f", - "sha256:9da52c3c728883aee429bb7c315049f50b2139f680cd86bb1165418e4f93a982", - "sha256:9e9736659987beab42d18525ed10d21f80a1ba8389eac03425fbfd5684e6bbf0", - "sha256:9ebcbb1a054cab362d29d3be571d43d6b9b23302d9fc4b43e5327000da1680a9", - "sha256:a93e08636623e24c939851e2e0c0140b14f524b2980c9cdc4ea52b70a871c7e0", - "sha256:ac322d86d1a079e0a118d544443ee16f320af0062c191b4754c0c6ec2fc79310", - "sha256:b1fb101459868f52df6b61e7bb13375e50badf17a160e39fe1d51ae19e53f461", - "sha256:b39aac96cceac624a23d540473835086a3ffa77c91030189988c073488434493", - "sha256:b65507bc273c6dbf539175a786a344cc0ac78d50e5584f72c6599733f8a3301f", - "sha256:be5bb6e47417e537c884a2e2ff2e1a8b2c064a998fcfdfcc67528d4e63e7ebaf", - "sha256:c92de6a28a909c4f460dc1bbbcb50d676cf0b1f40224b222761f73fdd851b522", - "sha256:c9f5962eb7fa7607b20eb0e4f59ed35829bd600fc0eacb626a6db83229a3e445", - "sha256:d00bdf9c546ed6e649f785c55b05288e8b2dbb6bf2eb74b6c579fa0d591d35bd", - "sha256:da804b1dd8293bd9d61b1e6ea989c887ba042a808a4fbdd80001cfa059aafed2", - "sha256:ead6c5aa3e807345913649c3be395aaca2bbb2d225f18b8f31f37eab225508f6", - "sha256:eb4d81550ce6f826af4ec6e8d98be347fe96291d718bf115c3f254621ae8d98d", - "sha256:ef6a18ec8fd32ec81748fe720544ea2fb2d2dc50fd6d06739d5e2eb8f0626a1c", - "sha256:fad42835656e0b6d3b7ffc900598e776722e30f43b7234a48f2576ca30f31a47", - "sha256:fb98dbfee0d963b49ae5754554028cf62e6bd695f22de16d242ba9d2f0b7339b", - "sha256:fb9cd9bb8d26dc17c2dd715a46bca3a879ec8283879b164e85863110dc6e3b2a" - ], - "version": "==1.21.1" - }, - "gunicorn": { - "hashes": [ - "sha256:aa8e0b40b4157b36a5df5e599f45c9c76d6af43845ba3b3b0efe2c70473c2471", - "sha256:fa2662097c66f920f53f70621c6c58ca4a3c4d3434205e608e121b5b3b71f4f3" - ], - "index": "pypi", - "version": "==19.9.0" - }, - "idna": { - "hashes": [ - "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", - "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" - ], - "version": "==2.8" - }, - "itsdangerous": { - "hashes": [ - "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", - "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" - ], - "version": "==1.1.0" - }, - "jinja2": { - "hashes": [ - "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", - "sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b" - ], - "version": "==2.10.1" - }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" - ], - "version": "==1.1.1" - }, - "protobuf": { - "hashes": [ - "sha256:03f43eac9d5b651f976e91cf46a25b75e5779d98f0f4114b0abfed83376d75f8", - "sha256:0c94b21e6de01362f91a86b372555d22a60b59708599ca9d5032ae9fdf8e3538", - "sha256:2d2a9f30f61f4063fadd7fb68a2510a6939b43c0d6ceeec5c4704f22225da28e", - "sha256:34a0b05fca061e4abb77dd180209f68d8637115ff319f51e28a6a9382d69853a", - "sha256:358710fd0db25372edcf1150fa691f48376a134a6c69ce29f38f185eea7699e6", - "sha256:41e47198b94c27ba05a08b4a95160656105745c462af574e4bcb0807164065c0", - "sha256:8c61cc8a76e9d381c665aecc5105fa0f1878cf7db8b5cd17202603bcb386d0fc", - "sha256:a6eebc4db759e58fdac02efcd3028b811effac881d8a5bad1996e4e8ee6acb47", - "sha256:a9c12f7c98093da0a46ba76ec40ace725daa1ac4038c41e4b1466afb5c45bb01", - "sha256:cb95068492ba0859b8c9e61fa8ba206a83c64e5d0916fb4543700b2e2b214115", - "sha256:cd98476ce7bb4dcd6a7b101f5eecdc073dafea19f311e36eb8fba1a349346277", - "sha256:ce64cfbea18c535176bdaa10ba740c0fc4c6d998a3f511c17bedb0ae4b3b167c", - "sha256:dcbb59eac73fd454e8f2c5fba9e3d3320fd4707ed6a9d3ea3717924a6f0903ea", - "sha256:dd67f34458ae716029e2a71ede998e9092493b62a519236ca52e3c5202096c87", - "sha256:e3c96056eb5b7284a20e256cb0bf783c8f36ad82a4ae5434a7b7cd02384144a7", - "sha256:f612d584d7a27e2f39e7b17878430a959c1bc09a74ba09db096b468558e5e126", - "sha256:f6de8a7d6122297b81566e5bd4df37fd5d62bec14f8f90ebff8ede1c9726cd0a", - "sha256:fa529d9261682b24c2aaa683667253175c9acebe0a31105394b221090da75832" - ], - "version": "==3.8.0" - }, - "pyotp": { - "hashes": [ - "sha256:1e3dc3d16919c4efac528d1dbecc17de1a97c4ecfdacb89d7726ed2c6645adff", - "sha256:be0ffeabddaa5ee53e7204e7740da842d070cf69168247a3d0c08541b84de602" - ], - "index": "pypi", - "version": "==2.2.7" - }, - "python-decouple": { - "hashes": [ - "sha256:1317df14b43efee4337a4aa02914bf004f010cd56d6c4bd894e6474ec8c4fe2d" - ], - "index": "pypi", - "version": "==3.1" - }, - "pytz": { - "hashes": [ - "sha256:303879e36b721603cc54604edcac9d20401bdbe31e1e4fdee5b9f98d5d31dfda", - "sha256:d747dd3d23d77ef44c6a3526e274af6efeb0a6f1afd5a69ba4d5be4098c8e141" - ], - "version": "==2019.1" - }, - "requests": { - "hashes": [ - "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", - "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31" - ], - "index": "pypi", - "version": "==2.22.0" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - }, - "tenacity": { - "hashes": [ - "sha256:a0c3c5f7ae0c33f5556c775ca059c12d6fd8ab7121613a713e8b7d649908571b", - "sha256:b87c1934daa0b2ccc7db153c37b8bf91d12f165936ade8628e7b962b92dc7705" - ], - "version": "==5.0.4" - }, - "urllib3": { - "hashes": [ - "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1", - "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232" - ], - "version": "==1.25.3" - }, - "werkzeug": { - "hashes": [ - "sha256:865856ebb55c4dcd0630cdd8f3331a1847a819dda7e8c750d3db6f2aa6c0209c", - "sha256:a0b915f0815982fb2a09161cb8f31708052d0951c3ba433ccc5e1aa276507ca6" - ], - "version": "==0.15.4" - } - }, - "develop": {} -} diff --git a/README.md b/README.md old mode 100644 new mode 100755 diff --git a/common_fields.py b/common_fields.py new file mode 100755 index 0000000..9b0950d --- /dev/null +++ b/common_fields.py @@ -0,0 +1,62 @@ +from specs_parser import SpecsParser +from config import etcd_client as client + +specs_parser = SpecsParser(exceptional_devices=["cpu"]) + + +class Field(object): + def __init__(self, _name, _type, _value=None): + self.name = _name + self.value = _value + self.type = _type + self.__errors = [] + + def validation(self): + return True + + def is_valid(self): + if self.value == KeyError: + self.add_error(f"'{self.name}' field is a required field") + else: + if not isinstance(self.value, self.type): + self.add_error(f"Incorrect Type for '{self.name}' field") + else: + self.validation() + + if self.__errors: + return False + return True + + def get_errors(self): + return self.__errors + + def add_error(self, error): + self.__errors.append(error) + + +class VmUUIDField(Field): + def __init__(self, data): + self.uuid = data.get("uuid", KeyError) + + super().__init__("uuid", str, self.uuid) + + self.validation = self.vm_uuid_validation + + def vm_uuid_validation(self): + r = client.get(f"/v1/vm/{self.uuid}") + if not r: + self.add_error(f"VM with uuid {self.uuid} does not exists") + + +class SpecsField(Field): + def __init__(self, data): + self.specs = data.get("specs", KeyError) + + super().__init__("specs", dict, self.specs) + + self.validation = self.specs_validation + + def specs_validation(self): + if not specs_parser.transform_specs(self.specs): + self.add_error("Invalid unit - " + f"Please use following units {specs_parser.get_allowed_units()}") diff --git a/config.py b/config.py new file mode 100644 index 0000000..603501c --- /dev/null +++ b/config.py @@ -0,0 +1,17 @@ +import logging + +from etcd3_wrapper import Etcd3Wrapper +from decouple import config + +logging.basicConfig( + level=logging.DEBUG, + filename="log.txt", + filemode="a", + format="%(asctime)s: %(levelname)s - %(message)s", + datefmt="%d-%b-%y %H:%M:%S", +) + + +WITHOUT_CEPH = config("WITHOUT_CEPH", False, cast=bool) + +etcd_client = Etcd3Wrapper(host=config("ETCD_URL")) diff --git a/create_image_store.py b/create_image_store.py new file mode 100755 index 0000000..04bc01f --- /dev/null +++ b/create_image_store.py @@ -0,0 +1,21 @@ +import json +from uuid import uuid4 +from config import etcd_client as client + + +data = { + "is_public": True, + "type": "ceph", + "name": "images", + "description": "first ever public image-store", + "attributes": { + "list": [], + "key": [], + "pool": "images", + } +} + +client.put( + f"/v1/image_store/{uuid4().hex}", + json.dumps(data), +) diff --git a/etcd3_wrapper b/etcd3_wrapper deleted file mode 160000 index cb2a416..0000000 --- a/etcd3_wrapper +++ /dev/null @@ -1 +0,0 @@ -Subproject commit cb2a416a17d6789e613ba3b9957917770f4211e1 diff --git a/helper.py b/helper.py old mode 100644 new mode 100755 index 4b580f1..819ffe6 --- a/helper.py +++ b/helper.py @@ -8,33 +8,21 @@ from pyotp import TOTP def check_otp(name, realm, token): try: data = { - "auth_name": config('AUTH_NAME', ''), - "auth_token": TOTP(config('AUTH_SEED', '')).now(), - "auth_realm": config('AUTH_REALM', ''), + "auth_name": config("AUTH_NAME", ""), + "auth_token": TOTP(config("AUTH_SEED", "")).now(), + "auth_realm": config("AUTH_REALM", ""), "name": name, "realm": realm, - "token": token + "token": token, } except binascii.Error: return 400 response = requests.post( "{OTP_SERVER}{OTP_VERIFY_ENDPOINT}".format( - OTP_SERVER=config('OTP_SERVER', ''), - OTP_VERIFY_ENDPOINT=config('OTP_VERIFY_ENDPOINT', 'verify/') + OTP_SERVER=config("OTP_SERVER", ""), + OTP_VERIFY_ENDPOINT=config("OTP_VERIFY_ENDPOINT", "verify/"), ), - data=data + data=data, ) return response.status_code - - -def add_otp_args(parser): - parser.add_argument("name", required=True) - parser.add_argument("realm", required=True) - parser.add_argument("token", required=True) - return parser - - -def add_vmid_args(parser): - parser.add_argument("vmid", required=True) - return parser diff --git a/main.py b/main.py old mode 100644 new mode 100755 index 8aa1470..7c2a61d --- a/main.py +++ b/main.py @@ -1,151 +1,104 @@ # TODO -# convert etcd3 usage to etcd3_wrapper +# 1. Allow user of realm ungleich-admin to perform any action on +# any user vm. -import etcd3 import json +import subprocess +import os -from helper import check_otp, add_otp_args, add_vmid_args -from flask import Flask -from flask_restful import Resource, Api, reqparse -from decouple import config +from flask import Flask, request +from flask_restful import Resource, Api from uuid import uuid4 -from etcd3_wrapper import Etcd3Wrapper +from os.path import join +from config import etcd_client as client +from config import WITHOUT_CEPH, logging + +from ucloud_common.vm import VmPool, VMStatus +from ucloud_common.host import HostPool +from ucloud_common.request import RequestEntry, RequestPool, RequestType +from schemas import (CreateVMSchema, VMStatusSchema, + CreateImageSchema, VmActionSchema, + OTPSchema, CreateHostSchema, + VmMigrationSchema) app = Flask(__name__) api = Api(app) -etcd_client = etcd3.client(host=config("ETCD_HOST"), port=int(config("ETCD_PORT"))) -client = Etcd3Wrapper() - -# CreateVM argparser -createvm_argparser = reqparse.RequestParser() -createvm_argparser.add_argument("specs", type=dict, required=True) -createvm_argparser.add_argument("image_uuid", type=str, required=True) -add_otp_args(createvm_argparser) - -# CreateImage argparser -createimage_argparser = reqparse.RequestParser() -createimage_argparser.add_argument("uuid", type=str, required=True) -createimage_argparser.add_argument("name", type=str, required=True) -createimage_argparser.add_argument("image_store", type=str, required=True) - -# DeleteVM argparser -deletevm_argparser = reqparse.RequestParser() -add_vmid_args(add_otp_args(deletevm_argparser)) - -# VMStatus argparser -vmstatus_argparser = reqparse.RequestParser() -add_vmid_args(vmstatus_argparser) - -# StartVM argparser -startvm_argparser = reqparse.RequestParser() -add_vmid_args(add_otp_args(startvm_argparser)) - -# UserVM argparser -uservm_argparser = reqparse.RequestParser() -add_otp_args(uservm_argparser) - - -def is_image_valid(image_uuid): - images = client.get_prefix("/v1/image/") - return image_uuid in [i.key.split("/")[-1] for i in images] +vm_pool = VmPool(client, "/v1/vm") +host_pool = HostPool(client, "/v1/host") +request_pool = RequestPool(client, "/v1/request") class CreateVM(Resource): - def post(self): - createvm_args = createvm_argparser.parse_args() - name, realm, token, specs, image_uuid = createvm_args.name, createvm_args.realm,\ - createvm_args.token, createvm_args.specs,\ - createvm_args.image_uuid + @staticmethod + def post(): + data = request.json + validator = CreateVMSchema(data) + if validator.is_valid(): + # Create VM Entry under /v1/vm/ + # TODO: !!!Generate Mac Address on creation of VM + vm_uuid = uuid4().hex + vm_key = f"/v1/vm/{vm_uuid}" + vm_entry = { + "owner": data["name"], + "specs": data["specs"], + "hostname": "", + "status": "", + "image_uuid": data["image_uuid"], + "log": [], + "storage_attachment": [] + } + client.put(vm_key, vm_entry, value_in_json=True) - if check_otp(name, realm, token) == 200: - # User is good - if is_image_valid(image_uuid): - vm_key = f"/v1/vm/{uuid4().hex}" - vm_entry = {"owner": name, - "specs": specs, - "hostname": "", - "status": "REQUESTED_NEW", - "image_uuid": image_uuid} + # Create ScheduleVM Request + r = RequestEntry.from_scratch(type=RequestType.ScheduleVM, + uuid=vm_uuid) + request_pool.put(r) - etcd_client.put(vm_key, json.dumps(vm_entry)) - - return {"message": "VM Creation Queued"}, 200 - else: - return {"message": "Image uuid not valid"} + return {"message": "VM Creation Queued"}, 200 else: - return {"message": "Invalid Credentials"}, 400 - - -class DeleteVM(Resource): - def post(self): - deletevm_args = deletevm_argparser.parse_args() - name, realm, token, vmid = deletevm_args.name, deletevm_args.realm,\ - deletevm_args.token, deletevm_args.vmid - - if check_otp(name, realm, token) == 200: - # User is good - - vmentry_etcd = etcd_client.get(f"/v1/vm/{vmid}")[0] - if vmentry_etcd: - vmentry_etcd = json.loads(vmentry_etcd) - vmentry_etcd["status"] = "REQUESTED_DELETE" - - etcd_client.put(f"/v1/vm/{vmid}", json.dumps(vmentry_etcd)) - - return {"message": "VM Deletion Queued"}, 200 - else: - return {"message": "Invalid VM ID"} - else: - return {"message": "Invalid Credentials"}, 400 + return validator.get_errors(), 400 class VmStatus(Resource): - def get(self): - args = vmstatus_argparser.parse_args() - r = etcd_client.get(f"/v1/vm/{args.vmid}")[0] - print(r) - if r: - r = dict(json.loads(r.decode("utf-8"))) - return r - return {"Message": "Not Found"} + @staticmethod + def get(): + data = request.json + validator = VMStatusSchema(data) + if validator.is_valid(): + vm = vm_pool.get(f"/v1/vm/{data['uuid']}") + return str(vm) + else: + return validator.get_errors(), 400 class CreateImage(Resource): - def post(self): - image_stores = list(client.get_prefix("/v1/image_store/")) - args = createimage_argparser.parse_args() - image_file_uuid = args.uuid - image_store_name = args.image_store + @staticmethod + def post(): + data = request.json + validator = CreateImageSchema(data) + if validator.is_valid(): + file_entry = client.get(f"/v1/file/{data['uuid']}") + file_entry_value = json.loads(file_entry.value) - file_entry = client.get(f"/v1/files/{image_file_uuid}") - if file_entry is None: - return { - "Message": - f"Image File with uuid '{image_file_uuid}' Not Found"}, 400 + image_entry_json = { + "status": "TO_BE_CREATED", + "owner": file_entry_value["owner"], + "filename": file_entry_value["filename"], + "name": data["name"], + "store_name": data["image_store"], + "visibility": "public", + } + client.put(f"/v1/image/{data['uuid']}", json.dumps(image_entry_json)) - file_entry_value = json.loads(file_entry.value) - - image_store = list(filter(lambda s: json.loads(s.value)["name"] == image_store_name, image_stores)) - if not image_store: - return {"Message": f"Store '{image_store_name}' does not exists"}, 400 - - image_store = image_store[0] - image_entry_json = { - "status": "TO_BE_CREATED", - "owner": file_entry_value["owner"], - "filename": file_entry_value["filename"], - "name": args.name, - "store_name": image_store_name, - "visibility": "public" - } - client.put(f"/v1/image/{image_file_uuid}", json.dumps(image_entry_json)) - - return {"Message": "Image successfully created"} + return {"message": "Image successfully created"} + else: + return validator.get_errors(), 400 class ListPublicImages(Resource): - def get(self): + @staticmethod + def get(): images = client.get_prefix("/v1/image/") r = {} for image in images: @@ -154,91 +107,164 @@ class ListPublicImages(Resource): return r, 200 -class StartVM(Resource): - def post(self): - args = startvm_argparser.parse_args() - name, realm, token, vm_uuid = args.name, args.realm, args.token, args.vmid +class VMAction(Resource): + @staticmethod + def post(): + data = request.json + validator = VmActionSchema(data) - if check_otp(name, realm, token) == 200: - vm = client.get(f"/v1/vm/{vm_uuid}", value_in_json=True) - if vm: - vm.value["status"] = "REQUESTED_START" - client.put(vm.key, json.dumps(vm.value)) - return {"message": f"VM Start Queued"} - else: - return {"message": "No such VM found"} + if validator.is_valid(): + vm_entry = vm_pool.get(f"/v1/vm/{data['uuid']}") + action = data["action"] + + if action == "start": + vm_entry.status = VMStatus.requested_start + vm_pool.put(vm_entry) + action = "schedule" + + if action == "delete" and vm_entry.hostname == "": + try: + path_without_protocol = vm_entry.path[vm_entry.path.find(":")+1:] + + if WITHOUT_CEPH: + command_to_delete = ["rm", os.path.join("/var/vm", vm_entry.uuid)] + else: + command_to_delete = ["rbd", "rm", path_without_protocol] + + subprocess.check_output(command_to_delete, stderr=subprocess.PIPE) + except subprocess.CalledProcessError as e: + if "No such file" in e.stderr.decode("utf-8"): + client.client.delete(vm_entry.key) + return {"message": "VM successfully deleted"} + else: + logging.exception(e) + return {"message": "Some error occurred while deleting VM"} + else: + client.client.delete(vm_entry.key) + return {"message": "VM successfully deleted"} + + r = RequestEntry.from_scratch(type=f"{action.title()}VM", + uuid=data['uuid'], + hostname=vm_entry.hostname) + request_pool.put(r) + return {"message": f"VM {action.title()} Queued"}, 200 else: - return {"message": "Invalid Credentials"}, 400 + return validator.get_errors(), 400 -class SuspendVM(Resource): - def post(self): - args = startvm_argparser.parse_args() - name, realm, token, vm_uuid = args.name, args.realm, args.token, args.vmid +class VMMigration(Resource): + @staticmethod + def post(): + data = request.json + validator = VmMigrationSchema(data) - if check_otp(name, realm, token) == 200: - vm = client.get(f"/v1/vm/{vm_uuid}", value_in_json=True) - if vm: - vm.value["status"] = "REQUESTED_SUSPEND" - client.put(vm.key, json.dumps(vm.value)) - return {"message": f"VM Suspension Queued"} - else: - return {"message": "No such VM found"} + if validator.is_valid(): + vm = vm_pool.get(data['uuid']) + + r = RequestEntry.from_scratch(type=RequestType.ScheduleVM, + uuid=vm.uuid, + destination=join("/v1/host", data["destination"]), + migration=True) + request_pool.put(r) + return {"message": f"VM Migration Initialization Queued"}, 200 else: - return {"message": "Invalid Credentials"}, 400 - - -class ResumeVM(Resource): - def post(self): - args = startvm_argparser.parse_args() - name, realm, token, vm_uuid = args.name, args.realm, args.token, args.vmid - - if check_otp(name, realm, token) == 200: - vm = client.get(f"/v1/vm/{vm_uuid}", value_in_json=True) - if vm: - vm.value["status"] = "REQUESTED_RESUME" - client.put(vm.key, json.dumps(vm.value)) - return {"message": f"VM Resume Queued"} - else: - return {"message": "No such VM found"} - else: - return {"message": "Invalid Credentials"}, 400 + return validator.get_errors(), 400 class ListUserVM(Resource): - def get(self): - args = uservm_argparser.parse_args() - name, realm, token = args.name, args.realm, args.token + @staticmethod + def get(): + data = request.json + validator = OTPSchema(data) - if check_otp(name, realm, token) == 200: + if validator.is_valid(): vms = client.get_prefix(f"/v1/vm/", value_in_json=True) if vms: return_vms = [] - user_vms = list(filter(lambda v: v.value["owner"] == name, vms)) + user_vms = list(filter(lambda v: v.value["owner"] == data["name"], vms)) for vm in user_vms: - return_vms.append({ - "vm_uuid": vm.key.split("/")[-1], - "specs": vm.value["specs"], - "status": vm.value["status"] - }) + return_vms.append( + { + "vm_uuid": vm.key.split("/")[-1], + "specs": vm.value["specs"], + "status": vm.value["status"], + "hostname": vm.value["hostname"] + } + ) return {"message": return_vms}, 200 else: return {"message": "No VM found"}, 404 else: - return {"message": "Invalid Credentials"}, 400 + return validator.get_errors(), 400 + + +class ListUserFiles(Resource): + @staticmethod + def get(): + data = request.json + validator = OTPSchema(data) + + if validator.is_valid(): + files = client.get_prefix(f"/v1/file/", value_in_json=True) + if files: + return_files = [] + user_files = list(filter(lambda f: f.value["owner"] == data["name"], files)) + for file in user_files: + return_files.append( + { + "filename": file.value["filename"], + "uuid": file.key.split("/")[-1], + } + ) + return {"message": return_files}, 200 + else: + return {"message": "No File found"}, 404 + else: + return validator.get_errors(), 400 + + +class CreateHost(Resource): + @staticmethod + def post(): + data = request.json + validator = CreateHostSchema(data) + if validator.is_valid(): + host_key = f"/v1/host/{uuid4().hex}" + host_entry = { + "specs": data["specs"], + "hostname": data["hostname"], + "status": "DEAD", + "last_heartbeat": "", + } + client.put(host_key, host_entry, value_in_json=True) + + return {"message": "Host Created"}, 200 + + return validator.get_errors(), 400 + + +class ListHost(Resource): + @staticmethod + def get(): + hosts = host_pool.hosts + r = {host.key: {"status": host.status, "specs": host.specs, "hostname": host.hostname} for host in hosts} + return r, 200 api.add_resource(CreateVM, "/vm/create") -api.add_resource(DeleteVM, "/vm/delete") api.add_resource(VmStatus, "/vm/status") -api.add_resource(StartVM, "/vm/start") -api.add_resource(SuspendVM, "/vm/suspend") -api.add_resource(ResumeVM, "/vm/resume") + +api.add_resource(VMAction, "/vm/action") +api.add_resource(VMMigration, "/vm/migrate") api.add_resource(CreateImage, "/image/create") api.add_resource(ListPublicImages, "/image/list-public") api.add_resource(ListUserVM, "/user/vms") +api.add_resource(ListUserFiles, "/user/files") + +api.add_resource(CreateHost, "/host/create") +api.add_resource(ListHost, "/host/list") if __name__ == "__main__": app.run(host="::", debug=True) diff --git a/schemas.py b/schemas.py new file mode 100755 index 0000000..90acd50 --- /dev/null +++ b/schemas.py @@ -0,0 +1,193 @@ +import json + +from common_fields import Field, VmUUIDField, SpecsField +from ucloud_common.host import HostPool, HostStatus +from ucloud_common.vm import VmPool, VMStatus +from helper import check_otp +from config import etcd_client as client +from os.path import join + +host_pool = HostPool(client, "/v1/host") +vm_pool = VmPool(client, "/v1/vm") + + +class BaseSchema: + def __init__(self, data, fields=None): + self.__errors = [] + if fields is None: + self.fields = [] + else: + self.fields = fields + + def validation(self): + # custom validation is optional + return True + + def is_valid(self): + for field in self.fields: + field.is_valid() + self.add_field_errors(field) + + for parent in self.__class__.__bases__: + try: + parent.validation(self) + except AttributeError: + pass + if not self.__errors: + self.validation() + + if self.__errors: + return False + return True + + def get_errors(self): + return {"message": self.__errors} + + def add_field_errors(self, field: Field): + self.__errors += field.get_errors() + + def add_error(self, error): + self.__errors.append(error) + + +class OTPSchema(BaseSchema): + def __init__(self, data: dict, fields=None): + self.name = Field("name", str, data.get("name", KeyError)) + self.realm = Field("realm", str, data.get("realm", KeyError)) + self.token = Field("token", str, data.get("token", KeyError)) + + _fields = [self.name, self.realm, self.token] + if fields: + _fields += fields + super().__init__(data=data, fields=_fields) + + def validation(self): + rc = check_otp(self.name.value, self.realm.value, self.token.value) + if rc != 200: + self.add_error("Wrong Credentials") + + +class CreateVMSchema(OTPSchema): + def __init__(self, data): + self.specs = SpecsField(data) + + self.image_uuid = Field("image_uuid", str, data.get("image_uuid", KeyError)) + self.image_uuid.validation = self.image_uuid_validation + + fields = [self.specs, self.image_uuid] + super().__init__(data=data, fields=fields) + + def image_uuid_validation(self): + images = client.get_prefix("/v1/image/") + + if self.image_uuid.value not in [i.key.split("/")[-1] for i in images]: + self.add_error("Image UUID not valid") + + +class VMStatusSchema(BaseSchema): + def __init__(self, data): + self.uuid = VmUUIDField(data) + + fields = [self.uuid] + + super().__init__(data, fields) + + +class CreateImageSchema(BaseSchema): + def __init__(self, data): + # Fields + self.uuid: Field = Field("uuid", str, data.get("uuid", KeyError)) + self.name = Field("name", str, data.get("name", KeyError)) + self.image_store = Field("image_store", str, data.get("image_store", KeyError)) + + # Validations + self.uuid.validation = self.file_uuid_validation + self.image_store.validation = self.image_store_name_validation + + # All Fields + fields = [self.uuid, self.name, self.image_store] + super().__init__(data, fields) + + def file_uuid_validation(self): + file_entry = client.get(f"/v1/file/{self.uuid.value}") + if file_entry is None: + self.add_error(f"Image File with uuid '{self.uuid.value}' Not Found") + + def image_store_name_validation(self): + image_stores = list(client.get_prefix("/v1/image_store/")) + + image_store = next(filter(lambda s: json.loads(s.value)["name"] == self.image_store.value, + image_stores), None) + if not image_store: + self.add_error(f"Store '{self.image_store.value}' does not exists") + + +class VmActionSchema(OTPSchema): + def __init__(self, data): + self.uuid = VmUUIDField(data) + self.action = Field("action", str, data.get("action", KeyError)) + + self.action.validation = self.action_validation + + _fields = [self.uuid, self.action] + super().__init__(data=data, fields=_fields) + + def action_validation(self): + allowed_actions = ["start", "stop", "delete"] + if self.action.value not in allowed_actions: + self.add_error(f"Invalid Action. Allowed Actions are {allowed_actions}") + + def validation(self): + vm = vm_pool.get(self.uuid.value) + if vm.value["owner"] != self.name.value: + self.add_error("Invalid User") + + if self.action.value == "start" and vm.status == VMStatus.running and vm.hostname != "": + self.add_error("VM Already Running") + + if self.action.value == "stop" and vm.status == VMStatus.stopped: + self.add_error("VM Already Stopped") + + +class VmMigrationSchema(OTPSchema): + def __init__(self, data): + self.uuid = VmUUIDField(data) + self.destination = Field("destination", str, data.get("destination", KeyError)) + + self.destination.validation = self.destination_validation + + fields = [self.destination] + super().__init__(data=data, fields=fields) + + def destination_validation(self): + host_key = self.destination.value + host = host_pool.get(host_key) + if not host: + self.add_error(f"No Such Host ({self.destination.value}) exists") + elif host.status != HostStatus.alive: + self.add_error("Destination Host is dead") + + def validation(self): + vm = vm_pool.get(self.uuid.value) + if vm.owner != self.name.value: + self.add_error("Invalid User") + + if vm.status != VMStatus.running: + self.add_error("Can't migrate non-running VM") + + if vm.hostname == join("/v1/host", self.destination.value): + self.add_error("Destination host couldn't be same as Source Host") + + +class CreateHostSchema(OTPSchema): + def __init__(self, data): + self.specs = SpecsField(data) + self.hostname = Field("hostname", str, data.get("hostname", KeyError)) + + fields = [self.specs, self.hostname] + + super().__init__(data=data, fields=fields) + + def validation(self): + if self.realm.value != "ungleich-admin": + self.add_error("Invalid Credentials/Insufficient Permission") diff --git a/specs_parser.py b/specs_parser.py new file mode 100755 index 0000000..3f05486 --- /dev/null +++ b/specs_parser.py @@ -0,0 +1,26 @@ +import bitmath + + +class SpecsParser: + def __init__(self, exceptional_devices, allowed_unit=10): + self.exceptional_devices = exceptional_devices + self.allowed_unit = allowed_unit + + def transform_specs(self, specs): + try: + for device in filter(lambda x: x not in self.exceptional_devices, specs): + parsed = bitmath.parse_string_unsafe(specs[device]) + if parsed.base != self.allowed_unit: + return False + specs[device] = int(parsed.to_Byte()) + return True + except ValueError as _: + return False + + def get_allowed_units(self): + if self.allowed_unit == 10: + unit_prefix = bitmath.SI_PREFIXES + else: + unit_prefix = bitmath.NIST_PREFIXES + + return list(map(lambda u: u.upper() + "B", unit_prefix))