proxmaster/grid.py

129 lines
4.2 KiB
Python

#. -*- coding: utf-8
#
# vdc grid
#import site packages
import base64
import json
import re
import datetime
import random
import os
#import local packages
import utils
import plugin
import ioconfig
logger = ioconfig.logger
config = ioconfig.parser
def query(json):
data = read(json)
if json['type'] == 'kvm' or json['type'] == 'lxc':
return data['slave'], data['phy_id'], data['hostname'], data['clientemail']
if json['type'] == 'br':
return data['slave'], data['phy_id'], data['clientemail']
def read(data):
""" read a metadata file """
try:
dbfile = 'db/{}.{}.json'.format(data['type'], data['unit_id'])
dbf = open(dbfile, 'r')
data = json.load(dbf)
dbf.close()
#logger.info('grid> {}'.format(dbfile))
data['status'] = 'query_success'
return data
except Exception as e:
logger.critical('grid> read error: {}'.format(e))
pass
return None
def create(data):
""" write new metadata file """
try:
dbfile = 'db/{}.{}.json'.format(data['type'], data['unit_id'])
logger.info('{}'.format(data))
dbf = open(dbfile, 'w')
json.dump(data, dbf)
dbf.close()
logger.info('grid> {} successfully writen.'.format(dbfile))
except Exception as e:
logger.critical('grid> write error: {}'.format(e))
def delete(data):
""" remove metadata file """
dbfile = 'db/{}.{}.json'.format(data['type'], data['unit_id'])
#TODO: perhaps just move the datafile to an archive directory
os.remove(dbfile)
return None
def phyidgen(slave_name, unit_type):
""" scans all current db files and generate new id within a range between
101 - 150 for deployments and avoids any duplicates.
reserved ids:
100 - FrankenROUTER instance """
if str(unit_type) == 'br':
full_list = list(range(1000,2999))
if str(unit_type) == 'lxc':
full_list = list(range(2000,2999))
if str(unit_type) == 'kvm':
full_list = list(range(101,150))
exclude_list = []
directory = 'db/'
for dbfile in os.listdir(directory):
filename = os.fsdecode(dbfile)
if filename.startswith(str(unit_type)):
db_fullpath = os.path.join(directory, filename)
dbf = open(db_fullpath, 'r')
data = json.load(dbf)
if data['slave'] == str(slave_name):
exclude_list.append(data['phy_id'])
dbf.close()
valid_list = list(set(full_list) - set(exclude_list))
if len(valid_list) > 1:
choice = random.choice(valid_list)
#logger.info('{}[{}]> unit type: {} physical id: {}'.format(str(json['clientemail']), str(slave_name), str(unit_type), str(choice)))
logger.info('[{}]> unit type: {} physical id: {}'.format(str(slave_name), str(unit_type), str(choice)))
print(str(exclude_list))
return choice
else:
logger.critical('{}> no free physical ids!'.format(slave_name))
return 99999
def analyze_happiness(region_id):
""" analyzes grid data for the reuqested region and returns proposed slave_id,
based on a "happiness" factor. happiness means alive and free :) """
grid_data = readcache()
grid_data = grid_data[str(region_id)]
all_slaves = []
for element in grid_data:
try:
if str(element) == grid_data[element]['id']:
all_slaves.append(element)
except:
continue
all_slaves = [ int(x) for x in all_slaves ] #convert values from str to int
alive_slaves = []
for slaveid in all_slaves:
if str(grid_data[str(slaveid)]['alive']) == 'up':
alive_slaves.append(slaveid)
logger.info('region[{}]> alive slaves {}'.format(str(region_id), str(alive_slaves)))
#happy_slave = random.choice(alive_slaves)
if len(alive_slaves) < 1:
logger.error('region[{}]> grid is full. add more slaves'.format(str(region_id)))
else:
happy_slave = 0 #TODO: analyze slaves and make informed decision.
logger.info('region[{}]> {} selected'.format(str(region_id), str(happy_slave)))
return happy_slave
if __name__ == '__main__':
#print(query_happiness(0))
print(phyidgen('warrior', 'kvm'))