2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Miscellaneous utility functions, wrappers around some subprocess procedures.
|
|
|
|
"""
|
|
|
|
|
2019-10-29 17:25:39 +00:00
|
|
|
import concurrent.futures
|
2019-06-03 03:06:25 +01:00
|
|
|
import fcntl
|
2019-06-20 21:22:20 +01:00
|
|
|
import hashlib
|
2018-02-05 17:01:49 +00:00
|
|
|
import importlib
|
|
|
|
import inspect
|
2019-09-28 07:29:15 +01:00
|
|
|
import json
|
2019-02-16 17:50:19 +00:00
|
|
|
import logging
|
2019-09-28 07:29:15 +01:00
|
|
|
import logging.config
|
2017-04-25 16:45:34 +01:00
|
|
|
import os
|
2020-01-09 01:33:49 +00:00
|
|
|
import random
|
2018-03-01 21:21:25 +00:00
|
|
|
import shlex
|
2020-03-07 00:41:26 +00:00
|
|
|
import shutil
|
2018-02-05 17:01:49 +00:00
|
|
|
import sys
|
2019-10-11 21:15:57 +01:00
|
|
|
from subprocess import PIPE, STDOUT, Popen
|
2020-01-13 18:06:18 +00:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
2020-01-14 23:26:19 +00:00
|
|
|
Generic,
|
2020-01-13 18:06:18 +00:00
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
Type,
|
2020-01-14 23:26:19 +00:00
|
|
|
TypeVar,
|
2020-01-13 18:06:18 +00:00
|
|
|
Union,
|
|
|
|
)
|
2019-09-10 22:20:51 +01:00
|
|
|
|
2020-01-09 01:33:49 +00:00
|
|
|
import netaddr
|
|
|
|
|
2020-01-09 04:44:15 +00:00
|
|
|
from core.errors import CoreCommandError, CoreError
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from core.emulator.session import Session
|
|
|
|
from core.nodes.base import CoreNode
|
2020-01-14 23:26:19 +00:00
|
|
|
T = TypeVar("T")
|
2020-01-13 18:06:18 +00:00
|
|
|
|
2018-03-02 00:23:58 +00:00
|
|
|
DEVNULL = open(os.devnull, "wb")
|
|
|
|
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def execute_file(
|
|
|
|
path: str, exec_globals: Dict[str, str] = None, exec_locals: Dict[str, str] = None
|
|
|
|
) -> None:
|
2019-06-21 17:29:19 +01:00
|
|
|
"""
|
|
|
|
Provides an alternative way to run execfile to be compatible for
|
|
|
|
both python2/3.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param path: path of file to execute
|
|
|
|
:param exec_globals: globals values to pass to execution
|
|
|
|
:param exec_locals: local values to pass to execution
|
2019-06-21 17:29:19 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
if exec_globals is None:
|
|
|
|
exec_globals = {}
|
2019-09-10 23:10:24 +01:00
|
|
|
exec_globals.update({"__file__": path, "__name__": "__main__"})
|
2019-06-21 17:29:19 +01:00
|
|
|
with open(path, "rb") as f:
|
|
|
|
data = compile(f.read(), path, "exec")
|
|
|
|
exec(data, exec_globals, exec_locals)
|
|
|
|
|
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def hashkey(value: Union[str, int]) -> int:
|
2019-06-20 21:22:20 +01:00
|
|
|
"""
|
|
|
|
Provide a consistent hash that can be used in place
|
|
|
|
of the builtin hash, that no longer behaves consistently
|
|
|
|
in python3.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param value: value to hash
|
2019-06-20 21:22:20 +01:00
|
|
|
:return: hash value
|
|
|
|
"""
|
|
|
|
if isinstance(value, int):
|
|
|
|
value = str(value)
|
|
|
|
value = value.encode("utf-8")
|
|
|
|
return int(hashlib.sha256(value).hexdigest(), 16)
|
|
|
|
|
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def _detach_init() -> None:
|
2017-05-04 18:36:13 +01:00
|
|
|
"""
|
2018-03-02 17:15:52 +00:00
|
|
|
Fork a child process and exit.
|
2017-05-04 18:36:13 +01:00
|
|
|
|
|
|
|
:return: nothing
|
|
|
|
"""
|
2018-03-02 17:15:52 +00:00
|
|
|
if os.fork():
|
|
|
|
# parent exits
|
|
|
|
os._exit(0)
|
|
|
|
os.setsid()
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def _valid_module(path: str, file_name: str) -> bool:
|
2017-05-04 18:36:13 +01:00
|
|
|
"""
|
2018-03-02 17:15:52 +00:00
|
|
|
Check if file is a valid python module.
|
2017-05-04 18:36:13 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param path: path to file
|
|
|
|
:param file_name: file name to check
|
2018-03-02 17:15:52 +00:00
|
|
|
:return: True if a valid python module file, False otherwise
|
2017-05-04 18:36:13 +01:00
|
|
|
"""
|
2018-03-02 17:15:52 +00:00
|
|
|
file_path = os.path.join(path, file_name)
|
|
|
|
if not os.path.isfile(file_path):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if file_name.startswith("_"):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if not file_name.endswith(".py"):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def _is_class(module: Any, member: Type, clazz: Type) -> bool:
|
2018-03-02 17:15:52 +00:00
|
|
|
"""
|
|
|
|
Validates if a module member is a class and an instance of a CoreService.
|
|
|
|
|
|
|
|
:param module: module to validate for service
|
|
|
|
:param member: member to validate for service
|
|
|
|
:param clazz: clazz type to check for validation
|
|
|
|
:return: True if a valid service, False otherwise
|
|
|
|
"""
|
|
|
|
if not inspect.isclass(member):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if not issubclass(member, clazz):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if member.__module__ != module.__name__:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def close_onexec(fd: int) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2018-03-02 17:15:52 +00:00
|
|
|
Close on execution of a shell process.
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2018-03-02 17:15:52 +00:00
|
|
|
:param fd: file descriptor to close
|
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2018-03-02 17:15:52 +00:00
|
|
|
fdflags = fcntl.fcntl(fd, fcntl.F_GETFD)
|
|
|
|
fcntl.fcntl(fd, fcntl.F_SETFD, fdflags | fcntl.FD_CLOEXEC)
|
2013-09-12 20:07:41 +01:00
|
|
|
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def which(command: str, required: bool) -> str:
|
2017-05-04 18:36:13 +01:00
|
|
|
"""
|
2019-09-28 07:29:15 +01:00
|
|
|
Find location of desired executable within current PATH.
|
2017-05-04 18:36:13 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param command: command to find location for
|
|
|
|
:param required: command is required to be found, false otherwise
|
2019-09-28 07:29:15 +01:00
|
|
|
:return: command location or None
|
|
|
|
:raises ValueError: when not found and required
|
2017-05-04 18:36:13 +01:00
|
|
|
"""
|
2020-03-07 00:41:26 +00:00
|
|
|
found_path = shutil.which(command)
|
2019-09-28 07:29:15 +01:00
|
|
|
if found_path is None and required:
|
2019-10-18 18:33:31 +01:00
|
|
|
raise ValueError(f"failed to find required executable({command}) in path")
|
2019-09-28 07:29:15 +01:00
|
|
|
return found_path
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-15 19:56:23 +00:00
|
|
|
def make_tuple_fromstr(s: str, value_type: Callable[[str], T]) -> Tuple[T]:
|
2017-05-04 18:36:13 +01:00
|
|
|
"""
|
|
|
|
Create a tuple from a string.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param s: string to convert to a tuple
|
2017-07-10 17:25:33 +01:00
|
|
|
:param value_type: type of values to be contained within tuple
|
2017-05-04 18:36:13 +01:00
|
|
|
:return: tuple from string
|
|
|
|
"""
|
2019-09-28 07:29:15 +01:00
|
|
|
# remove tuple braces and strip commands and space from all values in the tuple
|
|
|
|
# string
|
2018-08-16 20:20:56 +01:00
|
|
|
values = []
|
|
|
|
for x in s.strip("(), ").split(","):
|
|
|
|
x = x.strip("' ")
|
|
|
|
if x:
|
|
|
|
values.append(x)
|
2017-08-02 22:07:56 +01:00
|
|
|
return tuple(value_type(i) for i in values)
|
2013-08-29 15:21:13 +01:00
|
|
|
|
|
|
|
|
2020-01-15 19:56:23 +00:00
|
|
|
def mute_detach(args: str, **kwargs: Dict[str, Any]) -> int:
|
2017-05-04 18:36:13 +01:00
|
|
|
"""
|
|
|
|
Run a muted detached process by forking it.
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param args: arguments for the command
|
|
|
|
:param kwargs: keyword arguments for the command
|
2017-05-04 18:36:13 +01:00
|
|
|
:return: process id of the command
|
|
|
|
"""
|
2019-10-12 00:36:57 +01:00
|
|
|
args = shlex.split(args)
|
2018-03-02 17:15:52 +00:00
|
|
|
kwargs["preexec_fn"] = _detach_init
|
2018-03-02 00:23:58 +00:00
|
|
|
kwargs["stdout"] = DEVNULL
|
2019-10-11 21:15:57 +01:00
|
|
|
kwargs["stderr"] = STDOUT
|
|
|
|
return Popen(args, **kwargs).pid
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def cmd(
|
|
|
|
args: str,
|
|
|
|
env: Dict[str, str] = None,
|
|
|
|
cwd: str = None,
|
|
|
|
wait: bool = True,
|
|
|
|
shell: bool = False,
|
|
|
|
) -> str:
|
2018-03-01 21:21:25 +00:00
|
|
|
"""
|
2019-09-28 07:29:15 +01:00
|
|
|
Execute a command on the host and return a tuple containing the exit status and
|
|
|
|
result string. stderr output is folded into the stdout result string.
|
2018-03-01 21:21:25 +00:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param args: command arguments
|
|
|
|
:param env: environment to run command with
|
|
|
|
:param cwd: directory to run command in
|
|
|
|
:param wait: True to wait for status, False otherwise
|
|
|
|
:param shell: True to use shell, False otherwise
|
2018-03-02 21:57:50 +00:00
|
|
|
:return: combined stdout and stderr
|
2019-09-28 07:29:15 +01:00
|
|
|
:raises CoreCommandError: when there is a non-zero exit status or the file to
|
|
|
|
execute is not found
|
2018-03-01 21:21:25 +00:00
|
|
|
"""
|
2019-10-22 20:19:37 +01:00
|
|
|
logging.debug("command cwd(%s) wait(%s): %s", cwd, wait, args)
|
2019-10-21 18:32:42 +01:00
|
|
|
if shell is False:
|
|
|
|
args = shlex.split(args)
|
2018-03-02 00:23:58 +00:00
|
|
|
try:
|
2020-06-05 05:14:11 +01:00
|
|
|
output = PIPE if wait else DEVNULL
|
|
|
|
p = Popen(args, stdout=output, stderr=output, env=env, cwd=cwd, shell=shell)
|
2019-10-11 21:15:57 +01:00
|
|
|
if wait:
|
|
|
|
stdout, stderr = p.communicate()
|
2020-05-13 20:01:28 +01:00
|
|
|
stdout = stdout.decode("utf-8").strip()
|
|
|
|
stderr = stderr.decode("utf-8").strip()
|
2019-10-11 21:15:57 +01:00
|
|
|
status = p.wait()
|
|
|
|
if status != 0:
|
|
|
|
raise CoreCommandError(status, args, stdout, stderr)
|
2020-05-13 20:01:28 +01:00
|
|
|
return stdout
|
2019-10-11 21:15:57 +01:00
|
|
|
else:
|
|
|
|
return ""
|
2020-05-13 20:01:28 +01:00
|
|
|
except OSError as e:
|
|
|
|
logging.error("cmd error: %s", e.strerror)
|
|
|
|
raise CoreCommandError(1, args, "", e.strerror)
|
2013-10-22 15:32:42 +01:00
|
|
|
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def file_munge(pathname: str, header: str, text: str) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Insert text at the end of a file, surrounded by header comments.
|
2017-05-04 18:36:13 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param pathname: file path to add text to
|
|
|
|
:param header: header text comments
|
|
|
|
:param text: text to append to file
|
2017-05-04 18:36:13 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
# prevent duplicates
|
2018-03-02 17:15:52 +00:00
|
|
|
file_demunge(pathname, header)
|
|
|
|
|
|
|
|
with open(pathname, "a") as append_file:
|
2019-10-18 18:33:31 +01:00
|
|
|
append_file.write(f"# BEGIN {header}\n")
|
2018-03-02 17:15:52 +00:00
|
|
|
append_file.write(text)
|
2019-10-18 18:33:31 +01:00
|
|
|
append_file.write(f"# END {header}\n")
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def file_demunge(pathname: str, header: str) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Remove text that was inserted in a file surrounded by header comments.
|
2017-05-04 18:36:13 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param pathname: file path to open for removing a header
|
|
|
|
:param header: header text to target for removal
|
2017-05-04 18:36:13 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2018-03-02 17:15:52 +00:00
|
|
|
with open(pathname, "r") as read_file:
|
|
|
|
lines = read_file.readlines()
|
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
start = None
|
|
|
|
end = None
|
2018-03-02 17:15:52 +00:00
|
|
|
|
2018-10-12 00:01:17 +01:00
|
|
|
for i, line in enumerate(lines):
|
2019-10-18 18:33:31 +01:00
|
|
|
if line == f"# BEGIN {header}\n":
|
2013-08-29 15:21:13 +01:00
|
|
|
start = i
|
2019-10-18 18:33:31 +01:00
|
|
|
elif line == f"# END {header}\n":
|
2013-08-29 15:21:13 +01:00
|
|
|
end = i + 1
|
2018-03-02 17:15:52 +00:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
if start is None or end is None:
|
|
|
|
return
|
2018-03-02 17:15:52 +00:00
|
|
|
|
|
|
|
with open(pathname, "w") as write_file:
|
|
|
|
lines = lines[:start] + lines[end:]
|
|
|
|
write_file.write("".join(lines))
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def expand_corepath(
|
|
|
|
pathname: str, session: "Session" = None, node: "CoreNode" = None
|
|
|
|
) -> str:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Expand a file path given session information.
|
2017-05-04 18:36:13 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param pathname: file path to expand
|
|
|
|
:param session: core session object to expand path
|
|
|
|
:param node: node to expand path with
|
2017-05-04 18:36:13 +01:00
|
|
|
:return: expanded path
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
if session is not None:
|
2019-10-18 18:33:31 +01:00
|
|
|
pathname = pathname.replace("~", f"/home/{session.user}")
|
2019-04-08 18:39:36 +01:00
|
|
|
pathname = pathname.replace("%SESSION%", str(session.id))
|
2017-05-04 18:36:13 +01:00
|
|
|
pathname = pathname.replace("%SESSION_DIR%", session.session_dir)
|
|
|
|
pathname = pathname.replace("%SESSION_USER%", session.user)
|
2018-03-02 17:15:52 +00:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
if node is not None:
|
2019-04-27 06:07:51 +01:00
|
|
|
pathname = pathname.replace("%NODE%", str(node.id))
|
2017-05-04 18:36:13 +01:00
|
|
|
pathname = pathname.replace("%NODENAME%", node.name)
|
2018-03-02 17:15:52 +00:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
return pathname
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def sysctl_devname(devname: str) -> Optional[str]:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Translate a device name to the name used with sysctl.
|
2017-05-04 18:36:13 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param devname: device name to translate
|
2017-05-04 18:36:13 +01:00
|
|
|
:return: translated device name
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
if devname is None:
|
|
|
|
return None
|
|
|
|
return devname.replace(".", "/")
|
|
|
|
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def load_config(filename: str, d: Dict[str, str]) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-09-28 07:29:15 +01:00
|
|
|
Read key=value pairs from a file, into a dict. Skip comments; strip newline
|
|
|
|
characters and spacing.
|
2017-05-04 18:36:13 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param filename: file to read into a dictionary
|
|
|
|
:param d: dictionary to read file into
|
2017-05-04 18:36:13 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2017-05-04 18:36:13 +01:00
|
|
|
with open(filename, "r") as f:
|
2013-08-29 15:21:13 +01:00
|
|
|
lines = f.readlines()
|
2018-03-02 17:15:52 +00:00
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
if line[:1] == "#":
|
2013-08-29 15:21:13 +01:00
|
|
|
continue
|
2018-03-02 17:15:52 +00:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2018-03-02 17:15:52 +00:00
|
|
|
key, value = line.split("=", 1)
|
2013-08-29 15:21:13 +01:00
|
|
|
d[key] = value.strip()
|
|
|
|
except ValueError:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception("error reading file to dict: %s", filename)
|
2013-10-22 15:32:42 +01:00
|
|
|
|
|
|
|
|
2020-01-14 23:26:19 +00:00
|
|
|
def load_classes(path: str, clazz: Generic[T]) -> T:
|
2018-02-05 17:01:49 +00:00
|
|
|
"""
|
|
|
|
Dynamically load classes for use within CORE.
|
|
|
|
|
|
|
|
:param path: path to load classes from
|
|
|
|
:param clazz: class type expected to be inherited from for loading
|
|
|
|
:return: list of classes loaded
|
|
|
|
"""
|
|
|
|
# validate path exists
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.debug("attempting to load modules from path: %s", path)
|
2018-02-05 17:01:49 +00:00
|
|
|
if not os.path.isdir(path):
|
2019-10-18 18:33:31 +01:00
|
|
|
logging.warning("invalid custom module directory specified" ": %s", path)
|
2018-02-05 17:01:49 +00:00
|
|
|
# check if path is in sys.path
|
|
|
|
parent_path = os.path.dirname(path)
|
|
|
|
if parent_path not in sys.path:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.debug("adding parent path to allow imports: %s", parent_path)
|
2018-02-05 17:01:49 +00:00
|
|
|
sys.path.append(parent_path)
|
|
|
|
|
|
|
|
# retrieve potential service modules, and filter out invalid modules
|
|
|
|
base_module = os.path.basename(path)
|
|
|
|
module_names = os.listdir(path)
|
|
|
|
module_names = filter(lambda x: _valid_module(path, x), module_names)
|
|
|
|
module_names = map(lambda x: x[:-3], module_names)
|
|
|
|
|
|
|
|
# import and add all service modules in the path
|
|
|
|
classes = []
|
|
|
|
for module_name in module_names:
|
2019-10-18 18:33:31 +01:00
|
|
|
import_statement = f"{base_module}.{module_name}"
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.debug("importing custom module: %s", import_statement)
|
2018-02-05 17:01:49 +00:00
|
|
|
try:
|
|
|
|
module = importlib.import_module(import_statement)
|
|
|
|
members = inspect.getmembers(module, lambda x: _is_class(module, x, clazz))
|
|
|
|
for member in members:
|
2018-02-05 19:22:01 +00:00
|
|
|
valid_class = member[1]
|
|
|
|
classes.append(valid_class)
|
2019-09-11 05:01:51 +01:00
|
|
|
except Exception:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.exception(
|
|
|
|
"unexpected error during import, skipping: %s", import_statement
|
|
|
|
)
|
2018-02-05 17:01:49 +00:00
|
|
|
|
|
|
|
return classes
|
2019-09-28 07:29:15 +01:00
|
|
|
|
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def load_logging_config(config_path: str) -> None:
|
2019-09-28 07:29:15 +01:00
|
|
|
"""
|
|
|
|
Load CORE logging configuration file.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param config_path: path to logging config file
|
2019-09-28 07:29:15 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
with open(config_path, "r") as log_config_file:
|
|
|
|
log_config = json.load(log_config_file)
|
|
|
|
logging.config.dictConfig(log_config)
|
2019-10-29 17:25:39 +00:00
|
|
|
|
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def threadpool(
|
|
|
|
funcs: List[Tuple[Callable, Iterable[Any], Dict[Any, Any]]], workers: int = 10
|
|
|
|
) -> Tuple[List[Any], List[Exception]]:
|
2019-10-29 17:25:39 +00:00
|
|
|
"""
|
|
|
|
Run provided functions, arguments, and keywords within a threadpool
|
|
|
|
collecting results and exceptions.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param funcs: iterable that provides a func, args, kwargs
|
|
|
|
:param workers: number of workers for the threadpool
|
2019-10-29 17:25:39 +00:00
|
|
|
:return: results and exceptions from running functions with args and kwargs
|
|
|
|
"""
|
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
|
|
|
futures = []
|
|
|
|
for func, args, kwargs in funcs:
|
|
|
|
future = executor.submit(func, *args, **kwargs)
|
|
|
|
futures.append(future)
|
|
|
|
results = []
|
|
|
|
exceptions = []
|
|
|
|
for future in concurrent.futures.as_completed(futures):
|
|
|
|
try:
|
|
|
|
result = future.result()
|
|
|
|
results.append(result)
|
|
|
|
except Exception as e:
|
2020-01-15 21:58:48 +00:00
|
|
|
logging.exception("thread pool exception")
|
2019-10-29 17:25:39 +00:00
|
|
|
exceptions.append(e)
|
|
|
|
return results, exceptions
|
2020-01-09 01:33:49 +00:00
|
|
|
|
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def random_mac() -> str:
|
2020-01-09 01:33:49 +00:00
|
|
|
"""
|
|
|
|
Create a random mac address using Xen OID 00:16:3E.
|
|
|
|
|
|
|
|
:return: random mac address
|
|
|
|
"""
|
|
|
|
value = random.randint(0, 0xFFFFFF)
|
|
|
|
value |= 0x00163E << 24
|
2020-05-02 01:40:53 +01:00
|
|
|
mac = netaddr.EUI(value, dialect=netaddr.mac_unix_expanded)
|
2020-01-09 01:33:49 +00:00
|
|
|
return str(mac)
|
2020-01-09 04:44:15 +00:00
|
|
|
|
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def validate_mac(value: str) -> str:
|
2020-01-09 04:44:15 +00:00
|
|
|
"""
|
|
|
|
Validate mac and return unix formatted version.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param value: address to validate
|
2020-01-09 04:44:15 +00:00
|
|
|
:return: unix formatted mac
|
|
|
|
"""
|
|
|
|
try:
|
2020-05-02 01:40:53 +01:00
|
|
|
mac = netaddr.EUI(value, dialect=netaddr.mac_unix_expanded)
|
2020-01-09 04:44:15 +00:00
|
|
|
return str(mac)
|
|
|
|
except netaddr.AddrFormatError as e:
|
|
|
|
raise CoreError(f"invalid mac address {value}: {e}")
|
|
|
|
|
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def validate_ip(value: str) -> str:
|
2020-01-09 04:44:15 +00:00
|
|
|
"""
|
|
|
|
Validate ip address with prefix and return formatted version.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param value: address to validate
|
2020-01-09 04:44:15 +00:00
|
|
|
:return: formatted ip address
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
ip = netaddr.IPNetwork(value)
|
|
|
|
return str(ip)
|
|
|
|
except (ValueError, netaddr.AddrFormatError) as e:
|
|
|
|
raise CoreError(f"invalid ip address {value}: {e}")
|