2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
session.py: defines the Session class used by the core-daemon daemon program
|
|
|
|
that manages a CORE session.
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2019-02-16 17:50:19 +00:00
|
|
|
import logging
|
2017-04-25 16:45:34 +01:00
|
|
|
import os
|
2019-04-30 07:31:47 +01:00
|
|
|
import pwd
|
2017-04-25 16:45:34 +01:00
|
|
|
import random
|
|
|
|
import shutil
|
2016-09-05 22:11:10 +01:00
|
|
|
import subprocess
|
2017-04-25 16:45:34 +01:00
|
|
|
import tempfile
|
|
|
|
import threading
|
|
|
|
import time
|
2020-01-14 06:15:44 +00:00
|
|
|
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2019-09-28 07:29:15 +01:00
|
|
|
from core import constants, utils
|
2017-04-25 16:45:34 +01:00
|
|
|
from core.emane.emanemanager import EmaneManager
|
2019-09-28 06:31:56 +01:00
|
|
|
from core.emane.nodes import EmaneNet
|
2020-01-11 06:37:19 +00:00
|
|
|
from core.emulator.data import (
|
|
|
|
ConfigData,
|
|
|
|
EventData,
|
|
|
|
ExceptionData,
|
|
|
|
FileData,
|
|
|
|
LinkData,
|
|
|
|
NodeData,
|
|
|
|
)
|
2019-10-17 19:10:59 +01:00
|
|
|
from core.emulator.distributed import DistributedController
|
2019-09-10 22:20:51 +01:00
|
|
|
from core.emulator.emudata import (
|
|
|
|
IdGen,
|
2020-01-11 06:37:19 +00:00
|
|
|
InterfaceData,
|
2019-09-10 22:20:51 +01:00
|
|
|
LinkOptions,
|
|
|
|
NodeOptions,
|
|
|
|
create_interface,
|
|
|
|
link_config,
|
|
|
|
)
|
|
|
|
from core.emulator.enumerations import EventTypes, ExceptionLevels, LinkTypes, NodeTypes
|
2019-10-30 20:27:12 +00:00
|
|
|
from core.emulator.sessionconfig import SessionConfig
|
2019-09-28 07:29:15 +01:00
|
|
|
from core.errors import CoreError
|
2019-04-30 07:31:47 +01:00
|
|
|
from core.location.corelocation import CoreLocation
|
|
|
|
from core.location.event import EventLoop
|
2019-11-21 20:29:33 +00:00
|
|
|
from core.location.mobility import BasicRangeModel, MobilityManager
|
2020-01-11 06:37:19 +00:00
|
|
|
from core.nodes.base import CoreNetworkBase, CoreNode, CoreNodeBase, NodeBase
|
2019-09-26 21:00:12 +01:00
|
|
|
from core.nodes.docker import DockerNode
|
2020-01-11 06:37:19 +00:00
|
|
|
from core.nodes.interface import GreTap
|
2019-09-26 21:00:12 +01:00
|
|
|
from core.nodes.lxd import LxcNode
|
|
|
|
from core.nodes.network import (
|
|
|
|
CtrlNet,
|
|
|
|
GreTapBridge,
|
|
|
|
HubNode,
|
|
|
|
PtpNet,
|
|
|
|
SwitchNode,
|
|
|
|
TunnelNode,
|
|
|
|
WlanNode,
|
|
|
|
)
|
|
|
|
from core.nodes.physical import PhysicalNode, Rj45Node
|
2019-04-30 07:31:47 +01:00
|
|
|
from core.plugins.sdt import Sdt
|
2020-01-11 06:37:19 +00:00
|
|
|
from core.services.coreservices import CoreServices, ServiceBootError
|
2019-09-10 22:20:51 +01:00
|
|
|
from core.xml import corexml, corexmldeployment
|
2019-05-02 07:17:46 +01:00
|
|
|
from core.xml.corexml import CoreXmlReader, CoreXmlWriter
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-09-26 21:00:12 +01:00
|
|
|
# maps for converting from API call node type values to classes and vice versa
|
|
|
|
NODES = {
|
|
|
|
NodeTypes.DEFAULT: CoreNode,
|
|
|
|
NodeTypes.PHYSICAL: PhysicalNode,
|
|
|
|
NodeTypes.SWITCH: SwitchNode,
|
|
|
|
NodeTypes.HUB: HubNode,
|
|
|
|
NodeTypes.WIRELESS_LAN: WlanNode,
|
|
|
|
NodeTypes.RJ45: Rj45Node,
|
|
|
|
NodeTypes.TUNNEL: TunnelNode,
|
2019-09-28 06:31:56 +01:00
|
|
|
NodeTypes.EMANE: EmaneNet,
|
2019-09-26 21:00:12 +01:00
|
|
|
NodeTypes.TAP_BRIDGE: GreTapBridge,
|
|
|
|
NodeTypes.PEER_TO_PEER: PtpNet,
|
|
|
|
NodeTypes.CONTROL_NET: CtrlNet,
|
|
|
|
NodeTypes.DOCKER: DockerNode,
|
|
|
|
NodeTypes.LXC: LxcNode,
|
|
|
|
}
|
|
|
|
NODES_TYPE = {NODES[x]: x for x in NODES}
|
2019-10-21 17:36:07 +01:00
|
|
|
CTRL_NET_ID = 9001
|
2019-09-26 21:00:12 +01:00
|
|
|
|
2018-01-04 16:19:34 +00:00
|
|
|
|
2019-10-23 17:31:07 +01:00
|
|
|
class Session:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
CORE session manager.
|
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def __init__(
|
|
|
|
self, _id: int, config: Dict[str, str] = None, mkdir: bool = True
|
|
|
|
) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Create a Session instance.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param _id: session id
|
|
|
|
:param config: session configuration
|
|
|
|
:param mkdir: flag to determine if a directory should be made
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
2019-04-08 18:39:36 +01:00
|
|
|
self.id = _id
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# define and create session directory when desired
|
2019-10-18 18:33:31 +01:00
|
|
|
self.session_dir = os.path.join(tempfile.gettempdir(), f"pycore.{self.id}")
|
2013-10-25 16:21:08 +01:00
|
|
|
if mkdir:
|
2017-04-25 16:45:34 +01:00
|
|
|
os.mkdir(self.session_dir)
|
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
self.name = None
|
2017-04-25 16:45:34 +01:00
|
|
|
self.file_name = None
|
2013-08-29 15:21:13 +01:00
|
|
|
self.thumbnail = None
|
|
|
|
self.user = None
|
2017-04-25 16:45:34 +01:00
|
|
|
self.event_loop = EventLoop()
|
|
|
|
|
2019-04-30 07:31:47 +01:00
|
|
|
# dict of nodes: all nodes and nets
|
2019-05-02 07:17:46 +01:00
|
|
|
self.node_id_gen = IdGen()
|
2019-04-30 07:31:47 +01:00
|
|
|
self.nodes = {}
|
|
|
|
self._nodes_lock = threading.Lock()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# TODO: should the default state be definition?
|
|
|
|
self.state = EventTypes.NONE.value
|
2019-12-06 17:42:41 +00:00
|
|
|
self._state_time = time.monotonic()
|
2017-04-25 16:45:34 +01:00
|
|
|
self._state_file = os.path.join(self.session_dir, "state")
|
|
|
|
|
2019-05-02 07:17:46 +01:00
|
|
|
# hooks handlers
|
2013-08-29 15:21:13 +01:00
|
|
|
self._hooks = {}
|
2015-02-05 00:15:43 +00:00
|
|
|
self._state_hooks = {}
|
2019-09-10 23:10:24 +01:00
|
|
|
self.add_state_hook(
|
|
|
|
state=EventTypes.RUNTIME_STATE.value, hook=self.runtime_state_hook
|
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2017-08-07 23:37:41 +01:00
|
|
|
# handlers for broadcasting information
|
|
|
|
self.event_handlers = []
|
|
|
|
self.exception_handlers = []
|
|
|
|
self.node_handlers = []
|
|
|
|
self.link_handlers = []
|
|
|
|
self.file_handlers = []
|
|
|
|
self.config_handlers = []
|
|
|
|
self.shutdown_handlers = []
|
|
|
|
|
2018-06-12 16:37:39 +01:00
|
|
|
# session options/metadata
|
|
|
|
self.options = SessionConfig()
|
|
|
|
if not config:
|
|
|
|
config = {}
|
2019-05-06 05:23:43 +01:00
|
|
|
for key in config:
|
|
|
|
value = config[key]
|
2018-06-13 19:59:50 +01:00
|
|
|
self.options.set_config(key, value)
|
2019-10-30 20:27:12 +00:00
|
|
|
self.metadata = {}
|
2018-06-12 16:37:39 +01:00
|
|
|
|
2019-10-17 19:10:59 +01:00
|
|
|
# distributed support and logic
|
|
|
|
self.distributed = DistributedController(self)
|
2019-10-11 20:57:37 +01:00
|
|
|
|
2018-06-12 16:37:39 +01:00
|
|
|
# initialize session feature helpers
|
2017-04-25 16:45:34 +01:00
|
|
|
self.location = CoreLocation()
|
|
|
|
self.mobility = MobilityManager(session=self)
|
|
|
|
self.services = CoreServices(session=self)
|
|
|
|
self.emane = EmaneManager(session=self)
|
2018-06-06 22:51:45 +01:00
|
|
|
self.sdt = Sdt(session=self)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-05-02 07:17:46 +01:00
|
|
|
# initialize default node services
|
|
|
|
self.services.default_services = {
|
|
|
|
"mdr": ("zebra", "OSPFv3MDR", "IPForward"),
|
|
|
|
"PC": ("DefaultRoute",),
|
2019-10-17 01:11:21 +01:00
|
|
|
"prouter": (),
|
2019-05-02 07:17:46 +01:00
|
|
|
"router": ("zebra", "OSPFv2", "OSPFv3", "IPForward"),
|
|
|
|
"host": ("DefaultRoute", "SSH"),
|
|
|
|
}
|
|
|
|
|
2019-09-26 21:00:12 +01:00
|
|
|
@classmethod
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_node_class(cls, _type: NodeTypes) -> Type[NodeBase]:
|
2019-09-26 21:00:12 +01:00
|
|
|
"""
|
|
|
|
Retrieve the class for a given node type.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param _type: node type to get class for
|
2019-09-26 21:00:12 +01:00
|
|
|
:return: node class
|
|
|
|
"""
|
|
|
|
node_class = NODES.get(_type)
|
|
|
|
if node_class is None:
|
2019-10-18 18:33:31 +01:00
|
|
|
raise CoreError(f"invalid node type: {_type}")
|
2019-09-26 21:00:12 +01:00
|
|
|
return node_class
|
|
|
|
|
|
|
|
@classmethod
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_node_type(cls, _class: Type[NodeBase]) -> NodeTypes:
|
2019-09-26 21:00:12 +01:00
|
|
|
"""
|
|
|
|
Retrieve node type for a given node class.
|
|
|
|
|
|
|
|
:param _class: node class to get a node type for
|
|
|
|
:return: node type
|
2020-01-16 19:00:57 +00:00
|
|
|
:raises CoreError: when node type does not exist
|
2019-09-26 21:00:12 +01:00
|
|
|
"""
|
|
|
|
node_type = NODES_TYPE.get(_class)
|
|
|
|
if node_type is None:
|
2019-10-18 18:33:31 +01:00
|
|
|
raise CoreError(f"invalid node class: {_class}")
|
2019-09-26 21:00:12 +01:00
|
|
|
return node_type
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def _link_nodes(
|
|
|
|
self, node_one_id: int, node_two_id: int
|
|
|
|
) -> Tuple[
|
|
|
|
CoreNode, CoreNode, CoreNetworkBase, CoreNetworkBase, Tuple[GreTap, GreTap]
|
|
|
|
]:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Convenience method for retrieving nodes within link data.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_one_id: node one id
|
|
|
|
:param node_two_id: node two id
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nodes, network nodes if present, and tunnel if present
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.debug(
|
|
|
|
"link message between node1(%s) and node2(%s)", node_one_id, node_two_id
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
# values to fill
|
|
|
|
net_one = None
|
|
|
|
net_two = None
|
|
|
|
|
|
|
|
# retrieve node one
|
|
|
|
node_one = self.get_node(node_one_id)
|
|
|
|
node_two = self.get_node(node_two_id)
|
|
|
|
|
|
|
|
# both node ids are provided
|
2019-10-17 19:10:59 +01:00
|
|
|
tunnel = self.distributed.get_tunnel(node_one_id, node_two_id)
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.debug("tunnel between nodes: %s", tunnel)
|
2019-09-26 21:00:12 +01:00
|
|
|
if isinstance(tunnel, GreTapBridge):
|
2019-05-02 07:17:46 +01:00
|
|
|
net_one = tunnel
|
|
|
|
if tunnel.remotenum == node_one_id:
|
|
|
|
node_one = None
|
|
|
|
else:
|
|
|
|
node_two = None
|
|
|
|
# physical node connected via gre tap tunnel
|
|
|
|
elif tunnel:
|
|
|
|
if tunnel.remotenum == node_one_id:
|
|
|
|
node_one = None
|
|
|
|
else:
|
|
|
|
node_two = None
|
|
|
|
|
2019-09-26 21:15:46 +01:00
|
|
|
if isinstance(node_one, CoreNetworkBase):
|
2019-05-02 07:17:46 +01:00
|
|
|
if not net_one:
|
|
|
|
net_one = node_one
|
|
|
|
else:
|
|
|
|
net_two = node_one
|
|
|
|
node_one = None
|
|
|
|
|
2019-09-26 21:15:46 +01:00
|
|
|
if isinstance(node_two, CoreNetworkBase):
|
2019-05-02 07:17:46 +01:00
|
|
|
if not net_one:
|
|
|
|
net_one = node_two
|
|
|
|
else:
|
|
|
|
net_two = node_two
|
|
|
|
node_two = None
|
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.debug(
|
|
|
|
"link node types n1(%s) n2(%s) net1(%s) net2(%s) tunnel(%s)",
|
|
|
|
node_one,
|
|
|
|
node_two,
|
|
|
|
net_one,
|
|
|
|
net_two,
|
|
|
|
tunnel,
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
return node_one, node_two, net_one, net_two, tunnel
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def _link_wireless(self, objects: Iterable[CoreNodeBase], connect: bool) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Objects to deal with when connecting/disconnecting wireless links.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param objects: possible objects to deal with
|
|
|
|
:param connect: link interfaces if True, unlink otherwise
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
2020-01-11 06:37:19 +00:00
|
|
|
:raises core.CoreError: when objects to link is less than 2, or no common
|
|
|
|
networks are found
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
objects = [x for x in objects if x]
|
|
|
|
if len(objects) < 2:
|
2019-10-18 18:33:31 +01:00
|
|
|
raise CoreError(f"wireless link failure: {objects}")
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.debug(
|
|
|
|
"handling wireless linking objects(%s) connect(%s)", objects, connect
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
common_networks = objects[0].commonnets(objects[1])
|
|
|
|
if not common_networks:
|
2019-09-11 21:12:42 +01:00
|
|
|
raise CoreError("no common network found for wireless link/unlink")
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
for common_network, interface_one, interface_two in common_networks:
|
2019-09-28 06:31:56 +01:00
|
|
|
if not isinstance(common_network, (WlanNode, EmaneNet)):
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"skipping common network that is not wireless/emane: %s",
|
|
|
|
common_network,
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
continue
|
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"wireless linking connect(%s): %s - %s",
|
|
|
|
connect,
|
|
|
|
interface_one,
|
|
|
|
interface_two,
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
if connect:
|
|
|
|
common_network.link(interface_one, interface_two)
|
|
|
|
else:
|
|
|
|
common_network.unlink(interface_one, interface_two)
|
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
def add_link(
|
|
|
|
self,
|
2020-01-11 06:37:19 +00:00
|
|
|
node_one_id: int,
|
|
|
|
node_two_id: int,
|
|
|
|
interface_one: InterfaceData = None,
|
|
|
|
interface_two: InterfaceData = None,
|
|
|
|
link_options: LinkOptions = None,
|
|
|
|
) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Add a link between nodes.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_one_id: node one id
|
|
|
|
:param node_two_id: node two id
|
|
|
|
:param interface_one: node one interface
|
2020-01-11 06:37:19 +00:00
|
|
|
data, defaults to none
|
2020-01-16 19:00:57 +00:00
|
|
|
:param interface_two: node two interface
|
2020-01-11 06:37:19 +00:00
|
|
|
data, defaults to none
|
2020-01-16 19:00:57 +00:00
|
|
|
:param link_options: data for creating link,
|
2020-01-11 06:37:19 +00:00
|
|
|
defaults to no options
|
2019-09-11 21:12:42 +01:00
|
|
|
:return: nothing
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
if not link_options:
|
|
|
|
link_options = LinkOptions()
|
|
|
|
|
|
|
|
# get node objects identified by link data
|
2019-09-10 23:10:24 +01:00
|
|
|
node_one, node_two, net_one, net_two, tunnel = self._link_nodes(
|
|
|
|
node_one_id, node_two_id
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
if node_one:
|
|
|
|
node_one.lock.acquire()
|
|
|
|
if node_two:
|
|
|
|
node_two.lock.acquire()
|
|
|
|
|
|
|
|
try:
|
|
|
|
# wireless link
|
|
|
|
if link_options.type == LinkTypes.WIRELESS:
|
|
|
|
objects = [node_one, node_two, net_one, net_two]
|
|
|
|
self._link_wireless(objects, connect=True)
|
|
|
|
# wired link
|
|
|
|
else:
|
|
|
|
# 2 nodes being linked, ptp network
|
|
|
|
if all([node_one, node_two]) and not net_one:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"adding link for peer to peer nodes: %s - %s",
|
|
|
|
node_one.name,
|
|
|
|
node_two.name,
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
start = self.state > EventTypes.DEFINITION_STATE.value
|
2019-09-26 21:00:12 +01:00
|
|
|
net_one = self.create_node(cls=PtpNet, start=start)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
# node to network
|
|
|
|
if node_one and net_one:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"adding link from node to network: %s - %s",
|
|
|
|
node_one.name,
|
|
|
|
net_one.name,
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
interface = create_interface(node_one, net_one, interface_one)
|
|
|
|
link_config(net_one, interface, link_options)
|
|
|
|
|
|
|
|
# network to node
|
|
|
|
if node_two and net_one:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"adding link from network to node: %s - %s",
|
|
|
|
node_two.name,
|
|
|
|
net_one.name,
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
interface = create_interface(node_two, net_one, interface_two)
|
|
|
|
if not link_options.unidirectional:
|
|
|
|
link_config(net_one, interface, link_options)
|
|
|
|
|
|
|
|
# network to network
|
|
|
|
if net_one and net_two:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"adding link from network to network: %s - %s",
|
|
|
|
net_one.name,
|
|
|
|
net_two.name,
|
|
|
|
)
|
2019-10-23 19:24:50 +01:00
|
|
|
interface = net_one.linknet(net_two)
|
2019-05-02 07:17:46 +01:00
|
|
|
link_config(net_one, interface, link_options)
|
|
|
|
|
|
|
|
if not link_options.unidirectional:
|
|
|
|
interface.swapparams("_params_up")
|
2019-09-10 23:10:24 +01:00
|
|
|
link_config(
|
|
|
|
net_two, interface, link_options, devname=interface.name
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
interface.swapparams("_params_up")
|
|
|
|
|
|
|
|
# a tunnel node was found for the nodes
|
|
|
|
addresses = []
|
|
|
|
if not node_one and all([net_one, interface_one]):
|
|
|
|
addresses.extend(interface_one.get_addresses())
|
|
|
|
|
|
|
|
if not node_two and all([net_two, interface_two]):
|
|
|
|
addresses.extend(interface_two.get_addresses())
|
|
|
|
|
|
|
|
# tunnel node logic
|
|
|
|
key = link_options.key
|
2019-09-26 21:00:12 +01:00
|
|
|
if key and isinstance(net_one, TunnelNode):
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.info("setting tunnel key for: %s", net_one.name)
|
|
|
|
net_one.setkey(key)
|
|
|
|
if addresses:
|
|
|
|
net_one.addrconfig(addresses)
|
2019-09-26 21:00:12 +01:00
|
|
|
if key and isinstance(net_two, TunnelNode):
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.info("setting tunnel key for: %s", net_two.name)
|
|
|
|
net_two.setkey(key)
|
|
|
|
if addresses:
|
|
|
|
net_two.addrconfig(addresses)
|
|
|
|
|
|
|
|
# physical node connected with tunnel
|
|
|
|
if not net_one and not net_two and (node_one or node_two):
|
2019-09-26 21:00:12 +01:00
|
|
|
if node_one and isinstance(node_one, PhysicalNode):
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.info("adding link for physical node: %s", node_one.name)
|
|
|
|
addresses = interface_one.get_addresses()
|
2019-09-10 23:10:24 +01:00
|
|
|
node_one.adoptnetif(
|
|
|
|
tunnel, interface_one.id, interface_one.mac, addresses
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
link_config(node_one, tunnel, link_options)
|
2019-09-26 21:00:12 +01:00
|
|
|
elif node_two and isinstance(node_two, PhysicalNode):
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.info("adding link for physical node: %s", node_two.name)
|
|
|
|
addresses = interface_two.get_addresses()
|
2019-09-10 23:10:24 +01:00
|
|
|
node_two.adoptnetif(
|
|
|
|
tunnel, interface_two.id, interface_two.mac, addresses
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
link_config(node_two, tunnel, link_options)
|
|
|
|
finally:
|
|
|
|
if node_one:
|
|
|
|
node_one.lock.release()
|
|
|
|
if node_two:
|
|
|
|
node_two.lock.release()
|
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
def delete_link(
|
|
|
|
self,
|
2020-01-11 06:37:19 +00:00
|
|
|
node_one_id: int,
|
|
|
|
node_two_id: int,
|
|
|
|
interface_one_id: int,
|
|
|
|
interface_two_id: int,
|
|
|
|
link_type: LinkTypes = LinkTypes.WIRED,
|
|
|
|
) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Delete a link between nodes.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_one_id: node one id
|
|
|
|
:param node_two_id: node two id
|
|
|
|
:param interface_one_id: interface id for node one
|
|
|
|
:param interface_two_id: interface id for node two
|
|
|
|
:param link_type: link type to delete
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
2019-09-11 21:12:42 +01:00
|
|
|
:raises core.CoreError: when no common network is found for link being deleted
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
# get node objects identified by link data
|
2019-09-10 23:10:24 +01:00
|
|
|
node_one, node_two, net_one, net_two, _tunnel = self._link_nodes(
|
|
|
|
node_one_id, node_two_id
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
if node_one:
|
|
|
|
node_one.lock.acquire()
|
|
|
|
if node_two:
|
|
|
|
node_two.lock.acquire()
|
|
|
|
|
|
|
|
try:
|
|
|
|
# wireless link
|
|
|
|
if link_type == LinkTypes.WIRELESS:
|
|
|
|
objects = [node_one, node_two, net_one, net_two]
|
|
|
|
self._link_wireless(objects, connect=False)
|
|
|
|
# wired link
|
|
|
|
else:
|
|
|
|
if all([node_one, node_two]):
|
|
|
|
# TODO: fix this for the case where ifindex[1,2] are not specified
|
|
|
|
# a wired unlink event, delete the connecting bridge
|
|
|
|
interface_one = node_one.netif(interface_one_id)
|
|
|
|
interface_two = node_two.netif(interface_two_id)
|
|
|
|
|
|
|
|
# get interfaces from common network, if no network node
|
|
|
|
# otherwise get interfaces between a node and network
|
|
|
|
if not interface_one and not interface_two:
|
|
|
|
common_networks = node_one.commonnets(node_two)
|
2019-09-10 23:10:24 +01:00
|
|
|
for (
|
|
|
|
network,
|
|
|
|
common_interface_one,
|
|
|
|
common_interface_two,
|
|
|
|
) in common_networks:
|
2019-05-02 07:17:46 +01:00
|
|
|
if (net_one and network == net_one) or not net_one:
|
|
|
|
interface_one = common_interface_one
|
|
|
|
interface_two = common_interface_two
|
|
|
|
break
|
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
if all([interface_one, interface_two]) and any(
|
|
|
|
[interface_one.net, interface_two.net]
|
|
|
|
):
|
|
|
|
if interface_one.net != interface_two.net and all(
|
|
|
|
[interface_one.up, interface_two.up]
|
|
|
|
):
|
2019-09-11 21:12:42 +01:00
|
|
|
raise CoreError("no common network found")
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"deleting link node(%s):interface(%s) node(%s):interface(%s)",
|
|
|
|
node_one.name,
|
|
|
|
interface_one.name,
|
|
|
|
node_two.name,
|
|
|
|
interface_two.name,
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
net_one = interface_one.net
|
|
|
|
interface_one.detachnet()
|
|
|
|
interface_two.detachnet()
|
|
|
|
if net_one.numnetif() == 0:
|
|
|
|
self.delete_node(net_one.id)
|
|
|
|
node_one.delnetif(interface_one.netindex)
|
|
|
|
node_two.delnetif(interface_two.netindex)
|
2019-06-22 07:12:18 +01:00
|
|
|
elif node_one and net_one:
|
|
|
|
interface = node_one.netif(interface_one_id)
|
2019-07-19 15:46:17 +01:00
|
|
|
if interface:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"deleting link node(%s):interface(%s) node(%s)",
|
|
|
|
node_one.name,
|
|
|
|
interface.name,
|
|
|
|
net_one.name,
|
|
|
|
)
|
2019-07-19 15:46:17 +01:00
|
|
|
interface.detachnet()
|
|
|
|
node_one.delnetif(interface.netindex)
|
2019-06-22 07:12:18 +01:00
|
|
|
elif node_two and net_one:
|
|
|
|
interface = node_two.netif(interface_two_id)
|
2019-07-19 15:46:17 +01:00
|
|
|
if interface:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"deleting link node(%s):interface(%s) node(%s)",
|
|
|
|
node_two.name,
|
|
|
|
interface.name,
|
|
|
|
net_one.name,
|
|
|
|
)
|
2019-07-19 15:46:17 +01:00
|
|
|
interface.detachnet()
|
|
|
|
node_two.delnetif(interface.netindex)
|
2019-05-02 07:17:46 +01:00
|
|
|
finally:
|
|
|
|
if node_one:
|
|
|
|
node_one.lock.release()
|
|
|
|
if node_two:
|
|
|
|
node_two.lock.release()
|
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
def update_link(
|
|
|
|
self,
|
2020-01-11 06:37:19 +00:00
|
|
|
node_one_id: int,
|
|
|
|
node_two_id: int,
|
|
|
|
interface_one_id: int = None,
|
|
|
|
interface_two_id: int = None,
|
|
|
|
link_options: LinkOptions = None,
|
|
|
|
) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Update link information between nodes.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_one_id: node one id
|
|
|
|
:param node_two_id: node two id
|
|
|
|
:param interface_one_id: interface id for node one
|
|
|
|
:param interface_two_id: interface id for node two
|
|
|
|
:param link_options: data to update link with
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
2019-09-11 21:12:42 +01:00
|
|
|
:raises core.CoreError: when updating a wireless type link, when there is a unknown
|
|
|
|
link between networks
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-06-09 00:56:39 +01:00
|
|
|
if not link_options:
|
|
|
|
link_options = LinkOptions()
|
|
|
|
|
2019-05-02 07:17:46 +01:00
|
|
|
# get node objects identified by link data
|
2019-09-10 23:10:24 +01:00
|
|
|
node_one, node_two, net_one, net_two, _tunnel = self._link_nodes(
|
|
|
|
node_one_id, node_two_id
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
if node_one:
|
|
|
|
node_one.lock.acquire()
|
|
|
|
if node_two:
|
|
|
|
node_two.lock.acquire()
|
|
|
|
|
|
|
|
try:
|
|
|
|
# wireless link
|
|
|
|
if link_options.type == LinkTypes.WIRELESS.value:
|
2019-09-11 21:12:42 +01:00
|
|
|
raise CoreError("cannot update wireless link")
|
2019-05-02 07:17:46 +01:00
|
|
|
else:
|
|
|
|
if not node_one and not node_two:
|
|
|
|
if net_one and net_two:
|
|
|
|
# modify link between nets
|
|
|
|
interface = net_one.getlinknetif(net_two)
|
|
|
|
upstream = False
|
|
|
|
|
|
|
|
if not interface:
|
|
|
|
upstream = True
|
|
|
|
interface = net_two.getlinknetif(net_one)
|
|
|
|
|
|
|
|
if not interface:
|
2019-09-11 21:12:42 +01:00
|
|
|
raise CoreError("modify unknown link between nets")
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
if upstream:
|
|
|
|
interface.swapparams("_params_up")
|
2019-09-10 23:10:24 +01:00
|
|
|
link_config(
|
|
|
|
net_one, interface, link_options, devname=interface.name
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
interface.swapparams("_params_up")
|
|
|
|
else:
|
|
|
|
link_config(net_one, interface, link_options)
|
|
|
|
|
|
|
|
if not link_options.unidirectional:
|
|
|
|
if upstream:
|
|
|
|
link_config(net_two, interface, link_options)
|
|
|
|
else:
|
|
|
|
interface.swapparams("_params_up")
|
2019-09-10 23:10:24 +01:00
|
|
|
link_config(
|
|
|
|
net_two,
|
|
|
|
interface,
|
|
|
|
link_options,
|
|
|
|
devname=interface.name,
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
interface.swapparams("_params_up")
|
|
|
|
else:
|
2019-09-11 21:12:42 +01:00
|
|
|
raise CoreError("modify link for unknown nodes")
|
2019-05-02 07:17:46 +01:00
|
|
|
elif not node_one:
|
|
|
|
# node1 = layer 2node, node2 = layer3 node
|
2019-10-23 17:15:27 +01:00
|
|
|
interface = node_two.netif(interface_two_id)
|
2019-05-02 07:17:46 +01:00
|
|
|
link_config(net_one, interface, link_options)
|
|
|
|
elif not node_two:
|
|
|
|
# node2 = layer 2node, node1 = layer3 node
|
2019-10-23 17:15:27 +01:00
|
|
|
interface = node_one.netif(interface_one_id)
|
2019-05-02 07:17:46 +01:00
|
|
|
link_config(net_one, interface, link_options)
|
|
|
|
else:
|
|
|
|
common_networks = node_one.commonnets(node_two)
|
|
|
|
if not common_networks:
|
2019-09-11 21:12:42 +01:00
|
|
|
raise CoreError("no common network found")
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
for net_one, interface_one, interface_two in common_networks:
|
2019-09-10 23:10:24 +01:00
|
|
|
if (
|
|
|
|
interface_one_id is not None
|
|
|
|
and interface_one_id != node_one.getifindex(interface_one)
|
|
|
|
):
|
2019-05-02 07:17:46 +01:00
|
|
|
continue
|
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
link_config(
|
|
|
|
net_one,
|
|
|
|
interface_one,
|
|
|
|
link_options,
|
|
|
|
interface_two=interface_two,
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
if not link_options.unidirectional:
|
2019-09-10 23:10:24 +01:00
|
|
|
link_config(
|
|
|
|
net_one,
|
|
|
|
interface_two,
|
|
|
|
link_options,
|
|
|
|
interface_two=interface_one,
|
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
finally:
|
|
|
|
if node_one:
|
|
|
|
node_one.lock.release()
|
|
|
|
if node_two:
|
|
|
|
node_two.lock.release()
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def add_node(
|
|
|
|
self,
|
|
|
|
_type: NodeTypes = NodeTypes.DEFAULT,
|
|
|
|
_id: int = None,
|
|
|
|
options: NodeOptions = None,
|
|
|
|
_cls: Type[NodeBase] = None,
|
|
|
|
) -> NodeBase:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Add a node to the session, based on the provided node data.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param _type: type of node to create
|
|
|
|
:param _id: id for node, defaults to None for generated id
|
|
|
|
:param options: data to create node with
|
|
|
|
:param _cls: optional custom class to use for a created node
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: created node
|
2019-09-26 21:00:12 +01:00
|
|
|
:raises core.CoreError: when an invalid node type is given
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-09-26 21:00:12 +01:00
|
|
|
# validate node type, get class, or throw error
|
2019-10-22 21:15:12 +01:00
|
|
|
if _cls is None:
|
|
|
|
node_class = self.get_node_class(_type)
|
|
|
|
else:
|
|
|
|
node_class = _cls
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
# set node start based on current session state, override and check when rj45
|
|
|
|
start = self.state > EventTypes.DEFINITION_STATE.value
|
|
|
|
enable_rj45 = self.options.get_config("enablerj45") == "1"
|
|
|
|
if _type == NodeTypes.RJ45 and not enable_rj45:
|
|
|
|
start = False
|
|
|
|
|
|
|
|
# determine node id
|
|
|
|
if not _id:
|
|
|
|
while True:
|
|
|
|
_id = self.node_id_gen.next()
|
|
|
|
if _id not in self.nodes:
|
|
|
|
break
|
|
|
|
|
|
|
|
# generate name if not provided
|
2019-10-22 23:31:50 +01:00
|
|
|
if not options:
|
|
|
|
options = NodeOptions()
|
|
|
|
name = options.name
|
2019-05-02 07:17:46 +01:00
|
|
|
if not name:
|
2019-10-18 18:33:31 +01:00
|
|
|
name = f"{node_class.__name__}{_id}"
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2019-10-07 19:58:27 +01:00
|
|
|
# verify distributed server
|
2019-10-23 04:50:01 +01:00
|
|
|
server = self.distributed.servers.get(options.server)
|
|
|
|
if options.server is not None and server is None:
|
|
|
|
raise CoreError(f"invalid distributed server: {options.server}")
|
2019-10-07 19:58:27 +01:00
|
|
|
|
2019-05-02 07:17:46 +01:00
|
|
|
# create node
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"creating node(%s) id(%s) name(%s) start(%s)",
|
|
|
|
node_class.__name__,
|
|
|
|
_id,
|
|
|
|
name,
|
|
|
|
start,
|
|
|
|
)
|
2019-07-02 15:48:43 +01:00
|
|
|
if _type in [NodeTypes.DOCKER, NodeTypes.LXC]:
|
2019-09-10 23:10:24 +01:00
|
|
|
node = self.create_node(
|
|
|
|
cls=node_class,
|
|
|
|
_id=_id,
|
|
|
|
name=name,
|
|
|
|
start=start,
|
2019-10-22 23:31:50 +01:00
|
|
|
image=options.image,
|
2019-10-17 17:09:03 +01:00
|
|
|
server=server,
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2019-06-28 23:41:55 +01:00
|
|
|
else:
|
2019-10-05 01:33:44 +01:00
|
|
|
node = self.create_node(
|
2019-10-07 19:58:27 +01:00
|
|
|
cls=node_class, _id=_id, name=name, start=start, server=server
|
2019-10-05 01:33:44 +01:00
|
|
|
)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
# set node attributes
|
2019-10-22 23:31:50 +01:00
|
|
|
node.icon = options.icon
|
|
|
|
node.canvas = options.canvas
|
|
|
|
node.opaque = options.opaque
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
# set node position and broadcast it
|
2019-10-22 23:31:50 +01:00
|
|
|
self.set_node_position(node, options)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2019-10-22 21:15:12 +01:00
|
|
|
# add services to needed nodes
|
|
|
|
if isinstance(node, (CoreNode, PhysicalNode, DockerNode, LxcNode)):
|
2019-10-22 23:31:50 +01:00
|
|
|
node.type = options.model
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.debug("set node type: %s", node.type)
|
2019-10-22 23:31:50 +01:00
|
|
|
self.services.add_services(node, node.type, options.services)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2019-11-21 20:29:33 +00:00
|
|
|
# ensure default emane configuration
|
2019-11-22 01:03:18 +00:00
|
|
|
if isinstance(node, EmaneNet) and options.emane:
|
2019-11-21 20:44:50 +00:00
|
|
|
self.emane.set_model_config(_id, options.emane)
|
2019-11-21 20:29:33 +00:00
|
|
|
# set default wlan config if needed
|
2019-11-21 20:44:50 +00:00
|
|
|
if isinstance(node, WlanNode):
|
2019-11-21 20:29:33 +00:00
|
|
|
self.mobility.set_model_config(_id, BasicRangeModel.name)
|
|
|
|
|
2019-10-23 04:50:01 +01:00
|
|
|
# boot nodes after runtime, CoreNodes, Physical, and RJ45 are all nodes
|
2019-09-26 21:00:12 +01:00
|
|
|
is_boot_node = isinstance(node, CoreNodeBase) and not isinstance(node, Rj45Node)
|
2019-05-02 07:17:46 +01:00
|
|
|
if self.state == EventTypes.RUNTIME_STATE.value and is_boot_node:
|
|
|
|
self.write_nodes()
|
|
|
|
self.add_remove_control_interface(node=node, remove=False)
|
|
|
|
self.services.boot_services(node)
|
|
|
|
|
|
|
|
return node
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def edit_node(self, node_id: int, options: NodeOptions) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-10-23 04:55:06 +01:00
|
|
|
Edit node information.
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_id: id of node to update
|
|
|
|
:param options: data to update node with
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: True if node updated, False otherwise
|
2020-01-16 19:00:57 +00:00
|
|
|
:raises core.CoreError: when node to update does not exist
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-09-11 21:12:42 +01:00
|
|
|
# get node to update
|
|
|
|
node = self.get_node(node_id)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2019-09-11 21:12:42 +01:00
|
|
|
# set node position and broadcast it
|
2019-10-22 23:31:50 +01:00
|
|
|
self.set_node_position(node, options)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2019-09-11 21:12:42 +01:00
|
|
|
# update attributes
|
2019-10-22 23:31:50 +01:00
|
|
|
node.canvas = options.canvas
|
|
|
|
node.icon = options.icon
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def set_node_position(self, node: NodeBase, options: NodeOptions) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Set position for a node, use lat/lon/alt if needed.
|
|
|
|
|
|
|
|
:param node: node to set position for
|
2020-01-16 19:00:57 +00:00
|
|
|
:param options: data for node
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
# extract location values
|
2019-10-22 23:31:50 +01:00
|
|
|
x = options.x
|
|
|
|
y = options.y
|
|
|
|
lat = options.lat
|
|
|
|
lon = options.lon
|
|
|
|
alt = options.alt
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
# check if we need to generate position from lat/lon/alt
|
|
|
|
has_empty_position = all(i is None for i in [x, y])
|
|
|
|
has_lat_lon_alt = all(i is not None for i in [lat, lon, alt])
|
|
|
|
using_lat_lon_alt = has_empty_position and has_lat_lon_alt
|
|
|
|
if using_lat_lon_alt:
|
|
|
|
x, y, _ = self.location.getxyz(lat, lon, alt)
|
|
|
|
|
|
|
|
# set position and broadcast
|
|
|
|
if None not in [x, y]:
|
|
|
|
node.setposition(x, y, None)
|
|
|
|
|
|
|
|
# broadcast updated location when using lat/lon/alt
|
|
|
|
if using_lat_lon_alt:
|
|
|
|
self.broadcast_node_location(node)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_node_location(self, node: NodeBase) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Broadcast node location to all listeners.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node: node to broadcast location for
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
node_data = NodeData(
|
|
|
|
message_type=0,
|
|
|
|
id=node.id,
|
|
|
|
x_position=node.position.x,
|
2019-09-10 23:10:24 +01:00
|
|
|
y_position=node.position.y,
|
2019-05-02 07:17:46 +01:00
|
|
|
)
|
|
|
|
self.broadcast_node(node_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def start_mobility(self, node_ids: List[int] = None) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Start mobility for the provided node ids.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_ids: nodes to start mobility for
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
self.mobility.startup(node_ids)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def is_active(self) -> bool:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Determine if this session is considered to be active. (Runtime or Data collect states)
|
|
|
|
|
|
|
|
:return: True if active, False otherwise
|
|
|
|
"""
|
2019-09-10 23:10:24 +01:00
|
|
|
result = self.state in {
|
|
|
|
EventTypes.RUNTIME_STATE.value,
|
|
|
|
EventTypes.DATACOLLECT_STATE.value,
|
|
|
|
}
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.info("session(%s) checking if active: %s", self.id, result)
|
|
|
|
return result
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def open_xml(self, file_name: str, start: bool = False) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Import a session from the EmulationScript XML format.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param file_name: xml file to load session from
|
|
|
|
:param start: instantiate session if true, false otherwise
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2019-10-22 20:08:55 +01:00
|
|
|
logging.info("opening xml: %s", file_name)
|
|
|
|
|
2019-05-02 07:17:46 +01:00
|
|
|
# clear out existing session
|
|
|
|
self.clear()
|
|
|
|
|
2019-06-24 23:49:12 +01:00
|
|
|
if start:
|
2019-10-22 20:08:55 +01:00
|
|
|
state = EventTypes.CONFIGURATION_STATE
|
|
|
|
else:
|
|
|
|
state = EventTypes.DEFINITION_STATE
|
|
|
|
self.set_state(state)
|
|
|
|
self.name = os.path.basename(file_name)
|
|
|
|
self.file_name = file_name
|
2019-06-24 23:49:12 +01:00
|
|
|
|
2019-05-02 07:17:46 +01:00
|
|
|
# write out xml file
|
|
|
|
CoreXmlReader(self).read(file_name)
|
|
|
|
|
|
|
|
# start session if needed
|
|
|
|
if start:
|
|
|
|
self.instantiate()
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def save_xml(self, file_name: str) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Export a session to the EmulationScript XML format.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param file_name: file name to write session xml to
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
CoreXmlWriter(self).write(file_name)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def add_hook(self, state: int, file_name: str, source_name: str, data: str) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Store a hook from a received file message.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: when to run hook
|
|
|
|
:param file_name: file name for hook
|
|
|
|
:param source_name: source name
|
2019-05-02 07:17:46 +01:00
|
|
|
:param data: hook data
|
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
# hack to conform with old logic until updated
|
2019-10-18 18:33:31 +01:00
|
|
|
state = f":{state}"
|
2019-05-02 07:17:46 +01:00
|
|
|
self.set_hook(state, file_name, source_name, data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def add_node_file(
|
|
|
|
self, node_id: int, source_name: str, file_name: str, data: str
|
|
|
|
) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Add a file to a node.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_id: node to add file to
|
|
|
|
:param source_name: source file name
|
|
|
|
:param file_name: file name to add
|
|
|
|
:param data: file data
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
|
|
|
|
node = self.get_node(node_id)
|
|
|
|
|
|
|
|
if source_name is not None:
|
|
|
|
node.addfile(source_name, file_name)
|
|
|
|
elif data is not None:
|
|
|
|
node.nodefile(file_name, data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def clear(self) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-10-15 22:13:42 +01:00
|
|
|
Clear all CORE session data. (nodes, hooks, etc)
|
2019-05-02 07:17:46 +01:00
|
|
|
|
|
|
|
:return: nothing
|
|
|
|
"""
|
2019-10-24 21:05:02 +01:00
|
|
|
self.emane.shutdown()
|
2019-05-02 07:17:46 +01:00
|
|
|
self.delete_nodes()
|
2019-10-17 19:10:59 +01:00
|
|
|
self.distributed.shutdown()
|
2019-05-02 07:17:46 +01:00
|
|
|
self.del_hooks()
|
|
|
|
self.emane.reset()
|
2019-10-24 21:05:02 +01:00
|
|
|
self.emane.config_reset()
|
|
|
|
self.location.reset()
|
|
|
|
self.services.reset()
|
|
|
|
self.mobility.config_reset()
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def start_events(self) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Start event loop.
|
|
|
|
|
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
self.event_loop.run()
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def mobility_event(self, event_data: EventData) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
|
|
|
Handle a mobility event.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param event_data: event data to handle
|
2019-05-02 07:17:46 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
self.mobility.handleevent(event_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def set_location(self, lat: float, lon: float, alt: float, scale: float) -> None:
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-10-23 05:27:31 +01:00
|
|
|
Set session geospatial location.
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param lat: latitude
|
|
|
|
:param lon: longitude
|
|
|
|
:param alt: altitude
|
|
|
|
:param scale: reference scale
|
2019-10-23 05:27:31 +01:00
|
|
|
:return: nothing
|
2019-05-02 07:17:46 +01:00
|
|
|
"""
|
2019-10-23 05:27:31 +01:00
|
|
|
self.location.setrefgeo(lat, lon, alt)
|
|
|
|
self.location.refscale = scale
|
2019-05-02 07:17:46 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def shutdown(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
Shutdown all session nodes and remove the session directory.
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.info("session(%s) shutting down", self.id)
|
|
|
|
self.set_state(EventTypes.DATACOLLECT_STATE, send_event=True)
|
|
|
|
self.set_state(EventTypes.SHUTDOWN_STATE, send_event=True)
|
|
|
|
|
2019-10-24 21:05:02 +01:00
|
|
|
# clear out current core session
|
|
|
|
self.clear()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-10-24 21:05:02 +01:00
|
|
|
# shutdown sdt
|
|
|
|
self.sdt.shutdown()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# remove this sessions working directory
|
2018-06-06 22:51:45 +01:00
|
|
|
preserve = self.options.get_config("preservedir") == "1"
|
2013-08-29 15:21:13 +01:00
|
|
|
if not preserve:
|
2017-04-25 16:45:34 +01:00
|
|
|
shutil.rmtree(self.session_dir, ignore_errors=True)
|
|
|
|
|
2017-05-04 21:49:14 +01:00
|
|
|
# call session shutdown handlers
|
|
|
|
for handler in self.shutdown_handlers:
|
|
|
|
handler(self)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_event(self, event_data: EventData) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle event data that should be provided to event handler.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param event_data: event data to send out
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
|
|
|
|
for handler in self.event_handlers:
|
|
|
|
handler(event_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_exception(self, exception_data: ExceptionData) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle exception data that should be provided to exception handlers.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param exception_data: exception data to send out
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
|
|
|
|
for handler in self.exception_handlers:
|
|
|
|
handler(exception_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_node(self, node_data: NodeData) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle node data that should be provided to node handlers.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_data: node data to send out
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
|
|
|
|
for handler in self.node_handlers:
|
|
|
|
handler(node_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_file(self, file_data: FileData) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle file data that should be provided to file handlers.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param file_data: file data to send out
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
|
|
|
|
for handler in self.file_handlers:
|
|
|
|
handler(file_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_config(self, config_data: ConfigData) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle config data that should be provided to config handlers.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param config_data: config data to send out
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
|
|
|
|
for handler in self.config_handlers:
|
|
|
|
handler(config_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def broadcast_link(self, link_data: LinkData) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Handle link data that should be provided to link handlers.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param link_data: link data to send out
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
|
|
|
|
for handler in self.link_handlers:
|
|
|
|
handler(link_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def set_state(self, state: EventTypes, send_event: bool = False) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Set the session's current state.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to set to
|
2017-04-25 16:45:34 +01:00
|
|
|
:param send_event: if true, generate core API event messages
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2018-04-26 00:33:58 +01:00
|
|
|
state_value = state.value
|
|
|
|
state_name = state.name
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2018-04-26 00:33:58 +01:00
|
|
|
if self.state == state_value:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"session(%s) is already in state: %s, skipping change",
|
|
|
|
self.id,
|
|
|
|
state_name,
|
|
|
|
)
|
2017-05-04 23:24:45 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2018-04-26 00:33:58 +01:00
|
|
|
self.state = state_value
|
2019-12-06 17:42:41 +00:00
|
|
|
self._state_time = time.monotonic()
|
2019-04-08 18:39:36 +01:00
|
|
|
logging.info("changing session(%s) to state %s", self.id, state_name)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2018-04-26 00:33:58 +01:00
|
|
|
self.write_state(state_value)
|
|
|
|
self.run_hooks(state_value)
|
|
|
|
self.run_state_hooks(state_value)
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2017-04-25 16:45:34 +01:00
|
|
|
if send_event:
|
2019-12-06 17:42:41 +00:00
|
|
|
event_data = EventData(event_type=state_value, time=str(time.monotonic()))
|
2017-04-25 16:45:34 +01:00
|
|
|
self.broadcast_event(event_data)
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def write_state(self, state: int) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Write the current state to a state file in the session dir.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to write to file
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2017-04-25 16:45:34 +01:00
|
|
|
state_file = open(self._state_file, "w")
|
2019-10-18 18:33:31 +01:00
|
|
|
state_file.write(f"{state} {EventTypes(self.state).name}\n")
|
2017-04-25 16:45:34 +01:00
|
|
|
state_file.close()
|
|
|
|
except IOError:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception("error writing state file: %s", state)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def run_hooks(self, state: int) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2020-01-11 06:37:19 +00:00
|
|
|
Run hook scripts upon changing states. If hooks is not specified, run all hooks
|
|
|
|
in the given state.
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to run hooks for
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
|
|
|
|
# check that state change hooks exist
|
2013-08-29 15:21:13 +01:00
|
|
|
if state not in self._hooks:
|
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# retrieve all state hooks
|
|
|
|
hooks = self._hooks.get(state, [])
|
|
|
|
|
|
|
|
# execute all state hooks
|
2018-10-11 21:28:02 +01:00
|
|
|
if hooks:
|
|
|
|
for hook in hooks:
|
|
|
|
self.run_hook(hook)
|
2017-04-25 16:45:34 +01:00
|
|
|
else:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.info("no state hooks for %s", state)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def set_hook(
|
|
|
|
self, hook_type: str, file_name: str, source_name: str, data: str
|
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Store a hook from a received file message.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param hook_type: hook type
|
|
|
|
:param file_name: file name for hook
|
|
|
|
:param source_name: source name
|
|
|
|
:param data: hook data
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"setting state hook: %s - %s from %s", hook_type, file_name, source_name
|
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
_hook_id, state = hook_type.split(":")[:2]
|
2013-08-29 15:21:13 +01:00
|
|
|
if not state.isdigit():
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.error("error setting hook having state '%s'", state)
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
state = int(state)
|
2017-04-25 16:45:34 +01:00
|
|
|
hook = file_name, data
|
|
|
|
|
|
|
|
# append hook to current state hooks
|
|
|
|
state_hooks = self._hooks.setdefault(state, [])
|
|
|
|
state_hooks.append(hook)
|
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
# immediately run a hook if it is in the current state
|
|
|
|
# (this allows hooks in the definition and configuration states)
|
2017-04-25 16:45:34 +01:00
|
|
|
if self.state == state:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.info("immediately running new state hook")
|
2017-04-25 16:45:34 +01:00
|
|
|
self.run_hook(hook)
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def del_hooks(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Clear the hook scripts dict.
|
|
|
|
"""
|
|
|
|
self._hooks.clear()
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def run_hook(self, hook: Tuple[str, str]) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Run a hook.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param hook: hook to run
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2017-04-25 16:45:34 +01:00
|
|
|
file_name, data = hook
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.info("running hook %s", file_name)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# write data to hook file
|
|
|
|
try:
|
|
|
|
hook_file = open(os.path.join(self.session_dir, file_name), "w")
|
|
|
|
hook_file.write(data)
|
|
|
|
hook_file.close()
|
|
|
|
except IOError:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception("error writing hook '%s'", file_name)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# setup hook stdout and stderr
|
|
|
|
try:
|
|
|
|
stdout = open(os.path.join(self.session_dir, file_name + ".log"), "w")
|
|
|
|
stderr = subprocess.STDOUT
|
|
|
|
except IOError:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception("error setting up hook stderr and stdout")
|
2017-04-25 16:45:34 +01:00
|
|
|
stdout = None
|
|
|
|
stderr = None
|
|
|
|
|
|
|
|
# execute hook file
|
|
|
|
try:
|
2018-03-02 00:23:58 +00:00
|
|
|
args = ["/bin/sh", file_name]
|
2019-09-10 23:10:24 +01:00
|
|
|
subprocess.check_call(
|
|
|
|
args,
|
|
|
|
stdout=stdout,
|
|
|
|
stderr=stderr,
|
|
|
|
close_fds=True,
|
|
|
|
cwd=self.session_dir,
|
|
|
|
env=self.get_environment(),
|
|
|
|
)
|
2018-03-02 00:23:58 +00:00
|
|
|
except (OSError, subprocess.CalledProcessError):
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception("error running hook: %s", file_name)
|
2015-02-05 00:15:43 +00:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def run_state_hooks(self, state: int) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Run state hooks.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to run hooks for
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2017-04-25 16:45:34 +01:00
|
|
|
for hook in self._state_hooks.get(state, []):
|
2015-05-22 01:56:24 +01:00
|
|
|
try:
|
2015-02-05 00:15:43 +00:00
|
|
|
hook(state)
|
2019-09-11 05:01:51 +01:00
|
|
|
except Exception:
|
2019-10-18 18:33:31 +01:00
|
|
|
state_name = EventTypes(self.state).name
|
|
|
|
message = (
|
|
|
|
f"exception occured when running {state_name} state hook: {hook}"
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception(message)
|
2019-09-10 23:10:24 +01:00
|
|
|
self.exception(
|
|
|
|
ExceptionLevels.ERROR, "Session.run_state_hooks", None, message
|
|
|
|
)
|
2015-02-05 00:15:43 +00:00
|
|
|
|
2020-01-15 19:56:23 +00:00
|
|
|
def add_state_hook(self, state: int, hook: Callable[[int], None]) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Add a state hook.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to add hook for
|
|
|
|
:param hook: hook callback for the state
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2017-04-25 16:45:34 +01:00
|
|
|
hooks = self._state_hooks.setdefault(state, [])
|
2018-10-11 21:28:02 +01:00
|
|
|
if hook in hooks:
|
2019-09-11 21:12:42 +01:00
|
|
|
raise CoreError("attempting to add duplicate state hook")
|
2017-04-25 16:45:34 +01:00
|
|
|
hooks.append(hook)
|
|
|
|
|
|
|
|
if self.state == state:
|
2015-02-05 00:15:43 +00:00
|
|
|
hook(state)
|
|
|
|
|
2020-01-15 19:56:23 +00:00
|
|
|
def del_state_hook(self, state: int, hook: Callable[[int], None]) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Delete a state hook.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to delete hook for
|
|
|
|
:param hook: hook to delete
|
2020-01-11 06:37:19 +00:00
|
|
|
:return: nothing
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
2017-04-25 16:45:34 +01:00
|
|
|
hooks = self._state_hooks.setdefault(state, [])
|
|
|
|
hooks.remove(hook)
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def runtime_state_hook(self, state: int) -> None:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Runtime state hook check.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: state to check
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
2017-04-25 16:45:34 +01:00
|
|
|
if state == EventTypes.RUNTIME_STATE.value:
|
2015-02-05 00:15:48 +00:00
|
|
|
self.emane.poststartup()
|
2019-09-30 18:36:27 +01:00
|
|
|
|
|
|
|
# create session deployed xml
|
|
|
|
xml_file_name = os.path.join(self.session_dir, "session-deployed.xml")
|
|
|
|
xml_writer = corexml.CoreXmlWriter(self)
|
|
|
|
corexmldeployment.CoreXmlDeployment(self, xml_writer.scenario)
|
|
|
|
xml_writer.write(xml_file_name)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_environment(self, state: bool = True) -> Dict[str, str]:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Get an environment suitable for a subprocess.Popen call.
|
|
|
|
This is the current process environment with some session-specific
|
|
|
|
variables.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param state: flag to determine if session state should be included
|
2019-06-07 00:34:26 +01:00
|
|
|
:return: environment variables
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
env = os.environ.copy()
|
2019-10-18 18:33:31 +01:00
|
|
|
env["SESSION"] = str(self.id)
|
|
|
|
env["SESSION_SHORT"] = self.short_session_id()
|
|
|
|
env["SESSION_DIR"] = self.session_dir
|
|
|
|
env["SESSION_NAME"] = str(self.name)
|
|
|
|
env["SESSION_FILENAME"] = str(self.file_name)
|
|
|
|
env["SESSION_USER"] = str(self.user)
|
|
|
|
env["SESSION_NODE_COUNT"] = str(self.get_node_count())
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
if state:
|
2019-10-18 18:33:31 +01:00
|
|
|
env["SESSION_STATE"] = str(self.state)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# attempt to read and add environment config file
|
|
|
|
environment_config_file = os.path.join(constants.CORE_CONF_DIR, "environment")
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2017-04-25 16:45:34 +01:00
|
|
|
if os.path.isfile(environment_config_file):
|
2018-03-02 17:15:52 +00:00
|
|
|
utils.load_config(environment_config_file, env)
|
2013-08-29 15:21:13 +01:00
|
|
|
except IOError:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.warning(
|
|
|
|
"environment configuration file does not exist: %s",
|
|
|
|
environment_config_file,
|
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# attempt to read and add user environment file
|
2013-08-29 15:21:13 +01:00
|
|
|
if self.user:
|
2019-09-10 23:10:24 +01:00
|
|
|
environment_user_file = os.path.join(
|
|
|
|
"/home", self.user, ".core", "environment"
|
|
|
|
)
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2018-03-02 17:15:52 +00:00
|
|
|
utils.load_config(environment_user_file, env)
|
2013-08-29 15:21:13 +01:00
|
|
|
except IOError:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.debug(
|
|
|
|
"user core environment settings file not present: %s",
|
|
|
|
environment_user_file,
|
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
return env
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def set_thumbnail(self, thumb_file: str) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Set the thumbnail filename. Move files from /tmp to session dir.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param thumb_file: tumbnail file to set for session
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
if not os.path.exists(thumb_file):
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.error("thumbnail file to set does not exist: %s", thumb_file)
|
2013-08-29 15:21:13 +01:00
|
|
|
self.thumbnail = None
|
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
destination_file = os.path.join(self.session_dir, os.path.basename(thumb_file))
|
|
|
|
shutil.copy(thumb_file, destination_file)
|
|
|
|
self.thumbnail = destination_file
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def set_user(self, user: str) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Set the username for this session. Update the permissions of the
|
|
|
|
session dir to allow the user write access.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param user: user to give write permissions to for the session directory
|
2017-04-25 16:45:34 +01:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
if user:
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
|
|
|
uid = pwd.getpwnam(user).pw_uid
|
2017-04-25 16:45:34 +01:00
|
|
|
gid = os.stat(self.session_dir).st_gid
|
|
|
|
os.chown(self.session_dir, uid, gid)
|
|
|
|
except IOError:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception("failed to set permission on %s", self.session_dir)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
self.user = user
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_node_id(self) -> int:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-27 06:07:51 +01:00
|
|
|
Return a unique, new node id.
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
with self._nodes_lock:
|
2017-04-25 16:45:34 +01:00
|
|
|
while True:
|
2019-04-30 07:31:47 +01:00
|
|
|
node_id = random.randint(1, 0xFFFF)
|
|
|
|
if node_id not in self.nodes:
|
2017-04-25 16:45:34 +01:00
|
|
|
break
|
2019-04-30 07:31:47 +01:00
|
|
|
return node_id
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-14 06:15:44 +00:00
|
|
|
def create_node(self, cls: Type[NodeBase], *args: Any, **kwargs: Any) -> NodeBase:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-27 06:07:51 +01:00
|
|
|
Create an emulation node.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param cls: node class to create
|
|
|
|
:param args: list of arguments for the class to create
|
|
|
|
:param kwargs: dictionary of arguments for the class to create
|
2019-04-30 07:31:47 +01:00
|
|
|
:return: the created node instance
|
2019-09-11 21:12:42 +01:00
|
|
|
:raises core.CoreError: when id of the node to create already exists
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-09-11 21:12:42 +01:00
|
|
|
node = cls(self, *args, **kwargs)
|
2019-04-30 07:31:47 +01:00
|
|
|
with self._nodes_lock:
|
|
|
|
if node.id in self.nodes:
|
2019-04-27 06:07:51 +01:00
|
|
|
node.shutdown()
|
2019-10-18 18:33:31 +01:00
|
|
|
raise CoreError(f"duplicate node id {node.id} for {node.name}")
|
2019-04-30 07:31:47 +01:00
|
|
|
self.nodes[node.id] = node
|
2019-04-27 06:07:51 +01:00
|
|
|
return node
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_node(self, _id: int) -> NodeBase:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
Get a session node.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param _id: node id to retrieve
|
2019-04-30 07:31:47 +01:00
|
|
|
:return: node for the given id
|
2020-01-16 19:00:57 +00:00
|
|
|
:raises core.CoreError: when node does not exist
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
if _id not in self.nodes:
|
2019-10-18 18:33:31 +01:00
|
|
|
raise CoreError(f"unknown node id {_id}")
|
2019-04-30 07:31:47 +01:00
|
|
|
return self.nodes[_id]
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def delete_node(self, _id: int) -> bool:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
Delete a node from the session and check if session should shutdown, if no nodes are left.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param _id: id of node to delete
|
2019-04-30 07:31:47 +01:00
|
|
|
:return: True if node deleted, False otherwise
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
# delete node and check for session shutdown if a node was removed
|
2019-09-11 23:44:15 +01:00
|
|
|
logging.info("deleting node(%s)", _id)
|
2019-10-25 05:17:15 +01:00
|
|
|
node = None
|
2019-04-30 07:31:47 +01:00
|
|
|
with self._nodes_lock:
|
|
|
|
if _id in self.nodes:
|
|
|
|
node = self.nodes.pop(_id)
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2019-10-25 05:17:15 +01:00
|
|
|
if node:
|
|
|
|
node.shutdown()
|
2019-04-30 07:31:47 +01:00
|
|
|
self.check_shutdown()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-10-25 05:17:15 +01:00
|
|
|
return node is not None
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def delete_nodes(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
Clear the nodes dictionary, and call shutdown for each node.
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
with self._nodes_lock:
|
2019-10-29 17:25:39 +00:00
|
|
|
funcs = []
|
2019-04-30 07:31:47 +01:00
|
|
|
while self.nodes:
|
|
|
|
_, node = self.nodes.popitem()
|
2019-10-29 17:25:39 +00:00
|
|
|
funcs.append((node.shutdown, [], {}))
|
|
|
|
utils.threadpool(funcs)
|
2019-10-24 18:58:26 +01:00
|
|
|
self.node_id_gen.id = 0
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def write_nodes(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
Write nodes to a 'nodes' file in the session dir.
|
2017-05-03 17:30:49 +01:00
|
|
|
The 'nodes' file lists: number, name, api-type, class-type
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2019-04-30 07:31:47 +01:00
|
|
|
with self._nodes_lock:
|
|
|
|
file_path = os.path.join(self.session_dir, "nodes")
|
|
|
|
with open(file_path, "w") as f:
|
2019-06-20 18:49:07 +01:00
|
|
|
for _id in self.nodes.keys():
|
2019-04-30 07:31:47 +01:00
|
|
|
node = self.nodes[_id]
|
2019-10-18 18:33:31 +01:00
|
|
|
f.write(f"{_id} {node.name} {node.apitype} {type(node)}\n")
|
2017-04-25 16:45:34 +01:00
|
|
|
except IOError:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception("error writing nodes file")
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def dump_session(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2017-05-03 17:30:49 +01:00
|
|
|
Log information about the session in its current state.
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-08 18:39:36 +01:00
|
|
|
logging.info("session id=%s name=%s state=%s", self.id, self.name, self.state)
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"file=%s thumbnail=%s node_count=%s/%s",
|
|
|
|
self.file_name,
|
|
|
|
self.thumbnail,
|
|
|
|
self.get_node_count(),
|
|
|
|
len(self.nodes),
|
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def exception(
|
|
|
|
self, level: ExceptionLevels, source: str, node_id: int, text: str
|
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2017-05-03 17:30:49 +01:00
|
|
|
Generate and broadcast an exception event.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param level: exception level
|
|
|
|
:param source: source name
|
|
|
|
:param node_id: node related to exception
|
|
|
|
:param text: exception message
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
exception_data = ExceptionData(
|
2019-04-30 07:31:47 +01:00
|
|
|
node=node_id,
|
2019-04-08 18:39:36 +01:00
|
|
|
session=str(self.id),
|
2017-04-25 16:45:34 +01:00
|
|
|
level=level,
|
|
|
|
source=source,
|
|
|
|
date=time.ctime(),
|
2019-09-10 23:10:24 +01:00
|
|
|
text=text,
|
2017-04-25 16:45:34 +01:00
|
|
|
)
|
|
|
|
self.broadcast_exception(exception_data)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def instantiate(self) -> List[ServiceBootError]:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
We have entered the instantiation state, invoke startup methods
|
|
|
|
of various managers and boot the nodes. Validate nodes and check
|
|
|
|
for transition to the runtime state.
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
:return: list of service boot errors during startup
|
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
# write current nodes out to session directory file
|
|
|
|
self.write_nodes()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-10-15 22:13:42 +01:00
|
|
|
# create control net interfaces and network tunnels
|
2019-06-19 18:58:49 +01:00
|
|
|
# which need to exist for emane to sync on location events
|
|
|
|
# in distributed scenarios
|
2020-01-11 06:37:19 +00:00
|
|
|
self.add_remove_control_net(0, remove=False)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-10-08 23:09:26 +01:00
|
|
|
# initialize distributed tunnels
|
2019-10-17 19:10:59 +01:00
|
|
|
self.distributed.start()
|
2019-10-08 23:09:26 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
# instantiate will be invoked again upon emane configure
|
2013-08-30 19:41:39 +01:00
|
|
|
if self.emane.startup() == self.emane.NOT_READY:
|
2020-01-11 06:37:19 +00:00
|
|
|
return []
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-06-19 18:58:49 +01:00
|
|
|
# boot node services and then start mobility
|
2019-12-20 23:11:34 +00:00
|
|
|
exceptions = self.boot_nodes()
|
|
|
|
if not exceptions:
|
|
|
|
self.mobility.startup()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-12-20 23:11:34 +00:00
|
|
|
# notify listeners that instantiation is complete
|
|
|
|
event = EventData(event_type=EventTypes.INSTANTIATION_COMPLETE.value)
|
|
|
|
self.broadcast_event(event)
|
2017-07-31 17:08:57 +01:00
|
|
|
|
2019-12-20 23:11:34 +00:00
|
|
|
# assume either all nodes have booted already, or there are some
|
|
|
|
# nodes on slave servers that will be booted and those servers will
|
|
|
|
# send a node status response message
|
|
|
|
self.check_runtime()
|
|
|
|
return exceptions
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_node_count(self) -> int:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Returns the number of CoreNodes and CoreNets, except for those
|
2013-11-25 19:54:02 +00:00
|
|
|
that are not considered in the GUI's node count.
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
:return: created node count
|
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
with self._nodes_lock:
|
2019-05-06 05:23:43 +01:00
|
|
|
count = 0
|
|
|
|
for node_id in self.nodes:
|
|
|
|
node = self.nodes[node_id]
|
2019-09-26 21:00:12 +01:00
|
|
|
is_p2p_ctrlnet = isinstance(node, (PtpNet, CtrlNet))
|
|
|
|
is_tap = isinstance(node, GreTapBridge) and not isinstance(
|
|
|
|
node, TunnelNode
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2019-05-06 05:23:43 +01:00
|
|
|
if is_p2p_ctrlnet or is_tap:
|
|
|
|
continue
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-05-06 05:23:43 +01:00
|
|
|
count += 1
|
2013-11-25 19:54:02 +00:00
|
|
|
return count
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def check_runtime(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Check if we have entered the runtime state, that all nodes have been
|
|
|
|
started and the emulation is running. Start the event loop once we
|
|
|
|
have entered runtime (time=0).
|
2020-01-11 06:37:19 +00:00
|
|
|
|
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
# this is called from instantiate() after receiving an event message
|
2019-10-15 22:13:42 +01:00
|
|
|
# for the instantiation state
|
2019-09-11 23:05:05 +01:00
|
|
|
logging.debug(
|
2019-09-10 23:10:24 +01:00
|
|
|
"session(%s) checking if not in runtime state, current state: %s",
|
|
|
|
self.id,
|
2019-10-15 22:13:42 +01:00
|
|
|
EventTypes(self.state).name,
|
2019-09-10 23:10:24 +01:00
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
if self.state == EventTypes.RUNTIME_STATE.value:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.info("valid runtime state found, returning")
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# start event loop and set to runtime
|
|
|
|
self.event_loop.run()
|
2018-04-26 00:33:58 +01:00
|
|
|
self.set_state(EventTypes.RUNTIME_STATE, send_event=True)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def data_collect(self) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Tear down a running session. Stop the event loop and any running
|
|
|
|
nodes, and perform clean-up.
|
2020-01-11 06:37:19 +00:00
|
|
|
|
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
# stop event loop
|
|
|
|
self.event_loop.stop()
|
|
|
|
|
|
|
|
# stop node services
|
2019-04-30 07:31:47 +01:00
|
|
|
with self._nodes_lock:
|
2019-10-29 17:25:39 +00:00
|
|
|
funcs = []
|
2019-05-06 05:23:43 +01:00
|
|
|
for node_id in self.nodes:
|
|
|
|
node = self.nodes[node_id]
|
2019-09-26 21:00:12 +01:00
|
|
|
if isinstance(node, CoreNodeBase):
|
2019-10-30 20:27:12 +00:00
|
|
|
args = (node,)
|
|
|
|
funcs.append((self.services.stop_services, args, {}))
|
2019-10-29 17:25:39 +00:00
|
|
|
utils.threadpool(funcs)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# shutdown emane
|
2013-08-29 15:21:13 +01:00
|
|
|
self.emane.shutdown()
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
# update control interface hosts
|
|
|
|
self.update_control_interface_hosts(remove=True)
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
# remove all four possible control networks
|
|
|
|
self.add_remove_control_net(0, remove=True)
|
|
|
|
self.add_remove_control_net(1, remove=True)
|
|
|
|
self.add_remove_control_net(2, remove=True)
|
|
|
|
self.add_remove_control_net(3, remove=True)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def check_shutdown(self) -> bool:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Check if we have entered the shutdown state, when no running nodes
|
|
|
|
and links remain.
|
2020-01-11 06:37:19 +00:00
|
|
|
|
|
|
|
:return: True if should shutdown, False otherwise
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
node_count = self.get_node_count()
|
2019-09-11 23:05:05 +01:00
|
|
|
logging.debug(
|
2019-09-10 23:10:24 +01:00
|
|
|
"session(%s) checking shutdown: %s nodes remaining", self.id, node_count
|
|
|
|
)
|
2017-07-28 00:03:27 +01:00
|
|
|
shutdown = False
|
2017-04-25 16:45:34 +01:00
|
|
|
if node_count == 0:
|
2017-07-28 00:03:27 +01:00
|
|
|
shutdown = True
|
2018-04-26 00:33:58 +01:00
|
|
|
self.set_state(EventTypes.SHUTDOWN_STATE)
|
2017-07-28 00:03:27 +01:00
|
|
|
return shutdown
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def short_session_id(self) -> str:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Return a shorter version of the session ID, appropriate for
|
|
|
|
interface names, where length may be limited.
|
2020-01-11 06:37:19 +00:00
|
|
|
|
|
|
|
:return: short session id
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2019-04-08 18:39:36 +01:00
|
|
|
ssid = (self.id >> 8) ^ (self.id & ((1 << 8) - 1))
|
2019-10-18 18:33:31 +01:00
|
|
|
return f"{ssid:x}"
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def boot_node(self, node: CoreNode) -> None:
|
2019-10-29 17:25:39 +00:00
|
|
|
"""
|
|
|
|
Boot node by adding a control interface when necessary and starting
|
|
|
|
node services.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node: node to boot
|
2019-10-29 17:25:39 +00:00
|
|
|
:return: nothing
|
|
|
|
"""
|
|
|
|
logging.info("booting node(%s): %s", node.name, [x.name for x in node.services])
|
|
|
|
self.add_remove_control_interface(node=node, remove=False)
|
|
|
|
self.services.boot_services(node)
|
2020-01-18 00:57:49 +00:00
|
|
|
node.start_config_services()
|
2019-10-29 17:25:39 +00:00
|
|
|
|
2020-01-13 18:06:18 +00:00
|
|
|
def boot_nodes(self) -> List[Exception]:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Invoke the boot() procedure for all nodes and send back node
|
|
|
|
messages to the GUI for node messages that had the status
|
|
|
|
request flag.
|
2019-12-20 23:11:34 +00:00
|
|
|
|
|
|
|
:return: service boot exceptions
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2019-04-30 07:31:47 +01:00
|
|
|
with self._nodes_lock:
|
2019-10-29 17:25:39 +00:00
|
|
|
funcs = []
|
|
|
|
start = time.monotonic()
|
2019-05-02 07:17:46 +01:00
|
|
|
for _id in self.nodes:
|
|
|
|
node = self.nodes[_id]
|
2019-09-26 21:00:12 +01:00
|
|
|
if isinstance(node, CoreNodeBase) and not isinstance(node, Rj45Node):
|
2019-10-29 17:25:39 +00:00
|
|
|
args = (node,)
|
|
|
|
funcs.append((self.boot_node, args, {}))
|
|
|
|
results, exceptions = utils.threadpool(funcs)
|
|
|
|
total = time.monotonic() - start
|
|
|
|
logging.debug("boot run time: %s", total)
|
2019-12-20 23:11:34 +00:00
|
|
|
if not exceptions:
|
|
|
|
self.update_control_interface_hosts()
|
|
|
|
return exceptions
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_control_net_prefixes(self) -> List[str]:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Retrieve control net prefixes.
|
|
|
|
|
|
|
|
:return: control net prefix list
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2018-06-06 22:51:45 +01:00
|
|
|
p = self.options.get_config("controlnet")
|
|
|
|
p0 = self.options.get_config("controlnet0")
|
|
|
|
p1 = self.options.get_config("controlnet1")
|
|
|
|
p2 = self.options.get_config("controlnet2")
|
|
|
|
p3 = self.options.get_config("controlnet3")
|
2015-05-22 01:53:43 +01:00
|
|
|
if not p0 and p:
|
|
|
|
p0 = p
|
2017-04-25 16:45:34 +01:00
|
|
|
return [p0, p1, p2, p3]
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_control_net_server_interfaces(self) -> List[str]:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Retrieve control net server interfaces.
|
|
|
|
|
|
|
|
:return: list of control net server interfaces
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2018-06-12 16:37:39 +01:00
|
|
|
d0 = self.options.get_config("controlnetif0")
|
2015-05-22 01:53:43 +01:00
|
|
|
if d0:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.error("controlnet0 cannot be assigned with a host interface")
|
2018-06-12 16:37:39 +01:00
|
|
|
d1 = self.options.get_config("controlnetif1")
|
|
|
|
d2 = self.options.get_config("controlnetif2")
|
|
|
|
d3 = self.options.get_config("controlnetif3")
|
2017-04-25 16:45:34 +01:00
|
|
|
return [None, d1, d2, d3]
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_control_net_index(self, dev: str) -> int:
|
2017-05-03 17:30:49 +01:00
|
|
|
"""
|
|
|
|
Retrieve control net index.
|
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param dev: device to get control net index for
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: control net index, -1 otherwise
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2017-04-25 16:45:34 +01:00
|
|
|
if dev[0:4] == "ctrl" and int(dev[4]) in [0, 1, 2, 3]:
|
|
|
|
index = int(dev[4])
|
|
|
|
if index == 0:
|
|
|
|
return index
|
2017-05-04 22:43:57 +01:00
|
|
|
if index < 4 and self.get_control_net_prefixes()[index] is not None:
|
2017-04-25 16:45:34 +01:00
|
|
|
return index
|
2015-05-22 01:53:59 +01:00
|
|
|
return -1
|
2015-05-22 01:53:43 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def get_control_net(self, net_index: int) -> CtrlNet:
|
|
|
|
"""
|
|
|
|
Retrieve a control net based on index.
|
|
|
|
|
|
|
|
:param net_index: control net index
|
|
|
|
:return: control net
|
|
|
|
:raises CoreError: when control net is not found
|
|
|
|
"""
|
|
|
|
node = self.get_node(CTRL_NET_ID + net_index)
|
|
|
|
if not isinstance(node, CtrlNet):
|
|
|
|
raise CoreError("node is not a valid CtrlNet: %s", node.name)
|
|
|
|
return node
|
2015-05-22 01:53:43 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def add_remove_control_net(
|
|
|
|
self, net_index: int, remove: bool = False, conf_required: bool = True
|
|
|
|
) -> Optional[CtrlNet]:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Create a control network bridge as necessary.
|
2013-08-29 18:51:19 +01:00
|
|
|
When the remove flag is True, remove the bridge that connects control
|
2014-09-23 21:24:19 +01:00
|
|
|
interfaces. The conf_reqd flag, when False, causes a control network
|
|
|
|
bridge to be added even if one has not been configured.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param net_index: network index
|
|
|
|
:param remove: flag to check if it should be removed
|
|
|
|
:param conf_required: flag to check if conf is required
|
2019-04-30 07:31:47 +01:00
|
|
|
:return: control net node
|
2020-01-17 00:12:01 +00:00
|
|
|
"""
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.debug(
|
|
|
|
"add/remove control net: index(%s) remove(%s) conf_required(%s)",
|
|
|
|
net_index,
|
|
|
|
remove,
|
|
|
|
conf_required,
|
|
|
|
)
|
2017-05-04 22:43:57 +01:00
|
|
|
prefix_spec_list = self.get_control_net_prefixes()
|
2017-04-25 16:45:34 +01:00
|
|
|
prefix_spec = prefix_spec_list[net_index]
|
|
|
|
if not prefix_spec:
|
|
|
|
if conf_required:
|
2017-05-03 17:30:49 +01:00
|
|
|
# no controlnet needed
|
|
|
|
return None
|
2014-09-23 17:26:22 +01:00
|
|
|
else:
|
2019-09-26 21:00:12 +01:00
|
|
|
prefix_spec = CtrlNet.DEFAULT_PREFIX_LIST[net_index]
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.debug("prefix spec: %s", prefix_spec)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
server_interface = self.get_control_net_server_interfaces()[net_index]
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2013-08-29 18:51:19 +01:00
|
|
|
# return any existing controlnet bridge
|
|
|
|
try:
|
2019-04-30 07:31:47 +01:00
|
|
|
control_net = self.get_control_net(net_index)
|
2013-08-29 18:51:19 +01:00
|
|
|
if remove:
|
2019-04-30 07:31:47 +01:00
|
|
|
self.delete_node(control_net.id)
|
2013-08-29 18:51:19 +01:00
|
|
|
return None
|
2017-04-25 16:45:34 +01:00
|
|
|
return control_net
|
2019-09-12 23:48:09 +01:00
|
|
|
except CoreError:
|
2013-08-29 18:51:19 +01:00
|
|
|
if remove:
|
|
|
|
return None
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2013-08-29 18:51:19 +01:00
|
|
|
# build a new controlnet bridge
|
2019-10-21 17:36:07 +01:00
|
|
|
_id = CTRL_NET_ID + net_index
|
2015-05-22 01:53:43 +01:00
|
|
|
|
|
|
|
# use the updown script for control net 0 only.
|
2013-08-29 15:21:13 +01:00
|
|
|
updown_script = None
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
if net_index == 0:
|
2018-06-12 16:37:39 +01:00
|
|
|
updown_script = self.options.get_config("controlnet_updown_script")
|
2017-07-10 18:44:10 +01:00
|
|
|
if not updown_script:
|
2019-09-30 20:58:13 +01:00
|
|
|
logging.debug("controlnet updown script not configured")
|
2017-07-10 18:44:10 +01:00
|
|
|
|
2017-04-25 16:45:34 +01:00
|
|
|
prefixes = prefix_spec.split()
|
|
|
|
if len(prefixes) > 1:
|
|
|
|
# a list of per-host prefixes is provided
|
2019-10-26 06:06:30 +01:00
|
|
|
try:
|
|
|
|
# split first (master) entry into server and prefix
|
|
|
|
prefix = prefixes[0].split(":", 1)[1]
|
|
|
|
except IndexError:
|
|
|
|
# no server name. possibly only one server
|
|
|
|
prefix = prefixes[0]
|
2017-05-03 17:30:49 +01:00
|
|
|
else:
|
2015-05-22 01:53:43 +01:00
|
|
|
prefix = prefixes[0]
|
|
|
|
|
2019-10-09 05:06:22 +01:00
|
|
|
logging.info(
|
2019-10-26 06:06:30 +01:00
|
|
|
"controlnet(%s) prefix(%s) updown(%s) serverintf(%s)",
|
2019-10-09 05:06:22 +01:00
|
|
|
_id,
|
|
|
|
prefix,
|
|
|
|
updown_script,
|
|
|
|
server_interface,
|
|
|
|
)
|
2019-09-10 23:10:24 +01:00
|
|
|
control_net = self.create_node(
|
2019-09-26 21:00:12 +01:00
|
|
|
cls=CtrlNet,
|
2019-09-10 23:10:24 +01:00
|
|
|
_id=_id,
|
|
|
|
prefix=prefix,
|
2019-10-26 06:06:30 +01:00
|
|
|
assign_address=True,
|
2019-09-10 23:10:24 +01:00
|
|
|
updown_script=updown_script,
|
|
|
|
serverintf=server_interface,
|
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
return control_net
|
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
def add_remove_control_interface(
|
2020-01-11 06:37:19 +00:00
|
|
|
self,
|
|
|
|
node: CoreNode,
|
|
|
|
net_index: int = 0,
|
|
|
|
remove: bool = False,
|
|
|
|
conf_required: bool = True,
|
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Add a control interface to a node when a 'controlnet' prefix is
|
|
|
|
listed in the config file or session options. Uses
|
|
|
|
addremovectrlnet() to build or remove the control bridge.
|
|
|
|
If conf_reqd is False, the control network may be built even
|
|
|
|
when the user has not configured one (e.g. for EMANE.)
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node: node to add or remove control interface
|
|
|
|
:param net_index: network index
|
|
|
|
:param remove: flag to check if it should be removed
|
|
|
|
:param conf_required: flag to check if conf is required
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
control_net = self.add_remove_control_net(net_index, remove, conf_required)
|
|
|
|
if not control_net:
|
2013-08-29 18:51:19 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
if not node:
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2017-05-03 17:30:49 +01:00
|
|
|
# ctrl# already exists
|
2017-04-25 16:45:34 +01:00
|
|
|
if node.netif(control_net.CTRLIF_IDX_BASE + net_index):
|
2017-05-03 17:30:49 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-04-27 06:07:51 +01:00
|
|
|
control_ip = node.id
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2020-01-08 21:25:00 +00:00
|
|
|
address = control_net.prefix[control_ip]
|
2019-10-18 18:33:31 +01:00
|
|
|
prefix = control_net.prefix.prefixlen
|
|
|
|
addrlist = [f"{address}/{prefix}"]
|
2013-08-29 15:21:13 +01:00
|
|
|
except ValueError:
|
2019-10-18 18:33:31 +01:00
|
|
|
msg = f"Control interface not added to node {node.id}. "
|
|
|
|
msg += f"Invalid control network prefix ({control_net.prefix}). "
|
2013-08-29 15:21:13 +01:00
|
|
|
msg += "A longer prefix length may be required for this many nodes."
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.exception(msg)
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
interface1 = node.newnetif(
|
|
|
|
net=control_net,
|
|
|
|
ifindex=control_net.CTRLIF_IDX_BASE + net_index,
|
2019-10-18 18:33:31 +01:00
|
|
|
ifname=f"ctrl{net_index}",
|
2020-01-09 01:33:49 +00:00
|
|
|
hwaddr=utils.random_mac(),
|
2019-09-10 23:10:24 +01:00
|
|
|
addrlist=addrlist,
|
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
node.netif(interface1).control = True
|
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def update_control_interface_hosts(
|
|
|
|
self, net_index: int = 0, remove: bool = False
|
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Add the IP addresses of control interfaces to the /etc/hosts file.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param net_index: network index to update
|
|
|
|
:param remove: flag to check if it should be removed
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2018-06-12 16:37:39 +01:00
|
|
|
if not self.options.get_config_bool("update_etc_hosts", default=False):
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
try:
|
2019-04-30 07:31:47 +01:00
|
|
|
control_net = self.get_control_net(net_index)
|
2019-09-12 23:48:09 +01:00
|
|
|
except CoreError:
|
2019-04-30 07:31:47 +01:00
|
|
|
logging.exception("error retrieving control net node")
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-10-18 18:33:31 +01:00
|
|
|
header = f"CORE session {self.id} host entries"
|
2013-08-29 15:21:13 +01:00
|
|
|
if remove:
|
2019-02-16 17:50:19 +00:00
|
|
|
logging.info("Removing /etc/hosts file entries.")
|
2018-03-02 17:15:52 +00:00
|
|
|
utils.file_demunge("/etc/hosts", header)
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2013-08-29 15:21:13 +01:00
|
|
|
entries = []
|
2017-04-25 16:45:34 +01:00
|
|
|
for interface in control_net.netifs():
|
|
|
|
name = interface.node.name
|
|
|
|
for address in interface.addrlist:
|
2019-10-18 18:33:31 +01:00
|
|
|
address = address.split("/")[0]
|
|
|
|
entries.append(f"{address} {name}")
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-10-18 18:33:31 +01:00
|
|
|
logging.info("Adding %d /etc/hosts file entries.", len(entries))
|
2018-03-02 17:15:52 +00:00
|
|
|
utils.file_munge("/etc/hosts", header, "\n".join(entries) + "\n")
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def runtime(self) -> float:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Return the current time we have been in the runtime state, or zero
|
|
|
|
if not in runtime.
|
|
|
|
"""
|
|
|
|
if self.state == EventTypes.RUNTIME_STATE.value:
|
2019-12-06 17:42:41 +00:00
|
|
|
return time.monotonic() - self._state_time
|
2013-08-29 15:21:13 +01:00
|
|
|
else:
|
|
|
|
return 0.0
|
2014-09-23 17:26:22 +01:00
|
|
|
|
2020-01-11 06:37:19 +00:00
|
|
|
def add_event(
|
|
|
|
self,
|
|
|
|
event_time: float,
|
|
|
|
node: CoreNode = None,
|
|
|
|
name: str = None,
|
|
|
|
data: str = None,
|
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Add an event to the event queue, with a start time relative to the
|
|
|
|
start of the runtime state.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
|
|
|
:param event_time: event time
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node: node to add event for
|
|
|
|
:param name: name of event
|
2017-05-03 17:30:49 +01:00
|
|
|
:param data: data for event
|
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
event_time = float(event_time)
|
|
|
|
current_time = self.runtime()
|
|
|
|
|
2019-05-02 07:17:46 +01:00
|
|
|
if current_time > 0:
|
|
|
|
if event_time <= current_time:
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.warning(
|
|
|
|
"could not schedule past event for time %s (run time is now %s)",
|
|
|
|
event_time,
|
|
|
|
current_time,
|
|
|
|
)
|
2013-08-29 15:21:13 +01:00
|
|
|
return
|
2017-04-25 16:45:34 +01:00
|
|
|
event_time = event_time - current_time
|
|
|
|
|
2019-09-10 23:10:24 +01:00
|
|
|
self.event_loop.add_event(
|
|
|
|
event_time, self.run_event, node=node, name=name, data=data
|
|
|
|
)
|
2017-04-25 16:45:34 +01:00
|
|
|
|
|
|
|
if not name:
|
2013-08-29 15:21:13 +01:00
|
|
|
name = ""
|
2019-09-10 23:10:24 +01:00
|
|
|
logging.info(
|
|
|
|
"scheduled event %s at time %s data=%s",
|
|
|
|
name,
|
|
|
|
event_time + current_time,
|
|
|
|
data,
|
|
|
|
)
|
2013-08-29 15:21:13 +01:00
|
|
|
|
2019-10-12 06:37:33 +01:00
|
|
|
# TODO: if data is None, this blows up, but this ties into how event functions
|
|
|
|
# are ran, need to clean that up
|
2020-01-11 06:37:19 +00:00
|
|
|
def run_event(
|
|
|
|
self, node_id: int = None, name: str = None, data: str = None
|
|
|
|
) -> None:
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
|
|
|
Run a scheduled event, executing commands in the data string.
|
2017-05-03 17:30:49 +01:00
|
|
|
|
2020-01-16 19:00:57 +00:00
|
|
|
:param node_id: node id to run event
|
|
|
|
:param name: event name
|
|
|
|
:param data: event data
|
2017-05-03 17:30:49 +01:00
|
|
|
:return: nothing
|
2017-04-25 16:45:34 +01:00
|
|
|
"""
|
2013-08-29 15:21:13 +01:00
|
|
|
now = self.runtime()
|
2017-04-25 16:45:34 +01:00
|
|
|
if not name:
|
2013-08-29 15:21:13 +01:00
|
|
|
name = ""
|
2017-04-25 16:45:34 +01:00
|
|
|
|
2019-05-02 07:17:46 +01:00
|
|
|
logging.info("running event %s at time %s cmd=%s", name, now, data)
|
2017-04-25 16:45:34 +01:00
|
|
|
if not node_id:
|
2018-03-02 17:15:52 +00:00
|
|
|
utils.mute_detach(data)
|
2013-08-29 15:21:13 +01:00
|
|
|
else:
|
2019-04-30 07:31:47 +01:00
|
|
|
node = self.get_node(node_id)
|
2019-10-19 07:28:09 +01:00
|
|
|
node.cmd(data, wait=False)
|