2016-03-02 10:49:52 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
|
|
|
# Copyright (C) 2016 GNS3 Technologies Inc.
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2016-04-19 16:35:50 +03:00
|
|
|
import os
|
2017-03-16 11:50:08 +02:00
|
|
|
import sys
|
2017-03-21 14:35:02 +02:00
|
|
|
import uuid
|
2017-02-02 20:13:47 +02:00
|
|
|
import shutil
|
2020-10-22 12:26:53 +03:00
|
|
|
import asyncio
|
2021-10-22 14:33:51 +03:00
|
|
|
import random
|
2022-12-30 03:15:40 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
import importlib_resources
|
|
|
|
except ImportError:
|
|
|
|
from importlib import resources as importlib_resources
|
2016-03-02 10:49:52 +02:00
|
|
|
|
|
|
|
|
|
|
|
from ..config import Config
|
2023-01-01 09:57:41 +02:00
|
|
|
from ..utils import parse_version
|
2023-05-05 17:40:58 +03:00
|
|
|
from ..utils.images import default_images_directory
|
2023-01-01 09:57:41 +02:00
|
|
|
|
2016-03-10 22:51:29 +02:00
|
|
|
from .project import Project
|
2017-02-01 12:30:14 +02:00
|
|
|
from .appliance import Appliance
|
2019-01-14 11:09:06 +02:00
|
|
|
from .appliance_manager import ApplianceManager
|
2016-11-11 11:38:59 +02:00
|
|
|
from .compute import Compute, ComputeError
|
2016-05-18 15:56:23 +03:00
|
|
|
from .notification import Notification
|
2016-06-27 19:33:42 +03:00
|
|
|
from .symbols import Symbols
|
2016-06-14 17:07:37 +03:00
|
|
|
from .topology import load_topology
|
2016-08-24 12:36:32 +03:00
|
|
|
from .gns3vm import GNS3VM
|
2017-04-04 15:23:43 +03:00
|
|
|
from .gns3vm.gns3_vm_error import GNS3VMError
|
2020-10-02 09:37:50 +03:00
|
|
|
from .controller_error import ControllerError, ControllerNotFoundError
|
2016-04-19 16:35:50 +03:00
|
|
|
|
2021-04-05 07:51:41 +03:00
|
|
|
|
2016-04-19 16:35:50 +03:00
|
|
|
import logging
|
2021-04-13 12:16:50 +03:00
|
|
|
|
2016-04-19 16:35:50 +03:00
|
|
|
log = logging.getLogger(__name__)
|
2016-03-02 10:49:52 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Controller:
|
2018-03-15 09:17:39 +02:00
|
|
|
"""
|
2018-11-27 13:14:51 +02:00
|
|
|
The controller is responsible to manage one or more computes.
|
2018-03-15 09:17:39 +02:00
|
|
|
"""
|
2016-03-02 10:49:52 +02:00
|
|
|
|
2016-03-03 17:02:27 +02:00
|
|
|
def __init__(self):
|
2021-04-05 07:51:41 +03:00
|
|
|
|
2016-04-15 18:57:06 +03:00
|
|
|
self._computes = {}
|
2016-03-10 11:32:07 +02:00
|
|
|
self._projects = {}
|
2021-04-10 06:31:23 +03:00
|
|
|
self._ssl_context = None
|
2016-05-18 15:56:23 +03:00
|
|
|
self._notification = Notification(self)
|
2016-08-24 12:36:32 +03:00
|
|
|
self.gns3vm = GNS3VM(self)
|
2016-06-27 19:33:42 +03:00
|
|
|
self.symbols = Symbols()
|
2019-01-14 11:09:06 +02:00
|
|
|
self._appliance_manager = ApplianceManager()
|
2021-04-13 12:16:50 +03:00
|
|
|
self._iou_license_settings = {"iourc_content": "", "license_check": True}
|
2018-11-14 10:24:30 +02:00
|
|
|
self._config_loaded = False
|
2016-08-22 18:21:03 +03:00
|
|
|
|
2021-04-05 08:09:50 +03:00
|
|
|
async def start(self, computes=None):
|
2018-03-15 09:17:39 +02:00
|
|
|
|
|
|
|
log.info("Controller is starting")
|
2023-01-01 09:57:41 +02:00
|
|
|
self._install_base_configs()
|
2023-05-05 17:40:58 +03:00
|
|
|
self._install_builtin_disks()
|
2021-04-12 10:32:23 +03:00
|
|
|
server_config = Config.instance().settings.Server
|
2017-03-22 19:29:08 +02:00
|
|
|
Config.instance().listen_for_config_changes(self._update_config)
|
2022-12-27 04:05:13 +02:00
|
|
|
name = server_config.name
|
2021-04-12 10:32:23 +03:00
|
|
|
host = server_config.host
|
|
|
|
port = server_config.port
|
2017-02-28 13:08:47 +02:00
|
|
|
|
2018-03-15 09:17:39 +02:00
|
|
|
# clients will use the IP they use to connect to
|
|
|
|
# the controller if console_host is 0.0.0.0
|
2016-10-26 15:43:47 +03:00
|
|
|
console_host = host
|
2016-09-27 12:54:23 +03:00
|
|
|
if host == "0.0.0.0":
|
|
|
|
host = "127.0.0.1"
|
2017-02-28 14:11:03 +02:00
|
|
|
|
2021-04-05 07:51:41 +03:00
|
|
|
self._load_controller_settings()
|
2020-11-05 09:54:50 +02:00
|
|
|
|
2021-04-12 16:56:42 +03:00
|
|
|
if server_config.enable_ssl:
|
2021-04-10 06:31:23 +03:00
|
|
|
self._ssl_context = self._create_ssl_context(server_config)
|
2020-11-05 09:54:50 +02:00
|
|
|
|
2021-04-12 10:32:23 +03:00
|
|
|
protocol = server_config.protocol
|
2021-04-10 06:31:23 +03:00
|
|
|
if self._ssl_context and protocol != "https":
|
2021-04-13 12:07:58 +03:00
|
|
|
log.warning(f"Protocol changed to 'https' for local compute because SSL is enabled")
|
2020-11-05 09:54:50 +02:00
|
|
|
protocol = "https"
|
2017-03-16 11:50:08 +02:00
|
|
|
try:
|
2021-04-13 12:16:50 +03:00
|
|
|
self._local_server = await self.add_compute(
|
|
|
|
compute_id="local",
|
2022-12-27 04:05:13 +02:00
|
|
|
name=name,
|
2021-04-13 12:16:50 +03:00
|
|
|
protocol=protocol,
|
|
|
|
host=host,
|
|
|
|
console_host=console_host,
|
|
|
|
port=port,
|
2021-11-18 10:07:10 +02:00
|
|
|
user=server_config.compute_username,
|
|
|
|
password=server_config.compute_password,
|
2021-04-13 12:16:50 +03:00
|
|
|
force=True,
|
|
|
|
connect=True,
|
2021-12-24 04:35:39 +02:00
|
|
|
wait_connection=False,
|
2021-04-13 12:16:50 +03:00
|
|
|
ssl_context=self._ssl_context,
|
|
|
|
)
|
2020-10-02 09:37:50 +03:00
|
|
|
except ControllerError:
|
2021-04-13 12:16:50 +03:00
|
|
|
log.fatal(
|
|
|
|
f"Cannot access to the local server, make sure something else is not running on the TCP port {port}"
|
|
|
|
)
|
2017-03-16 11:50:08 +02:00
|
|
|
sys.exit(1)
|
2021-04-05 08:09:50 +03:00
|
|
|
|
|
|
|
if computes:
|
|
|
|
for c in computes:
|
|
|
|
try:
|
2021-12-24 04:35:39 +02:00
|
|
|
#FIXME: Task exception was never retrieved
|
|
|
|
await self.add_compute(
|
|
|
|
compute_id=str(c.compute_id),
|
|
|
|
connect=False,
|
|
|
|
**c.dict(exclude_unset=True, exclude={"compute_id", "created_at", "updated_at"}),
|
|
|
|
)
|
2021-04-05 08:09:50 +03:00
|
|
|
except (ControllerError, KeyError):
|
|
|
|
pass # Skip not available servers at loading
|
2020-07-09 13:43:21 +03:00
|
|
|
|
2017-04-04 15:23:43 +03:00
|
|
|
try:
|
2018-10-15 13:05:49 +03:00
|
|
|
await self.gns3vm.auto_start_vm()
|
2017-04-04 15:23:43 +03:00
|
|
|
except GNS3VMError as e:
|
2018-03-15 09:17:39 +02:00
|
|
|
log.warning(str(e))
|
2020-07-09 13:43:21 +03:00
|
|
|
|
|
|
|
await self.load_projects()
|
2018-10-15 13:05:49 +03:00
|
|
|
await self._project_auto_open()
|
2016-08-24 12:36:32 +03:00
|
|
|
|
2020-11-05 09:54:50 +02:00
|
|
|
def _create_ssl_context(self, server_config):
|
|
|
|
|
|
|
|
import ssl
|
2021-04-13 12:16:50 +03:00
|
|
|
|
2020-11-05 09:54:50 +02:00
|
|
|
ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
2021-04-12 10:32:23 +03:00
|
|
|
certfile = server_config.certfile
|
|
|
|
certkey = server_config.certkey
|
2020-11-05 09:54:50 +02:00
|
|
|
try:
|
|
|
|
ssl_context.load_cert_chain(certfile, certkey)
|
|
|
|
except FileNotFoundError:
|
|
|
|
log.critical("Could not find the SSL certfile or certkey")
|
|
|
|
raise SystemExit
|
|
|
|
except ssl.SSLError as e:
|
2021-04-13 12:07:58 +03:00
|
|
|
log.critical(f"SSL error: {e}")
|
2020-11-05 09:54:50 +02:00
|
|
|
raise SystemExit
|
|
|
|
return ssl_context
|
|
|
|
|
2021-04-10 06:31:23 +03:00
|
|
|
def ssl_context(self):
|
|
|
|
"""
|
|
|
|
Returns the SSL context for the server.
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self._ssl_context
|
|
|
|
|
2017-03-22 19:29:08 +02:00
|
|
|
def _update_config(self):
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
Call this when the server configuration file changes.
|
2017-03-22 19:29:08 +02:00
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2017-03-22 19:29:08 +02:00
|
|
|
if self._local_server:
|
2021-11-18 10:07:10 +02:00
|
|
|
self._local_server.user = Config.instance().settings.Server.compute_username
|
|
|
|
self._local_server.password = Config.instance().settings.Server.compute_password
|
2017-03-22 19:29:08 +02:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def stop(self):
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2020-04-30 09:00:50 +03:00
|
|
|
log.info("Controller is stopping")
|
2016-08-26 12:22:09 +03:00
|
|
|
for project in self._projects.values():
|
2018-10-15 13:05:49 +03:00
|
|
|
await project.close()
|
2016-08-24 12:36:32 +03:00
|
|
|
for compute in self._computes.values():
|
2016-08-26 15:09:18 +03:00
|
|
|
try:
|
2018-10-15 13:05:49 +03:00
|
|
|
await compute.close()
|
2016-08-26 15:09:18 +03:00
|
|
|
# We don't care if a compute is down at this step
|
2020-10-02 09:37:50 +03:00
|
|
|
except (ComputeError, ControllerError, OSError):
|
2016-08-26 15:09:18 +03:00
|
|
|
pass
|
2018-10-15 13:05:49 +03:00
|
|
|
await self.gns3vm.exit_vm()
|
2021-04-12 10:32:23 +03:00
|
|
|
self.save()
|
2016-08-24 12:36:32 +03:00
|
|
|
self._computes = {}
|
|
|
|
self._projects = {}
|
2016-05-24 16:45:06 +03:00
|
|
|
|
2020-04-30 09:00:50 +03:00
|
|
|
async def reload(self):
|
|
|
|
|
|
|
|
log.info("Controller is reloading")
|
|
|
|
self._load_controller_settings()
|
2020-12-04 07:47:28 +02:00
|
|
|
|
|
|
|
# remove all projects deleted from disk.
|
|
|
|
for project in self._projects.copy().values():
|
2021-02-16 08:30:59 +02:00
|
|
|
if not os.path.exists(project.path) or not os.listdir(project.path):
|
2020-12-04 07:47:28 +02:00
|
|
|
log.info(f"Project '{project.name}' doesn't exist on the disk anymore, closing...")
|
|
|
|
await project.close()
|
|
|
|
self.remove_project(project)
|
|
|
|
|
2020-04-30 09:00:50 +03:00
|
|
|
await self.load_projects()
|
|
|
|
|
2016-04-19 16:35:50 +03:00
|
|
|
def save(self):
|
|
|
|
"""
|
|
|
|
Save the controller configuration on disk
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2018-11-14 10:24:30 +02:00
|
|
|
if self._config_loaded is False:
|
2017-03-07 16:36:35 +02:00
|
|
|
return
|
2018-11-11 14:13:58 +02:00
|
|
|
|
2021-04-12 10:32:23 +03:00
|
|
|
if self._iou_license_settings["iourc_content"]:
|
2018-11-11 14:13:58 +02:00
|
|
|
|
2021-04-12 10:32:23 +03:00
|
|
|
iou_config = Config.instance().settings.IOU
|
|
|
|
server_config = Config.instance().settings.Server
|
|
|
|
|
|
|
|
if iou_config.iourc_path:
|
|
|
|
iourc_path = iou_config.iourc_path
|
|
|
|
else:
|
2021-05-16 18:07:15 +03:00
|
|
|
os.makedirs(server_config.secrets_dir, exist_ok=True)
|
2021-04-12 10:32:23 +03:00
|
|
|
iourc_path = os.path.join(server_config.secrets_dir, "gns3_iourc_license")
|
|
|
|
|
|
|
|
try:
|
2021-04-13 12:16:50 +03:00
|
|
|
with open(iourc_path, "w+") as f:
|
2021-04-12 10:32:23 +03:00
|
|
|
f.write(self._iou_license_settings["iourc_content"])
|
|
|
|
log.info(f"iourc file '{iourc_path}' saved")
|
|
|
|
except OSError as e:
|
|
|
|
log.error(f"Cannot write IOU license file '{iourc_path}': {e}")
|
|
|
|
|
2021-05-16 18:07:15 +03:00
|
|
|
if self._appliance_manager.appliances_etag:
|
|
|
|
etag_directory = os.path.dirname(Config.instance().server_config)
|
|
|
|
os.makedirs(etag_directory, exist_ok=True)
|
|
|
|
etag_appliances_path = os.path.join(etag_directory, "gns3_appliances_etag")
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open(etag_appliances_path, "w+") as f:
|
|
|
|
f.write(self._appliance_manager.appliances_etag)
|
|
|
|
log.info(f"etag appliances file '{etag_appliances_path}' saved")
|
|
|
|
except OSError as e:
|
|
|
|
log.error(f"Cannot write Etag appliance file '{etag_appliances_path}': {e}")
|
2016-04-19 16:35:50 +03:00
|
|
|
|
2020-04-30 09:00:50 +03:00
|
|
|
def _load_controller_settings(self):
|
2016-04-19 16:35:50 +03:00
|
|
|
"""
|
|
|
|
Reload the controller configuration from disk
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2021-04-12 10:32:23 +03:00
|
|
|
# try:
|
|
|
|
# if not os.path.exists(self._config_file):
|
|
|
|
# self._config_loaded = True
|
|
|
|
# self.save()
|
|
|
|
# with open(self._config_file) as f:
|
|
|
|
# controller_settings = json.load(f)
|
|
|
|
# except (OSError, ValueError) as e:
|
|
|
|
# log.critical("Cannot load configuration file '{}': {}".format(self._config_file, e))
|
|
|
|
# return []
|
2016-12-08 11:52:21 +02:00
|
|
|
|
2018-11-11 14:13:58 +02:00
|
|
|
# load GNS3 VM settings
|
2021-04-12 10:32:23 +03:00
|
|
|
# if "gns3vm" in controller_settings:
|
|
|
|
# gns3_vm_settings = controller_settings["gns3vm"]
|
|
|
|
# if "port" not in gns3_vm_settings:
|
|
|
|
# # port setting was added in version 2.2.8
|
|
|
|
# # the default port was 3080 before this
|
|
|
|
# gns3_vm_settings["port"] = 3080
|
|
|
|
# self.gns3vm.settings = gns3_vm_settings
|
2018-11-11 14:13:58 +02:00
|
|
|
|
2018-11-14 10:24:30 +02:00
|
|
|
# load the IOU license settings
|
2021-04-12 10:32:23 +03:00
|
|
|
iou_config = Config.instance().settings.IOU
|
|
|
|
server_config = Config.instance().settings.Server
|
|
|
|
|
|
|
|
if iou_config.iourc_path:
|
|
|
|
iourc_path = iou_config.iourc_path
|
|
|
|
else:
|
2021-04-12 16:56:42 +03:00
|
|
|
if not server_config.secrets_dir:
|
|
|
|
server_config.secrets_dir = os.path.dirname(Config.instance().server_config)
|
2021-04-12 10:32:23 +03:00
|
|
|
iourc_path = os.path.join(server_config.secrets_dir, "gns3_iourc_license")
|
|
|
|
|
|
|
|
if os.path.exists(iourc_path):
|
|
|
|
try:
|
2021-04-13 12:07:58 +03:00
|
|
|
with open(iourc_path) as f:
|
2021-04-12 10:32:23 +03:00
|
|
|
self._iou_license_settings["iourc_content"] = f.read()
|
|
|
|
log.info(f"iourc file '{iourc_path}' loaded")
|
|
|
|
except OSError as e:
|
|
|
|
log.error(f"Cannot read IOU license file '{iourc_path}': {e}")
|
|
|
|
|
|
|
|
self._iou_license_settings["license_check"] = iou_config.license_check
|
2021-05-16 18:07:15 +03:00
|
|
|
|
|
|
|
etag_directory = os.path.dirname(Config.instance().server_config)
|
|
|
|
etag_appliances_path = os.path.join(etag_directory, "gns3_appliances_etag")
|
|
|
|
self._appliance_manager.appliances_etag = None
|
|
|
|
if os.path.exists(etag_appliances_path):
|
|
|
|
try:
|
|
|
|
with open(etag_appliances_path) as f:
|
|
|
|
self._appliance_manager.appliances_etag = f.read()
|
|
|
|
log.info(f"etag appliances file '{etag_appliances_path}' loaded")
|
|
|
|
except OSError as e:
|
|
|
|
log.error(f"Cannot read Etag appliance file '{etag_appliances_path}': {e}")
|
|
|
|
|
2023-01-05 06:57:00 +02:00
|
|
|
# FIXME install builtin appliances only once, need to store "version" somewhere...
|
2023-01-05 06:38:00 +02:00
|
|
|
#if parse_version(__version__) > parse_version(controller_settings.get("version", "")):
|
2023-01-05 06:57:00 +02:00
|
|
|
# self._appliance_manager.install_builtin_appliances()
|
2023-01-05 06:38:00 +02:00
|
|
|
|
2022-11-07 14:12:03 +02:00
|
|
|
self._appliance_manager.install_builtin_appliances()
|
2021-05-16 18:07:15 +03:00
|
|
|
self._appliance_manager.load_appliances()
|
2018-11-14 10:24:30 +02:00
|
|
|
self._config_loaded = True
|
2016-04-19 16:35:50 +03:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def load_projects(self):
|
2016-08-16 17:04:20 +03:00
|
|
|
"""
|
|
|
|
Preload the list of projects from disk
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2021-04-12 10:32:23 +03:00
|
|
|
server_config = Config.instance().settings.Server
|
|
|
|
projects_path = os.path.expanduser(server_config.projects_path)
|
2016-06-15 16:12:38 +03:00
|
|
|
os.makedirs(projects_path, exist_ok=True)
|
2016-06-15 00:08:30 +03:00
|
|
|
try:
|
|
|
|
for project_path in os.listdir(projects_path):
|
|
|
|
project_dir = os.path.join(projects_path, project_path)
|
|
|
|
if os.path.isdir(project_dir):
|
|
|
|
for file in os.listdir(project_dir):
|
|
|
|
if file.endswith(".gns3"):
|
|
|
|
try:
|
2018-10-15 13:05:49 +03:00
|
|
|
await self.load_project(os.path.join(project_dir, file), load=False)
|
2020-10-02 09:37:50 +03:00
|
|
|
except (ControllerError, NotImplementedError):
|
2016-06-15 16:12:38 +03:00
|
|
|
pass # Skip not compatible projects
|
2016-06-15 00:08:30 +03:00
|
|
|
except OSError as e:
|
|
|
|
log.error(str(e))
|
|
|
|
|
2023-05-07 16:57:44 +03:00
|
|
|
@staticmethod
|
|
|
|
def install_resource_files(dst_path, resource_name):
|
|
|
|
"""
|
|
|
|
Install files from resources to user's file system
|
|
|
|
"""
|
|
|
|
|
|
|
|
if hasattr(sys, "frozen") and sys.platform.startswith("win"):
|
|
|
|
resource_path = os.path.normpath(os.path.join(os.path.dirname(sys.executable), resource_name))
|
|
|
|
for filename in os.listdir(resource_path):
|
|
|
|
if not os.path.exists(os.path.join(dst_path, filename)):
|
|
|
|
shutil.copy(os.path.join(resource_path, filename), os.path.join(dst_path, filename))
|
|
|
|
else:
|
|
|
|
for entry in importlib_resources.files(f'gns3server.{resource_name}').iterdir():
|
|
|
|
full_path = os.path.join(dst_path, entry.name)
|
|
|
|
if entry.is_file() and not os.path.exists(full_path):
|
|
|
|
log.debug(f'Installing {resource_name} resource file "{entry.name}" to "{full_path}"')
|
|
|
|
shutil.copy(str(entry), os.path.join(dst_path, entry.name))
|
|
|
|
|
2023-01-01 09:57:41 +02:00
|
|
|
def _install_base_configs(self):
|
2017-02-02 20:13:47 +02:00
|
|
|
"""
|
2023-05-05 17:40:58 +03:00
|
|
|
At startup we copy base configs to the user location to allow
|
2017-02-02 20:13:47 +02:00
|
|
|
them to customize it
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2017-02-02 20:13:47 +02:00
|
|
|
dst_path = self.configs_path()
|
2023-01-01 09:57:41 +02:00
|
|
|
log.info(f"Installing base configs in '{dst_path}'")
|
2017-02-02 20:13:47 +02:00
|
|
|
try:
|
2023-05-07 16:57:44 +03:00
|
|
|
Controller.install_resource_files(dst_path, "configs")
|
2022-11-07 14:12:03 +02:00
|
|
|
except OSError as e:
|
|
|
|
log.error(f"Could not install base config files to {dst_path}: {e}")
|
2017-02-02 20:13:47 +02:00
|
|
|
|
2023-05-05 17:40:58 +03:00
|
|
|
def _install_builtin_disks(self):
|
|
|
|
"""
|
|
|
|
At startup we copy built-in Qemu disks to the user location to allow
|
|
|
|
them to use with appliances
|
|
|
|
"""
|
|
|
|
|
|
|
|
dst_path = self.disks_path()
|
|
|
|
log.info(f"Installing built-in disks in '{dst_path}'")
|
|
|
|
try:
|
2023-05-07 16:57:44 +03:00
|
|
|
Controller.install_resource_files(dst_path, "disks")
|
2023-05-05 17:40:58 +03:00
|
|
|
except OSError as e:
|
|
|
|
log.error(f"Could not install disk files to {dst_path}: {e}")
|
|
|
|
|
2016-07-21 18:55:15 +03:00
|
|
|
def images_path(self):
|
|
|
|
"""
|
|
|
|
Get the image storage directory
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2021-04-12 10:32:23 +03:00
|
|
|
server_config = Config.instance().settings.Server
|
|
|
|
images_path = os.path.expanduser(server_config.images_path)
|
2016-07-21 18:55:15 +03:00
|
|
|
os.makedirs(images_path, exist_ok=True)
|
|
|
|
return images_path
|
|
|
|
|
2017-02-02 20:13:47 +02:00
|
|
|
def configs_path(self):
|
|
|
|
"""
|
|
|
|
Get the configs storage directory
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2021-04-12 10:32:23 +03:00
|
|
|
server_config = Config.instance().settings.Server
|
|
|
|
configs_path = os.path.expanduser(server_config.configs_path)
|
2020-11-04 04:00:23 +02:00
|
|
|
os.makedirs(configs_path, exist_ok=True)
|
|
|
|
return configs_path
|
2017-02-02 20:13:47 +02:00
|
|
|
|
2023-05-05 17:40:58 +03:00
|
|
|
def disks_path(self, emulator_type="qemu"):
|
|
|
|
"""
|
|
|
|
Get the disks storage directory
|
|
|
|
"""
|
|
|
|
|
|
|
|
disks_path = default_images_directory(emulator_type)
|
|
|
|
os.makedirs(disks_path, exist_ok=True)
|
|
|
|
return disks_path
|
|
|
|
|
2021-12-24 04:35:39 +02:00
|
|
|
async def add_compute(self, compute_id=None, name=None, force=False, connect=True, wait_connection=True, **kwargs):
|
2016-03-03 17:02:27 +02:00
|
|
|
"""
|
2018-11-27 13:14:51 +02:00
|
|
|
Add a server to the dictionary of computes controlled by this controller
|
2016-03-10 22:51:29 +02:00
|
|
|
|
2018-11-27 13:14:51 +02:00
|
|
|
:param compute_id: Compute identifier
|
2016-06-29 18:39:41 +03:00
|
|
|
:param name: Compute name
|
2016-08-26 15:09:18 +03:00
|
|
|
:param force: True skip security check
|
2016-08-30 10:58:37 +03:00
|
|
|
:param connect: True connect to the compute immediately
|
2016-04-15 18:57:06 +03:00
|
|
|
:param kwargs: See the documentation of Compute
|
2016-03-03 17:02:27 +02:00
|
|
|
"""
|
2018-01-10 11:22:55 +02:00
|
|
|
|
2016-05-23 19:44:20 +03:00
|
|
|
if compute_id not in self._computes:
|
2016-05-11 16:01:57 +03:00
|
|
|
|
2016-07-19 14:26:27 +03:00
|
|
|
# We disallow to create from the outside the local and VM server
|
2021-04-13 12:16:50 +03:00
|
|
|
if (compute_id == "local" or compute_id == "vm") and not force:
|
2016-05-24 16:45:06 +03:00
|
|
|
return None
|
2016-05-23 19:44:20 +03:00
|
|
|
|
2016-10-24 16:24:45 +03:00
|
|
|
# It seem we have error with a gns3vm imported as a remote server and conflict
|
|
|
|
# with GNS3 VM settings. That's why we ignore server name gns3vm
|
2021-04-13 12:16:50 +03:00
|
|
|
if name == "gns3vm":
|
2016-10-24 16:24:45 +03:00
|
|
|
return None
|
|
|
|
|
2016-06-29 18:39:41 +03:00
|
|
|
for compute in self._computes.values():
|
2017-02-23 15:34:21 +02:00
|
|
|
if name and compute.name == name and not force:
|
2021-04-13 12:07:58 +03:00
|
|
|
raise ControllerError(f'Compute name "{name}" already exists')
|
2016-06-29 18:39:41 +03:00
|
|
|
|
|
|
|
compute = Compute(compute_id=compute_id, controller=self, name=name, **kwargs)
|
2016-05-25 15:10:03 +03:00
|
|
|
self._computes[compute.id] = compute
|
2021-04-13 12:16:50 +03:00
|
|
|
# self.save()
|
2016-08-30 10:58:37 +03:00
|
|
|
if connect:
|
2021-12-24 04:35:39 +02:00
|
|
|
if wait_connection:
|
|
|
|
await compute.connect()
|
|
|
|
else:
|
|
|
|
# call compute.connect() later to give time to the controller to be fully started
|
|
|
|
asyncio.get_event_loop().call_later(1, lambda: asyncio.ensure_future(compute.connect()))
|
2021-04-17 17:04:28 +03:00
|
|
|
self.notification.controller_emit("compute.created", compute.asdict())
|
2016-05-25 15:10:03 +03:00
|
|
|
return compute
|
2016-05-23 19:44:20 +03:00
|
|
|
else:
|
2016-08-30 10:58:37 +03:00
|
|
|
if connect:
|
2018-10-15 13:05:49 +03:00
|
|
|
await self._computes[compute_id].connect()
|
2021-04-17 17:04:28 +03:00
|
|
|
self.notification.controller_emit("compute.updated", self._computes[compute_id].asdict())
|
2016-05-25 15:10:03 +03:00
|
|
|
return self._computes[compute_id]
|
2016-03-03 17:02:27 +02:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def close_compute_projects(self, compute):
|
2016-12-15 22:57:59 +02:00
|
|
|
"""
|
|
|
|
Close projects running on a compute
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-12-15 22:57:59 +02:00
|
|
|
for project in self._projects.values():
|
|
|
|
if compute in project.computes:
|
2018-10-15 13:05:49 +03:00
|
|
|
await project.close()
|
2016-12-15 22:57:59 +02:00
|
|
|
|
2018-08-09 12:59:10 +03:00
|
|
|
def compute_has_open_project(self, compute):
|
|
|
|
"""
|
|
|
|
Check is compute has a project opened.
|
|
|
|
|
|
|
|
:returns: True if a project is open
|
|
|
|
"""
|
|
|
|
|
|
|
|
for project in self._projects.values():
|
|
|
|
if compute in project.computes and project.status == "opened":
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def delete_compute(self, compute_id):
|
2016-05-25 12:27:41 +03:00
|
|
|
"""
|
2016-08-26 12:22:09 +03:00
|
|
|
Delete a compute node. Project using this compute will be close
|
|
|
|
|
2018-11-27 13:14:51 +02:00
|
|
|
:param compute_id: Compute identifier
|
2016-05-25 12:27:41 +03:00
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-12-12 12:09:07 +02:00
|
|
|
try:
|
|
|
|
compute = self.get_compute(compute_id)
|
2020-10-02 09:37:50 +03:00
|
|
|
except ControllerNotFoundError:
|
2016-12-12 12:09:07 +02:00
|
|
|
return
|
2018-10-15 13:05:49 +03:00
|
|
|
await self.close_compute_projects(compute)
|
|
|
|
await compute.close()
|
2016-05-25 12:27:41 +03:00
|
|
|
del self._computes[compute_id]
|
2021-04-13 12:16:50 +03:00
|
|
|
# self.save()
|
2021-04-17 17:04:28 +03:00
|
|
|
self.notification.controller_emit("compute.deleted", compute.asdict())
|
2016-05-25 12:27:41 +03:00
|
|
|
|
2016-05-18 15:56:23 +03:00
|
|
|
@property
|
|
|
|
def notification(self):
|
|
|
|
"""
|
|
|
|
The notification system
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-05-18 15:56:23 +03:00
|
|
|
return self._notification
|
|
|
|
|
2016-03-03 17:02:27 +02:00
|
|
|
@property
|
2016-04-15 18:57:06 +03:00
|
|
|
def computes(self):
|
2016-03-03 17:02:27 +02:00
|
|
|
"""
|
2018-11-27 13:14:51 +02:00
|
|
|
:returns: The dictionary of computes managed by this controller
|
2016-03-03 17:02:27 +02:00
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-04-15 18:57:06 +03:00
|
|
|
return self._computes
|
2016-03-03 17:02:27 +02:00
|
|
|
|
2016-05-12 00:19:00 +03:00
|
|
|
def get_compute(self, compute_id):
|
2016-03-10 22:51:29 +02:00
|
|
|
"""
|
2018-11-27 13:14:51 +02:00
|
|
|
Returns a compute or raise a 404 error.
|
2016-03-10 22:51:29 +02:00
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2021-10-22 14:33:51 +03:00
|
|
|
if compute_id is None:
|
2023-06-21 15:58:09 +03:00
|
|
|
# get all connected computes
|
|
|
|
computes = [compute for compute in self._computes.values() if compute.connected is True]
|
2021-10-22 14:33:51 +03:00
|
|
|
if len(computes) == 1:
|
|
|
|
# return the only available compute
|
|
|
|
return computes[0]
|
|
|
|
else:
|
|
|
|
# randomly pick a compute until we have proper scalability handling
|
|
|
|
# https://github.com/GNS3/gns3-server/issues/1676
|
|
|
|
return random.choice(computes)
|
|
|
|
|
2016-03-10 22:51:29 +02:00
|
|
|
try:
|
2016-04-15 18:57:06 +03:00
|
|
|
return self._computes[compute_id]
|
2016-03-10 22:51:29 +02:00
|
|
|
except KeyError:
|
2016-08-22 19:37:32 +03:00
|
|
|
if compute_id == "vm":
|
2020-10-02 09:37:50 +03:00
|
|
|
raise ControllerNotFoundError("Cannot use a node on the GNS3 VM server with the GNS3 VM not configured")
|
2021-04-13 12:07:58 +03:00
|
|
|
raise ControllerNotFoundError(f"Compute ID {compute_id} doesn't exist")
|
2016-03-10 22:51:29 +02:00
|
|
|
|
2016-07-25 19:02:22 +03:00
|
|
|
def has_compute(self, compute_id):
|
|
|
|
"""
|
|
|
|
Return True if the compute exist in the controller
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-07-25 19:02:22 +03:00
|
|
|
return compute_id in self._computes
|
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def add_project(self, project_id=None, name=None, path=None, **kwargs):
|
2016-03-10 11:32:07 +02:00
|
|
|
"""
|
2016-05-14 03:48:10 +03:00
|
|
|
Creates a project or returns an existing project
|
2016-03-10 22:51:29 +02:00
|
|
|
|
2016-06-29 18:39:41 +03:00
|
|
|
:param project_id: Project ID
|
|
|
|
:param name: Project name
|
2016-03-10 22:51:29 +02:00
|
|
|
:param kwargs: See the documentation of Project
|
2016-03-10 11:32:07 +02:00
|
|
|
"""
|
2016-06-29 18:39:41 +03:00
|
|
|
|
2018-03-15 09:17:39 +02:00
|
|
|
if project_id not in self._projects:
|
2016-06-29 18:39:41 +03:00
|
|
|
for project in self._projects.values():
|
|
|
|
if name and project.name == name:
|
2018-03-30 19:01:37 +03:00
|
|
|
if path and path == project.path:
|
2021-04-13 12:07:58 +03:00
|
|
|
raise ControllerError(f'Project "{name}" already exists in location "{path}"')
|
2018-03-30 19:01:37 +03:00
|
|
|
else:
|
2021-04-13 12:07:58 +03:00
|
|
|
raise ControllerError(f'Project "{name}" already exists')
|
2018-03-30 19:01:37 +03:00
|
|
|
project = Project(project_id=project_id, controller=self, name=name, path=path, **kwargs)
|
2016-03-10 11:32:07 +02:00
|
|
|
self._projects[project.id] = project
|
2016-03-10 22:51:29 +02:00
|
|
|
return self._projects[project.id]
|
|
|
|
return self._projects[project_id]
|
2016-03-10 11:32:07 +02:00
|
|
|
|
2016-05-11 20:35:36 +03:00
|
|
|
def get_project(self, project_id):
|
2016-03-10 11:32:07 +02:00
|
|
|
"""
|
2016-11-18 18:35:28 +02:00
|
|
|
Returns a project or raise a 404 error.
|
2016-03-10 11:32:07 +02:00
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-03-10 11:32:07 +02:00
|
|
|
try:
|
|
|
|
return self._projects[project_id]
|
|
|
|
except KeyError:
|
2021-04-13 12:07:58 +03:00
|
|
|
raise ControllerNotFoundError(f"Project ID {project_id} doesn't exist")
|
2016-03-10 11:32:07 +02:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def get_loaded_project(self, project_id):
|
2016-11-18 18:35:28 +02:00
|
|
|
"""
|
|
|
|
Returns a project or raise a 404 error.
|
|
|
|
|
|
|
|
If project is not finished to load wait for it
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-11-18 18:35:28 +02:00
|
|
|
project = self.get_project(project_id)
|
2018-10-15 13:05:49 +03:00
|
|
|
await project.wait_loaded()
|
2016-11-18 18:35:28 +02:00
|
|
|
return project
|
|
|
|
|
2016-05-12 00:19:00 +03:00
|
|
|
def remove_project(self, project):
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2017-02-13 16:24:22 +02:00
|
|
|
if project.id in self._projects:
|
|
|
|
del self._projects[project.id]
|
2016-03-10 11:32:07 +02:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def load_project(self, path, load=True):
|
2016-06-14 17:07:37 +03:00
|
|
|
"""
|
|
|
|
Load a project from a .gns3
|
|
|
|
|
|
|
|
:param path: Path of the .gns3
|
2016-06-15 00:08:30 +03:00
|
|
|
:param load: Load the topology
|
2016-06-14 17:07:37 +03:00
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2021-12-10 07:22:57 +02:00
|
|
|
if not os.path.exists(path):
|
|
|
|
raise ControllerError(f"'{path}' does not exist on the controller")
|
|
|
|
|
2016-06-14 17:07:37 +03:00
|
|
|
topo_data = load_topology(path)
|
2017-02-06 12:07:35 +02:00
|
|
|
topo_data.pop("topology")
|
2016-06-14 17:07:37 +03:00
|
|
|
topo_data.pop("version")
|
|
|
|
topo_data.pop("revision")
|
|
|
|
topo_data.pop("type")
|
|
|
|
|
2016-06-17 18:50:06 +03:00
|
|
|
if topo_data["project_id"] in self._projects:
|
2016-11-14 19:45:44 +02:00
|
|
|
project = self._projects[topo_data["project_id"]]
|
|
|
|
else:
|
2021-04-13 12:16:50 +03:00
|
|
|
project = await self.add_project(
|
2021-12-10 07:22:57 +02:00
|
|
|
path=os.path.dirname(path),
|
|
|
|
status="closed",
|
|
|
|
filename=os.path.basename(path),
|
|
|
|
**topo_data
|
2021-04-13 12:16:50 +03:00
|
|
|
)
|
2016-08-15 21:51:59 +03:00
|
|
|
if load or project.auto_open:
|
2018-10-15 13:05:49 +03:00
|
|
|
await project.open()
|
2016-06-16 17:57:54 +03:00
|
|
|
return project
|
2016-06-14 17:07:37 +03:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def _project_auto_open(self):
|
2016-09-06 12:30:08 +03:00
|
|
|
"""
|
|
|
|
Auto open the project with auto open enable
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-09-06 12:30:08 +03:00
|
|
|
for project in self._projects.values():
|
|
|
|
if project.auto_open:
|
2018-10-15 13:05:49 +03:00
|
|
|
await project.open()
|
2016-09-06 12:30:08 +03:00
|
|
|
|
2016-07-21 15:48:13 +03:00
|
|
|
def get_free_project_name(self, base_name):
|
|
|
|
"""
|
|
|
|
Generate a free project name base on the base name
|
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-07-21 18:55:15 +03:00
|
|
|
names = [p.name for p in self._projects.values()]
|
2016-07-21 15:48:13 +03:00
|
|
|
if base_name not in names:
|
|
|
|
return base_name
|
|
|
|
i = 1
|
2016-07-22 19:02:11 +03:00
|
|
|
|
|
|
|
projects_path = self.projects_directory()
|
|
|
|
|
|
|
|
while True:
|
2021-04-13 12:07:58 +03:00
|
|
|
new_name = f"{base_name}-{i}"
|
2016-07-22 19:02:11 +03:00
|
|
|
if new_name not in names and not os.path.exists(os.path.join(projects_path, new_name)):
|
|
|
|
break
|
2016-07-21 15:48:13 +03:00
|
|
|
i += 1
|
|
|
|
if i > 1000000:
|
2020-10-02 09:37:50 +03:00
|
|
|
raise ControllerError("A project name could not be allocated (node limit reached?)")
|
2016-07-22 19:02:11 +03:00
|
|
|
return new_name
|
2016-07-21 15:48:13 +03:00
|
|
|
|
2016-03-10 11:32:07 +02:00
|
|
|
@property
|
|
|
|
def projects(self):
|
|
|
|
"""
|
2020-02-10 09:20:49 +02:00
|
|
|
:returns: The dictionary of projects managed by the controller
|
2016-03-10 11:32:07 +02:00
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2016-03-10 11:32:07 +02:00
|
|
|
return self._projects
|
|
|
|
|
2017-02-01 12:30:14 +02:00
|
|
|
@property
|
2019-01-14 11:09:06 +02:00
|
|
|
def appliance_manager(self):
|
2017-02-01 12:30:14 +02:00
|
|
|
"""
|
2019-01-14 11:09:06 +02:00
|
|
|
:returns: Appliance Manager instance
|
2017-02-01 12:30:14 +02:00
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2019-01-14 11:09:06 +02:00
|
|
|
return self._appliance_manager
|
2017-02-01 12:30:14 +02:00
|
|
|
|
2018-11-14 10:24:30 +02:00
|
|
|
@property
|
|
|
|
def iou_license(self):
|
|
|
|
"""
|
|
|
|
:returns: The dictionary of IOU license settings
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self._iou_license_settings
|
|
|
|
|
2016-07-22 19:02:11 +03:00
|
|
|
def projects_directory(self):
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2021-04-12 10:32:23 +03:00
|
|
|
server_config = Config.instance().settings.Server
|
|
|
|
return os.path.expanduser(server_config.projects_path)
|
2016-07-22 19:02:11 +03:00
|
|
|
|
2016-03-02 10:49:52 +02:00
|
|
|
@staticmethod
|
|
|
|
def instance():
|
|
|
|
"""
|
|
|
|
Singleton to return only on instance of Controller.
|
2016-05-14 03:48:10 +03:00
|
|
|
|
2016-03-02 10:49:52 +02:00
|
|
|
:returns: instance of Controller
|
|
|
|
"""
|
|
|
|
|
2021-04-13 12:16:50 +03:00
|
|
|
if not hasattr(Controller, "_instance") or Controller._instance is None:
|
2016-03-02 10:49:52 +02:00
|
|
|
Controller._instance = Controller()
|
|
|
|
return Controller._instance
|
2017-07-07 18:50:40 +03:00
|
|
|
|
2018-10-15 13:05:49 +03:00
|
|
|
async def autoidlepc(self, compute_id, platform, image, ram):
|
2017-07-07 18:50:40 +03:00
|
|
|
"""
|
|
|
|
Compute and IDLE PC value for an image
|
|
|
|
|
|
|
|
:param compute_id: ID of the compute where the idlepc operation need to run
|
|
|
|
:param platform: Platform type
|
|
|
|
:param image: Image to use
|
2017-11-23 18:00:31 +02:00
|
|
|
:param ram: amount of RAM to use
|
2017-07-07 18:50:40 +03:00
|
|
|
"""
|
2018-03-15 09:17:39 +02:00
|
|
|
|
2017-07-07 18:50:40 +03:00
|
|
|
compute = self.get_compute(compute_id)
|
|
|
|
for project in list(self._projects.values()):
|
|
|
|
if project.name == "AUTOIDLEPC":
|
2018-10-15 13:05:49 +03:00
|
|
|
await project.delete()
|
2017-07-07 18:50:40 +03:00
|
|
|
self.remove_project(project)
|
2018-10-15 13:05:49 +03:00
|
|
|
project = await self.add_project(name="AUTOIDLEPC")
|
2021-04-13 12:16:50 +03:00
|
|
|
node = await project.add_node(
|
|
|
|
compute, "AUTOIDLEPC", str(uuid.uuid4()), node_type="dynamips", platform=platform, image=image, ram=ram
|
|
|
|
)
|
2018-10-15 13:05:49 +03:00
|
|
|
res = await node.dynamips_auto_idlepc()
|
|
|
|
await project.delete()
|
2017-07-07 18:50:40 +03:00
|
|
|
self.remove_project(project)
|
|
|
|
return res
|