mirror of
https://github.com/GNS3/gns3-server.git
synced 2024-11-16 16:54:51 +02:00
Allocate application IDs for IOU nodes on the controller.
An application ID is used by IOU to generate its interface Mac addresses. They must be unique across all opened projects sharing the same computes to avoid Mac address collisions.
This commit is contained in:
parent
6cb87ef25d
commit
c5412bf970
@ -25,7 +25,6 @@ import asyncio
|
||||
from ..base_manager import BaseManager
|
||||
from .iou_error import IOUError
|
||||
from .iou_vm import IOUVM
|
||||
from .utils.application_id import get_next_application_id
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
@ -48,12 +47,7 @@ class IOU(BaseManager):
|
||||
:returns: IOUVM instance
|
||||
"""
|
||||
|
||||
async with self._iou_id_lock:
|
||||
# wait for a node to be completely created before adding a new one
|
||||
# this is important otherwise we allocate the same application ID
|
||||
# when creating multiple IOU node at the same time
|
||||
application_id = get_next_application_id(self.nodes)
|
||||
node = await super().create_node(*args, application_id=application_id, **kwargs)
|
||||
node = await super().create_node(*args, **kwargs)
|
||||
return node
|
||||
|
||||
@staticmethod
|
||||
|
@ -70,6 +70,10 @@ class IOUVM(BaseNode):
|
||||
|
||||
super().__init__(name, node_id, project, manager, console=console, console_type=console_type)
|
||||
|
||||
log.info('IOU "{name}" [{id}]: assigned with application ID {application_id}'.format(name=self._name,
|
||||
id=self._id,
|
||||
application_id=application_id))
|
||||
|
||||
self._iou_process = None
|
||||
self._telnet_server = None
|
||||
self._iou_stdout_file = ""
|
||||
|
@ -481,7 +481,7 @@ class Controller:
|
||||
@property
|
||||
def projects(self):
|
||||
"""
|
||||
:returns: The dictionary of projects managed by GNS3
|
||||
:returns: The dictionary of projects managed by the controller
|
||||
"""
|
||||
|
||||
return self._projects
|
||||
|
@ -38,6 +38,7 @@ from .topology import project_to_topology, load_topology
|
||||
from .udp_link import UDPLink
|
||||
from ..config import Config
|
||||
from ..utils.path import check_path_allowed, get_default_project_directory
|
||||
from ..utils.application_id import get_next_application_id
|
||||
from ..utils.asyncio.pool import Pool
|
||||
from ..utils.asyncio import locking
|
||||
from ..utils.asyncio import aiozipstream
|
||||
@ -126,6 +127,8 @@ class Project:
|
||||
assert self._status != "closed"
|
||||
self.dump()
|
||||
|
||||
self._iou_id_lock = asyncio.Lock()
|
||||
|
||||
def emit_notification(self, action, event):
|
||||
"""
|
||||
Emit a notification to all clients using this project.
|
||||
@ -516,17 +519,7 @@ class Project:
|
||||
node = await self.add_node(compute, name, node_id, node_type=node_type, **template)
|
||||
return node
|
||||
|
||||
@open_required
|
||||
async def add_node(self, compute, name, node_id, dump=True, node_type=None, **kwargs):
|
||||
"""
|
||||
Create a node or return an existing node
|
||||
|
||||
:param dump: Dump topology to disk
|
||||
:param kwargs: See the documentation of node
|
||||
"""
|
||||
|
||||
if node_id in self._nodes:
|
||||
return self._nodes[node_id]
|
||||
async def _create_node(self, compute, name, node_id, node_type=None, **kwargs):
|
||||
|
||||
node = Node(self, compute, name, node_id=node_id, node_type=node_type, **kwargs)
|
||||
if compute not in self._project_created_on_compute:
|
||||
@ -547,10 +540,39 @@ class Project:
|
||||
data["variables"] = self._variables
|
||||
|
||||
await compute.post("/projects", data=data)
|
||||
|
||||
self._project_created_on_compute.add(compute)
|
||||
|
||||
await node.create()
|
||||
self._nodes[node.id] = node
|
||||
|
||||
return node
|
||||
|
||||
@open_required
|
||||
async def add_node(self, compute, name, node_id, dump=True, node_type=None, **kwargs):
|
||||
"""
|
||||
Create a node or return an existing node
|
||||
|
||||
:param dump: Dump topology to disk
|
||||
:param kwargs: See the documentation of node
|
||||
"""
|
||||
|
||||
if node_id in self._nodes:
|
||||
return self._nodes[node_id]
|
||||
|
||||
if node_type == "iou":
|
||||
async with self._iou_id_lock:
|
||||
# wait for a IOU node to be completely created before adding a new one
|
||||
# this is important otherwise we allocate the same application ID (used
|
||||
# to generate MAC addresses) when creating multiple IOU node at the same time
|
||||
if "properties" in kwargs.keys():
|
||||
# allocate a new application id for nodes loaded from the project
|
||||
kwargs.get("properties")["application_id"] = get_next_application_id(self._controller.projects, compute)
|
||||
elif "application_id" not in kwargs.keys() and not kwargs.get("properties"):
|
||||
# allocate a new application id for nodes added to the project
|
||||
kwargs["application_id"] = get_next_application_id(self._controller.projects, compute)
|
||||
node = await self._create_node(compute, name, node_id, node_type, **kwargs)
|
||||
else:
|
||||
node = await self._create_node(compute, name, node_id, node_type, **kwargs)
|
||||
self.emit_notification("node.created", node.__json__())
|
||||
if dump:
|
||||
self.dump()
|
||||
@ -1102,12 +1124,11 @@ class Project:
|
||||
data['z'] = z
|
||||
data['locked'] = False # duplicated node must not be locked
|
||||
new_node_uuid = str(uuid.uuid4())
|
||||
new_node = await self.add_node(
|
||||
node.compute,
|
||||
node.name,
|
||||
new_node_uuid,
|
||||
node_type=node_type,
|
||||
**data)
|
||||
new_node = await self.add_node(node.compute,
|
||||
node.name,
|
||||
new_node_uuid,
|
||||
node_type=node_type,
|
||||
**data)
|
||||
try:
|
||||
await node.post("/duplicate", timeout=None, data={
|
||||
"destination_node_id": new_node_uuid
|
||||
|
@ -58,16 +58,17 @@ class IOUHandler:
|
||||
|
||||
iou = IOU.instance()
|
||||
vm = await iou.create_node(request.json.pop("name"),
|
||||
request.match_info["project_id"],
|
||||
request.json.get("node_id"),
|
||||
path=request.json.get("path"),
|
||||
console=request.json.get("console"),
|
||||
console_type=request.json.get("console_type", "telnet"))
|
||||
request.match_info["project_id"],
|
||||
request.json.get("node_id"),
|
||||
application_id=request.json.get("application_id"),
|
||||
path=request.json.get("path"),
|
||||
console=request.json.get("console"),
|
||||
console_type=request.json.get("console_type", "telnet"))
|
||||
|
||||
for name, value in request.json.items():
|
||||
if hasattr(vm, name) and getattr(vm, name) != value:
|
||||
if name == "application_id":
|
||||
continue # we must ignore this to avoid overwriting the application_id allocated by the IOU manager
|
||||
continue # we must ignore this to avoid overwriting the application_id allocated by the controller
|
||||
if name == "startup_config_content" and (vm.startup_config_content and len(vm.startup_config_content) > 0):
|
||||
continue
|
||||
if name == "private_config_content" and (vm.private_config_content and len(vm.private_config_content) > 0):
|
||||
|
@ -96,7 +96,7 @@ IOU_CREATE_SCHEMA = {
|
||||
},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
"required": ["name", "path"]
|
||||
"required": ["application_id", "name", "path"]
|
||||
}
|
||||
|
||||
|
||||
|
@ -15,24 +15,32 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from ..iou_error import IOUError
|
||||
import aiohttp
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_next_application_id(nodes):
|
||||
def get_next_application_id(projects, compute):
|
||||
"""
|
||||
Calculates free application_id from given nodes
|
||||
|
||||
:param nodes:
|
||||
:raises IOUError when exceeds number
|
||||
:param projects: all projects managed by controller
|
||||
:param compute: Compute instance
|
||||
:raises HTTPConflict when exceeds number
|
||||
:return: integer first free id
|
||||
"""
|
||||
|
||||
used = set([n.application_id for n in nodes])
|
||||
nodes = []
|
||||
|
||||
# look for application id for in all nodes across all opened projects that share the same compute
|
||||
for project in projects.values():
|
||||
if project.status == "opened" and compute in project.computes:
|
||||
nodes.extend(list(project.nodes.values()))
|
||||
|
||||
used = set([n.properties["application_id"] for n in nodes if n.node_type == "iou"])
|
||||
pool = set(range(1, 512))
|
||||
try:
|
||||
return (pool - used).pop()
|
||||
except KeyError:
|
||||
raise IOUError("Cannot create a new IOU VM (limit of 512 VMs on one host reached)")
|
||||
raise aiohttp.web.HTTPConflict(text="Cannot create a new IOU node (limit of 512 nodes across all opened projects using compute {} reached".format(compute.name))
|
@ -1,79 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2015 GNS3 Technologies Inc.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
import uuid
|
||||
import sys
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform.startswith("win"), reason="Not supported on Windows")
|
||||
|
||||
if not sys.platform.startswith("win"):
|
||||
from gns3server.compute.iou import IOU
|
||||
from gns3server.compute.iou.iou_error import IOUError
|
||||
|
||||
from gns3server.compute.project_manager import ProjectManager
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def iou(port_manager):
|
||||
# Cleanup the IOU object
|
||||
IOU._instance = None
|
||||
iou = IOU.instance()
|
||||
iou.port_manager = port_manager
|
||||
return iou
|
||||
|
||||
|
||||
def test_application_id(loop, project, iou):
|
||||
vm1_id = str(uuid.uuid4())
|
||||
vm2_id = str(uuid.uuid4())
|
||||
vm3_id = str(uuid.uuid4())
|
||||
vm1 = loop.run_until_complete(iou.create_node("PC 1", project.id, vm1_id))
|
||||
vm2 = loop.run_until_complete(iou.create_node("PC 2", project.id, vm2_id))
|
||||
assert vm1.application_id == 1
|
||||
assert vm2.application_id == 2
|
||||
loop.run_until_complete(iou.delete_node(vm1_id))
|
||||
vm3 = loop.run_until_complete(iou.create_node("PC 3", project.id, vm3_id))
|
||||
assert vm3.application_id == 1
|
||||
|
||||
|
||||
def test_get_application_id_multiple_project(loop, iou):
|
||||
vm1_id = str(uuid.uuid4())
|
||||
vm2_id = str(uuid.uuid4())
|
||||
vm3_id = str(uuid.uuid4())
|
||||
project1 = ProjectManager.instance().create_project(project_id=str(uuid.uuid4()))
|
||||
project2 = ProjectManager.instance().create_project(project_id=str(uuid.uuid4()))
|
||||
vm1 = loop.run_until_complete(iou.create_node("PC 1", project1.id, vm1_id))
|
||||
vm2 = loop.run_until_complete(iou.create_node("PC 2", project1.id, vm2_id))
|
||||
vm3 = loop.run_until_complete(iou.create_node("PC 2", project2.id, vm3_id))
|
||||
assert vm1.application_id == 1
|
||||
assert vm2.application_id == 2
|
||||
assert vm3.application_id == 3
|
||||
|
||||
|
||||
def test_get_application_id_no_id_available(loop, project, iou):
|
||||
with pytest.raises(IOUError):
|
||||
for i in range(1, 513):
|
||||
node_id = str(uuid.uuid4())
|
||||
vm = loop.run_until_complete(iou.create_node("PC {}".format(i), project.id, node_id))
|
||||
assert vm.application_id == i
|
||||
|
||||
|
||||
def test_get_images_directory(iou, tmpdir):
|
||||
with patch("gns3server.config.Config.get_section_config", return_value={"images_path": str(tmpdir)}):
|
||||
assert iou.get_images_directory() == str(tmpdir / "IOU")
|
@ -28,6 +28,7 @@ from uuid import uuid4
|
||||
|
||||
from gns3server.controller.project import Project
|
||||
from gns3server.controller.template import Template
|
||||
from gns3server.controller.node import Node
|
||||
from gns3server.controller.ports.ethernet_port import EthernetPort
|
||||
from gns3server.config import Config
|
||||
|
||||
@ -204,6 +205,131 @@ def test_add_node_non_local(async_run, controller):
|
||||
project.emit_notification.assert_any_call("node.created", node.__json__())
|
||||
|
||||
|
||||
def test_add_node_iou(async_run, controller):
|
||||
"""
|
||||
Test if an application ID is allocated for IOU nodes
|
||||
"""
|
||||
compute = MagicMock()
|
||||
compute.id = "local"
|
||||
project = async_run(controller.add_project(project_id=str(uuid.uuid4()), name="test1"))
|
||||
project.emit_notification = MagicMock()
|
||||
|
||||
response = MagicMock()
|
||||
compute.post = AsyncioMagicMock(return_value=response)
|
||||
|
||||
node1 = async_run(project.add_node(compute, "test1", None, node_type="iou"))
|
||||
node2 = async_run(project.add_node(compute, "test2", None, node_type="iou"))
|
||||
node3 = async_run(project.add_node(compute, "test3", None, node_type="iou"))
|
||||
assert node1.properties["application_id"] == 1
|
||||
assert node2.properties["application_id"] == 2
|
||||
assert node3.properties["application_id"] == 3
|
||||
|
||||
|
||||
def test_add_node_iou_with_multiple_projects(async_run, controller):
|
||||
"""
|
||||
Test if an application ID is allocated for IOU nodes with different projects already opened
|
||||
"""
|
||||
compute = MagicMock()
|
||||
compute.id = "local"
|
||||
project1 = async_run(controller.add_project(project_id=str(uuid.uuid4()), name="test1"))
|
||||
project1.emit_notification = MagicMock()
|
||||
project2 = async_run(controller.add_project(project_id=str(uuid.uuid4()), name="test2"))
|
||||
project2.emit_notification = MagicMock()
|
||||
project3 = async_run(controller.add_project(project_id=str(uuid.uuid4()), name="test3"))
|
||||
project3.emit_notification = MagicMock()
|
||||
response = MagicMock()
|
||||
compute.post = AsyncioMagicMock(return_value=response)
|
||||
|
||||
node1 = async_run(project1.add_node(compute, "test1", None, node_type="iou"))
|
||||
node2 = async_run(project1.add_node(compute, "test2", None, node_type="iou"))
|
||||
node3 = async_run(project1.add_node(compute, "test3", None, node_type="iou"))
|
||||
|
||||
node4 = async_run(project2.add_node(compute, "test4", None, node_type="iou"))
|
||||
node5 = async_run(project2.add_node(compute, "test5", None, node_type="iou"))
|
||||
node6 = async_run(project2.add_node(compute, "test6", None, node_type="iou"))
|
||||
|
||||
node7 = async_run(project3.add_node(compute, "test7", None, node_type="iou"))
|
||||
node8 = async_run(project3.add_node(compute, "test8", None, node_type="iou"))
|
||||
node9 = async_run(project3.add_node(compute, "test9", None, node_type="iou"))
|
||||
|
||||
assert node1.properties["application_id"] == 1
|
||||
assert node2.properties["application_id"] == 2
|
||||
assert node3.properties["application_id"] == 3
|
||||
|
||||
assert node4.properties["application_id"] == 4
|
||||
assert node5.properties["application_id"] == 5
|
||||
assert node6.properties["application_id"] == 6
|
||||
|
||||
assert node7.properties["application_id"] == 7
|
||||
assert node8.properties["application_id"] == 8
|
||||
assert node9.properties["application_id"] == 9
|
||||
|
||||
controller.remove_project(project1)
|
||||
project4 = async_run(controller.add_project(project_id=str(uuid.uuid4()), name="test4"))
|
||||
project4.emit_notification = MagicMock()
|
||||
|
||||
node10 = async_run(project3.add_node(compute, "test10", None, node_type="iou"))
|
||||
node11 = async_run(project3.add_node(compute, "test11", None, node_type="iou"))
|
||||
node12 = async_run(project3.add_node(compute, "test12", None, node_type="iou"))
|
||||
|
||||
assert node10.properties["application_id"] == 1
|
||||
assert node11.properties["application_id"] == 2
|
||||
assert node12.properties["application_id"] == 3
|
||||
|
||||
|
||||
def test_add_node_iou_with_multiple_projects_different_computes(async_run, controller):
|
||||
"""
|
||||
Test if an application ID is allocated for IOU nodes with different projects already opened
|
||||
"""
|
||||
compute1 = MagicMock()
|
||||
compute1.id = "remote1"
|
||||
compute2 = MagicMock()
|
||||
compute2.id = "remote2"
|
||||
project1 = async_run(controller.add_project(project_id=str(uuid.uuid4()), name="test1"))
|
||||
project1.emit_notification = MagicMock()
|
||||
project2 = async_run(controller.add_project(project_id=str(uuid.uuid4()), name="test2"))
|
||||
project2.emit_notification = MagicMock()
|
||||
response = MagicMock()
|
||||
compute1.post = AsyncioMagicMock(return_value=response)
|
||||
compute2.post = AsyncioMagicMock(return_value=response)
|
||||
|
||||
node1 = async_run(project1.add_node(compute1, "test1", None, node_type="iou"))
|
||||
node2 = async_run(project1.add_node(compute1, "test2", None, node_type="iou"))
|
||||
|
||||
node3 = async_run(project2.add_node(compute2, "test3", None, node_type="iou"))
|
||||
node4 = async_run(project2.add_node(compute2, "test4", None, node_type="iou"))
|
||||
|
||||
assert node1.properties["application_id"] == 1
|
||||
assert node2.properties["application_id"] == 2
|
||||
|
||||
assert node3.properties["application_id"] == 1
|
||||
assert node4.properties["application_id"] == 2
|
||||
|
||||
node5 = async_run(project1.add_node(compute2, "test5", None, node_type="iou"))
|
||||
node6 = async_run(project2.add_node(compute1, "test6", None, node_type="iou"))
|
||||
|
||||
assert node5.properties["application_id"] == 3
|
||||
assert node6.properties["application_id"] == 4
|
||||
|
||||
|
||||
def test_add_node_iou_no_id_available(async_run, controller):
|
||||
"""
|
||||
Test if an application ID is allocated for IOU nodes
|
||||
"""
|
||||
compute = MagicMock()
|
||||
compute.id = "local"
|
||||
project = async_run(controller.add_project(project_id=str(uuid.uuid4()), name="test"))
|
||||
project.emit_notification = MagicMock()
|
||||
response = MagicMock()
|
||||
compute.post = AsyncioMagicMock(return_value=response)
|
||||
|
||||
with pytest.raises(aiohttp.web.HTTPConflict):
|
||||
for i in range(1, 513):
|
||||
prop = {"properties": {"application_id": i}}
|
||||
project._nodes[i] = Node(project, compute, "Node{}".format(i), node_id=i, node_type="iou", **prop)
|
||||
async_run(project.add_node(compute, "test1", None, node_type="iou"))
|
||||
|
||||
|
||||
def test_add_node_from_template(async_run, controller):
|
||||
"""
|
||||
For a local server we send the project path
|
||||
|
@ -42,7 +42,7 @@ def fake_iou_bin(images_dir):
|
||||
@pytest.fixture
|
||||
def base_params(tmpdir, fake_iou_bin):
|
||||
"""Return standard parameters"""
|
||||
return {"name": "PC TEST 1", "path": "iou.bin"}
|
||||
return {"application_id": 42, "name": "PC TEST 1", "path": "iou.bin"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
Loading…
Reference in New Issue
Block a user