mirror of
https://github.com/GNS3/gns3-server.git
synced 2025-02-07 08:43:48 +02:00
Merge branch '2.1' into filters_iou
This commit is contained in:
commit
08d4c1a000
@ -22,6 +22,13 @@
|
|||||||
"process_priority": "normal"
|
"process_priority": "normal"
|
||||||
},
|
},
|
||||||
"images": [
|
"images": [
|
||||||
|
{
|
||||||
|
"filename": "Check_Point_R80.10_T421_Gaia.iso",
|
||||||
|
"version": "80.10",
|
||||||
|
"md5sum": "12d9723fadb89bb722e20ca3f89012ce",
|
||||||
|
"filesize": 3420127232,
|
||||||
|
"download_url": "https://supportcenter.checkpoint.com/supportcenter/portal?eventSubmit_doGoviewsolutiondetails=&solutionid=sk104859"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"filename": "Check_Point_R77.30_T204_Install_and_Upgrade.Gaia.iso",
|
"filename": "Check_Point_R77.30_T204_Install_and_Upgrade.Gaia.iso",
|
||||||
"version": "77.30",
|
"version": "77.30",
|
||||||
@ -29,6 +36,13 @@
|
|||||||
"filesize": 2799271936,
|
"filesize": 2799271936,
|
||||||
"download_url": "https://supportcenter.checkpoint.com/supportcenter/portal?eventSubmit_doGoviewsolutiondetails=&solutionid=sk104859"
|
"download_url": "https://supportcenter.checkpoint.com/supportcenter/portal?eventSubmit_doGoviewsolutiondetails=&solutionid=sk104859"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"filename": "Check_Point_R77.20_T124_Install.Gaia.iso",
|
||||||
|
"version": "77.20",
|
||||||
|
"md5sum": "7552fa2ad3e1f0ac31615b60b736969c",
|
||||||
|
"filesize": 2632974336,
|
||||||
|
"download_url": "https://supportcenter.checkpoint.com/supportcenter/portal?eventSubmit_doGoviewsolutiondetails=&solutionid=sk104859"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"filename": "empty8G.qcow2",
|
"filename": "empty8G.qcow2",
|
||||||
"version": "1.0",
|
"version": "1.0",
|
||||||
@ -39,12 +53,27 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"versions": [
|
"versions": [
|
||||||
|
{
|
||||||
|
"name": "80.10",
|
||||||
|
"images": {
|
||||||
|
"hda_disk_image": "empty8G.qcow2",
|
||||||
|
"cdrom_image": "Check_Point_R80.10_T421_Gaia.iso"
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "77.30",
|
"name": "77.30",
|
||||||
"images": {
|
"images": {
|
||||||
"hda_disk_image": "empty8G.qcow2",
|
"hda_disk_image": "empty8G.qcow2",
|
||||||
"cdrom_image": "Check_Point_R77.30_T204_Install_and_Upgrade.Gaia.iso"
|
"cdrom_image": "Check_Point_R77.30_T204_Install_and_Upgrade.Gaia.iso"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "77.20",
|
||||||
|
"images": {
|
||||||
|
"hda_disk_image": "empty8G.qcow2",
|
||||||
|
"cdrom_image": "Check_Point_R77.20_T124_Install.Gaia.iso"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -28,6 +28,12 @@
|
|||||||
"version": "15.1a",
|
"version": "15.1a",
|
||||||
"md5sum": "9549a20a7391fb849da32caa77a0d254",
|
"md5sum": "9549a20a7391fb849da32caa77a0d254",
|
||||||
"filesize": 72726092
|
"filesize": 72726092
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "i86bi-linux-l2-adventerprisek9-15.2d.bin",
|
||||||
|
"version": "15.2d",
|
||||||
|
"md5sum": "f16db44433beb3e8c828db5ddad1de8a",
|
||||||
|
"filesize": 105036380
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"versions": [
|
"versions": [
|
||||||
@ -42,6 +48,12 @@
|
|||||||
"images": {
|
"images": {
|
||||||
"image": "i86bi-linux-l2-adventerprisek9-15.1a.bin"
|
"image": "i86bi-linux-l2-adventerprisek9-15.1a.bin"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "15.2d",
|
||||||
|
"images": {
|
||||||
|
"image": "i86bi-linux-l2-adventerprisek9-15.2d.bin"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -23,6 +23,13 @@
|
|||||||
"kvm": "require"
|
"kvm": "require"
|
||||||
},
|
},
|
||||||
"images": [
|
"images": [
|
||||||
|
{
|
||||||
|
"filename": "cumulus-linux-3.3.2-vx-amd64.qcow2",
|
||||||
|
"version": "3.3.2",
|
||||||
|
"md5sum": "8364f93cabaa442c13c8c6752a248a5d",
|
||||||
|
"filesize": 980090880,
|
||||||
|
"download_url": "https://cumulusnetworks.com/cumulus-vx/download/"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"filename": "cumulus-linux-3.2.1-vx-amd64-1486153138.ac46c24zd00d13e.qcow2",
|
"filename": "cumulus-linux-3.2.1-vx-amd64-1486153138.ac46c24zd00d13e.qcow2",
|
||||||
"version": "3.2.1",
|
"version": "3.2.1",
|
||||||
@ -88,6 +95,12 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"versions": [
|
"versions": [
|
||||||
|
{
|
||||||
|
"name": "3.3.2",
|
||||||
|
"images": {
|
||||||
|
"hda_disk_image": "cumulus-linux-3.3.2-vx-amd64.qcow2"
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "3.2.1",
|
"name": "3.2.1",
|
||||||
"images": {
|
"images": {
|
||||||
|
18
gns3server/appliances/network_automation.gns3a
Normal file
18
gns3server/appliances/network_automation.gns3a
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"name": "Network Automation",
|
||||||
|
"category": "guest",
|
||||||
|
"description": "This container provides the popular tools used for network automation: Netmiko, NAPALM, Pyntc, and Ansible.",
|
||||||
|
"vendor_name": "GNS3",
|
||||||
|
"vendor_url": "http://www.gns3.com",
|
||||||
|
"product_name": "Network Automation",
|
||||||
|
"registry_version": 3,
|
||||||
|
"status": "stable",
|
||||||
|
"maintainer": "GNS3 Team",
|
||||||
|
"maintainer_email": "developers@gns3.net",
|
||||||
|
"symbol": "linux_guest.svg",
|
||||||
|
"docker": {
|
||||||
|
"adapters": 1,
|
||||||
|
"image": "gns3/network_automation:latest",
|
||||||
|
"console_type": "telnet"
|
||||||
|
}
|
||||||
|
}
|
71
gns3server/appliances/pan-vm-fw.gns3a
Normal file
71
gns3server/appliances/pan-vm-fw.gns3a
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
{
|
||||||
|
"name": "PA-VM",
|
||||||
|
"category": "firewall",
|
||||||
|
"description": "The VM-Series combines next-generation firewall security and advanced threat prevention to protect your virtualized environments from advanced cyberthreats. The VM-Series natively analyzes all traffic in a single pass to determine the application identity, the content within, and the user identity.",
|
||||||
|
"vendor_name": "Palo Alto Networks",
|
||||||
|
"vendor_url": "http://www.paloaltonetworks.com/",
|
||||||
|
"documentation_url": "https://www.paloaltonetworks.com/documentation/80/virtualization/virtualization",
|
||||||
|
"product_name": "PAN VM-Series Firewall",
|
||||||
|
"product_url": "https://www.paloaltonetworks.com/products/secure-the-network/virtualized-next-generation-firewall/vm-series",
|
||||||
|
"registry_version": 3,
|
||||||
|
"status": "experimental",
|
||||||
|
"maintainer": "Community",
|
||||||
|
"maintainer_email": "",
|
||||||
|
"usage": "Default Username: admin\r\nDefault Password: admin\r\nPAN-VM goes through several iterations of host prompts during boot. This is normal and expected.\r\nLogin is available when prompt is PA-VM login:\r\n\r\nGetting Started:\r\nTo configure a static IP address at the console enter the following commands:\r\n\r\nconfigure\r\nset deviceconfig system ip-address <Static IP> netmask <Netmask> default-gateway <Gateway IP> type static\r\nset deviceconfig system dns-setting servers primary <DNS Server IP> secondary <DNS Server IP>\r\ncommit\r\n",
|
||||||
|
"symbol": "pan-vm-fw.svg",
|
||||||
|
"first_port_name": "management",
|
||||||
|
"port_name_format": "ethernet1/{port1}",
|
||||||
|
"qemu": {
|
||||||
|
"adapter_type": "virtio-net-pci",
|
||||||
|
"adapters": 25,
|
||||||
|
"ram": 4096,
|
||||||
|
"arch": "x86_64",
|
||||||
|
"console_type": "telnet",
|
||||||
|
"hda_disk_interface": "virtio",
|
||||||
|
"kvm": "require",
|
||||||
|
"options": "-smp 2"
|
||||||
|
},
|
||||||
|
"images": [
|
||||||
|
{
|
||||||
|
"filename": "PA-VM-ESX-6.1.0-disk1.vmdk",
|
||||||
|
"version": "6.1.0 (ESX)",
|
||||||
|
"md5sum": "64b1e81cd54008318235832ea6d71424",
|
||||||
|
"filesize": 2959736832,
|
||||||
|
"download_url": "https://support.paloaltonetworks.com/Updates/SoftwareUpdates/"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "PA-VM-KVM-7.1.0.qcow2",
|
||||||
|
"version": "7.1.0",
|
||||||
|
"md5sum": "da300253709740068927408239c2e321",
|
||||||
|
"filesize": 1858797568,
|
||||||
|
"download_url": "https://support.paloaltonetworks.com/Updates/SoftwareUpdates/"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "PA-VM-ESX-7.1.0-disk1.vmdk",
|
||||||
|
"version": "7.1.0 (ESX)",
|
||||||
|
"md5sum": "e044dc649b7146ee4f619edb0e5f6675",
|
||||||
|
"filesize": 1871149056,
|
||||||
|
"download_url": "https://support.paloaltonetworks.com/Updates/SoftwareUpdates/"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"versions": [
|
||||||
|
{
|
||||||
|
"name": "6.1.0 (ESX)",
|
||||||
|
"images": {
|
||||||
|
"hda_disk_image": "PA-VM-ESX-6.1.0-disk1.vmdk"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "7.1.0",
|
||||||
|
"images": {
|
||||||
|
"hda_disk_image": "PA-VM-KVM-7.1.0.qcow2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "7.1.0 (ESX)",
|
||||||
|
"images": {
|
||||||
|
"hda_disk_image": "PA-VM-ESX-7.1.0-disk1.vmdk"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -64,6 +64,10 @@ class Cloud(BaseNode):
|
|||||||
port_number += 1
|
port_number += 1
|
||||||
self._ports_mapping = ports
|
self._ports_mapping = ports
|
||||||
|
|
||||||
|
@property
|
||||||
|
def nios(self):
|
||||||
|
return self._nios
|
||||||
|
|
||||||
def _interfaces(self):
|
def _interfaces(self):
|
||||||
return gns3server.utils.interfaces.interfaces()
|
return gns3server.utils.interfaces.interfaces()
|
||||||
|
|
||||||
@ -202,6 +206,7 @@ class Cloud(BaseNode):
|
|||||||
rhost=nio.rhost,
|
rhost=nio.rhost,
|
||||||
rport=nio.rport))
|
rport=nio.rport))
|
||||||
|
|
||||||
|
yield from self._ubridge_apply_filters(bridge_name, nio.filters)
|
||||||
if port_info["type"] in ("ethernet", "tap"):
|
if port_info["type"] in ("ethernet", "tap"):
|
||||||
|
|
||||||
if sys.platform.startswith("win"):
|
if sys.platform.startswith("win"):
|
||||||
@ -311,6 +316,19 @@ class Cloud(BaseNode):
|
|||||||
self.status = "stopped"
|
self.status = "stopped"
|
||||||
self._nios[port_number] = nio
|
self._nios[port_number] = nio
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def update_nio(self, port_number, nio):
|
||||||
|
"""
|
||||||
|
Update an nio on this node
|
||||||
|
|
||||||
|
:param nio: NIO instance to add
|
||||||
|
:param port_number: port to allocate for the NIO
|
||||||
|
"""
|
||||||
|
|
||||||
|
bridge_name = "{}-{}".format(self._id, port_number)
|
||||||
|
if self._ubridge_hypervisor and self._ubridge_hypervisor.is_running():
|
||||||
|
yield from self._ubridge_apply_filters(bridge_name, nio.filters)
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def _delete_ubridge_connection(self, port_number):
|
def _delete_ubridge_connection(self, port_number):
|
||||||
"""
|
"""
|
||||||
|
@ -38,6 +38,7 @@ class NIOUDP(NIO):
|
|||||||
self._lport = lport
|
self._lport = lport
|
||||||
self._rhost = rhost
|
self._rhost = rhost
|
||||||
self._rport = rport
|
self._rport = rport
|
||||||
|
assert isinstance(filters, dict)
|
||||||
self._filters = filters
|
self._filters = filters
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -368,8 +368,10 @@ class Link:
|
|||||||
if node["node"].node_type in ('vpcs',
|
if node["node"].node_type in ('vpcs',
|
||||||
'dynamips',
|
'dynamips',
|
||||||
'qemu',
|
'qemu',
|
||||||
'docker',
|
|
||||||
'iou'):
|
'iou'):
|
||||||
|
'cloud',
|
||||||
|
'nat',
|
||||||
|
'docker'):
|
||||||
return node["node"]
|
return node["node"]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -381,11 +383,11 @@ class Link:
|
|||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
return hash(self._id)
|
return hash(self._id)
|
||||||
|
|
||||||
def __json__(self, topology_dump=False):
|
def __json__(self, topology_dump = False):
|
||||||
"""
|
"""
|
||||||
:param topology_dump: Filter to keep only properties require for saving on disk
|
:param topology_dump: Filter to keep only properties require for saving on disk
|
||||||
"""
|
"""
|
||||||
res = []
|
res=[]
|
||||||
for side in self._nodes:
|
for side in self._nodes:
|
||||||
res.append({
|
res.append({
|
||||||
"node_id": side["node"].id,
|
"node_id": side["node"].id,
|
||||||
|
@ -199,6 +199,33 @@ class CloudHandler:
|
|||||||
response.set_status(201)
|
response.set_status(201)
|
||||||
response.json(nio)
|
response.json(nio)
|
||||||
|
|
||||||
|
@Route.put(
|
||||||
|
r"/projects/{project_id}/cloud/nodes/{node_id}/adapters/{adapter_number:\d+}/ports/{port_number:\d+}/nio",
|
||||||
|
parameters={
|
||||||
|
"project_id": "Project UUID",
|
||||||
|
"node_id": "Node UUID",
|
||||||
|
"adapter_number": "Network adapter where the nio is located",
|
||||||
|
"port_number": "Port from where the nio should be updated"
|
||||||
|
},
|
||||||
|
status_codes={
|
||||||
|
201: "NIO updated",
|
||||||
|
400: "Invalid request",
|
||||||
|
404: "Instance doesn't exist"
|
||||||
|
},
|
||||||
|
input=NIO_SCHEMA,
|
||||||
|
output=NIO_SCHEMA,
|
||||||
|
description="Update a NIO from a Cloud instance")
|
||||||
|
def update_nio(request, response):
|
||||||
|
|
||||||
|
builtin_manager = Builtin.instance()
|
||||||
|
node = builtin_manager.get_node(request.match_info["node_id"], project_id=request.match_info["project_id"])
|
||||||
|
nio = node.nios[int(request.match_info["adapter_number"])]
|
||||||
|
if "filters" in request.json and nio:
|
||||||
|
nio.filters = request.json["filters"]
|
||||||
|
yield from node.update_nio(int(request.match_info["port_number"]), nio)
|
||||||
|
response.set_status(201)
|
||||||
|
response.json(request.json)
|
||||||
|
|
||||||
@Route.delete(
|
@Route.delete(
|
||||||
r"/projects/{project_id}/cloud/nodes/{node_id}/adapters/{adapter_number:\d+}/ports/{port_number:\d+}/nio",
|
r"/projects/{project_id}/cloud/nodes/{node_id}/adapters/{adapter_number:\d+}/ports/{port_number:\d+}/nio",
|
||||||
parameters={
|
parameters={
|
||||||
|
@ -198,6 +198,33 @@ class NatHandler:
|
|||||||
response.set_status(201)
|
response.set_status(201)
|
||||||
response.json(nio)
|
response.json(nio)
|
||||||
|
|
||||||
|
@Route.put(
|
||||||
|
r"/projects/{project_id}/nat/nodes/{node_id}/adapters/{adapter_number:\d+}/ports/{port_number:\d+}/nio",
|
||||||
|
parameters={
|
||||||
|
"project_id": "Project UUID",
|
||||||
|
"node_id": "Node UUID",
|
||||||
|
"adapter_number": "Network adapter where the nio is located",
|
||||||
|
"port_number": "Port from where the nio should be updated"
|
||||||
|
},
|
||||||
|
status_codes={
|
||||||
|
201: "NIO updated",
|
||||||
|
400: "Invalid request",
|
||||||
|
404: "Instance doesn't exist"
|
||||||
|
},
|
||||||
|
input=NIO_SCHEMA,
|
||||||
|
output=NIO_SCHEMA,
|
||||||
|
description="Update a NIO from a NAT instance")
|
||||||
|
def update_nio(request, response):
|
||||||
|
|
||||||
|
builtin_manager = Builtin.instance()
|
||||||
|
node = builtin_manager.get_node(request.match_info["node_id"], project_id=request.match_info["project_id"])
|
||||||
|
nio = node.nios[int(request.match_info["adapter_number"])]
|
||||||
|
if "filters" in request.json and nio:
|
||||||
|
nio.filters = request.json["filters"]
|
||||||
|
yield from node.update_nio(int(request.match_info["port_number"]), nio)
|
||||||
|
response.set_status(201)
|
||||||
|
response.json(request.json)
|
||||||
|
|
||||||
@Route.delete(
|
@Route.delete(
|
||||||
r"/projects/{project_id}/nat/nodes/{node_id}/adapters/{adapter_number:\d+}/ports/{port_number:\d+}/nio",
|
r"/projects/{project_id}/nat/nodes/{node_id}/adapters/{adapter_number:\d+}/ports/{port_number:\d+}/nio",
|
||||||
parameters={
|
parameters={
|
||||||
|
@ -26,7 +26,7 @@ from tests.utils import asyncio_patch
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def nio():
|
def nio():
|
||||||
return NIOUDP(4242, "127.0.0.1", 4343, [])
|
return NIOUDP(4242, "127.0.0.1", 4343, {})
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@ -159,6 +159,7 @@ def test_linux_ethernet_raw_add_nio(linux_platform, project, async_run, nio):
|
|||||||
ubridge_mock.assert_has_calls([
|
ubridge_mock.assert_has_calls([
|
||||||
call("bridge create {}-0".format(cloud._id)),
|
call("bridge create {}-0".format(cloud._id)),
|
||||||
call("bridge add_nio_udp {}-0 4242 127.0.0.1 4343".format(cloud._id)),
|
call("bridge add_nio_udp {}-0 4242 127.0.0.1 4343".format(cloud._id)),
|
||||||
|
call('bridge reset_packet_filters {}-0'.format(cloud._id)),
|
||||||
call("bridge add_nio_linux_raw {}-0 \"eth0\"".format(cloud._id)),
|
call("bridge add_nio_linux_raw {}-0 \"eth0\"".format(cloud._id)),
|
||||||
call("bridge start {}-0".format(cloud._id)),
|
call("bridge start {}-0".format(cloud._id)),
|
||||||
])
|
])
|
||||||
@ -188,6 +189,7 @@ def test_linux_ethernet_raw_add_nio_bridge(linux_platform, project, async_run, n
|
|||||||
ubridge_mock.assert_has_calls([
|
ubridge_mock.assert_has_calls([
|
||||||
call("bridge create {}-0".format(cloud._id)),
|
call("bridge create {}-0".format(cloud._id)),
|
||||||
call("bridge add_nio_udp {}-0 4242 127.0.0.1 4343".format(cloud._id)),
|
call("bridge add_nio_udp {}-0 4242 127.0.0.1 4343".format(cloud._id)),
|
||||||
|
call('bridge reset_packet_filters {}-0'.format(cloud._id)),
|
||||||
call("bridge add_nio_tap \"{}-0\" \"{}\"".format(cloud._id, tap)),
|
call("bridge add_nio_tap \"{}-0\" \"{}\"".format(cloud._id, tap)),
|
||||||
call("brctl addif \"bridge0\" \"{}\"".format(tap)),
|
call("brctl addif \"bridge0\" \"{}\"".format(tap)),
|
||||||
call("bridge start {}-0".format(cloud._id)),
|
call("bridge start {}-0".format(cloud._id)),
|
||||||
|
@ -24,9 +24,9 @@ def test_arp_command(async_run):
|
|||||||
node = AsyncioMagicMock()
|
node = AsyncioMagicMock()
|
||||||
node.name = "Test"
|
node.name = "Test"
|
||||||
node.nios = {}
|
node.nios = {}
|
||||||
node.nios[0] = NIOUDP(55, "127.0.0.1", 56, [])
|
node.nios[0] = NIOUDP(55, "127.0.0.1", 56, {})
|
||||||
node.nios[0].name = "Ethernet0"
|
node.nios[0].name = "Ethernet0"
|
||||||
node.nios[1] = NIOUDP(55, "127.0.0.1", 56, [])
|
node.nios[1] = NIOUDP(55, "127.0.0.1", 56, {})
|
||||||
node.nios[1].name = "Ethernet1"
|
node.nios[1].name = "Ethernet1"
|
||||||
node._hypervisor.send = AsyncioMagicMock(return_value=["0050.7966.6801 1 Ethernet0", "0050.7966.6802 1 Ethernet1"])
|
node._hypervisor.send = AsyncioMagicMock(return_value=["0050.7966.6801 1 Ethernet0", "0050.7966.6802 1 Ethernet1"])
|
||||||
console = EthernetSwitchConsole(node)
|
console = EthernetSwitchConsole(node)
|
||||||
|
@ -125,12 +125,11 @@ def test_change_aux_port(node, port_manager):
|
|||||||
|
|
||||||
|
|
||||||
def test_update_ubridge_udp_connection(node, async_run):
|
def test_update_ubridge_udp_connection(node, async_run):
|
||||||
filters = [{
|
filters = {
|
||||||
"type": "latency",
|
"latency": [10]
|
||||||
"value": 10
|
}
|
||||||
}]
|
|
||||||
|
|
||||||
snio = NIOUDP(1245, "localhost", 1246, [])
|
snio = NIOUDP(1245, "localhost", 1246, {})
|
||||||
dnio = NIOUDP(1245, "localhost", 1244, filters)
|
dnio = NIOUDP(1245, "localhost", 1244, filters)
|
||||||
with asyncio_patch("gns3server.compute.base_node.BaseNode._ubridge_apply_filters") as mock:
|
with asyncio_patch("gns3server.compute.base_node.BaseNode._ubridge_apply_filters") as mock:
|
||||||
async_run(node.update_ubridge_udp_connection('VPCS-10', snio, dnio))
|
async_run(node.update_ubridge_udp_connection('VPCS-10', snio, dnio))
|
||||||
|
@ -68,6 +68,24 @@ def test_cloud_nio_create_udp(http_compute, vm):
|
|||||||
assert response.json["type"] == "nio_udp"
|
assert response.json["type"] == "nio_udp"
|
||||||
|
|
||||||
|
|
||||||
|
def test_cloud_nio_update_udp(http_compute, vm):
|
||||||
|
http_compute.post("/projects/{project_id}/cloud/nodes/{node_id}/adapters/0/ports/0/nio".format(project_id=vm["project_id"], node_id=vm["node_id"]), {"type": "nio_udp",
|
||||||
|
"lport": 4242,
|
||||||
|
"rport": 4343,
|
||||||
|
"rhost": "127.0.0.1"})
|
||||||
|
response = http_compute.put("/projects/{project_id}/cloud/nodes/{node_id}/adapters/0/ports/0/nio".format(project_id=vm["project_id"], node_id=vm["node_id"]),
|
||||||
|
{
|
||||||
|
"type": "nio_udp",
|
||||||
|
"lport": 4242,
|
||||||
|
"rport": 4343,
|
||||||
|
"rhost": "127.0.0.1",
|
||||||
|
"filters": {}},
|
||||||
|
example=True)
|
||||||
|
assert response.status == 201, response.body.decode()
|
||||||
|
assert response.route == "/projects/{project_id}/cloud/nodes/{node_id}/adapters/{adapter_number:\d+}/ports/{port_number:\d+}/nio"
|
||||||
|
assert response.json["type"] == "nio_udp"
|
||||||
|
|
||||||
|
|
||||||
def test_cloud_delete_nio(http_compute, vm):
|
def test_cloud_delete_nio(http_compute, vm):
|
||||||
http_compute.post("/projects/{project_id}/cloud/nodes/{node_id}/adapters/0/ports/0/nio".format(project_id=vm["project_id"], node_id=vm["node_id"]), {"type": "nio_udp",
|
http_compute.post("/projects/{project_id}/cloud/nodes/{node_id}/adapters/0/ports/0/nio".format(project_id=vm["project_id"], node_id=vm["node_id"]), {"type": "nio_udp",
|
||||||
"lport": 4242,
|
"lport": 4242,
|
||||||
|
@ -69,6 +69,24 @@ def test_nat_nio_create_udp(http_compute, vm):
|
|||||||
assert response.json["type"] == "nio_udp"
|
assert response.json["type"] == "nio_udp"
|
||||||
|
|
||||||
|
|
||||||
|
def test_nat_nio_update_udp(http_compute, vm):
|
||||||
|
http_compute.post("/projects/{project_id}/nat/nodes/{node_id}/adapters/0/ports/0/nio".format(project_id=vm["project_id"], node_id=vm["node_id"]), {"type": "nio_udp",
|
||||||
|
"lport": 4242,
|
||||||
|
"rport": 4343,
|
||||||
|
"rhost": "127.0.0.1"})
|
||||||
|
response = http_compute.put("/projects/{project_id}/nat/nodes/{node_id}/adapters/0/ports/0/nio".format(project_id=vm["project_id"], node_id=vm["node_id"]),
|
||||||
|
{
|
||||||
|
"type": "nio_udp",
|
||||||
|
"lport": 4242,
|
||||||
|
"rport": 4343,
|
||||||
|
"rhost": "127.0.0.1",
|
||||||
|
"filters": {}},
|
||||||
|
example=True)
|
||||||
|
assert response.status == 201, response.body.decode()
|
||||||
|
assert response.route == "/projects/{project_id}/nat/nodes/{node_id}/adapters/{adapter_number:\d+}/ports/{port_number:\d+}/nio"
|
||||||
|
assert response.json["type"] == "nio_udp"
|
||||||
|
|
||||||
|
|
||||||
def test_nat_delete_nio(http_compute, vm):
|
def test_nat_delete_nio(http_compute, vm):
|
||||||
with asyncio_patch("gns3server.compute.builtin.nodes.nat.Nat.add_nio"):
|
with asyncio_patch("gns3server.compute.builtin.nodes.nat.Nat.add_nio"):
|
||||||
http_compute.post("/projects/{project_id}/nat/nodes/{node_id}/adapters/0/ports/0/nio".format(project_id=vm["project_id"], node_id=vm["node_id"]), {"type": "nio_udp",
|
http_compute.post("/projects/{project_id}/nat/nodes/{node_id}/adapters/0/ports/0/nio".format(project_id=vm["project_id"], node_id=vm["node_id"]), {"type": "nio_udp",
|
||||||
|
Loading…
Reference in New Issue
Block a user