2016-03-11 17:51:35 +02:00
#!/usr/bin/env python
#
# Copyright (C) 2016 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
2016-04-26 18:10:33 +03:00
import os
2016-04-21 13:14:09 +03:00
import re
2016-03-11 17:51:35 +02:00
import uuid
2017-03-20 20:14:07 +02:00
import html
2016-08-24 00:33:19 +03:00
import aiohttp
2016-03-11 17:51:35 +02:00
2016-04-26 18:10:33 +03:00
import logging
log = logging . getLogger ( __name__ )
2017-06-30 11:22:30 +03:00
FILTERS = [
{
" type " : " frequency_drop " ,
" name " : " Frequency drop " ,
" description " : " It will drop everything with a -1 frequency, drop every Nth packet with a positive frequency, or drop nothing " ,
" parameters " : [
{
" name " : " Frequency " ,
" minimum " : - 1 ,
" maximum " : 32767 ,
2017-07-11 18:30:29 +03:00
" type " : " int " ,
2017-06-30 11:22:30 +03:00
" unit " : " th packet "
}
]
} ,
{
" type " : " packet_loss " ,
" name " : " Packet loss " ,
" description " : " The percentage represents the chance for a packet to be lost " ,
" parameters " : [
{
2017-07-06 12:53:05 +03:00
" name " : " Chance " ,
2017-06-30 11:22:30 +03:00
" minimum " : 0 ,
" maximum " : 100 ,
2017-07-11 18:30:29 +03:00
" type " : " int " ,
2017-06-30 11:22:30 +03:00
" unit " : " % "
}
]
} ,
{
" type " : " delay " ,
" name " : " Delay " ,
" description " : " Delay packets in milliseconds. You can add jitter in milliseconds (+/-) of the delay " ,
" parameters " : [
{
2017-07-06 12:53:05 +03:00
" name " : " Latency " ,
2017-06-30 11:22:30 +03:00
" minimum " : 0 ,
" maximum " : 32767 ,
2017-07-11 18:30:29 +03:00
" unit " : " ms " ,
" type " : " int "
2017-06-30 11:22:30 +03:00
} ,
{
2017-07-06 12:53:05 +03:00
" name " : " Jitter (-/+) " ,
2017-06-30 11:22:30 +03:00
" minimum " : 0 ,
" maximum " : 32767 ,
2017-07-11 18:30:29 +03:00
" unit " : " ms " ,
" type " : " int "
2017-06-30 11:22:30 +03:00
}
]
} ,
{
" type " : " corrupt " ,
" name " : " Corrupt " ,
2017-07-05 17:36:39 +03:00
" description " : " The percentage represents the chance for a packet to be corrupted " ,
2017-06-30 11:22:30 +03:00
" parameters " : [
{
2017-07-06 12:53:05 +03:00
" name " : " Chance " ,
2017-06-30 11:22:30 +03:00
" minimum " : 0 ,
" maximum " : 100 ,
2017-07-11 18:30:29 +03:00
" unit " : " % " ,
" type " : " int "
}
]
} ,
{
" type " : " bpf " ,
2017-07-12 12:21:11 +03:00
" name " : " Berkeley Packet Filter (BPF) " ,
" description " : " This filter will drop any packet matching a BPF expression. Put one expression per line " ,
2017-07-11 18:30:29 +03:00
" parameters " : [
{
2017-07-12 12:21:11 +03:00
" name " : " Filters " ,
2017-07-11 18:30:29 +03:00
" type " : " text "
2017-06-30 11:22:30 +03:00
}
]
}
]
2016-03-11 17:51:35 +02:00
class Link :
2016-06-03 06:32:46 +03:00
"""
Base class for links .
"""
2016-03-11 18:02:50 +02:00
2016-06-14 17:57:13 +03:00
def __init__ ( self , project , link_id = None ) :
2016-06-03 06:32:46 +03:00
2016-06-14 17:57:13 +03:00
if link_id :
self . _id = link_id
else :
self . _id = str ( uuid . uuid4 ( ) )
2016-05-11 20:35:36 +03:00
self . _nodes = [ ]
2016-03-11 21:13:52 +02:00
self . _project = project
2016-04-21 17:11:42 +03:00
self . _capturing = False
2018-10-27 10:47:17 +03:00
self . _capture_node = None
2016-04-26 18:10:33 +03:00
self . _capture_file_name = None
2016-05-14 03:48:10 +03:00
self . _streaming_pcap = None
2016-09-02 15:39:38 +03:00
self . _created = False
2016-09-15 15:51:40 +03:00
self . _link_type = " ethernet "
2018-03-19 11:26:12 +02:00
self . _suspended = False
2017-06-30 11:22:30 +03:00
self . _filters = { }
@property
def filters ( self ) :
"""
Get an array of filters
"""
return self . _filters
2017-09-14 13:57:58 +03:00
@property
def nodes ( self ) :
"""
Get the current nodes attached to this link
"""
return self . _nodes
2018-10-27 10:47:17 +03:00
@property
def project ( self ) :
"""
Get the project this link belongs to .
: return : Project instance .
"""
return self . _project
@property
def capture_node ( self ) :
"""
Get the capturing node
: return : Node instance .
"""
return self . _capture_node
@property
def compute ( self ) :
"""
Get the capturing node
: return : Node instance .
"""
assert self . capture_node
return self . capture_node [ " node " ] . compute
2017-07-19 18:30:25 +03:00
def get_active_filters ( self ) :
"""
Return the active filters .
2017-07-20 07:11:44 +03:00
Filters are overridden if the link is suspended .
2017-07-19 18:30:25 +03:00
"""
2018-03-19 11:26:12 +02:00
if self . _suspended :
# this is to allow all node types to support suspend link
2017-07-19 18:30:25 +03:00
return { " frequency_drop " : [ - 1 ] }
return self . _filters
2018-10-15 13:05:49 +03:00
async def update_filters ( self , filters ) :
2017-06-30 11:22:30 +03:00
"""
Modify the filters list .
Filter with value 0 will be dropped because not active
"""
new_filters = { }
for ( filter , values ) in filters . items ( ) :
2017-07-11 18:30:29 +03:00
new_values = [ ]
for value in values :
if isinstance ( value , str ) :
new_values . append ( value . strip ( " \n " ) )
else :
new_values . append ( int ( value ) )
values = new_values
if len ( values ) != 0 and values [ 0 ] != 0 and values [ 0 ] != ' ' :
2017-06-30 11:22:30 +03:00
new_filters [ filter ] = values
if new_filters != self . filters :
self . _filters = new_filters
if self . _created :
2018-10-15 13:05:49 +03:00
await self . update ( )
2019-02-23 16:08:52 +02:00
self . _project . emit_notification ( " link.updated " , self . __json__ ( ) )
2018-03-12 08:38:50 +02:00
self . _project . dump ( )
2016-09-02 15:39:38 +03:00
2018-10-15 13:05:49 +03:00
async def update_suspend ( self , value ) :
2018-03-19 11:26:12 +02:00
if value != self . _suspended :
self . _suspended = value
2018-10-15 13:05:49 +03:00
await self . update ( )
2019-02-23 16:08:52 +02:00
self . _project . emit_notification ( " link.updated " , self . __json__ ( ) )
2018-03-12 08:38:50 +02:00
self . _project . dump ( )
2017-07-19 18:30:25 +03:00
2016-09-02 15:39:38 +03:00
@property
def created ( self ) :
"""
: returns : True the link has been created on the computes
"""
return self . _created
2016-03-11 17:51:35 +02:00
2018-10-15 13:05:49 +03:00
async def add_node ( self , node , adapter_number , port_number , label = None , dump = True ) :
2016-03-11 17:51:35 +02:00
"""
2016-05-11 20:35:36 +03:00
Add a node to the link
2017-02-06 12:40:00 +02:00
: param dump : Dump project on disk
2016-03-11 17:51:35 +02:00
"""
2016-07-01 18:38:32 +03:00
2016-09-15 15:51:40 +03:00
port = node . get_port ( adapter_number , port_number )
2018-03-12 08:38:50 +02:00
if port is None :
raise aiohttp . web . HTTPNotFound ( text = " Port {} / {} for {} not found " . format ( adapter_number , port_number , node . name ) )
2017-01-06 11:29:56 +02:00
if port . link is not None :
raise aiohttp . web . HTTPConflict ( text = " Port is already used " )
2016-09-15 15:51:40 +03:00
self . _link_type = port . link_type
2016-08-24 00:33:19 +03:00
for other_node in self . _nodes :
2016-10-03 13:31:01 +03:00
if other_node [ " node " ] == node :
raise aiohttp . web . HTTPConflict ( text = " Cannot connect to itself " )
2016-08-24 00:33:19 +03:00
if node . node_type in [ " nat " , " cloud " ] :
if other_node [ " node " ] . node_type in [ " nat " , " cloud " ] :
2018-08-16 17:31:56 +03:00
raise aiohttp . web . HTTPConflict ( text = " Connecting a {} to a {} is not allowed " . format ( other_node [ " node " ] . node_type , node . node_type ) )
2016-08-24 00:33:19 +03:00
2016-09-15 15:51:40 +03:00
# Check if user is not connecting serial => ethernet
other_port = other_node [ " node " ] . get_port ( other_node [ " adapter_number " ] , other_node [ " port_number " ] )
2018-03-12 08:38:50 +02:00
if other_port is None :
raise aiohttp . web . HTTPNotFound ( text = " Port {} / {} for {} not found " . format ( other_node [ " adapter_number " ] , other_node [ " port_number " ] , other_node [ " node " ] . name ) )
2016-09-15 15:51:40 +03:00
if port . link_type != other_port . link_type :
2018-08-16 17:31:56 +03:00
raise aiohttp . web . HTTPConflict ( text = " Connecting a {} interface to a {} interface is not allowed " . format ( other_port . link_type , port . link_type ) )
2016-09-15 15:51:40 +03:00
2016-07-01 18:38:32 +03:00
if label is None :
label = {
2017-03-20 20:14:07 +02:00
" text " : html . escape ( " {} / {} " . format ( adapter_number , port_number ) ) ,
2019-08-26 15:51:03 +03:00
" style " : " font-family: TypeWriter;font-size: 10.0;font-weight: bold;fill: #000000;fill-opacity: 1.0; "
2016-07-01 18:38:32 +03:00
}
2016-05-11 20:35:36 +03:00
self . _nodes . append ( {
" node " : node ,
2016-03-11 17:51:35 +02:00
" adapter_number " : adapter_number ,
2016-07-01 18:38:32 +03:00
" port_number " : port_number ,
2017-01-06 11:29:56 +02:00
" port " : port ,
2016-07-01 18:38:32 +03:00
" label " : label
2016-03-11 17:51:35 +02:00
} )
2016-06-03 06:32:46 +03:00
2016-05-18 19:37:18 +03:00
if len ( self . _nodes ) == 2 :
2018-10-15 13:05:49 +03:00
await self . create ( )
2016-11-22 17:05:00 +02:00
for n in self . _nodes :
n [ " node " ] . add_link ( self )
2017-01-06 11:29:56 +02:00
n [ " port " ] . link = self
2016-09-02 15:39:38 +03:00
self . _created = True
2019-02-23 16:08:52 +02:00
self . _project . emit_notification ( " link.created " , self . __json__ ( ) )
2016-07-01 18:38:32 +03:00
2017-02-06 12:40:00 +02:00
if dump :
self . _project . dump ( )
2016-07-01 18:38:32 +03:00
2018-10-15 13:05:49 +03:00
async def update_nodes ( self , nodes ) :
2016-07-01 22:56:42 +03:00
for node_data in nodes :
node = self . _project . get_node ( node_data [ " node_id " ] )
for port in self . _nodes :
if port [ " node " ] == node :
label = node_data . get ( " label " )
if label :
port [ " label " ] = label
2019-02-23 16:08:52 +02:00
self . _project . emit_notification ( " link.updated " , self . __json__ ( ) )
2016-06-15 16:12:38 +03:00
self . _project . dump ( )
2016-03-11 17:51:35 +02:00
2018-10-15 13:05:49 +03:00
async def create ( self ) :
2016-03-14 18:40:27 +02:00
"""
Create the link
"""
2016-06-03 06:32:46 +03:00
2016-03-18 17:55:54 +02:00
raise NotImplementedError
2016-03-14 18:40:27 +02:00
2018-10-15 13:05:49 +03:00
async def update ( self ) :
2017-06-30 11:22:30 +03:00
"""
Update a link
"""
raise NotImplementedError
2018-10-15 13:05:49 +03:00
async def delete ( self ) :
2016-03-14 18:40:27 +02:00
"""
Delete the link
"""
2017-01-06 11:29:56 +02:00
for n in self . _nodes :
# It could be different of self if we rollback an already existing link
if n [ " port " ] . link == self :
n [ " port " ] . link = None
n [ " node " ] . remove_link ( self )
2016-03-14 18:40:27 +02:00
2020-07-24 15:18:49 +03:00
async def reset ( self ) :
"""
Reset a link
"""
raise NotImplementedError
2018-10-15 13:05:49 +03:00
async def start_capture ( self , data_link_type = " DLT_EN10MB " , capture_file_name = None ) :
2016-04-21 13:14:09 +03:00
"""
Start capture on the link
: returns : Capture object
"""
2016-06-03 06:32:46 +03:00
2016-04-26 18:10:33 +03:00
self . _capturing = True
self . _capture_file_name = capture_file_name
2019-02-23 16:08:52 +02:00
self . _project . emit_notification ( " link.updated " , self . __json__ ( ) )
2016-04-26 18:10:33 +03:00
2018-10-15 13:05:49 +03:00
async def stop_capture ( self ) :
2016-04-21 13:14:09 +03:00
"""
Stop capture on the link
"""
2016-06-03 06:32:46 +03:00
2016-04-26 18:10:33 +03:00
self . _capturing = False
2019-02-23 16:08:52 +02:00
self . _project . emit_notification ( " link.updated " , self . __json__ ( ) )
2016-05-18 19:37:18 +03:00
2018-10-27 10:47:17 +03:00
def pcap_streaming_url ( self ) :
2016-04-22 17:22:03 +03:00
"""
2018-10-27 10:47:17 +03:00
Get the PCAP streaming URL on compute
: returns : URL
2016-04-22 17:22:03 +03:00
"""
2016-06-03 06:32:46 +03:00
2018-10-27 10:47:17 +03:00
assert self . capture_node
compute = self . capture_node [ " node " ] . compute
node_type = self . capture_node [ " node " ] . node_type
node_id = self . capture_node [ " node " ] . id
adapter_number = self . capture_node [ " adapter_number " ]
port_number = self . capture_node [ " port_number " ]
url = " /projects/ {project_id} / {node_type} /nodes/ {node_id} /adapters/ {adapter_number} /ports/ {port_number} /pcap " . format ( project_id = self . project . id ,
node_type = node_type ,
node_id = node_id ,
adapter_number = adapter_number ,
port_number = port_number )
return compute . _getUrl ( url )
2016-04-22 17:22:03 +03:00
2018-10-15 13:05:49 +03:00
async def node_updated ( self , node ) :
2016-11-22 17:05:00 +02:00
"""
Called when a node member of the link is updated
"""
raise NotImplementedError
2016-04-26 18:10:33 +03:00
def default_capture_file_name ( self ) :
2016-04-21 13:14:09 +03:00
"""
: returns : File name for a capture on this link
"""
2016-06-03 06:32:46 +03:00
2016-05-14 03:48:10 +03:00
capture_file_name = " {} _ {} - {} _to_ {} _ {} - {} " . format ( self . _nodes [ 0 ] [ " node " ] . name ,
self . _nodes [ 0 ] [ " adapter_number " ] ,
self . _nodes [ 0 ] [ " port_number " ] ,
self . _nodes [ 1 ] [ " node " ] . name ,
self . _nodes [ 1 ] [ " adapter_number " ] ,
self . _nodes [ 1 ] [ " port_number " ] )
2019-01-17 13:01:58 +02:00
return re . sub ( r " [^0-9A-Za-z_-] " , " " , capture_file_name ) + " .pcap "
2016-04-21 13:14:09 +03:00
2016-03-11 17:51:35 +02:00
@property
def id ( self ) :
return self . _id
2016-06-14 17:57:13 +03:00
@property
def nodes ( self ) :
2016-08-19 12:20:56 +03:00
return [ node [ ' node ' ] for node in self . _nodes ]
2016-06-14 17:57:13 +03:00
2016-04-21 17:11:42 +03:00
@property
def capturing ( self ) :
return self . _capturing
2016-04-26 18:10:33 +03:00
@property
def capture_file_path ( self ) :
"""
Get the path of the capture
"""
2016-06-03 06:32:46 +03:00
2016-04-26 18:10:33 +03:00
if self . _capture_file_name :
return os . path . join ( self . _project . captures_directory , self . _capture_file_name )
else :
return None
2019-04-01 15:47:31 +03:00
@property
def capture_compute_id ( self ) :
"""
Get the capture compute ID .
"""
if self . _capture_node :
return self . capture_node [ " node " ] . compute . id
else :
return None
2017-06-30 11:22:30 +03:00
def available_filters ( self ) :
"""
Return the list of filters compatible with this link
: returns : Array of filters
"""
filter_node = self . _get_filter_node ( )
if filter_node :
return FILTERS
return [ ]
def _get_filter_node ( self ) :
"""
Return the node where the filter will run
: returns : None if no node support filtering else the node
"""
for node in self . _nodes :
2017-07-17 12:21:54 +03:00
if node [ " node " ] . node_type in ( ' vpcs ' ,
2018-03-12 12:57:13 +02:00
' traceng ' ,
2017-07-18 19:04:03 +03:00
' vmware ' ,
2017-07-17 12:21:54 +03:00
' dynamips ' ,
' qemu ' ,
2017-07-18 10:24:36 +03:00
' iou ' ,
2017-07-17 15:22:05 +03:00
' cloud ' ,
' nat ' ,
2017-07-18 15:59:47 +03:00
' virtualbox ' ,
2017-07-17 15:22:05 +03:00
' docker ' ) :
2017-06-30 11:22:30 +03:00
return node [ " node " ]
return None
2016-07-05 17:07:05 +03:00
def __eq__ ( self , other ) :
if not isinstance ( other , Link ) :
return False
return self . id == other . id
2016-11-22 17:05:00 +02:00
def __hash__ ( self ) :
return hash ( self . _id )
2017-07-18 10:24:36 +03:00
def __json__ ( self , topology_dump = False ) :
2016-06-15 16:12:38 +03:00
"""
: param topology_dump : Filter to keep only properties require for saving on disk
"""
2017-07-18 10:24:36 +03:00
res = [ ]
2016-05-11 20:35:36 +03:00
for side in self . _nodes :
2016-03-11 17:51:35 +02:00
res . append ( {
2016-05-11 20:35:36 +03:00
" node_id " : side [ " node " ] . id ,
2016-03-11 17:51:35 +02:00
" adapter_number " : side [ " adapter_number " ] ,
2016-07-01 18:38:32 +03:00
" port_number " : side [ " port_number " ] ,
" label " : side [ " label " ]
2016-03-11 17:51:35 +02:00
} )
2016-06-15 16:12:38 +03:00
if topology_dump :
return {
" nodes " : res ,
2017-06-30 11:22:30 +03:00
" link_id " : self . _id ,
2017-07-19 18:30:25 +03:00
" filters " : self . _filters ,
2018-03-19 11:26:12 +02:00
" suspend " : self . _suspended
2016-06-15 16:12:38 +03:00
}
2016-04-26 18:10:33 +03:00
return {
2016-06-03 06:32:46 +03:00
" nodes " : res ,
" link_id " : self . _id ,
2016-05-18 19:37:18 +03:00
" project_id " : self . _project . id ,
2016-04-26 18:10:33 +03:00
" capturing " : self . _capturing ,
2016-04-26 18:36:24 +03:00
" capture_file_name " : self . _capture_file_name ,
2016-09-15 15:51:40 +03:00
" capture_file_path " : self . capture_file_path ,
2019-04-01 15:47:31 +03:00
" capture_compute_id " : self . capture_compute_id ,
2017-06-30 11:22:30 +03:00
" link_type " : self . _link_type ,
2017-07-19 18:30:25 +03:00
" filters " : self . _filters ,
2018-03-19 11:26:12 +02:00
" suspend " : self . _suspended
2016-04-26 18:10:33 +03:00
}