2016-03-11 17:51:35 +02:00
#!/usr/bin/env python
#
# Copyright (C) 2016 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
2016-04-26 18:10:33 +03:00
import os
2016-04-21 13:14:09 +03:00
import re
2016-03-11 17:51:35 +02:00
import uuid
2017-03-20 20:14:07 +02:00
import html
2016-03-11 17:51:35 +02:00
import asyncio
2016-08-24 00:33:19 +03:00
import aiohttp
2016-03-11 17:51:35 +02:00
2016-04-26 18:10:33 +03:00
import logging
log = logging . getLogger ( __name__ )
2017-06-30 11:22:30 +03:00
FILTERS = [
{
" type " : " frequency_drop " ,
" name " : " Frequency drop " ,
" description " : " It will drop everything with a -1 frequency, drop every Nth packet with a positive frequency, or drop nothing " ,
" parameters " : [
{
" name " : " Frequency " ,
" minimum " : - 1 ,
" maximum " : 32767 ,
2017-07-11 18:30:29 +03:00
" type " : " int " ,
2017-06-30 11:22:30 +03:00
" unit " : " th packet "
}
]
} ,
{
" type " : " packet_loss " ,
" name " : " Packet loss " ,
" description " : " The percentage represents the chance for a packet to be lost " ,
" parameters " : [
{
2017-07-06 12:53:05 +03:00
" name " : " Chance " ,
2017-06-30 11:22:30 +03:00
" minimum " : 0 ,
" maximum " : 100 ,
2017-07-11 18:30:29 +03:00
" type " : " int " ,
2017-06-30 11:22:30 +03:00
" unit " : " % "
}
]
} ,
{
" type " : " delay " ,
" name " : " Delay " ,
" description " : " Delay packets in milliseconds. You can add jitter in milliseconds (+/-) of the delay " ,
" parameters " : [
{
2017-07-06 12:53:05 +03:00
" name " : " Latency " ,
2017-06-30 11:22:30 +03:00
" minimum " : 0 ,
" maximum " : 32767 ,
2017-07-11 18:30:29 +03:00
" unit " : " ms " ,
" type " : " int "
2017-06-30 11:22:30 +03:00
} ,
{
2017-07-06 12:53:05 +03:00
" name " : " Jitter (-/+) " ,
2017-06-30 11:22:30 +03:00
" minimum " : 0 ,
" maximum " : 32767 ,
2017-07-11 18:30:29 +03:00
" unit " : " ms " ,
" type " : " int "
2017-06-30 11:22:30 +03:00
}
]
} ,
{
" type " : " corrupt " ,
" name " : " Corrupt " ,
2017-07-05 17:36:39 +03:00
" description " : " The percentage represents the chance for a packet to be corrupted " ,
2017-06-30 11:22:30 +03:00
" parameters " : [
{
2017-07-06 12:53:05 +03:00
" name " : " Chance " ,
2017-06-30 11:22:30 +03:00
" minimum " : 0 ,
" maximum " : 100 ,
2017-07-11 18:30:29 +03:00
" unit " : " % " ,
" type " : " int "
}
]
} ,
{
" type " : " bpf " ,
2017-07-12 12:21:11 +03:00
" name " : " Berkeley Packet Filter (BPF) " ,
" description " : " This filter will drop any packet matching a BPF expression. Put one expression per line " ,
2017-07-11 18:30:29 +03:00
" parameters " : [
{
2017-07-12 12:21:11 +03:00
" name " : " Filters " ,
2017-07-11 18:30:29 +03:00
" type " : " text "
2017-06-30 11:22:30 +03:00
}
]
}
]
2016-03-11 17:51:35 +02:00
class Link :
2016-06-03 06:32:46 +03:00
"""
Base class for links .
"""
2016-03-11 18:02:50 +02:00
2016-06-14 17:57:13 +03:00
def __init__ ( self , project , link_id = None ) :
2016-06-03 06:32:46 +03:00
2016-06-14 17:57:13 +03:00
if link_id :
self . _id = link_id
else :
self . _id = str ( uuid . uuid4 ( ) )
2016-05-11 20:35:36 +03:00
self . _nodes = [ ]
2016-03-11 21:13:52 +02:00
self . _project = project
2016-04-21 17:11:42 +03:00
self . _capturing = False
2016-04-26 18:10:33 +03:00
self . _capture_file_name = None
2016-05-14 03:48:10 +03:00
self . _streaming_pcap = None
2016-09-02 15:39:38 +03:00
self . _created = False
2016-09-15 15:51:40 +03:00
self . _link_type = " ethernet "
2017-07-19 18:30:25 +03:00
self . _suspend = False
2017-06-30 11:22:30 +03:00
self . _filters = { }
@property
def filters ( self ) :
"""
Get an array of filters
"""
return self . _filters
2017-09-14 13:57:58 +03:00
@property
def nodes ( self ) :
"""
Get the current nodes attached to this link
"""
return self . _nodes
2017-07-19 18:30:25 +03:00
def get_active_filters ( self ) :
"""
Return the active filters .
2017-07-20 07:11:44 +03:00
Filters are overridden if the link is suspended .
2017-07-19 18:30:25 +03:00
"""
if self . _suspend :
return { " frequency_drop " : [ - 1 ] }
return self . _filters
2017-06-30 11:22:30 +03:00
@asyncio.coroutine
def update_filters ( self , filters ) :
"""
Modify the filters list .
Filter with value 0 will be dropped because not active
"""
new_filters = { }
for ( filter , values ) in filters . items ( ) :
2017-07-11 18:30:29 +03:00
new_values = [ ]
for value in values :
if isinstance ( value , str ) :
new_values . append ( value . strip ( " \n " ) )
else :
new_values . append ( int ( value ) )
values = new_values
if len ( values ) != 0 and values [ 0 ] != 0 and values [ 0 ] != ' ' :
2017-06-30 11:22:30 +03:00
new_filters [ filter ] = values
if new_filters != self . filters :
self . _filters = new_filters
if self . _created :
yield from self . update ( )
2018-01-30 12:39:33 +02:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
self . _project . dump ( )
2016-09-02 15:39:38 +03:00
2017-07-19 18:30:25 +03:00
@asyncio.coroutine
def update_suspend ( self , value ) :
if value != self . _suspend :
self . _suspend = value
yield from self . update ( )
2018-01-30 12:39:33 +02:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
self . _project . dump ( )
2017-07-19 18:30:25 +03:00
2016-09-02 15:39:38 +03:00
@property
def created ( self ) :
"""
: returns : True the link has been created on the computes
"""
return self . _created
2016-03-11 17:51:35 +02:00
@asyncio.coroutine
2017-02-06 12:40:00 +02:00
def add_node ( self , node , adapter_number , port_number , label = None , dump = True ) :
2016-03-11 17:51:35 +02:00
"""
2016-05-11 20:35:36 +03:00
Add a node to the link
2017-02-06 12:40:00 +02:00
: param dump : Dump project on disk
2016-03-11 17:51:35 +02:00
"""
2016-07-01 18:38:32 +03:00
2016-09-15 15:51:40 +03:00
port = node . get_port ( adapter_number , port_number )
2018-02-02 16:05:51 +02:00
if port is None :
raise aiohttp . web . HTTPNotFound ( text = " Port {} / {} for {} not found " . format ( adapter_number , port_number , node . name ) )
2017-01-06 11:29:56 +02:00
if port . link is not None :
raise aiohttp . web . HTTPConflict ( text = " Port is already used " )
2016-09-15 15:51:40 +03:00
self . _link_type = port . link_type
2016-08-24 00:33:19 +03:00
for other_node in self . _nodes :
2016-10-03 13:31:01 +03:00
if other_node [ " node " ] == node :
raise aiohttp . web . HTTPConflict ( text = " Cannot connect to itself " )
2016-08-24 00:33:19 +03:00
if node . node_type in [ " nat " , " cloud " ] :
if other_node [ " node " ] . node_type in [ " nat " , " cloud " ] :
raise aiohttp . web . HTTPConflict ( text = " It ' s not allowed to connect a {} to a {} " . format ( other_node [ " node " ] . node_type , node . node_type ) )
2016-09-15 15:51:40 +03:00
# Check if user is not connecting serial => ethernet
other_port = other_node [ " node " ] . get_port ( other_node [ " adapter_number " ] , other_node [ " port_number " ] )
2018-02-02 16:05:51 +02:00
if other_port is None :
raise aiohttp . web . HTTPNotFound ( text = " Port {} / {} for {} not found " . format ( other_node [ " adapter_number " ] , other_node [ " port_number " ] , other_node [ " node " ] . name ) )
2016-09-15 15:51:40 +03:00
if port . link_type != other_port . link_type :
raise aiohttp . web . HTTPConflict ( text = " It ' s not allowed to connect a {} to a {} " . format ( other_port . link_type , port . link_type ) )
2016-07-01 18:38:32 +03:00
if label is None :
label = {
" x " : - 10 ,
" y " : - 10 ,
2016-07-01 20:54:44 +03:00
" rotation " : 0 ,
2017-03-20 20:14:07 +02:00
" text " : html . escape ( " {} / {} " . format ( adapter_number , port_number ) ) ,
2016-07-01 18:38:32 +03:00
" style " : " font-size: 10; font-style: Verdana "
}
2016-05-11 20:35:36 +03:00
self . _nodes . append ( {
" node " : node ,
2016-03-11 17:51:35 +02:00
" adapter_number " : adapter_number ,
2016-07-01 18:38:32 +03:00
" port_number " : port_number ,
2017-01-06 11:29:56 +02:00
" port " : port ,
2016-07-01 18:38:32 +03:00
" label " : label
2016-03-11 17:51:35 +02:00
} )
2016-06-03 06:32:46 +03:00
2016-05-18 19:37:18 +03:00
if len ( self . _nodes ) == 2 :
2016-09-02 15:39:38 +03:00
yield from self . create ( )
2016-11-22 17:05:00 +02:00
for n in self . _nodes :
n [ " node " ] . add_link ( self )
2017-01-06 11:29:56 +02:00
n [ " port " ] . link = self
2016-09-02 15:39:38 +03:00
self . _created = True
2016-05-18 19:37:18 +03:00
self . _project . controller . notification . emit ( " link.created " , self . __json__ ( ) )
2016-07-01 18:38:32 +03:00
2017-02-06 12:40:00 +02:00
if dump :
self . _project . dump ( )
2016-07-01 18:38:32 +03:00
@asyncio.coroutine
2016-07-01 22:56:42 +03:00
def update_nodes ( self , nodes ) :
for node_data in nodes :
node = self . _project . get_node ( node_data [ " node_id " ] )
for port in self . _nodes :
if port [ " node " ] == node :
label = node_data . get ( " label " )
if label :
port [ " label " ] = label
2016-07-01 18:38:32 +03:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
2016-06-15 16:12:38 +03:00
self . _project . dump ( )
2016-03-11 17:51:35 +02:00
2016-03-14 18:40:27 +02:00
@asyncio.coroutine
def create ( self ) :
"""
Create the link
"""
2016-06-03 06:32:46 +03:00
2016-03-18 17:55:54 +02:00
raise NotImplementedError
2016-03-14 18:40:27 +02:00
2017-06-30 11:22:30 +03:00
@asyncio.coroutine
def update ( self ) :
"""
Update a link
"""
raise NotImplementedError
2016-03-14 18:40:27 +02:00
@asyncio.coroutine
def delete ( self ) :
"""
Delete the link
"""
2017-01-06 11:29:56 +02:00
for n in self . _nodes :
# It could be different of self if we rollback an already existing link
if n [ " port " ] . link == self :
n [ " port " ] . link = None
n [ " node " ] . remove_link ( self )
2016-03-14 18:40:27 +02:00
2016-04-21 13:14:09 +03:00
@asyncio.coroutine
2016-04-26 18:10:33 +03:00
def start_capture ( self , data_link_type = " DLT_EN10MB " , capture_file_name = None ) :
2016-04-21 13:14:09 +03:00
"""
Start capture on the link
: returns : Capture object
"""
2016-06-03 06:32:46 +03:00
2016-04-26 18:10:33 +03:00
self . _capturing = True
self . _capture_file_name = capture_file_name
self . _streaming_pcap = asyncio . async ( self . _start_streaming_pcap ( ) )
2016-05-18 19:37:18 +03:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
2016-04-26 18:10:33 +03:00
@asyncio.coroutine
def _start_streaming_pcap ( self ) :
"""
2016-05-14 03:48:10 +03:00
Dump a pcap file on disk
2016-04-26 18:10:33 +03:00
"""
2016-06-03 06:32:46 +03:00
2018-03-08 11:00:05 +02:00
if os . path . exists ( self . capture_file_path ) :
try :
os . remove ( self . capture_file_path )
except OSError as e :
raise aiohttp . web . HTTPConflict ( text = " Could not delete old capture file ' {} ' : {} " . format ( self . capture_file_path , e ) )
2017-09-01 13:10:24 +03:00
try :
stream_content = yield from self . read_pcap_from_source ( )
except aiohttp . web . HTTPException as e :
2017-12-07 20:28:01 +02:00
error_msg = " Could not stream PCAP file: error {} : {} " . format ( e . status , e . text )
log . error ( error_msg )
2017-09-01 13:10:24 +03:00
self . _capturing = False
2017-12-07 20:28:01 +02:00
self . _project . notification . emit ( " log.error " , { " message " : error_msg } )
2017-09-01 13:10:24 +03:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
2017-12-07 20:28:01 +02:00
2016-08-19 12:05:54 +03:00
with stream_content as stream :
2018-03-07 11:39:04 +02:00
try :
with open ( self . capture_file_path , " wb " ) as f :
while self . _capturing :
# We read 1 bytes by 1 otherwise the remaining data is not read if the traffic stops
data = yield from stream . read ( 1 )
if data :
f . write ( data )
# Flush to disk otherwise the live is not really live
f . flush ( )
else :
break
except OSError as e :
raise aiohttp . web . HTTPConflict ( text = " Could not write capture file ' {} ' : {} " . format ( self . capture_file_path , e ) )
2016-04-21 13:14:09 +03:00
@asyncio.coroutine
def stop_capture ( self ) :
"""
Stop capture on the link
"""
2016-06-03 06:32:46 +03:00
2016-04-26 18:10:33 +03:00
self . _capturing = False
2016-05-18 19:37:18 +03:00
self . _project . controller . notification . emit ( " link.updated " , self . __json__ ( ) )
2016-04-22 17:22:03 +03:00
@asyncio.coroutine
2016-06-03 06:32:46 +03:00
def _read_pcap_from_source ( self ) :
2016-04-22 17:22:03 +03:00
"""
2016-05-14 03:48:10 +03:00
Return a FileStream of the Pcap from the compute server
2016-04-22 17:22:03 +03:00
"""
2016-06-03 06:32:46 +03:00
2016-04-22 17:22:03 +03:00
raise NotImplementedError
2016-11-22 17:05:00 +02:00
@asyncio.coroutine
def node_updated ( self , node ) :
"""
Called when a node member of the link is updated
"""
raise NotImplementedError
2016-04-26 18:10:33 +03:00
def default_capture_file_name ( self ) :
2016-04-21 13:14:09 +03:00
"""
: returns : File name for a capture on this link
"""
2016-06-03 06:32:46 +03:00
2016-05-14 03:48:10 +03:00
capture_file_name = " {} _ {} - {} _to_ {} _ {} - {} " . format ( self . _nodes [ 0 ] [ " node " ] . name ,
self . _nodes [ 0 ] [ " adapter_number " ] ,
self . _nodes [ 0 ] [ " port_number " ] ,
self . _nodes [ 1 ] [ " node " ] . name ,
self . _nodes [ 1 ] [ " adapter_number " ] ,
self . _nodes [ 1 ] [ " port_number " ] )
2016-04-21 13:14:09 +03:00
return re . sub ( " [^0-9A-Za-z_-] " , " " , capture_file_name ) + " .pcap "
2016-03-11 17:51:35 +02:00
@property
def id ( self ) :
return self . _id
2016-06-14 17:57:13 +03:00
@property
def nodes ( self ) :
2016-08-19 12:20:56 +03:00
return [ node [ ' node ' ] for node in self . _nodes ]
2016-06-14 17:57:13 +03:00
2016-04-21 17:11:42 +03:00
@property
def capturing ( self ) :
return self . _capturing
2016-04-26 18:10:33 +03:00
@property
def capture_file_path ( self ) :
"""
Get the path of the capture
"""
2016-06-03 06:32:46 +03:00
2016-04-26 18:10:33 +03:00
if self . _capture_file_name :
return os . path . join ( self . _project . captures_directory , self . _capture_file_name )
else :
return None
2017-06-30 11:22:30 +03:00
def available_filters ( self ) :
"""
Return the list of filters compatible with this link
: returns : Array of filters
"""
filter_node = self . _get_filter_node ( )
if filter_node :
return FILTERS
return [ ]
def _get_filter_node ( self ) :
"""
Return the node where the filter will run
: returns : None if no node support filtering else the node
"""
for node in self . _nodes :
2017-07-17 12:21:54 +03:00
if node [ " node " ] . node_type in ( ' vpcs ' ,
2017-07-18 19:04:03 +03:00
' vmware ' ,
2017-07-17 12:21:54 +03:00
' dynamips ' ,
' qemu ' ,
2017-07-18 10:24:36 +03:00
' iou ' ,
2017-07-17 15:22:05 +03:00
' cloud ' ,
' nat ' ,
2017-07-18 15:59:47 +03:00
' virtualbox ' ,
2017-07-17 15:22:05 +03:00
' docker ' ) :
2017-06-30 11:22:30 +03:00
return node [ " node " ]
return None
2016-07-05 17:07:05 +03:00
def __eq__ ( self , other ) :
if not isinstance ( other , Link ) :
return False
return self . id == other . id
2016-11-22 17:05:00 +02:00
def __hash__ ( self ) :
return hash ( self . _id )
2017-07-18 10:24:36 +03:00
def __json__ ( self , topology_dump = False ) :
2016-06-15 16:12:38 +03:00
"""
: param topology_dump : Filter to keep only properties require for saving on disk
"""
2017-07-18 10:24:36 +03:00
res = [ ]
2016-05-11 20:35:36 +03:00
for side in self . _nodes :
2016-03-11 17:51:35 +02:00
res . append ( {
2016-05-11 20:35:36 +03:00
" node_id " : side [ " node " ] . id ,
2016-03-11 17:51:35 +02:00
" adapter_number " : side [ " adapter_number " ] ,
2016-07-01 18:38:32 +03:00
" port_number " : side [ " port_number " ] ,
" label " : side [ " label " ]
2016-03-11 17:51:35 +02:00
} )
2016-06-15 16:12:38 +03:00
if topology_dump :
return {
" nodes " : res ,
2017-06-30 11:22:30 +03:00
" link_id " : self . _id ,
2017-07-19 18:30:25 +03:00
" filters " : self . _filters ,
" suspend " : self . _suspend
2016-06-15 16:12:38 +03:00
}
2016-04-26 18:10:33 +03:00
return {
2016-06-03 06:32:46 +03:00
" nodes " : res ,
" link_id " : self . _id ,
2016-05-18 19:37:18 +03:00
" project_id " : self . _project . id ,
2016-04-26 18:10:33 +03:00
" capturing " : self . _capturing ,
2016-04-26 18:36:24 +03:00
" capture_file_name " : self . _capture_file_name ,
2016-09-15 15:51:40 +03:00
" capture_file_path " : self . capture_file_path ,
2017-06-30 11:22:30 +03:00
" link_type " : self . _link_type ,
2017-07-19 18:30:25 +03:00
" filters " : self . _filters ,
" suspend " : self . _suspend
2016-04-26 18:10:33 +03:00
}