Como entrada, existe uma rede bastante grande baseada na solução Cisco, que está em operação há mais de 10 anos e consiste em:
1.348 edifícios e estruturas;
10.030 tipos diferentes de pontos de acesso de produção, começando com lixo direto AIR-LAP1131G, AIR-LAP1041N (cerca de 88% do total), terminando com muito bom CAP1602I, CAP2602I, CAP1702I, AP1832I;
WISM1, WISM2, WLC 8540.
Por mais de 10 anos de operação, as linhas de cabos começaram a se tornar inutilizáveis como resultado de várias remodelações e revisões. Um grande número de pontos de acesso e injetoras começaram a falhar, o que, aliado à tendência de aumento do consumo de tráfego, gerou constantes reclamações sobre a qualidade do serviço.
Tendo em conta o calendário e as possibilidades do orçamento, foi decidido que na primeira fase de modernização as redes de 589 edifícios serão submetidas à decisão de nova modernização após análise das consequências da modernização em curso.
As seguintes tarefas são formuladas:
substituição de pontos de acesso com falha e obsoletos por novos e modernos;
cobertura de edifícios e instalações adicionais;
substituição de interruptores de acesso e injetores de energia por interruptores PoE;
.
, , - . , - Ekahau. 589 , , , , .
C - , 89 , Qlik . , . , 589 8014 ( , , 1% :).
?
, . Mikrotik cAP AC + CRS328-24P-4S+RM PoE CapsMan. 589 CapsMan , “ ”, Ansible .
, Cisco, , :), , .
Cisco …
, AIR-AP1815I-R, . , 4175 , , .
, Ekahau , . ( ) , , .
Nextcloud, Google Sheets, …. :)
. , .
, .
, .
, , .
, …
OpenSource:
:)
, — , bash WLC SNMP:
#!/bin/bash
#snmp OID Duplex, id :
oid_speed=".1.3.6.1.4.1.9.9.513.1.2.2.1.11"
#snmp OID Ap_Name, id Ap_Name:
oid_name="SNMPv2-SMI::enterprises.9.9.513.1.1.1.1.5"
#snmp OID Ap_Ip_Address, id Ap ip Address:
oid_address=".1.3.6.1.4.1.14179.2.2.1.1.19"
#snmp OID Ap_mac_address, id Ap mac Address:
oid_mac=".1.3.6.1.4.1.9.9.513.1.1.1.1.2"
# snmp v2 community
community="snmp_comunity"
# ip
address1="10.x.y.z"
address2="10.x1.y1.z1"
snmpwalk -v 2c -c $comunity $address1 $oid_name | awk '{print $1 $4}' | sed 's/SNMPv2-SMI::enterprises.9.9.513.1.1.1.1.5./'$address1'wlc/' | sed 's/"/ /' | sed 's/"//' > /opt/rename/snmp_files/ap_name_index.txt &
snmpwalk -v 2c -c $comunity $address1 $oid_speed | awk '{print substr($1, 1, length($1)-2) " " $4}' | sed 's/SNMPv2-SMI::enterprises.9.9.513.1.2.2.1.11./'$address1'wlc/' | uniq > /opt/rename/snmp_files/ap_speed_index.txt &
snmpwalk -v 2c -c $comunity $address1 $oid_address | awk '{print $1 " " $4}' | sed 's/SNMPv2-SMI::enterprises.14179.2.2.1.1.19./'$address1'wlc/' > /opt/rename/snmp_files/ap_address_index.txt &
snmpwalk -v 2c -c $comunity $address1 $oid_mac | awk '{print $1 " " $4"."$5"."$6"."$7"."$8"."$9}' | sed 's/SNMPv2-SMI::enterprises.9.9.513.1.1.1.1.2./'$address1'wlc/' > /opt/rename/snmp_files/ap_mac_index.txt
4 , SQLite:
import sqlite3, sys
import logging
logging.basicConfig(filename='/opt/rename/logg/snmp_py.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s')
def processfile(filename):
"""
.
"""
contents = []
cnt = []
print ('*** Reading ['+filename+'] ...')
try:
f = open(filename)
contents = f.read().splitlines()
f.close()
except IOError:
logging.warning ("Error opening file: ", filename)
sys.exit(1)
for line in contents:
s = line.split(' ')
if len(s) == 2:
cnt.append(s)
return dict(cnt)
def createdick(Aps,Macs,Addresses,Speeds):
#
contents = {}
try:
for index in Aps.keys():
contents[index] = [Aps.get(index).lower(),Macs.get(index), Addresses.get(index), Speeds.get(index),index[:12], index[15:]]
except:
logging.warning ("Error creating dick: ", index)
return contents
def create_db():
#
conn = sqlite3.connect("/opt/rename/db/ApsSnmpDatabase.db") # :memory: RAM
cursor = conn.cursor()
#
cursor.execute("""CREATE TABLE IF NOT EXISTS accesspoints
(nameap TEXT ,
mac TEXT PRIMARY KEY,
ip TEXT ,
duplex INTEGER,
wlc TEXT,
snmp_index TEXT)
""")
conn.commit()
cursor.close()
conn.close()
def insert_data_db(ApSnmpDb):
#
conn = sqlite3.connect("/opt/rename/db/ApsSnmpDatabase.db")
cursor = conn.cursor()
for line in ApSnmpDb.values():
data = tuple(line)
cursor.execute('INSERT INTO accesspoints VALUES(?, ?, ?, ?, ?, ?)', data)
conn.commit()
cursor.close()
conn.close()
def delete_table():
#
conn = sqlite3.connect("/opt/rename/db/ApsSnmpDatabase.db")
cursor = conn.cursor()
query = "DROP TABLE IF EXISTS accesspoints"
cursor.execute(query)
conn.commit()
cursor.close()
conn.close()
def main():
ap_name_index = '/opt/rename/snmp_files/ap_name_index.txt'
ap_speed_index = '/opt/rename/snmp_files/ap_speed_index.txt'
ap_mac_index = '/opt/rename/snmp_files/ap_mac_index.txt'
ap_address_index = '/opt/rename/snmp_files/ap_address_index.txt'
ApNameIndex = processfile(ap_name_index)
ApSpeedIndex = processfile(ap_speed_index)
ApMacIndex = processfile(ap_mac_index)
ApAddressIndex = processfile(ap_address_index)
ApDb = createdick(ApNameIndex,ApMacIndex,ApAddressIndex,ApSpeedIndex)
delete_table()
create_db()
insert_data_db(ApDb)
if __name__ == '__main__':
main()
Google Sheets SQLite, , :
import logging
import httplib2
import apiclient.discovery
from oauth2client.service_account import ServiceAccountCredentials
from datetime import datetime, date, time
import sqlite3
logging.basicConfig(filename='/opt/rename/logg/rename_ap.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s')
def readgooglesheets():
# , Google Developer Console
CREDENTIALS_FILE = 'creds.json'
# ID Google Sheets ( URL)
spreadsheet_id = 'id '
# service -- API
credentials = ServiceAccountCredentials.from_json_keyfile_name(
CREDENTIALS_FILE,
['https://www.googleapis.com/auth/spreadsheets',
'https://www.googleapis.com/auth/drive'])
httpAuth = credentials.authorize(httplib2.Http())
service = apiclient.discovery.build('sheets', 'v4', http = httpAuth)
#
values = service.spreadsheets().values().get(
spreadsheetId=spreadsheet_id,
range='CI3:CK9584',
majorDimension='COLUMNS'
# values -
).execute()
ValuesList = values["values"]
return(ValuesList)
def create_db():
#
conn = sqlite3.connect("/opt/rename/db/ApsSnmpDatabase.db")
cursor = conn.cursor()
#
cursor.execute("""CREATE TABLE IF NOT EXISTS googleAps
(ApMac TEXT ,
ApName TEXT ,
AccDate TEXT)
""")
print('table googleAps created')
conn.commit()
cursor.close()
conn.close()
def insert_data_db(ApDb):
#
conn = sqlite3.connect("/opt/rename/db/ApsSnmpDatabase.db")
cursor = conn.cursor()
for n in range(len(ApDb[0])):
# .. ,
str = ApDb[0][n].replace('.', '').replace(':', '').upper()
# apname
if len(str) == 12:
# "." 2 .
ApDb[0][n] = '.'.join(a + b for a, b in zip(str[::2], str[1::2]))
#ApDb -
data = (ApDb[0][n], ApDb[1][n].lower(), ApDb[2][n])
cursor.execute('INSERT INTO googleAps VALUES(?, ?, ?)', data)
print('added data table googleAps ')
conn.commit()
cursor.close()
conn.close()
def delete_table():
#
conn = sqlite3.connect("/opt/rename/db/ApsSnmpDatabase.db")
cursor = conn.cursor()
query = "DROP TABLE IF EXISTS googleAps"
print('table googleAps deleted ')
cursor.execute(query)
conn.commit()
cursor.close()
conn.close()
def renameap():
# snmpget
conn = sqlite3.connect("/opt/rename/db/ApsSnmpDatabase.db")
cursor = conn.cursor()
# 2- : ,
query = """SELECT ApName, accesspoints.snmp_index, accesspoints.wlc FROM accesspoints
JOIN googleAps ON accesspoints.mac = googleAps.ApMac and accesspoints.nameap != googleAps.ApName"""
cursor.execute(query)
data = cursor.fetchall()
wlc_cmd = []
command=''
comunity = "write_comunity"
oid = '.1.3.6.1.4.1.9.9.513.1.1.1.1.5.'
for n in range(len(data)):
# snmpget
command = 'snmpset -v 2c -c ' + comunity + ' ' + data[n][2] + ' ' + oid + data[n][1] + ' s ' + data[n][0].lower()
#
wlc_cmd.append(command)
logging.debug(command)
conn.commit()
cursor.close()
conn.close()
return wlc_cmd
def writefile(filename, confs):
""""
"""
print ('*** Writing ['+filename+'] ...')
try:
f = open(filename,'w')
for line in confs :
f.write(line + '\n')
f.close()
except:
logging.warning('Error writing file: ', filename)
sys.exit(1)
def write_date(LGoogle):
#
conn = sqlite3.connect("/opt/rename/db/ApsSnmpDatabase.db")
cursor = conn.cursor()
# 2- : ,
query = """SELECT accesspoints.nameap FROM accesspoints
JOIN googleAps ON accesspoints.mac = googleAps.ApMac and googleAps.AccDate = '' """
cursor.execute(query)
data = cursor.fetchall()
conn.commit()
cursor.close()
conn.close()
now = datetime.now()
date_today = now.strftime("%d")+'/'+now.strftime("%m")+'/'+now.strftime("%Y")
DateGoogle = []
i = 0
for n in LGoogle[1]:
for m in data:
if m[0] == n :
LGoogle[2][i] = date_today
DateGoogle.append([LGoogle[2][i]])
i = i+1
CREDENTIALS_FILE = '/opt/rename/creds.json'
spreadsheet_id = 'id_google_table'
credentials = ServiceAccountCredentials.from_json_keyfile_name(
CREDENTIALS_FILE,
['https://www.googleapis.com/auth/spreadsheets',
'https://www.googleapis.com/auth/drive'])
httpAuth = credentials.authorize(httplib2.Http())
service = apiclient.discovery.build('sheets', 'v4', http = httpAuth)
values = service.spreadsheets().values().batchUpdate(
spreadsheetId=spreadsheet_id,
body={
"valueInputOption": "USER_ENTERED",
"data": [
{"range": "CK3:CK9584",
"majorDimension": "ROWS",
"values": DateGoogle }
]
}
).execute()
def main():
wlc_conf = '/opt/rename/logg/wlc_commands.txt'
ListGoogle = readgooglesheets()
delete_table()
create_db()
insert_data_db(ListGoogle)
writefile(wlc_conf,renameap())
write_date(ListGoogle)
if __name__ == '__main__':
main()
:
#!/bin/bash
file='/opt/rename/logg/wlc_commands.txt'
while read line ;
do
$line
done < $file
MAC , hostname - , . , , .
MAC ?
, , . telegram.bot, “ ” , telebot, :
import telebot
import os
import time
import sqlite3
# . 'check ' . SNMP crontab, .
bot = telebot.TeleBot(' ')
def read_db(map):
try:
conn = sqlite3.connect("/opt/rename/db/ApsSnmpDatabase.db")
cursor = conn.cursor()
query = """ SELECT Apname, googleAps.APmac, accesspoints.duplex FROM googleAps
LEFT JOIN accesspoints ON accesspoints.mac = googleAps.ApMac
WHERE ApName LIKE '{}___'
ORDER BY ApName ASC """.format(map)
cursor.execute(query)
data = cursor.fetchall()
conn.commit()
cursor.close()
conn.close()
return data
except:
bot.send_message(message.chat.id, ' , 5 ')
time.sleep(1)
def create_message(AP_MAP):
try:
n = 0
msg = ''
for AP in AP_MAP:
msg+='|{:<17s}|{:<19s}|{:>5s} Mbps|\n'.format(AP[0], AP[1], str(AP[2]))
# < - ^- 17s -
n = n + 1
if n > 0:
msg = ' {} . \n<pre>'.format(n)+msg+'</pre>'
else:
msg = " "
return msg
except:
bot.send_message(message.chat.id, ' , 5 ')
time.sleep(1)
@bot.message_handler(content_types=['text'])
def send_text(message):
try:
# 6 , 'check'
if message.text[:5].lower() == 'check':
message.text = message.text.replace(message.text[:6],'') # 'check '
print(message)
Ap_on_Map = read_db(message.text)
msg = create_message(Ap_on_Map)
bot.send_message(message.chat.id, msg, parse_mode='HTML')
except:
bot.send_message(message.chat.id, ' , 5 ')
time.sleep(1)
bot.polling()
:
none - , , MAC ;
100 - , eth0 speed 100Mbps;
1000 - , eth0 speed 1000Mbps.
, PoE , eth0 speed = 1000Mbps .
, MAC 1 MAC 2 , . “” :
: 1 2?
: ;
: , , ;
: , .
.
, :
:
, . eth0 speed 100 1000, PoE : detected poe-out status: current_too_low. , .
? Graphite. . Graphite 6 . :
nano /path/to/graphite/configs/storage-schemas.conf[carbon] pattern = ^carbon. retentions = 60s:90d [default1minfor_1day] pattern = .* retentions = 1m:7d,5m:30d,30m:90d,24h:1y
: , . wcp :
sudo find ./ -type f -name '*.wsp' -exec whisper-resize --nobackup {} 1m:7d 5m:30d 30m:90d 24h:1y \;
eth0 speed 5 Graphite :
import graphyte
import sqlite3, sys
import logging
logging.basicConfig(filename='/opt/rename/logg/graphite.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s')
graphyte.init('ip_address_graphite', prefix='aero')
def read_db():
# sqlite
conn = sqlite3.connect("/opt/rename/db/ApsSnmpDatabase.db")
cursor = conn.cursor()
# .
query = """ SELECT googleAps.ApName, accesspoints.duplex FROM googleAps
LEFT JOIN accesspoints ON accesspoints.nameap = googleAps.ApName
WHERE googleAps.AccDate != '' AND googleAps.ApName != '' """
cursor.execute(query)
data = cursor.fetchall()
conn.commit()
cursor.close()
conn.close()
return data
def create_map():
data = read_db()
ApS = []
# snmp, Duplex =0
for line in data:
if line[1] == None:
duplex = 0
else:
duplex = line[1]
# map, . . «-», graphite, «_»
map = line[0][:3]+'.'+line[0].replace(".", "_")[:len(line[0])-3]+'.'+line[0].replace(".", "_")
ApS.append([map, int(duplex)])
return ApS
def sendgraphite(map):
# graphite
for data in map:
#print(data[0],data[1])
graphyte.send(data[0],data[1])
def main():
Map = create_map()
sendgraphite(Map)
if __name__ == '__main__':
main()
, , speed, :
6 :
#!/bin/bash
/dev/null > /opt/rename/average/average.txt
# maps_graphite_curl.txt -
cat /opt/rename/graphite/maps_graphite_curl.txt | while read line
do
curl 'http://10.10.10.10:8080/render/?from=-10080minutes&target=aliasByMetric(buiding.'$line')&format=csv' | awk -F "," '{print $3}' > /opt/rename/tmp/tmp.txt
a=$(cat /opt/rename/tmp/tmp.txt | grep 0.0 | numsum)
b=$(cat /opt/rename/tmp/tmp.txt | grep 0.0 | wc -l)
map=$(echo $line | awk -F "." '{print $3}' | sed 's/_/./')
echo $line
echo $b
if [[ $b -ne 0 ]]; then
echo $map $(($a/$b)) $(($b/288)) >> /opt/rename/average/average.txt;
fi
done
:
import httplib2
import apiclient.discovery
from oauth2client.service_account import ServiceAccountCredentials
def opengoogledocs(data):
CREDENTIALS_FILE = '/opt/rename/creds.json'
spreadsheet_id = 'id_'
credentials = ServiceAccountCredentials.from_json_keyfile_name(
CREDENTIALS_FILE,
['https://www.googleapis.com/auth/spreadsheets',
'https://www.googleapis.com/auth/drive'])
httpAuth = credentials.authorize(httplib2.Http())
service = apiclient.discovery.build('sheets', 'v4', http = httpAuth)
values = service.spreadsheets().values().batchUpdate(
spreadsheetId=spreadsheet_id,
body={
"valueInputOption": "USER_ENTERED",
"data": [
# {"range": "B3:C4",
# "majorDimension": "ROWS",
# "values": [["This is B3", "This is C3"], ["This is B4", "This is C4"]]},
{"range": "a1:d9584",
"majorDimension": "ROWS",
"values": data }
]
}
).execute()
def processfile(filename):
contents = []
cnt = []
print ('*** Reading ['+filename+'] ...')
try:
f = open(filename)
contents = f.read().splitlines()
f.close()
except IOError:
print ("Error opening file: ", filename)
sys.exit(1)
for line in contents:
cnt.append(line.replace('_','.').split(' '))
return cnt
def main():
file = '/opt/rename/average/average.txt'
AverageData = processfile(file)
opengoogledocs(AverageData)
if __name__ == '__main__':
main()
Pela observação experimental do comportamento de pontos de acesso já montados, e havia mais de 7.500 deles no momento da redação deste artigo, foi assumido que as linhas de comunicação nas quais os pontos de acesso têm um parâmetro de velocidade eth0 médio abaixo de 970 requerem reconstrução. Começando por recolocar os conectores RJ45 e terminando, se necessário, substituindo o cabo. O número dessas linhas de problemas no momento em que este livro foi escrito era superior a 10% do número de pontos de acesso monitorados.
A tarefa dos instaladores antes do ano novo é garantir em 100% das linhas de cabo uma velocidade média eth0 de pelo menos 970.
Espero que o artigo seja útil, escreva suas dúvidas nos comentários ...