from bs4 import BeautifulSoup
import requests
import re
from urllib3 import exceptions as url_ex
import logging
from sys import stdout
from socket import timeout
from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import Queue
# форматируем записи
formatter= logging.Formatter('%(asctime)s %(levelname)9s %(message)s', "%Y-%m-%d %H:%M:%S")
# создаём объект с именем модуля
logger = logging.getLogger('term_info')
logger.setLevel(logging.INFO)
# создаём обрабочтик лога в файл
handler = logging.FileHandler('terminfo.log', 'a', encoding='utf-8')
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
# создаём обрабочтик лога в консоль
console = logging.StreamHandler(stdout)
console.setFormatter(formatter)
#console.setLevel(logging.INFO)
logger.addHandler(console)
def from_http(url):
return 'http://' + url.strip() + '/ss30'
soft_term = []
with_passdw = []
q = Queue()
def read_info( ip):
'''
читаем первую строку с адреса http://{ip}/ss30
и приводим к виду (ip, soft, ver, sn}
:param ip:
:return:
'''
try:
url = f'http://{ip}/ss30'
r = requests.get(url, timeout=5)
soup = BeautifulSoup(r.text.encode(), features="lxml").text.split('\n')
mask = re.sub(r'^[\W\w\s_-]+!', '', soup[0])
try:
soft, ver, sn = map(str.strip, re.split(r'\s[A-Za-z]*:\s', mask.strip()))
except ValueError:
logger.info(f'терминал с ip {ip} запаролен')
with_passdw.append(ip)
return False
soft_term.append((ip, soft, ver, sn))
debug = f'{ip} Get data'
logger.info(debug)
q.put((soft, ver, sn))
return True
except (requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout, url_ex.ReadTimeoutError,
timeout, requests.exceptions.ConnectionError) as e:
logger.debug(f'{ip:15s}Connection timeout')
# make the Pool of workers
pool = ThreadPool(processes=80)
with open('ip.txt', 'r') as f:
ip = map(str.strip, f.readlines())
pool.map(read_info, ip)
# close the pool and wait for the work to finish
pool.close()
pool.join()
for _ in range(q.qsize()):
print(q.get())