Мультипоточное снятие информации -2

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
from bs4 import BeautifulSoup
import requests
import re
from urllib3 import exceptions as url_ex
import logging
from sys import stdout
from socket import timeout
from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import Queue
# форматируем записи
formatter= logging.Formatter('%(asctime)s %(levelname)9s %(message)s', "%Y-%m-%d %H:%M:%S")
# создаём объект с именем модуля
logger = logging.getLogger('term_info')
logger.setLevel(logging.INFO)
# создаём обрабочтик лога в файл
handler = logging.FileHandler('terminfo.log', 'a', encoding='utf-8')
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
# создаём обрабочтик лога в консоль
console = logging.StreamHandler(stdout)
console.setFormatter(formatter)
#console.setLevel(logging.INFO)
logger.addHandler(console)
def from_http(url):
return 'http://' + url.strip() + '/ss30'
soft_term = []
with_passdw = []
q = Queue()
def read_info( ip):
'''
читаем первую строку с адреса http://{ip}/ss30
и приводим к виду (ip, soft, ver, sn}
:param ip:
:return:
'''
try:
url = f'http://{ip}/ss30'
r = requests.get(url, timeout=5)
soup = BeautifulSoup(r.text.encode(), features="lxml").text.split('\n')
mask = re.sub(r'^[\W\w\s_-]+!', '', soup[0])
try:
soft, ver, sn = map(str.strip, re.split(r'\s[A-Za-z]*:\s', mask.strip()))
except ValueError:
logger.info(f'терминал с ip {ip} запаролен')
with_passdw.append(ip)
return False
soft_term.append((ip, soft, ver, sn))
debug = f'{ip} Get data'
logger.info(debug)
q.put((soft, ver, sn))
return True
except (requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout, url_ex.ReadTimeoutError,
timeout, requests.exceptions.ConnectionError) as e:
logger.debug(f'{ip:15s}Connection timeout')
# make the Pool of workers
pool = ThreadPool(processes=80)
with open('ip.txt', 'r') as f:
ip = map(str.strip, f.readlines())
pool.map(read_info, ip)
# close the pool and wait for the work to finish
pool.close()
pool.join()
for _ in range(q.qsize()):
print(q.get())