áááºá¹ááá¬áá±á·áááºáááºážáá«á áá±ážáá¬ážáᬠá ááŸá
áºááŸáááŒá®á
Habr áááá¹áá°ááᯠááá¯áá»ááºáá±á¬á¡áá«á á á¬áá±ážáá°á á¡ááŒá±á¬ááºážá¡áá¬á¡á¬ážáá¯á¶ážááᯠáá±áá¬áá±á·á áºááœáẠááááºážáááºážááá·áº parser áá áºáá¯áá±ážááẠáá¯á¶ážááŒááºáá²á·áááºá áááºááᯠá¡ááŸá¬ážá¡ááœááºážááœá± ááŒá¯á¶áá²á·áááá² ááá¯áá¬ááᯠá¡á±á¬ááºááŸá¬ áááºááŸá¯ááá¯ááºáá«áááºá
TLDR-
parser áááááá¬ážááŸááºážá ááŒáá¯ážáá áºáá»á±á¬ááºážá ááŒá¿áá¬áá»á¬ážá áœá¬
á¡á á¡á¬ážááŒáá·áºá áá±á¬ááºážáá«ážááᯠááœá²ááŒááºážá áááºááŒá¬ááŒá®áž áá±á«ááºážáá¯ááºááœá²ááŒá®ážáááºááŸáá·áº áá»ááºáá»ááºážááá¯áááᯠáá±áá¬áá±á·á áºááœáẠáá¬ážááŸáááá·áº script prototype áá áºáá¯ááᯠááŒá¯áá¯ááºááẠáá¯á¶ážááŒááºáá²á·áááºá ááŸá áºáá«áá ááºážá á¬ážáá² sqlite3 ááá¯áá¯á¶ážáááºá áááºážááẠáá¯ááºáá¬ážá¡ááºá¡á¬ážáááºážáááº- ááŒááºááœááºážáá¬áá¬áá áºáá¯ááŸáááẠáááá¯á¡ááºáá²á áááºáá®ážáá¬ážááá·áºáá¯á¶ááá¹áá¬ááºááŒáá·áº áá»ááºáá¬ážááŒá®áž ááá¯áá²á·ááá¯á·áá±á¬á¡áá¬áá»á¬áž ááŸáááẠáááá¯á¡ááºáá«á
one_thread.py
from bs4 import BeautifulSoup
import sqlite3
import requests
from datetime import datetime
def main(min, max):
conn = sqlite3.connect('habr.db')
c = conn.cursor()
c.execute('PRAGMA encoding = "UTF-8"')
c.execute("CREATE TABLE IF NOT EXISTS habr(id INT, author VARCHAR(255), title VARCHAR(255), content TEXT, tags TEXT)")
start_time = datetime.now()
c.execute("begin")
for i in range(min, max):
url = "https://m.habr.com/post/{}".format(i)
try:
r = requests.get(url)
except:
with open("req_errors.txt") as file:
file.write(i)
continue
if(r.status_code != 200):
print("{} - {}".format(i, r.status_code))
continue
html_doc = r.text
soup = BeautifulSoup(html_doc, 'html.parser')
try:
author = soup.find(class_="tm-user-info__username").get_text()
content = soup.find(id="post-content-body")
content = str(content)
title = soup.find(class_="tm-article-title__text").get_text()
tags = soup.find(class_="tm-article__tags").get_text()
tags = tags[5:]
except:
author,title,tags = "Error", "Error {}".format(r.status_code), "Error"
content = "ÐÑО паÑÑОМге ÑÑПй ÑÑÑаМОÑе пÑПОзПÑла ПÑОбка."
c.execute('INSERT INTO habr VALUES (?, ?, ?, ?, ?)', (i, author, title, content, tags))
print(i)
c.execute("commit")
print(datetime.now() - start_time)
main(1, 490406)
á¡áá¬á¡á¬ážáá¯á¶ážááẠááá¹ááááºááŒá áºááẠ- áá»áœááºá¯ááºááá¯á·ááẠááŸááá±á¬áááºážáááºááᯠá¡áá¯á¶ážááŒá¯áááºá áá±á¬ááºážááá¯ááŸá¯áá»á¬ážááŸáá·áº á¡ááŒááºáá¯á¶á á¶áá°ááẠá¡áááºááá·áºááŒá áºáá±áá«ááŒá®á áá«áá²âŠ
-
á á¬áá»ááºááŸá¬áá±á«ááºážáá¯ááºááẠá á¬ááœá²áá áºáá¯ááœááºááŒá áºáááºá
-
á¡áááºá áááºááẠscript ááá¯ááºáá±á¬ááºááŸá¯ááᯠááŸá±á¬áá·áºááŸááºáá«áá áá±áá¬áá±á·á áºáá áºáá¯áá¯á¶ážááẠáááºááá·áºáá±áá¬ááœááºá០áááŸááá±á¬á·áá«á á¡á¬ážáá¯á¶ážááᯠááœá²ááŒááºážá áááºááŒá¬ááŒá®ážááŸáᬠáááááááºááᯠáá¯ááºáá±á¬ááºáááºá
ááá·áºááœááºážááŸá¯áá áºáá¯á á®ááŒá®ážáá±á¬áẠáá±áá¬áá±á·á áºááá¯á· á¡ááŒá±á¬ááºážá¡áá²áá»á¬ážááᯠáááºáá¯ááºáá±á¬ááºááá¯ááºáááºá ááá¯á·áá±á¬áº ááá¯á·áá±á¬áẠscript á¡áá±á¬ááºá¡áááºáá±á¬áºáá»áááºááẠáááááá¬áá¬ááá¯ážáá¬áááºááŒá áºáááºá -
ááááá¯á¶áž áá±á¬ááºážáá«áž 100 ááᯠááá¯ááºážááŒá¬ážááẠ000 áá¬áá® ááŒá¬áá«áááºá
áá±á¬ááºáá
áºáá¯ááá±á¬á· á¡áá¯á¶ážááŒá¯áá°áá²á· áá±á¬ááºážáá«ážááᯠááŸá¬áá«áááºá
- Multithreading ááá¯á¡áá¯á¶ážááŒá¯ááŒááºážááẠáá áºáá«áá áºáá¶ááœáẠáá±á«ááºážáá¯ááºááœá²ááŒááºážááᯠááŒááºáááºá á±áááºá
- habr á áá¬ážááŸááºážá¡ááŒáá·áºá¡á
á¯á¶ááá¯ááºáá±á¬áºáááºáž áááºážáááá¯ááá¯ááºážáá¬ážááŸááºážááᯠáááºáááá¯ááºáááºá
á¥ááá¬á¡á¬ážááŒáá·áºá áááºá áºáá±á¬á·áá¬ážááŸááºážááœáẠáá±á«ááºážá ááºáá¬ážáá±á¬ áá±á¬ááºážáá«ážáá áºáá¯ááºááẠ378 KB á¡áá±ážáá»áááºááŸááá»áŸáẠááá¯ááá¯ááºážáá¬ážááŸááºážááœáẠáááºážááẠ126 KB ááŸáááŒá®ážááŒá áºáááºá
áá¯ááááá¬ážááŸááºážá á á¬ááœá²áá»á¬ážá áœá¬á Habr á០áá¬áá®ááááºáááºáá¬ážáááºá
python ááœáẠmultithreading áá±á«ááºážá ááºááŒáá·áºá¡ááºáá¬áááºááá¯áá±á·áá¬ááŒáá·áºáá±á¬á¡áá« multiprocessing.dummy ááŒáá·áºá¡ááá¯ážááŸááºážáá¯á¶ážááœá±ážáá»ááºááŸá¯ááá¯ááœá±ážáá»ááºáá²á·ááŒá®áž multithreading ááŸáá·áºá¡áá°ááŒá¿áá¬áá»á¬ážáá±á«áºáá¬áááºááá¯áááááŒá¯áááá²á·áááºá
SQLite3 ááẠthread áá
áºáá¯áááºááá¯á á¡áá¯ááºááá¯ááºááá¯áá«á.
áááºááŸááºáá¬ážáá²á· check_same_thread=False
áá«áá±ááá·áº áá® error á áá
áºáá¯áááºáž ááá¯ááºáá«áá°ážá database áá²ááᯠááá·áºááá¯á· ááŒáá¯ážá
á¬ážáá²á·á¡áá« ááá«ááᶠáá»áœááºáá±á¬áº áááŒá±ááŸááºážááá¯ááºáá²á· errors ááœá± ááŒá
áºáá±á«áºáá¬áá«áááºá
ááá¯á·ááŒá±á¬áá·áºá áá±áá¬áá±á·á áºáá²ááá¯á· áá±á¬ááºážáá«ážáá»á¬áž ááá¯ááºááá¯ááºááá·áºááœááºážááŒááºážááᯠá áœáá·áºááœáŸááºááẠáá¯á¶ážááŒááºááŒá®áž áá±á«ááºážá ááºáá±á«ááºážá ááºááŒá±ááŸááºážáá»ááºááᯠáááááᬠááá¯ááºáá áºáá¯ááá¯á· ááŒáá¯ážáááºážá á¯á¶áá±ážááŒááºážááœáẠááŒá¿áá¬áááŸááá±á¬ááŒá±á¬áá·áº ááá¯ááºáá»á¬ážááᯠá¡áá¯á¶ážááŒá¯ááẠáá¯á¶ážááŒááºáá²á·áááºá
Habr ááẠthread áá¯á¶ážáá¯áááºááá¯áá¡áá¯á¶ážááŒá¯ááŒááºážá¡ááœáẠáá¬ážááŒá
áºááŒááºážá
áááºáááºá.
á¡áá°ážáááŒáá·áº Habr ááá¯ááŒááºáá»á±á¬áºááẠáááºáááºáá±á¬ááŒáá¯ážáááºážááŸá¯áá»á¬ážááẠip ááááºáááºááŸá¯ááŒáá·áº áá¬áá®á¡áááºážáááºááŒá¬á¡á±á¬áẠá¡áá¯á¶ážáááºááá¯ááºáááºá ááá¯á·ááŒá±á¬áá·áº áá±á¬ááºážáá«áž 3 áá»á±á¬áºááᯠ100 á
áá¹ááá·áºá០26 á
áá¹ááá·áºá¡áá áá»áŸá±á¬á·áá»ááá¯ááºáá±á¬ááŒá±á¬áá·áº áááºááẠá
á¬ááœá² 12 áá¯ááá¯áᬠá¡áá¯á¶ážááŒá¯ááẠááá¯á¡ááºáá±á¬áºáááºáž áááºážááẠáá±á¬ááºážááœááºáá±ááŒá®ááŒá
áºáááºá
á€áá¬ážááŸááºážááẠááááºáááŒáááºááŒá áºááŒá®áž áá±á¬ááºážáá«ážá¡áá»á¬ážá¡ááŒá¬ážááœáẠá¡áá«á¡á¬ážáá»á±á¬áºá áœá¬ áá±á«ááºážáá¯ááºáá»á¬áž áá»áááºážááœá¬ážáááºááᯠáááááŒá¯ááá·áºáááºá
async_v1.py
from bs4 import BeautifulSoup
import requests
import os, sys
import json
from multiprocessing.dummy import Pool as ThreadPool
from datetime import datetime
import logging
def worker(i):
currentFile = "files\{}.json".format(i)
if os.path.isfile(currentFile):
logging.info("{} - File exists".format(i))
return 1
url = "https://m.habr.com/post/{}".format(i)
try: r = requests.get(url)
except:
with open("req_errors.txt") as file:
file.write(i)
return 2
# ÐапОÑÑ Ð·Ð°Ð±Ð»ÐŸÐºÐžÑПваММÑÑ
запÑПÑПв Ма ÑеÑвеÑ
if (r.status_code == 503):
with open("Error503.txt", "a") as write_file:
write_file.write(str(i) + "n")
logging.warning('{} / 503 Error'.format(i))
# ÐÑлО пПÑÑа Ме ÑÑÑеÑÑвÑÐµÑ ÐžÐ»Ðž ПМ бÑл ÑкÑÑÑ
if (r.status_code != 200):
logging.info("{} / {} Code".format(i, r.status_code))
return r.status_code
html_doc = r.text
soup = BeautifulSoup(html_doc, 'html5lib')
try:
author = soup.find(class_="tm-user-info__username").get_text()
timestamp = soup.find(class_='tm-user-meta__date')
timestamp = timestamp['title']
content = soup.find(id="post-content-body")
content = str(content)
title = soup.find(class_="tm-article-title__text").get_text()
tags = soup.find(class_="tm-article__tags").get_text()
tags = tags[5:]
# ÐеÑка, ÑÑП пПÑÑ ÑвлÑеÑÑÑ Ð¿ÐµÑевПЎПЌ ОлО ÑÑÑПÑОалПЌ.
tm_tag = soup.find(class_="tm-tags tm-tags_post").get_text()
rating = soup.find(class_="tm-votes-score").get_text()
except:
author = title = tags = timestamp = tm_tag = rating = "Error"
content = "ÐÑО паÑÑОМге ÑÑПй ÑÑÑаМОÑе пÑПОзПÑла ПÑОбка."
logging.warning("Error parsing - {}".format(i))
with open("Errors.txt", "a") as write_file:
write_file.write(str(i) + "n")
# ÐапОÑÑваеЌ ÑÑаÑÑÑ Ð² json
try:
article = [i, timestamp, author, title, content, tm_tag, rating, tags]
with open(currentFile, "w") as write_file:
json.dump(article, write_file)
except:
print(i)
raise
if __name__ == '__main__':
if len(sys.argv) < 3:
print("ÐеПбÑ
ÐŸÐŽÐžÐŒÑ Ð¿Ð°ÑаЌеÑÑÑ min О max. ÐÑпПлÑзПваМОе: async_v1.py 1 100")
sys.exit(1)
min = int(sys.argv[1])
max = int(sys.argv[2])
# ÐÑлО пПÑПкПв >3
# ÑП Ñ
Ð°Ð±Ñ Ð±Ð°ÐœÐžÑ ipÑМОк Ма вÑеЌÑ
pool = ThreadPool(3)
# ÐÑÑÑÐµÑ Ð²ÑеЌеМО, запÑÑк пПÑПкПв
start_time = datetime.now()
results = pool.map(worker, range(min, max))
# ÐПÑле закÑÑÑÐžÑ Ð²ÑеÑ
пПÑПкПв пеÑаÑаеЌ вÑеЌÑ
pool.close()
pool.join()
print(datetime.now() - start_time)
áááááá¬ážááŸááºážá áá±á¬ááºáá¯á¶áž
áá¯ááááá¬ážááŸááºážááᯠá¡ááŸá¬ážááŸá¬ááŒááºáá±á ááºá Habr ááẠááá¯ááºáááá¯ááá¯ááºážáá¬ážááŸááºážááá¯á·áááºáá±á¬ááºááá·áº API áá áºáá¯áá«ááŸááááºááᯠáá¯ááºáááẠááœá±á·ááŸááá²á·áááºá ááœá²ááŒááºážá áááºááŒá¬áááºáááºáááá¯á¡ááºáá±á¬ json áá»áŸáá¬ááŒá áºáá±á¬ááŒá±á¬áá·áºáááºážáááºááá¯ááá¯ááºážáá¬ážááŸááºážáááºááá¯ááá¯ááŒááºáááºáááºá áá±á¬ááºáá¯á¶ážáá±á¬á· áá»áœááºáá±á¬á·áº áá¬ááºááœáŸááºážááᯠááŒááºáá±ážááá¯á· áá¯á¶ážááŒááºááá¯ááºáááºá
áá«ááᯠááœá±á·ááŒá®áá±á«á·á
async_v2.py
import requests
import os, sys
import json
from multiprocessing.dummy import Pool as ThreadPool
from datetime import datetime
import logging
def worker(i):
currentFile = "files\{}.json".format(i)
if os.path.isfile(currentFile):
logging.info("{} - File exists".format(i))
return 1
url = "https://m.habr.com/kek/v1/articles/{}/?fl=ru%2Cen&hl=ru".format(i)
try:
r = requests.get(url)
if r.status_code == 503:
logging.critical("503 Error")
return 503
except:
with open("req_errors.txt") as file:
file.write(i)
return 2
data = json.loads(r.text)
if data['success']:
article = data['data']['article']
id = article['id']
is_tutorial = article['is_tutorial']
time_published = article['time_published']
comments_count = article['comments_count']
lang = article['lang']
tags_string = article['tags_string']
title = article['title']
content = article['text_html']
reading_count = article['reading_count']
author = article['author']['login']
score = article['voting']['score']
data = (id, is_tutorial, time_published, title, content, comments_count, lang, tags_string, reading_count, author, score)
with open(currentFile, "w") as write_file:
json.dump(data, write_file)
if __name__ == '__main__':
if len(sys.argv) < 3:
print("ÐеПбÑ
ÐŸÐŽÐžÐŒÑ Ð¿Ð°ÑаЌеÑÑÑ min О max. ÐÑпПлÑзПваМОе: asyc.py 1 100")
sys.exit(1)
min = int(sys.argv[1])
max = int(sys.argv[2])
# ÐÑлО пПÑПкПв >3
# ÑП Ñ
Ð°Ð±Ñ Ð±Ð°ÐœÐžÑ ipÑМОк Ма вÑеЌÑ
pool = ThreadPool(3)
# ÐÑÑÑÐµÑ Ð²ÑеЌеМО, запÑÑк пПÑПкПв
start_time = datetime.now()
results = pool.map(worker, range(min, max))
# ÐПÑле закÑÑÑÐžÑ Ð²ÑеÑ
пПÑПкПв пеÑаÑаеЌ вÑеЌÑ
pool.close()
pool.join()
print(datetime.now() - start_time)
áááºážááœáẠáá±á¬ááºážáá«ážááá¯ááºááŸáá¯ááºááŸáá·áº áááºážááá¯áá±ážáá²á·áá±á¬ á á¬áá±ážááá¬ááŸáá·áº áááºáááºááá·áº ááœááºáááºáá»á¬áž áá«ááŸááááºá
API.png
áá±á¬ááºážáá«ážáá áºáá¯á á®á json á¡ááŒáá·áºá¡á á¯á¶ááᯠáá»áœááºá¯áẠáá áœáá·áºáá áºáá²á·áá² áá»áœááºá¯ááºááá¯á¡ááºáá±á¬ ááœááºáááºáá»á¬ážááá¯áᬠááááºážáááºážáá²á·áááº-
- id
- áá»á°ááá¯áá®áááº
- á¡áá»áááº_áá¯ááºáá±áá²á·áááºá
- áá±á«ááºážá á¥áº
- á¡ááŒá±á¬ááºážá¡áá¬
- ááŸááºáá»ááº_á¡áá±á¡ááœááº
- lang ááẠáá±á¬ááºážáá«ážáá±ážáá±á¬ áá¬áá¬á áá¬ážááŒá áºáááºá ááá¯á¡áá»áááºá¡áá áááºážááœáẠen ááŸáá·áº ru áá¬ááŸááááºá
- tags_string - ááá¯á·á áºá០áááºáá»á¬ážá¡á¬ážáá¯á¶áž
- á á¬áááºááŒááºáž_á¡áá±á¡ááœááº
- á á¬áá±ážáá°
- áááŸáẠ- áá±á¬ááºážáá«ážá¡ááá·áºáááºááŸááºáá»ááºá
ááá¯á·ááŒá±á¬áá·áº API ááá¯á¡áá¯á¶ážááŒá¯á url 8 áá áºáá¯áá»áŸáẠscript áááºáááºáá»áááºááᯠ100 á áá¹ááá·áºááá¯á· áá»áŸá±á¬á·áá»ááá¯ááºáá«áááºá
áá»áœááºá¯ááºááá¯á·ááá¯á¡ááºáá±á¬áá±áá¬ááᯠáá±á«ááºážáá¯ááºáá¯ááºááŒá®ážáá«á áááºážááá¯áá¯ááºáá±á¬ááºááŒá®áž áá±áá¬áá±á·á áºáá²ááá¯á·ááá·áºááẠááá¯á¡ááºáá«áááºá áá«ááá¯áááºáž áá»áœááºáááŸá¬ áá¬ááŒá¿áá¬á០áááŸááá«áá°ážá
parser.py
import json
import sqlite3
import logging
from datetime import datetime
def parser(min, max):
conn = sqlite3.connect('habr.db')
c = conn.cursor()
c.execute('PRAGMA encoding = "UTF-8"')
c.execute('PRAGMA synchronous = 0') # ÐÑклÑÑаеЌ пПЎÑвеÑжЎеМОе запОÑО, Ñак ÑкПÑПÑÑÑ ÑвелОÑОваеÑÑÑ Ð² ÑазÑ.
c.execute("CREATE TABLE IF NOT EXISTS articles(id INTEGER, time_published TEXT, author TEXT, title TEXT, content TEXT,
lang TEXT, comments_count INTEGER, reading_count INTEGER, score INTEGER, is_tutorial INTEGER, tags_string TEXT)")
try:
for i in range(min, max):
try:
filename = "files\{}.json".format(i)
f = open(filename)
data = json.load(f)
(id, is_tutorial, time_published, title, content, comments_count, lang,
tags_string, reading_count, author, score) = data
# РаЎО лÑÑÑей ÑОÑаеЌПÑÑО Ð±Ð°Ð·Ñ ÐŒÐŸÐ¶ÐœÐŸ пÑеМебÑеÑÑ ÑОÑаеЌПÑÑÑÑ ÐºÐŸÐŽÐ°. ÐлО МеÑ?
# ÐÑлО ваЌ Ñак кажеÑÑÑ, ЌПжМП пÑПÑÑП заЌеМОÑÑ ÐºÐŸÑÑеж аÑгÑЌеМÑПЌ data. РеÑаÑÑ Ð²Ð°ÐŒ.
c.execute('INSERT INTO articles VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', (id, time_published, author,
title, content, lang,
comments_count, reading_count,
score, is_tutorial,
tags_string))
f.close()
except IOError:
logging.info('FileNotExists')
continue
finally:
conn.commit()
start_time = datetime.now()
parser(490000, 490918)
print(datetime.now() - start_time)
á á¬áááºážá¡ááºáž
áá±á¬ááºážááŒá®á á¡á ááºá¡áá¬á¡á¬ážááŒáá·áºá áá±á¬ááºáá¯á¶ážááœááºá áááºááẠá¡áá»ááºá¡áááºá¡áá»áá¯á·á០ááááºážááááºážá¡áá»ááºá¡áááºáá»á¬ážááᯠáá¯ááºáá°ááá¯ááºáááº-
- áá±á«ááºážáá¯ááºáá¯ááºááẠáá»áŸá±á¬áºááŸááºážáá¬ážááá·áº ááá,ááá á¡áááºá áá±á¬ááºážáá«áž ááá,á áá áᬠáá±á«ááºážáá¯ááºáá¯ááºáá²á·áááºá Habré áá±á«áºááŸá áá±á¬ááºážáá«ážáá»á¬ážá áááºáááºáá»á±á¬áº (490) ááᯠááŸááºáá¬ážááẠááá¯á·ááá¯áẠáá»ááºááá¯ááºááŒá±á¬ááºáž ááœá±á·ááŸáááááºá
- áá±á¬ááºážáá«ážáá±á«ááºáž áá áºáááºážááœá²áá®ážáá«áž áá«áááºáá±á¬ áá±áá¬áá±á·á áºáá áºáá¯áá¯á¶ážááẠ2.95 GB á¡áá±ážáá»áááºááŸááááºá áá»á¯á¶á·áá¯á¶á ᶠ- 495 MB á
- á á¯á á¯áá±á«ááºáž ááááá áŠážááẠHabre áá á¬áá±ážááá¬áá»á¬ážááŒá áºáááºá á€á á¬áááºážááá¬ážáá»á¬ážááẠááá¯ááºááá¯ááºááœáŸáá·áºáááºááŸá¯áá»á¬ážááŸáá¬ááŒá áºááŒá±á¬ááºáž ááá·áºá¡á¬áž áá»áœááºá¯ááºááááá±ážáá«áááºá
- Habre ááœáẠá¡áá»áá¯ážá¡ááŸááá¯á¶áž á
á¬áá±ážááá¬-
á¡áá®áᬠáá±á¬ááºážáá«áž - ááááá ááááºáááºážá¡ááá·áºáááºááŸááºáá¬ážáá±á¬ áá±á¬ááºážáá«áž 1448 plusesá¡áá»á¬ážáá¯á¶ážáááºáá±á¬áá±á¬ááºážáá«áž - ááŒáá·áºááŸá¯ááŸá¯ 1660841ááœá±ážááœá±ážááŸá¯á¡áá»á¬ážáá¯á¶ážáá±á¬ááºážáá«áž ááŸááºáá»áẠ- áááá
áá±á¬ááºážááŒá®, ááááºááá¯á¶á
á¶ááááºáááºážá
á¬áá±ážááᬠáá
áŠáž
á¡ááá·áºáááºááŸááºáá»ááºá¡á ááááºáááºáž 15
ááááºáááºáž 15 áááº
ááááºáááºáž 15 áᯠááœá±ážááœá±ážáá²á·áááºá
source: www.habr.com