-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathfind-sitemaps.py
53 lines (39 loc) · 1.52 KB
/
find-sitemaps.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
from usp.tree import sitemap_tree_for_homepage
from termcolor import colored
import sys
import threading, os
from tld import get_tld
def url_to_domain(url):
res = get_tld(url, as_object=True)
return res.fld.replace('.', '_')
def run_sitemap(url_web):
tree = sitemap_tree_for_homepage(url_web)
# all_pages() returns an Iterator
sitemaps = []
data = tree.sub_sitemaps
urls_sitemaps = []
for item in data:
sub_sitemaps = item.sub_sitemaps
if type(item).__name__ != 'IndexRobotsTxtSitemap':
urls_sitemaps.append(item.url)
for sitemap in sub_sitemaps:
if hasattr(sitemap, 'sub_sitemaps') and sitemap.sub_sitemaps:
for data in sitemap.sub_sitemaps:
urls_sitemaps.append(data.url)
urls_sitemaps.append(sitemap.url)
with open(f"datas/list_sitemap_{url_to_domain(url_web)}.txt", "w") as list_sitemap:
list_sitemap.write("\n".join(urls_sitemaps))
urls = {page.url for page in tree.all_pages()}
urls = list(urls)
with open(f"datas/list_urls_{url_to_domain(url_web)}.txt", "w") as list_urls:
list_urls.write("\n".join(urls))
th = []
with open("urls.txt") as urls_website:
for url_web in urls_website:
thread = threading.Thread(target=run_sitemap, args=(url_web,))
th.append(thread)
thread.start()
for t in th:
t.join()
print(colored('Liste de sitemap créé avec succés', 'green'))
print(colored('Liste des urls de tous les sitemaps créé avec succés', 'green'))