-
Notifications
You must be signed in to change notification settings - Fork 5
/
External_Internal.py
68 lines (57 loc) · 2.2 KB
/
External_Internal.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 25 00:15:26 2019
@author: Jesse Amamgbu, Kontrol, HAKS
"""
from urllib.request import urlopen
from urllib.parse import urlparse
from bs4 import BeautifulSoup
import csv
import datetime
import random
import re
import requests
pages = set()
random.seed(datetime.datetime.now())
def getInternalLinks(bs, includeUrl):
try:
includeUrl = '{}://{}'.format(urlparse(includeUrl).scheme,urlparse(includeUrl).netloc)
internalLinks = [] #Finds all links that begin with a "/"
for link in bs.find_all('a',href=re.compile('^(/|.*'+includeUrl+')')):
if link.attrs['href'] is not None:
if link.attrs['href'] not in internalLinks:
if(link.attrs['href'].startswith('/')):
internalLinks.append(includeUrl+link.attrs['href'])
else:
internalLinks.append(link.attrs['href'])
return internalLinks
except:
pass
#------------------------------------------------------------------------------------------------#
# Collects a list of all internal URLs found on the site
allIntLinks = set()
def getAllInternalLinks(siteUrl):
try:
# Headers needed for sites that throw the 403:Forbidden error
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'}
page_object = requests.get(siteUrl, headers=headers)
html = page_object.content
domain = '{}://{}'.format(urlparse(siteUrl).scheme,urlparse(siteUrl).netloc)
bs = BeautifulSoup(html, 'html.parser')
internalLinks = getInternalLinks(bs, domain)
return internalLinks
except Exception as e:
print(e)
print('Error in getting internal link')
#-----------------------------------------------------------------------------------------------#
#In order to test script:Use code below
def save_csv(internalLinks):
print(internalLinks)
with open('internal2.csv','a+') as f1:
writer=csv.writer(f1, delimiter='\t',lineterminator='\n')
for link in internalLinks:
allIntLinks.add(link)
print(link)
writer.writerow([link])
#getAllExternalLinks(desiredlink)
#getAllInternalLinks(desiredlink)