-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathgithub.py
More file actions
64 lines (54 loc) · 1.89 KB
/
github.py
File metadata and controls
64 lines (54 loc) · 1.89 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
from bs4 import BeautifulSoup
import requests
code_prefix = "https://raw.githubusercontent.com"
def get_pylinks (directory_links):
if directory_links == []:
return []
python_file_links = []
for directory_link in directory_links:
link = directory_link.get('href')
name = directory_link.text
name = name.split('.')
if len(name) == 1 : # It is generally a directory
# Do some directory shit
split_link = link.split('/')
if split_link[3] == 'tree':
soup = BeautifulSoup( requests.get ( prefix + link,verify = False ).text )
new_directory_links = soup.find_all(class_='js-directory-link')
python_file_links += python_file_links + get_pylinks(new_directory_links)
elif name[-1] == 'py':
# sp = BeautifulSoup( requests.get( code_prefix + link.replace('/blob', '') ).text )
# print sp.find('p').text
python_file_links.append(code_prefix + link.replace('/blob', ''))
else:
continue
return python_file_links
# URL to fetch top python projects
URL = "https://github.com/search?l=Python&q=stars%3A%3E1&s=stars&type=Repositories"
prefix = "https://github.com"
soup = BeautifulSoup(requests.get(URL,verify = False).text)
h3s = soup.find_all(class_='repo-list-name')
repo_links = []
repos = []
for h3 in h3s:
a = h3.find('a').get('href')
repo_links.append(a)
# Get the repo name
a = a.split('/')
repos.append(a[1])
# Just for testing
print a[1]
for i in xrange(len(repo_links)):
repo_links[i] = prefix + repo_links[i]
print repo_links[i]
# fetch code from each repo and store it in a text file
python_file_links = []
for repo in repo_links:
repo = repo_links[2]
soup = BeautifulSoup( requests.get ( repo,verify = False ).text )
directory_links = soup.find_all(class_='js-directory-link')
python_file_links += get_pylinks(directory_links)
with open('links.txt', 'w') as py_links:
for each in python_file_links:
py_links.write(each + "\n")
print len (python_file_links)