-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathgetkb.py
More file actions
145 lines (102 loc) · 3.62 KB
/
getkb.py
File metadata and controls
145 lines (102 loc) · 3.62 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
import sys
import hashlib
import re
import json
import requests
from bs4 import BeautifulSoup
from bs4 import Tag
import base64
def _downloadFile(url, digest):
fileName = url.split("/")[-1]
digestDecoded = base64.b64decode(digest)
print("\tDownloading to file {}".format(fileName))
with requests.get(url, stream=True) as r:
r.raise_for_status()
sha1 = hashlib.sha1()
with open(fileName, "wb") as f:
for chunk in r.iter_content(chunk_size=100000):
if chunk:
sha1.update(chunk)
f.write(chunk)
#print("write")
if sha1.digest() == digestDecoded:
print("\tDigest matches.")
else:
print("\tDigest not matches.")
pass
def _getDownloadLink(updateId):
url = "http://www.catalog.update.microsoft.com/DownloadDialog.aspx"
jparams = {
"size": 0,
"languages":"",
"uidInfo": updateId,
"updateID" : updateId
}
jjparams = json.dumps(jparams)
data = {
"updateIDs" : "["+jjparams+"]",
}
downloadDialog = requests.post(url, data)
# url in javascript code only, not in html
# downloadInformation[0].files[
# 0].url = 'http://download.windowsupdate.com/c/msdownload/update/software/secu/2016/12/windows6.1-kb3207752-ia64_122dbd2ed83c9cd2320cd9c693834325e9e67e92.msu';
urlStr = None
url = re.search("(http://.*msu)", downloadDialog.text)
if url:
urlStr = url.group(0)
# downloadInformation[0].files[0].digest = 'Ei29Ltg8nNIyDNnGk4NDJenmfpI=';
digestStr = None
digest = re.search("digest = '(.*)';", downloadDialog.text)
if digest:
digestStr = digest.group(1)
return (urlStr, digestStr)
def downloadUpdate(updateName, isX64 = True, windowsVersion="Windows 7"):
if updateName == "":
return
link = "http://www.catalog.update.microsoft.com/Search.aspx?q=KB{}".format(updateName)
print("Searching for update '{}'".format(updateName))
r = requests.get(link)
str = r.text
s = BeautifulSoup(str, features="html.parser")
table = s.findAll(attrs={"id":"tableContainer", "class":"resultsBackGround"})
if not table:
print("\tSearch dialog format changed. Aborting")
return
t1 = s.select("#headerRow")
foundIDs = []
for line in t1[0].next_siblings:
if not isinstance(line, Tag):
continue
archMatch = True
if isX64 and "x64" not in line.contents[2].text:
archMatch = False
osMatch = True
if windowsVersion not in line.contents[3].text:
osMatch = False
if archMatch and osMatch:
buttons = line.findAll('input', attrs={'type':'button', 'value':'Download'})
if len(buttons):
id = buttons[0]["id"]
print("\tFound id '{}'".format(id))
foundIDs.append(id)
links = []
for id in foundIDs:
link, digest = _getDownloadLink(id)
print("\tFor id '{}' \n\t\tfound link '{}',\n\t\tdigest '{}'".format(id, link, digest))
links.append((link, digest))
for ld in links:
_downloadFile(*ld)
def downloadUpdates(updatesListFile):
with open(updatesListFile, "rt") as f:
s = f.readlines()
updates = set(s)
for update in updates:
downloadUpdate(update.lower().replace("kb", "").strip())
def main(argv):
if not argv:
print("Usage getkb.py <file with updates list>")
return
downloadUpdates(argv[0])
pass
if __name__ == "__main__":
main(sys.argv[1:])