whosyouragent.whosyouragent
1import json 2import random 3from concurrent.futures import ThreadPoolExecutor 4from pathlib import Path 5 6import requests 7from bs4 import BeautifulSoup 8 9 10class VersionUpdater: 11 def __init__(self): 12 self.versions_path = Path(__file__).parent / "browserVersions.json" 13 if not self.versions_path.exists(): 14 self.versions_path.write_text(json.dumps({})) 15 16 def update_firefox(self): 17 try: 18 url = "https://www.mozilla.org/en-US/firefox/releases/" 19 soup = BeautifulSoup(requests.get(url).text, "html.parser") 20 release_list = soup.find("ol", class_="c-release-list") 21 version = release_list.ol.li.a.text 22 self.firefox = version 23 except Exception as e: 24 print(e) 25 raise Exception("Error updating firefox") 26 27 def update_chrome(self): 28 try: 29 url = "https://en.wikipedia.org/wiki/Google_Chrome" 30 soup = BeautifulSoup(requests.get(url).text, "html.parser") 31 info_boxes = soup.find_all("td", class_="infobox-data") 32 version = info_boxes[8].text[ 33 : min([info_boxes[8].text.find("["), info_boxes[8].text.find("/")]) 34 ] 35 self.chrome = version 36 except Exception as e: 37 print(e) 38 raise Exception("Error updating chrome") 39 40 def update_safari(self): 41 try: 42 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 43 soup = BeautifulSoup(requests.get(url).text, "html.parser") 44 info_boxes = soup.find_all("td", class_="infobox-data") 45 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 46 self.safari = version 47 except Exception as e: 48 print(e) 49 raise Exception("Error updating safari") 50 51 def update_edge(self): 52 try: 53 url = "https://www.techspot.com/downloads/7158-microsoft-edge.html" 54 soup = BeautifulSoup(requests.get(url).text, "html.parser") 55 version = soup.find("span", class_="subver").text 56 self.edge = version 57 except Exception as e: 58 print(e) 59 raise Exception("Error updating edge") 60 61 def update_vivaldi(self): 62 try: 63 url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)" 64 soup = BeautifulSoup(requests.get(url).text, "html.parser") 65 info_boxes = soup.find_all("td", class_="infobox-data") 66 version = info_boxes[5].text[: info_boxes[5].text.find(" ")] 67 self.vivaldi = version 68 except Exception as e: 69 print(e) 70 raise Exception("Error updating vivaldi") 71 72 def update_opera(self) -> str: 73 try: 74 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 75 soup = BeautifulSoup(requests.get(url).text, "html.parser") 76 info_boxes = soup.find_all("td", class_="infobox-data") 77 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 78 self.opera = version 79 except Exception as e: 80 print(e) 81 raise Exception("Error updating Opera") 82 83 def update_all(self): 84 updaters = [ 85 self.update_firefox, 86 self.update_chrome, 87 self.update_safari, 88 self.update_edge, 89 self.update_vivaldi, 90 self.update_opera, 91 ] 92 with ThreadPoolExecutor(6) as executor: 93 for updater in updaters: 94 executor.submit(updater) 95 versions = { 96 "Firefox": self.firefox, 97 "Chrome": self.chrome, 98 "Edg": self.edge, 99 "Vivaldi": self.vivaldi, 100 "OPR": self.opera, 101 "Safari": self.safari, 102 } 103 # Remove any keys that failed to update and keep previous version number 104 poppers = [ 105 version 106 for version in versions 107 if not ((versions[version]).replace(".", "")).isnumeric() 108 ] 109 for popper in poppers: 110 versions.pop(popper) 111 previous_versions = json.loads(self.versions_path.read_text()) 112 versions = previous_versions | versions 113 self.versions_path.write_text(json.dumps(versions)) 114 115 116platforms = [ 117 "(Windows NT 10.0; Win64; x64)", 118 "(x11; Ubuntu; Linux x86_64)", 119 "(Windows NT 11.0; Win64; x64)", 120 "(Macintosh; Intel Mac OS X 13_0_0)", 121] 122 123 124def randomize_version_number(version: str) -> str: 125 """Randomize a version number so that it's in between 126 the previous major version and the current one.""" 127 parts = [int(part) for part in version.split(".")] 128 parts[0] = random.randint(parts[0] - 1, parts[0]) 129 for i, part in enumerate(parts[1:]): 130 parts[i + 1] = random.randint(0, part) 131 return ".".join(str(part) for part in parts) 132 133 134def get_agent() -> str: 135 """Build and return a user agent string.""" 136 browsers = json.loads((Path(__file__).parent / "browserVersions.json").read_text()) 137 for browser in browsers: 138 browsers[browser] = randomize_version_number(browsers[browser]) 139 browser = random.choice(list(browsers.keys())) 140 if browser == "Safari": 141 platform = platforms[-1] 142 useragent = f'Mozilla/5.0 {platform} AppleWebKit/605.1.15 (KHTML, like Gecko) Version/{browsers["Safari"]} Safari/605.1.15' 143 else: 144 platform = random.choice(platforms) 145 if browser == "Firefox": 146 platform = platform[: platform.rfind(")")] + f"; rv:{browsers[browser]})" 147 useragent = ( 148 f"Mozilla/5.0 {platform} Gecko/20100101 Firefox/{browsers[browser]}" 149 ) 150 else: 151 useragent = f'Mozilla/5.0 {platform} AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{browsers["Chrome"]} Safari/537.36' 152 if browser == "Edg": 153 useragent += f' Edg/{browsers["Edg"]}' 154 elif browser == "OPR": 155 useragent += f' OPR/{browsers["OPR"]}' 156 elif browser == "Vivaldi": 157 useragent += f' Vivaldi/{browsers["Vivaldi"]}' 158 return useragent
class
VersionUpdater:
11class VersionUpdater: 12 def __init__(self): 13 self.versions_path = Path(__file__).parent / "browserVersions.json" 14 if not self.versions_path.exists(): 15 self.versions_path.write_text(json.dumps({})) 16 17 def update_firefox(self): 18 try: 19 url = "https://www.mozilla.org/en-US/firefox/releases/" 20 soup = BeautifulSoup(requests.get(url).text, "html.parser") 21 release_list = soup.find("ol", class_="c-release-list") 22 version = release_list.ol.li.a.text 23 self.firefox = version 24 except Exception as e: 25 print(e) 26 raise Exception("Error updating firefox") 27 28 def update_chrome(self): 29 try: 30 url = "https://en.wikipedia.org/wiki/Google_Chrome" 31 soup = BeautifulSoup(requests.get(url).text, "html.parser") 32 info_boxes = soup.find_all("td", class_="infobox-data") 33 version = info_boxes[8].text[ 34 : min([info_boxes[8].text.find("["), info_boxes[8].text.find("/")]) 35 ] 36 self.chrome = version 37 except Exception as e: 38 print(e) 39 raise Exception("Error updating chrome") 40 41 def update_safari(self): 42 try: 43 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 44 soup = BeautifulSoup(requests.get(url).text, "html.parser") 45 info_boxes = soup.find_all("td", class_="infobox-data") 46 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 47 self.safari = version 48 except Exception as e: 49 print(e) 50 raise Exception("Error updating safari") 51 52 def update_edge(self): 53 try: 54 url = "https://www.techspot.com/downloads/7158-microsoft-edge.html" 55 soup = BeautifulSoup(requests.get(url).text, "html.parser") 56 version = soup.find("span", class_="subver").text 57 self.edge = version 58 except Exception as e: 59 print(e) 60 raise Exception("Error updating edge") 61 62 def update_vivaldi(self): 63 try: 64 url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)" 65 soup = BeautifulSoup(requests.get(url).text, "html.parser") 66 info_boxes = soup.find_all("td", class_="infobox-data") 67 version = info_boxes[5].text[: info_boxes[5].text.find(" ")] 68 self.vivaldi = version 69 except Exception as e: 70 print(e) 71 raise Exception("Error updating vivaldi") 72 73 def update_opera(self) -> str: 74 try: 75 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 76 soup = BeautifulSoup(requests.get(url).text, "html.parser") 77 info_boxes = soup.find_all("td", class_="infobox-data") 78 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 79 self.opera = version 80 except Exception as e: 81 print(e) 82 raise Exception("Error updating Opera") 83 84 def update_all(self): 85 updaters = [ 86 self.update_firefox, 87 self.update_chrome, 88 self.update_safari, 89 self.update_edge, 90 self.update_vivaldi, 91 self.update_opera, 92 ] 93 with ThreadPoolExecutor(6) as executor: 94 for updater in updaters: 95 executor.submit(updater) 96 versions = { 97 "Firefox": self.firefox, 98 "Chrome": self.chrome, 99 "Edg": self.edge, 100 "Vivaldi": self.vivaldi, 101 "OPR": self.opera, 102 "Safari": self.safari, 103 } 104 # Remove any keys that failed to update and keep previous version number 105 poppers = [ 106 version 107 for version in versions 108 if not ((versions[version]).replace(".", "")).isnumeric() 109 ] 110 for popper in poppers: 111 versions.pop(popper) 112 previous_versions = json.loads(self.versions_path.read_text()) 113 versions = previous_versions | versions 114 self.versions_path.write_text(json.dumps(versions))
def
update_firefox(self):
17 def update_firefox(self): 18 try: 19 url = "https://www.mozilla.org/en-US/firefox/releases/" 20 soup = BeautifulSoup(requests.get(url).text, "html.parser") 21 release_list = soup.find("ol", class_="c-release-list") 22 version = release_list.ol.li.a.text 23 self.firefox = version 24 except Exception as e: 25 print(e) 26 raise Exception("Error updating firefox")
def
update_chrome(self):
28 def update_chrome(self): 29 try: 30 url = "https://en.wikipedia.org/wiki/Google_Chrome" 31 soup = BeautifulSoup(requests.get(url).text, "html.parser") 32 info_boxes = soup.find_all("td", class_="infobox-data") 33 version = info_boxes[8].text[ 34 : min([info_boxes[8].text.find("["), info_boxes[8].text.find("/")]) 35 ] 36 self.chrome = version 37 except Exception as e: 38 print(e) 39 raise Exception("Error updating chrome")
def
update_safari(self):
41 def update_safari(self): 42 try: 43 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 44 soup = BeautifulSoup(requests.get(url).text, "html.parser") 45 info_boxes = soup.find_all("td", class_="infobox-data") 46 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 47 self.safari = version 48 except Exception as e: 49 print(e) 50 raise Exception("Error updating safari")
def
update_edge(self):
52 def update_edge(self): 53 try: 54 url = "https://www.techspot.com/downloads/7158-microsoft-edge.html" 55 soup = BeautifulSoup(requests.get(url).text, "html.parser") 56 version = soup.find("span", class_="subver").text 57 self.edge = version 58 except Exception as e: 59 print(e) 60 raise Exception("Error updating edge")
def
update_vivaldi(self):
62 def update_vivaldi(self): 63 try: 64 url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)" 65 soup = BeautifulSoup(requests.get(url).text, "html.parser") 66 info_boxes = soup.find_all("td", class_="infobox-data") 67 version = info_boxes[5].text[: info_boxes[5].text.find(" ")] 68 self.vivaldi = version 69 except Exception as e: 70 print(e) 71 raise Exception("Error updating vivaldi")
def
update_opera(self) -> str:
73 def update_opera(self) -> str: 74 try: 75 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 76 soup = BeautifulSoup(requests.get(url).text, "html.parser") 77 info_boxes = soup.find_all("td", class_="infobox-data") 78 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 79 self.opera = version 80 except Exception as e: 81 print(e) 82 raise Exception("Error updating Opera")
def
update_all(self):
84 def update_all(self): 85 updaters = [ 86 self.update_firefox, 87 self.update_chrome, 88 self.update_safari, 89 self.update_edge, 90 self.update_vivaldi, 91 self.update_opera, 92 ] 93 with ThreadPoolExecutor(6) as executor: 94 for updater in updaters: 95 executor.submit(updater) 96 versions = { 97 "Firefox": self.firefox, 98 "Chrome": self.chrome, 99 "Edg": self.edge, 100 "Vivaldi": self.vivaldi, 101 "OPR": self.opera, 102 "Safari": self.safari, 103 } 104 # Remove any keys that failed to update and keep previous version number 105 poppers = [ 106 version 107 for version in versions 108 if not ((versions[version]).replace(".", "")).isnumeric() 109 ] 110 for popper in poppers: 111 versions.pop(popper) 112 previous_versions = json.loads(self.versions_path.read_text()) 113 versions = previous_versions | versions 114 self.versions_path.write_text(json.dumps(versions))
def
randomize_version_number(version: str) -> str:
125def randomize_version_number(version: str) -> str: 126 """Randomize a version number so that it's in between 127 the previous major version and the current one.""" 128 parts = [int(part) for part in version.split(".")] 129 parts[0] = random.randint(parts[0] - 1, parts[0]) 130 for i, part in enumerate(parts[1:]): 131 parts[i + 1] = random.randint(0, part) 132 return ".".join(str(part) for part in parts)
Randomize a version number so that it's in between the previous major version and the current one.
def
get_agent() -> str:
135def get_agent() -> str: 136 """Build and return a user agent string.""" 137 browsers = json.loads((Path(__file__).parent / "browserVersions.json").read_text()) 138 for browser in browsers: 139 browsers[browser] = randomize_version_number(browsers[browser]) 140 browser = random.choice(list(browsers.keys())) 141 if browser == "Safari": 142 platform = platforms[-1] 143 useragent = f'Mozilla/5.0 {platform} AppleWebKit/605.1.15 (KHTML, like Gecko) Version/{browsers["Safari"]} Safari/605.1.15' 144 else: 145 platform = random.choice(platforms) 146 if browser == "Firefox": 147 platform = platform[: platform.rfind(")")] + f"; rv:{browsers[browser]})" 148 useragent = ( 149 f"Mozilla/5.0 {platform} Gecko/20100101 Firefox/{browsers[browser]}" 150 ) 151 else: 152 useragent = f'Mozilla/5.0 {platform} AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{browsers["Chrome"]} Safari/537.36' 153 if browser == "Edg": 154 useragent += f' Edg/{browsers["Edg"]}' 155 elif browser == "OPR": 156 useragent += f' OPR/{browsers["OPR"]}' 157 elif browser == "Vivaldi": 158 useragent += f' Vivaldi/{browsers["Vivaldi"]}' 159 return useragent
Build and return a user agent string.