Skip to content

Commit 3d92088

Browse files
Add shit ton of sites, Add preview gif, Fix Evaluate site
feat: fully working bayfiles feat: fully working filechan feat: fully working hotfile feat: fully working letsupload feat: fully working lolabits feat: fully working megaupload feat: somewhat working mixdrop feat: fully working myfile feat: fully working openload feat: fully working oshi feat: fully working rapidshare feat: fully working shareonline feat: fully working upvid feat: fully working vshare refactor: add timeout to availability checker fix: logger printing half of the line on new line
1 parent 3a98222 commit 3d92088

25 files changed

+784
-45
lines changed

Build Release.bat

-1
This file was deleted.

Build Releases.bat

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
".\.env\Scripts\activate" && pyinstaller --onedir --icon "icon.ico" --console --name "Mul-Tor" --upx-dir "Z:\Projects\Python\### UPX ###" --add-data="./.env/Lib/site-packages/grapheme/data/*;grapheme/data/" --hidden-import "plyer.platforms.win.filechooser" main.py && rmdir /s /q .\build && rmdir /s /q .\__pycache__ && del ".\Mul-Tor.spec" && pyinstaller --onefile --icon "icon.ico" --console --name "Mul-Tor" --upx-dir "Z:\Projects\Python\### UPX ###" --add-data="./.env/Lib/site-packages/grapheme/data/*;grapheme/data/" --hidden-import "plyer.platforms.win.filechooser" main.py && rmdir /s /q .\build && rmdir /s /q .\__pycache__ && del ".\Mul-Tor.spec"

README.md

+34-18
Original file line numberDiff line numberDiff line change
@@ -24,44 +24,42 @@ Random User Agent | Low | Finished
2424
Check Website Availability | High | Finished
2525
Progress Bar | High | Finished*¹
2626
Auto Updater | High | Finished*³
27-
*Auto Compress to Max Size* | *Low* | *Planned*
28-
*Webhook Sender* | *Medium* | *Planned*
2927

3028
<br />
3129

3230
### Currently supported sites:
3331
Site | API | Account Required | Max File Size
3432
--- | --- | --- | ---
35-
[PixelDrain][1] | [Yes][2] | No | 20 GB
3633
[GoFile][7] | [Yes][8] | No | ∞
34+
[PixelDrain][1] | [Yes][2] | No | 20 GB
3735
[Anonfiles][3] | [Yes][4] | No | 20 GB
36+
[Bayfiles][5] | [Yes][6] | No | 20 GB
37+
[OpenLoad][49] | [Yes][50] | No | 20 GB
38+
[HotFile][51] | [Yes][52] | No | 20 GB
39+
[LolaBits][53] | [Yes][54] | No | 20 GB
40+
[RapidShare][55] | [Yes][56] | No | 20 GB
41+
[UpVid][57] | [Yes][58] | No | 20 GB
42+
[vShare][59] | [Yes][60] | No | 20 GB
43+
[LetsUpload][15] | [Yes][16] | No | 20 GB
44+
[ShareOnline][61] | [Yes][62] | No | 20 GB
45+
[MegaUpload][63] | [Yes][64] | No | 20 GB
46+
[MyFile][65] | [Yes][66] | No | 20 GB
47+
[FileChan][47] | [Yes][48] | No | 20 GB
48+
[Oshi][9] | No | No | 5 GB
49+
[HexUpload][23] | [Yes][24] | [Yes][25] | 2 GB
3850

3951
<br />
4052

4153
### Planned Sites:
4254
Site | API | Account Required | Max File Size
4355
--- | --- | --- | ---
44-
[Bayfiles][5] | [Yes][6] | No | 20 GB
45-
[Oshi][9] | No | No | 5 GB
46-
[LetsUpload][15] | [Yes][16] | No | 20 GB
4756
[BowFile][17] | [Yes][18] | [Yes][19] | 5 GB
4857
[1CloudFile][20] | [Yes][21] | [Yes][22] | 5 GB
49-
[HexUpload][23] | [Yes][24] | [Yes][25] | 2 GB
5058
[SendSpace][26] | [Yes][27] | [Yes][28] | 300 MB
51-
[Mixdrop][29] | [Yes][30] | [Yes][30] | ∞
5259
[Uptobox][33] | [Yes][34] | [Yes][35] | 200 GB
5360
[Doodrive][36] | [Yes][37] | [Yes][38] | 5 GB
5461
[Filemail][42] | [Yes][43] | [No/Yes*²][44] | 5 GB
55-
[FileChan][47] | [Yes][48] | No | 20 GB
56-
57-
<br />
58-
59-
### Issue Sites:
60-
Site | API | Account Required | Reason
61-
--- | --- | --- | ---
62-
[1Fichier][31] | [Yes][32] | Yes | No Free API Access
63-
[TransferNow][39] | [Yes][40] | [Yes][41] | No Free API Access
64-
[Files.fm][45] | [Yes][46] | Yes | No Free API Access
62+
[Mixdrop][29] | [Yes][30] | [Yes][30] | ∞
6563

6664
<br />
6765

@@ -132,6 +130,24 @@ Site | API | Account Required | Reason
132130
[46]: https://files.fm/api.php
133131
[47]: https://filechan.org/
134132
[48]: https://filechan.org/docs/api
133+
[49]: https://openload.cc/
134+
[50]: https://openload.cc/docs/api
135+
[51]: https://hotfile.io/
136+
[52]: https://hotfile.io/docs/api
137+
[53]: https://lolabits.se/
138+
[54]: https://lolabits.se/docs/api
139+
[55]: https://rapidshare.nu/
140+
[56]: https://rapidshare.nu/docs/api
141+
[57]: https://upvid.cc/
142+
[58]: https://upvid.cc/docs/api
143+
[59]: https://vshare.is/
144+
[60]: https://vshare.is/docs/api
145+
[61]: https://share-online.is/
146+
[62]: https://share-online.is/docs/api
147+
[63]: https://megaupload.nz/
148+
[64]: https://megaupload.nz/docs/api
149+
[65]: https://myfile.is/
150+
[66]: https://myfile.is/docs/api
135151

136152
<br />
137153

main.py

+38-4
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
from modules import *
1212

13-
version = "1.0.0"
13+
version = "1.1.0"
1414
owd = os.getcwd()
1515
platform = sys.platform
1616

@@ -42,16 +42,20 @@ def startup():
4242
Config_Manager.Writer()
4343
config = Config_Manager.Reader()
4444

45+
proxies_enabled = config.get("useProxies", False)
46+
random_ua_enabled = config.get("randomUserAgent", False)
47+
check_for_updates_enabled = config.get("checkForUpdates", False)
48+
4549
# Check if the user wants to use proxies and get them
46-
if config["useProxies"] == True:
50+
if proxies_enabled == True:
4751
print(colored("Fetching Fresh Proxies...", "yellow"), end='\r')
4852
proxy_list = ProxyScraper.Scraper()
4953
print(colored(f"Fetched {len(proxy_list)} Proxies. ", "green"))
5054
print("")
5155
else:
5256
proxy_list = []
5357

54-
if config["randomUserAgent"] == True:
58+
if random_ua_enabled == True:
5559
if os.path.exists("user_agents.json"):
5660
ua_list = UserAgentManager.Reader()
5761
else:
@@ -61,7 +65,7 @@ def startup():
6165

6266
available = Availability_Checker.Evaluate(config, proxy_list, ua_list)
6367

64-
if config["checkForUpdates"] == True:
68+
if check_for_updates_enabled == True:
6569
AutoUpdate.Checker(proxy_list, ua_list)
6670

6771
return config, available, proxy_list, ua_list
@@ -115,6 +119,35 @@ def selection(config, available, user_agents_list, proxy_list=""):
115119
output = GoFile.Uploader(file, proxy_list, user_agents_list)
116120
if site == "AnonFiles":
117121
output = AnonFiles.Uploader(file, proxy_list, user_agents_list)
122+
if site == "BayFiles":
123+
output = BayFiles.Uploader(file, proxy_list, user_agents_list)
124+
if site == "OpenLoad":
125+
output = OpenLoad.Uploader(file, proxy_list, user_agents_list)
126+
if site == "HotFile":
127+
output = HotFile.Uploader(file, proxy_list, user_agents_list)
128+
if site == "LolaBits":
129+
output = LolaBits.Uploader(file, proxy_list, user_agents_list)
130+
if site == "RapidShare":
131+
output = RapidShare.Uploader(file, proxy_list, user_agents_list)
132+
if site == "UpVid":
133+
output = UpVid.Uploader(file, proxy_list, user_agents_list)
134+
if site == "vShare":
135+
output = vShare.Uploader(file, proxy_list, user_agents_list)
136+
if site == "LetsUpload":
137+
output = LetsUpload.Uploader(file, proxy_list, user_agents_list)
138+
if site == "ShareOnline":
139+
output = ShareOnline.Uploader(file, proxy_list, user_agents_list)
140+
if site == "MegaUpload":
141+
output = MegaUpload.Uploader(file, proxy_list, user_agents_list)
142+
if site == "MyFile":
143+
output = MyFile.Uploader(file, proxy_list, user_agents_list)
144+
if site == "FileChan":
145+
output = FileChan.Uploader(file, proxy_list, user_agents_list)
146+
if site == "Oshi":
147+
output = Oshi.Uploader(file, proxy_list, user_agents_list)
148+
149+
if site == "MixDrop":
150+
output = MixDrop.Uploader(file, proxy_list, user_agents_list, config)
118151

119152
status = output.get("status", "")
120153
file_site = output.get("site", "")
@@ -167,6 +200,7 @@ def selection(config, available, user_agents_list, proxy_list=""):
167200
# TODO: Multiply time and space by 12 then divide by 25 for accurate quantum physics inside of VS Code
168201
# TODO: add a working progress bar to each upload. Possible solution https://stackoverflow.com/questions/13909900/progress-of-python-requests-post
169202
# TODO: Find a way to change the colors for the selection windows
203+
# TODO: Finish this so i can start learning Rust
170204
171205
"""
172206

media/preview.gif

283 KB
Loading

modules/__init__.py

+19-2
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,28 @@
1-
from .pixeldrain import PixelDrain
21
from .user_agents import UserAgentManager
32
from .pretty_print import *
43
from .availability_checker import Availability_Checker
54
from .site_data import Site_Data_CLSS, sites_data_dict
65
from .proxy_scraper import ProxyScraper
76
from .config_manager import Config_Manager
87
from .logger import Logger
8+
from .auto_update import AutoUpdate
9+
10+
11+
# Here are all modules for the sites that are supported
12+
from .pixeldrain import PixelDrain
913
from .gofile import GoFile
1014
from .anonfiles import AnonFiles
11-
from .auto_update import AutoUpdate
15+
from .bayfiles import BayFiles
16+
from .openload import OpenLoad
17+
from .lolabits import LolaBits
18+
from .vshare import vShare
19+
from .hotfile import HotFile
20+
from .rapidshare import RapidShare
21+
from .upvid import UpVid
22+
from .letsupload import LetsUpload
23+
from .shareonline import ShareOnline
24+
from .megaupload import MegaUpload
25+
from .myfile import MyFile
26+
from .filechan import FileChan
27+
from .oshi import Oshi
28+
from .mixdrop import MixDrop

modules/availability_checker.py

+19-6
Original file line numberDiff line numberDiff line change
@@ -14,35 +14,44 @@
1414
class Availability_Checker:
1515

1616
def Evaluate(config, proxy_list, ua_list):
17+
blacklist = []
18+
for blacklisted_site in config["blacklist"]:
19+
blacklist.append(blacklisted_site.lower())
1720
for site in sites_data_dict:
18-
if sites_data_dict[site]["apiKey"] == False and not site in config["blacklist"]:
21+
# TODO: does not get api keys so fix shit shit cunt
22+
if sites_data_dict[site]["apiKey"] == False and not site.lower() in blacklist:
1923
ping_sites.append(site)
20-
elif sites_data_dict[site]["apiKey"] == True and config["api_keys"][site]["apiKey"] != "" and not site in config["blacklist"]:
24+
elif sites_data_dict[site]["apiKey"] == True and config.get("api_keys", {}).get(site.lower(), {}).get("apiKey", "") != "" and not site.lower() in blacklist:
2125
ping_sites.append(site)
2226
else:
2327
pass
28+
print(colored("Checking available sites...", "green"), end='\r')
2429

2530
for site in ping_sites:
2631
try:
2732
ua = random.choice(ua_list)
2833
url = sites_data_dict[site]["api_url"]
2934

3035
if proxy_list == []:
31-
ping = requests.get(url, headers={"User-Agent": ua})
36+
ping = requests.get(url, headers={"User-Agent": ua}, timeout=5)
3237
else:
3338
proxy = random.choice(proxy_list)
34-
ping = requests.get(url, headers={"User-Agent": ua}, proxies=proxy)
35-
39+
ping = requests.get(url, headers={"User-Agent": ua}, proxies=proxy, timeout=5)
3640

3741
if ping.status_code == 200:
3842
available_sites.append(site)
3943
else:
4044
# Construct and save low level error
4145
error_str = f"Site ping for {site} Failed! Error Code {ping.status_code}"
4246
Logger.log_event(error_str, extra=str(ping))
47+
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e:
48+
# Construct the error
49+
error_str = f"An error occured while checking the sites! Please report this. Exception: {e}"
50+
Logger.log_event(error_str, extra=str(ping))
51+
sleep(5)
4352
except Exception as e:
4453
# Construct and print the error
45-
error_str = f"An error occured while checking the sites! Please report this. Exception: {e}"
54+
error_str = f"An error occured while checking the {site}! Please report this. Exception: {e}"
4655
print(colored(f"{error} {error_str}"))
4756
Logger.log_event(error_str, extra=str(ping))
4857
sleep(5)
@@ -54,4 +63,8 @@ def Evaluate(config, proxy_list, ua_list):
5463
Logger.log_event(error_str)
5564
sleep(5)
5665

66+
print(f"{colored(len(available_sites), 'yellow')} {colored('Available Sites ', 'green')}")
67+
68+
print("")
69+
5770
return available_sites

modules/bayfiles.py

+38
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
import requests
2+
import os
3+
import random
4+
5+
from .site_data import Site_Data_CLSS, sites_data_dict
6+
from .pretty_print import *
7+
8+
site = "BayFiles"
9+
10+
class BayFiles:
11+
12+
def Uploader(file, proxy_list, user_agents):
13+
try:
14+
ua = random.choice(user_agents)
15+
upload_url = sites_data_dict[site]["url"]
16+
size_limit = sites_data_dict[site]["size_limit_human"]
17+
size_unit = sites_data_dict[site]["size_unit"]
18+
19+
file_size = os.stat(file).st_size
20+
file_name = file.rsplit("\\")
21+
file_name = file_name[-1]
22+
file_name = (file_name[:240] + '..') if len(file_name) > 240 else file_name # Changed from 255 to 240 as an additional safety net.
23+
24+
calc_size = Site_Data_CLSS.size_unit_calc(site, file_size)
25+
26+
if calc_size == "OK":
27+
files_data = {'file': (os.path.basename(file), open(str(file), 'rb'), 'multipart/form-data')}
28+
29+
if proxy_list == []:
30+
req = requests.post(url=upload_url, files=files_data, headers={"User-Agent": ua}).json()
31+
else:
32+
req = requests.post(url=upload_url, files=files_data, headers={"User-Agent": ua}, proxies=random.choice(proxy_list)).json()
33+
return {"status": "ok", "file_name": file_name, "file_url": req.get("data").get("file").get("url").get("short"), "site": site}
34+
else:
35+
return {"status": "size_error", "file_name": file_name, "site": site, "exception": "SIZE_ERROR", "size_limit": f"{str(size_limit)} {size_unit}"}
36+
37+
except Exception as e:
38+
return {"status": "error", "file_name": file_name, "site": site, "exception": str(e), "extra": req}

modules/config_manager.py

+2-7
Original file line numberDiff line numberDiff line change
@@ -15,21 +15,16 @@ def Reader():
1515
def Writer():
1616
try:
1717
template = {
18-
"READ_THIS_LINE": "If you are unsure on what to do here visit the wiki for further explanation for each setting. https://github.com/Official-Husko/mul-tor/wiki",
1918
"checkForUpdates": True,
2019
"useProxies": False,
21-
"useWebhook": False,
2220
"saveLinksToFile": True,
2321
"randomUserAgent": True,
2422
"api_keys": {
2523
"example": {
26-
"apiKey": ""
24+
"apiKey": "",
25+
"email": ""
2726
}
2827
},
29-
"webhook_data": {
30-
"webhook_url": "",
31-
"webhook_body": {}
32-
},
3328
"blacklist": ["SomeSiteName", "CheapGoFileCopy", "HotSinglesInYourArea"]
3429
}
3530
with open("config.json", "w") as cfg_file:

modules/filechan.py

+38
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
import requests
2+
import os
3+
import random
4+
5+
from .site_data import Site_Data_CLSS, sites_data_dict
6+
from .pretty_print import *
7+
8+
site = "FileChan"
9+
10+
class FileChan:
11+
12+
def Uploader(file, proxy_list, user_agents):
13+
try:
14+
ua = random.choice(user_agents)
15+
upload_url = sites_data_dict[site]["url"]
16+
size_limit = sites_data_dict[site]["size_limit_human"]
17+
size_unit = sites_data_dict[site]["size_unit"]
18+
19+
file_size = os.stat(file).st_size
20+
file_name = file.rsplit("\\")
21+
file_name = file_name[-1]
22+
file_name = (file_name[:240] + '..') if len(file_name) > 240 else file_name # Changed from 255 to 240 as an additional safety net.
23+
24+
calc_size = Site_Data_CLSS.size_unit_calc(site, file_size)
25+
26+
if calc_size == "OK":
27+
files_data = {'file': (os.path.basename(file), open(str(file), 'rb'), 'multipart/form-data')}
28+
29+
if proxy_list == []:
30+
req = requests.post(url=upload_url, files=files_data, headers={"User-Agent": ua}).json()
31+
else:
32+
req = requests.post(url=upload_url, files=files_data, headers={"User-Agent": ua}, proxies=random.choice(proxy_list)).json()
33+
return {"status": "ok", "file_name": file_name, "file_url": req.get("data").get("file").get("url").get("short"), "site": site}
34+
else:
35+
return {"status": "size_error", "file_name": file_name, "site": site, "exception": "SIZE_ERROR", "size_limit": f"{str(size_limit)} {size_unit}"}
36+
37+
except Exception as e:
38+
return {"status": "error", "file_name": file_name, "site": site, "exception": str(e), "extra": req}

0 commit comments

Comments
 (0)