Skip to content

Commit

Permalink
Add AutoUpdate[Because I update so many times], update modules
Browse files Browse the repository at this point in the history
  • Loading branch information
InvalidAccount committed Nov 26, 2022
1 parent bbe6fbb commit 981352a
Show file tree
Hide file tree
Showing 2 changed files with 123 additions and 40 deletions.
152 changes: 117 additions & 35 deletions generator.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,107 @@
import asyncio
import sys
import socket as dsocket
from asyncio import sleep, new_event_loop, run, gather, create_task, set_event_loop
from contextlib import suppress
from dataclasses import dataclass
from json import load
from random import choice
from os import path, getcwd, listdir, chdir, remove, system, mkdir
from os.path import exists
from random import choice, randrange
from re import findall
import os
from shutil import copytree, rmtree, copyfile
from ssl import create_default_context
from sys import exit
from typing import IO
from urllib.parse import urlparse
from zipfile import ZipFile

import aiohttp
import colorama
from colorama import Fore
from aioconsole import aprint, ainput
from aiofiles import open as openfile
from aiohttp import ClientSession
from bs4 import BeautifulSoup
from colorama import Fore, init
from requests import get

colorama.init()
init()


class AutoUpdater:
def __init__(self, version):
self.version = version
self.latest = self.get_latest()
self.this = getcwd()
self.file = "temp/latest.zip"
self.folder = f"temp/latest_{randrange(1_000_000, 999_999_999)}"

@dataclass
class latest_data:
version: str
zip_url: str

def get_latest(self):
rjson = get("https://api.github.com/repos/MatrixTM/MultiAccountGenerator/tags").json()
return self.latest_data(version=rjson[0]["name"], zip_url=get(rjson[0]["zipball_url"]).url)

@staticmethod
def download(host, dPath, filename):
with dsocket.socket(dsocket.AF_INET, dsocket.SOCK_STREAM) as sock:
context = create_default_context()
with context.wrap_socket(sock, server_hostname="api.github.com") as wrapped_socket:
wrapped_socket.connect((dsocket.gethostbyname(host), 443))
wrapped_socket.send(
f"GET {dPath} HTTP/1.1\r\nHost:{host}\r\nAccept: text/html,application/xhtml+xml,application/xml;q=0.9,file/avif,file/webp,file/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9\r\n\r\n".encode())

resp = b""
while resp[-4:-1] != b"\r\n\r":
resp += wrapped_socket.recv(1)
else:
resp = resp.decode()
content_length = int(
"".join([tag.split(" ")[1] for tag in resp.split("\r\n") if "content-length" in tag.lower()]))
_file = b""
while content_length > 0:
data = wrapped_socket.recv(2048)
if not data:
print("EOF")
break
_file += data
content_length -= len(data)
with open(filename, "wb") as file:
file.write(_file)

def update(self):
if not self.version == self.latest.version:
rmtree("temp") if exists("temp") else ""
mkdir("temp")
print("Updating Script...")
parsed = urlparse(self.latest.zip_url)
self.download(parsed.hostname, parsed.path, self.file)
ZipFile(self.file).extractall(self.folder)
print(exists(self.folder))
print(exists(listdir(self.folder)[0]))
chdir("{}/{}".format(self.folder, listdir(self.folder)[0]))
for files in listdir():
if path.isdir(files):
with suppress(FileNotFoundError):
rmtree("{}/{}".format(self.this, files))
copytree(files, "{}/{}".format(self.this, files))
else:
with suppress(FileNotFoundError):
remove("{}/{}".format(self.this, files))
copyfile(files, "{}/{}".format(self.this, files))
rmtree("../../../temp")
exit("Run Script Again!")
return
print("Script is up to date!")


class Generator:
def __init__(self):
self.version = "v1.1"
AutoUpdater(self.version).update()
self.config: dict = load(open('config.json'))
self.output = open(self.config["output"], "a+")
self.tasks = []
self.useless_value = 0
self.colors = [
self.output: IO = open(self.config["output"], "a+")
self.tasks: list = []
self.colors: list = [
Fore.LIGHTGREEN_EX,
Fore.LIGHTBLACK_EX,
Fore.LIGHTMAGENTA_EX,
Expand All @@ -37,8 +117,8 @@ async def make_beautiful(self, text: str, reset=True) -> str:

return "".join(tmp) if not reset else "".join(tmp) + Fore.RESET

async def banner(self):
os.system('cls||clear')
async def banner(self) -> None:
system('cls||clear')
print(await self.make_beautiful("""
__ __ _ _ _ _____
| \/ | | | | (_)/ ____|
Expand All @@ -48,46 +128,48 @@ async def banner(self):
|_| |_|\__,_|\__|_|_|\_____|\___|_| |_|
#MahsaAmini\n\n""", False))

async def run(self):
async def run(self) -> None:
await self.banner()
for i in range(len(self.config["services"])):
await aprint(f"{i} - {self.config['services'][i]}")
await aprint("69 - exit\n")
inp = await ainput("Select service to scrape url >> ")
sys.exit(1) if inp == "69" else inp # Exit if exit selected
exit(1) if inp == "69" else inp # Exit if exit selected
if int(inp) <= len(self.config['services']):
inp = self.config['services'][int(inp)] # Change number to Name
await self.banner()
else:
await ainput("Select valid Item\nPress Enter to Exit")
sys.exit(1)
exit(1)
await aprint("Creating tasks")
for _ in range(self.config["thread"]):
for i in range(self.config["thread"]):
self.tasks.append(
asyncio.create_task(self.generate(self.config['url'][inp], self.config['selector'][inp] or inp)))
self.useless_value += 1
await aprint("%s Task Created!" % self.useless_value, end="\r")
await asyncio.sleep(.1)
create_task(self.generate(i, self.config['url'][inp], self.config['selector'][inp] or inp)))
await aprint("%s Task Created!" % i, end="\r")
await sleep(.1)
print()

await asyncio.gather(*self.tasks)
await gather(*self.tasks)

async def generate(self, url: str, selector: str) -> None:
async def generate(self, worker, url: str, selector: str) -> None:
while True:
with suppress(Exception):
with open(self.config["output"], "a+") as file:
session = aiohttp.ClientSession()
request = await session.post(choice(url), data={"gen": ""}, timeout=self.config["request-timeout"] or 5)
await session.close()
outUrl = \
findall("http://.*", str(BeautifulSoup(await request.text(), "html.parser").select(selector)))[0]
await aprint(await self.make_beautiful(outUrl))
file.write("%s" % outUrl)
async with openfile(self.config["output"], "a+") as file:
async with ClientSession() as session:
request = await session.post(choice(url), data={"gen": ""},
timeout=self.config["request-timeout"] or 5)
outUrl = \
findall("http://.*",
str(BeautifulSoup(await request.text(), "html.parser").select(selector)))[
0]
await aprint("%s[%s] " % (Fore.LIGHTCYAN_EX, worker) + await self.make_beautiful(outUrl))
await file.write("%s" % outUrl)


if __name__ == '__main__':
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop = new_event_loop()
set_event_loop(loop)
try:
asyncio.run(Generator().run())
run(Generator().run())
except KeyboardInterrupt:
pass
11 changes: 6 additions & 5 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
colorama==0.4.6
bs4~=0.0.1
beautifulsoup4~=4.11.1
httpx~=0.23.0
asyncio~=3.4.3
aioconsole~=0.5.1
aiofiles~=22.1.0
aiohttp~=3.8.3
aioconsole==0.5.1
bs4~=0.0.1
beautifulsoup4~=4.11.1
colorama~=0.4.6
requests~=2.28.1

0 comments on commit 981352a

Please sign in to comment.