|
|
|
|
@ -1,13 +1,13 @@ |
|
|
|
|
""" Import profiles from file to db""" |
|
|
|
|
|
|
|
|
|
from os import chdir, fdatasync |
|
|
|
|
from getpass import getpass |
|
|
|
|
from os import chdir |
|
|
|
|
from os.path import abspath, dirname |
|
|
|
|
from random import randint |
|
|
|
|
from time import sleep |
|
|
|
|
import base64 |
|
|
|
|
import json |
|
|
|
|
import requests |
|
|
|
|
from sshtunnel import open_tunnel |
|
|
|
|
import paramiko |
|
|
|
|
|
|
|
|
|
# Gör fb-scraper till arbetsmapp |
|
|
|
|
chdir(dirname(dirname(abspath(__file__)))) |
|
|
|
|
@ -16,15 +16,16 @@ from arangodb import arango_connect |
|
|
|
|
import config |
|
|
|
|
from helpers import now, nowstamp |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def mullvad_servers_to_db(db): |
|
|
|
|
""" Läser fil med servarar och exporterar till db. Används bara om servarna skulle uppdateras hos Mullvad. """ |
|
|
|
|
"""Läser fil med servarar och exporterar till db. Används bara om servarna skulle uppdateras hos Mullvad.""" |
|
|
|
|
with open("data/servers.txt") as f: |
|
|
|
|
for line in f.readlines(): |
|
|
|
|
if "@" in line: |
|
|
|
|
line = line.strip() |
|
|
|
|
city = line[: line.find("@")].strip() |
|
|
|
|
|
|
|
|
|
if ("WireGuard" in line): # "au", "ca" #För senare när det behövs |
|
|
|
|
if "WireGuard" in line: # "au", "ca" #För senare när det behövs |
|
|
|
|
line = line.strip() |
|
|
|
|
country_short = line[:2] |
|
|
|
|
server_id = line[: line.find("-")] |
|
|
|
|
@ -34,39 +35,48 @@ def mullvad_servers_to_db(db): |
|
|
|
|
city_short = city[city.find("(") + 1 : city.find(")")] |
|
|
|
|
server_name = [country_short, city_short, server_id + "-wireguard"] |
|
|
|
|
server = { |
|
|
|
|
'_key': server_id, |
|
|
|
|
'country': country_short, |
|
|
|
|
"_key": server_id, |
|
|
|
|
"country": country_short, |
|
|
|
|
"city": city, |
|
|
|
|
"id": server_id, |
|
|
|
|
"server": server_id + "-wg.socks5.mullvad.net:1080", |
|
|
|
|
"server_connect": server_name,} |
|
|
|
|
db.insert_document('servers', server) |
|
|
|
|
"server_connect": server_name, |
|
|
|
|
} |
|
|
|
|
db.insert_document("servers", server) |
|
|
|
|
sleep(0.1) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def servers_to_db(db, server_collection, socks='socks5'): |
|
|
|
|
""" Läser fil med servarar och exporterar till db. """ |
|
|
|
|
def servers_to_db(db, server_collection, socks="socks5"): |
|
|
|
|
"""Läser fil med servarar och exporterar till db.""" |
|
|
|
|
with open("socks5free.csv") as f: |
|
|
|
|
for line in f.readlines(): |
|
|
|
|
server = line.split(';')[1] |
|
|
|
|
db.insert_document(server_collection, {'_key':server, 'proxies': {"https": f"{socks}://{server}", |
|
|
|
|
"http": f"{socks}://{server}"}, 'country': False}) |
|
|
|
|
server = line.split(";")[1] |
|
|
|
|
db.insert_document( |
|
|
|
|
server_collection, |
|
|
|
|
{ |
|
|
|
|
"_key": server, |
|
|
|
|
"proxies": { |
|
|
|
|
"https": f"{socks}://{server}", |
|
|
|
|
"http": f"{socks}://{server}", |
|
|
|
|
}, |
|
|
|
|
"country": False, |
|
|
|
|
}, |
|
|
|
|
) |
|
|
|
|
sleep(0.1) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def used_emails(db, collection): |
|
|
|
|
cursor = db.aql.execute( |
|
|
|
|
""" |
|
|
|
|
FOR doc IN @@col |
|
|
|
|
RETURN doc.email |
|
|
|
|
""", |
|
|
|
|
bind_vars={'@col': collection}) |
|
|
|
|
bind_vars={"@col": collection}, |
|
|
|
|
) |
|
|
|
|
return [email for email in cursor] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def to_accs(db, data, info, profiles, vendor, accs='accs'): |
|
|
|
|
def to_accs(db, data, info, profiles, vendor, accs="accs"): |
|
|
|
|
""" |
|
|
|
|
Ta profiles från köpt fil och lägg i accs |
|
|
|
|
""" |
|
|
|
|
@ -75,81 +85,84 @@ def to_accs(db, data, info, profiles, vendor, accs='accs'): |
|
|
|
|
used_profiles = used_emails(db, profiles) |
|
|
|
|
|
|
|
|
|
for profile in data: |
|
|
|
|
if len(profile) > 3: |
|
|
|
|
print('\nKlart.\n') |
|
|
|
|
if len(profile) < 3: |
|
|
|
|
print("\nKlart.\n") |
|
|
|
|
break |
|
|
|
|
|
|
|
|
|
doc = {} |
|
|
|
|
doc["vendor"] = vendor |
|
|
|
|
doc["created"] = now() |
|
|
|
|
if 'email' in info: |
|
|
|
|
doc['email'] = profile[info.index('email')] |
|
|
|
|
elif 'login' in info: |
|
|
|
|
doc['email'] = profile[info.index('login')] |
|
|
|
|
if doc['email'] in used_accs or doc['email'] in used_profiles: |
|
|
|
|
if "email" in info: |
|
|
|
|
doc["email"] = profile[info.index("email")] |
|
|
|
|
elif "login" in info: |
|
|
|
|
doc["email"] = profile[info.index("login")] |
|
|
|
|
if doc["email"] in used_accs or doc["email"] in used_profiles: |
|
|
|
|
continue |
|
|
|
|
doc['name'] = doc['email'] |
|
|
|
|
if 'pwd' in info: |
|
|
|
|
doc["pwd"] = profile[info.index('pwd')] |
|
|
|
|
doc["name"] = doc["email"] |
|
|
|
|
if "pwd" in info: |
|
|
|
|
doc["pwd"] = profile[info.index("pwd")] |
|
|
|
|
else: |
|
|
|
|
doc["pwd"] = '' |
|
|
|
|
if 'url' in info: |
|
|
|
|
doc['id'] = profile[info.index('url')].replace("https;", "https:") |
|
|
|
|
doc["pwd"] = "" |
|
|
|
|
if "url" in info: |
|
|
|
|
doc["id"] = profile[info.index("url")].replace("https;", "https:") |
|
|
|
|
if "useragent" in info: |
|
|
|
|
doc["useragent"] = profile[info.index('useragent')].strip() |
|
|
|
|
doc["useragent"] = profile[info.index("useragent")].strip() |
|
|
|
|
else: |
|
|
|
|
doc["useragent"] = config.user_agent |
|
|
|
|
if 'cookie' in info: |
|
|
|
|
cookies = profile[info.index('cookie')] |
|
|
|
|
if "cookie" in info: |
|
|
|
|
cookies = profile[info.index("cookie")] |
|
|
|
|
|
|
|
|
|
if 'c_user=' in cookies: |
|
|
|
|
if "c_user=" in cookies: |
|
|
|
|
cookie = {} |
|
|
|
|
for c in cookies.split(';'): |
|
|
|
|
cookie[c[:c.find('=')].strip()] = c[c.find('=') + 1:].strip() |
|
|
|
|
for c in cookies.split(";"): |
|
|
|
|
cookie[c[: c.find("=")].strip()] = c[c.find("=") + 1 :].strip() |
|
|
|
|
else: |
|
|
|
|
cookies_base64 = cookies.strip()#.strip('=') |
|
|
|
|
cookies_base64 = cookies.strip() # .strip('=') |
|
|
|
|
# print() |
|
|
|
|
# print(cookies_base64) |
|
|
|
|
# print() |
|
|
|
|
cookies64_bytes = cookies_base64.encode("ascii") |
|
|
|
|
cookies_bytes = base64.b64decode(cookies64_bytes) |
|
|
|
|
#exit() |
|
|
|
|
# exit() |
|
|
|
|
cookies_str = ( |
|
|
|
|
cookies_bytes.decode("ascii") |
|
|
|
|
.replace("'", '"') |
|
|
|
|
.replace("False", "false") |
|
|
|
|
.replace("True", "true") |
|
|
|
|
) |
|
|
|
|
try: |
|
|
|
|
cookies = json.loads(cookies_str) |
|
|
|
|
except: |
|
|
|
|
for i in profile: |
|
|
|
|
print(i) |
|
|
|
|
exit() |
|
|
|
|
cookie = {} |
|
|
|
|
if vendor == '159': |
|
|
|
|
for c in cookies['cookies']: |
|
|
|
|
cookie[c['name']] = c['value'] |
|
|
|
|
if vendor == "159": |
|
|
|
|
for c in cookies["cookies"]: |
|
|
|
|
cookie[c["name"]] = c["value"] |
|
|
|
|
else: |
|
|
|
|
for c in cookies: |
|
|
|
|
name = c["name"] |
|
|
|
|
del c["name"] |
|
|
|
|
cookie[name] = c["value"] |
|
|
|
|
doc['cookie'] = cookie |
|
|
|
|
doc["cookie"] = cookie |
|
|
|
|
else: |
|
|
|
|
cookie = {} |
|
|
|
|
if 'birthday' in info: |
|
|
|
|
doc["birthday"] = profile[info.index('birthday')] |
|
|
|
|
if 'token' in info: |
|
|
|
|
doc['token'] = profile[info.index('token')].strip() |
|
|
|
|
if "birthday" in info: |
|
|
|
|
doc["birthday"] = profile[info.index("birthday")] |
|
|
|
|
if "token" in info: |
|
|
|
|
doc["token"] = profile[info.index("token")].strip() |
|
|
|
|
db.insert_document(accs, doc) |
|
|
|
|
sleep(0.1) |
|
|
|
|
|
|
|
|
|
db.collection('from_market').remove |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def used_servers(profiles='profiles'): |
|
|
|
|
def used_servers(profiles="profiles"): |
|
|
|
|
cursor = db.aql.execute( |
|
|
|
|
""" |
|
|
|
|
FOR doc IN @@col |
|
|
|
|
RETURN doc._key |
|
|
|
|
""", |
|
|
|
|
bind_vars={'@col': profiles} |
|
|
|
|
bind_vars={"@col": profiles}, |
|
|
|
|
) |
|
|
|
|
servers = [doc for doc in cursor] |
|
|
|
|
return servers |
|
|
|
|
@ -158,71 +171,97 @@ def used_servers(profiles='profiles'): |
|
|
|
|
def webshare_proxies(): |
|
|
|
|
"""Get list of webshare-proxies.""" |
|
|
|
|
|
|
|
|
|
proxielist = requests.get('https://proxy.webshare.io/proxy/list/download/sompzungcbajbeqmxoopddsthzarqlewjgraicog/-/http/port/direct/') |
|
|
|
|
proxielist = requests.get( |
|
|
|
|
"https://proxy.webshare.io/proxy/list/download/sompzungcbajbeqmxoopddsthzarqlewjgraicog/-/http/port/direct/" |
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
proxies = {} |
|
|
|
|
count = 0 |
|
|
|
|
|
|
|
|
|
for line in proxielist.text.split('\n'): |
|
|
|
|
for line in proxielist.text.split("\n"): |
|
|
|
|
count += 1 |
|
|
|
|
proxie = line.replace('\r', '').split(':') |
|
|
|
|
if proxie != ['']: |
|
|
|
|
proxie = line.replace("\r", "").split(":") |
|
|
|
|
if proxie != [""]: |
|
|
|
|
server = proxie[0] |
|
|
|
|
port = proxie[1] |
|
|
|
|
proxies[count] = {'server': server, 'port': port} |
|
|
|
|
|
|
|
|
|
proxies[count] = {"server": server, "port": port} |
|
|
|
|
|
|
|
|
|
for _, proxie in proxies.items(): |
|
|
|
|
proxies={'https':'socks5://xigpxrzr:ezgjcwr8lonj@{server}:{port}'.format(server=proxie['server'], port=proxie['port']), |
|
|
|
|
'http':'socks5://xigpxrzr:ezgjcwr8lonj@{server}:{port}'.format(server=proxie['server'], port=proxie['port']) |
|
|
|
|
proxies = { |
|
|
|
|
"https": "socks5://xigpxrzr:ezgjcwr8lonj@{server}:{port}".format( |
|
|
|
|
server=proxie["server"], port=proxie["port"] |
|
|
|
|
), |
|
|
|
|
"http": "socks5://xigpxrzr:ezgjcwr8lonj@{server}:{port}".format( |
|
|
|
|
server=proxie["server"], port=proxie["port"] |
|
|
|
|
), |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
try: |
|
|
|
|
db.insert_document('servers_webshare', {'_key':proxie['server'], 'proxies': proxies, 'country': 'us'}) |
|
|
|
|
db.insert_document( |
|
|
|
|
"servers_webshare", |
|
|
|
|
{"_key": proxie["server"], "proxies": proxies, "country": "us"}, |
|
|
|
|
) |
|
|
|
|
except: |
|
|
|
|
pass |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
|
|
|
|
vendors = { |
|
|
|
|
'1152': {'info': 'login:password:cookie:token', 'sep': '|'}, |
|
|
|
|
'1091': {'info': 'login:password:birthday:url:cookie', 'sep': ':'}, |
|
|
|
|
'1113': {'info': 'login:password:mail:password:birthday:country:useragent:token:cookie', 'sep': '|'}, |
|
|
|
|
'926': {'info': 'login:password:email:email password:birthday:token:cookie', 'sep': '|'} , |
|
|
|
|
"1152": {"info": "login:password:cookie:token", "sep": "|"}, |
|
|
|
|
"1091": { |
|
|
|
|
"info": "login:password:birthday:email:email_password:useragent:cookie", |
|
|
|
|
"sep": ":", |
|
|
|
|
}, |
|
|
|
|
"1113": { |
|
|
|
|
"info": "login:password:mail:password:birthday:country:useragent:token:cookie", |
|
|
|
|
"sep": "|", |
|
|
|
|
}, |
|
|
|
|
"926": { |
|
|
|
|
"info": "login:password:email:email password:birthday:token:cookie", |
|
|
|
|
"sep": "|", |
|
|
|
|
}, |
|
|
|
|
#'1113': {'info': 'login:mail:password:emailpassword:birthday:useragent:token:cookie', 'sep': '|'}, |
|
|
|
|
'159': {'info': 'login:password:birthday:id:cookie', 'sep':':'} |
|
|
|
|
"159": {"info": "login:password:mail:email password:birthday:id", "sep": ":"}, |
|
|
|
|
#'159': {'info': 'login:password:birthday:id:cookie', 'sep':':' |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
pwd = 'concert-hangar-mirth-salk-DECAL' |
|
|
|
|
db = arango_connect(pwd, username='Accs') |
|
|
|
|
pwd = "concert-hangar-mirth-salk-DECAL" |
|
|
|
|
db = arango_connect(pwd, username="Accs") |
|
|
|
|
|
|
|
|
|
############################### |
|
|
|
|
### Variabler att ställa in ### |
|
|
|
|
country='us' |
|
|
|
|
profiles = 'profiles_webshare' # ex profiles_free |
|
|
|
|
servers = 'servers_webshare' |
|
|
|
|
country = "us" |
|
|
|
|
profiles = "profiles_webshare" # ex profiles_free |
|
|
|
|
servers = "servers_webshare" |
|
|
|
|
############################### |
|
|
|
|
|
|
|
|
|
accs = 'accs' |
|
|
|
|
|
|
|
|
|
accs = "accs" |
|
|
|
|
|
|
|
|
|
for i in db.collection('from_market').all(): |
|
|
|
|
vendor = i['vendor'].upper() |
|
|
|
|
vendor = str(file[file.rfind('-')+1:file.rfind('.')]) |
|
|
|
|
sep = vendors[vendor]['sep'] |
|
|
|
|
|
|
|
|
|
info = vendors[vendor]['info'].split(':') |
|
|
|
|
rows = i['data'].split('\n') |
|
|
|
|
|
|
|
|
|
file = input('Lägg filen här ').strip("'").strip() |
|
|
|
|
data = [] |
|
|
|
|
for row in rows: |
|
|
|
|
with open(file) as document: |
|
|
|
|
for row in document: |
|
|
|
|
row = row.replace("https:", "https;") |
|
|
|
|
data.append(row.split(sep)) |
|
|
|
|
|
|
|
|
|
# Lägg in i accs |
|
|
|
|
with open_tunnel( |
|
|
|
|
("studio-garda.asuscomm.com", 2200), |
|
|
|
|
ssh_username="Lasse", |
|
|
|
|
ssh_pkey=paramiko.RSAKey.from_private_key_file( |
|
|
|
|
"/Users/Lasse/.ssh/id_rsa", password=pwd_key |
|
|
|
|
), |
|
|
|
|
ssh_private_key_password=pwd_key, |
|
|
|
|
remote_bind_address=("127.0.0.1", 8529), |
|
|
|
|
) as server: |
|
|
|
|
# server.start() |
|
|
|
|
port_arango = server.local_bind_port |
|
|
|
|
to_accs(db, data, info, profiles, vendor) |
|
|
|
|
|
|
|
|
|
# Ta bort dokumentet med data |
|
|
|
|
db.collection('from_market').delete( |
|
|
|
|
i['_key'], silent=True, ignore_missing=True |
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
#webshare_proxies() ## Den här för att uppdatera servers hos webshare |
|
|
|
|
# db.collection('from_market').delete(i['_key'], silent=True, ignore_missing=True) |
|
|
|
|
|