Splitted accs into two

master
Lasse Edfast 5 years ago
parent 3772bf3ccc
commit bbe31e2951
  1. 228
      facebook/accs_to_db.py
  2. 141
      facebook/accs_to_profiles.py

@ -0,0 +1,228 @@
""" Import profiles from file to db"""
from os import chdir, fdatasync
from getpass import getpass
from os.path import abspath, dirname
from random import randint
from time import sleep
import base64
import json
import requests
# Gör fb-scraper till arbetsmapp
chdir(dirname(dirname(abspath(__file__))))
from arangodb import arango_connect
import config
from helpers import now, nowstamp
def mullvad_servers_to_db(db):
""" Läser fil med servarar och exporterar till db. Används bara om servarna skulle uppdateras hos Mullvad. """
with open("data/servers.txt") as f:
for line in f.readlines():
if "@" in line:
line = line.strip()
city = line[: line.find("@")].strip()
if ("WireGuard" in line): # "au", "ca" #För senare när det behövs
line = line.strip()
country_short = line[:2]
server_id = line[: line.find("-")]
# Kolla så att servern inte redan används av profil i databasen
# eller finns i listan som skapas nu.
city_short = city[city.find("(") + 1 : city.find(")")]
server_name = [country_short, city_short, server_id + "-wireguard"]
server = {
'_key': server_id,
'country': country_short,
"city": city,
"id": server_id,
"server": server_id + "-wg.socks5.mullvad.net:1080",
"server_connect": server_name,}
db.insert_document('servers', server)
sleep(0.1)
def servers_to_db(db, server_collection, socks='socks5'):
""" Läser fil med servarar och exporterar till db. """
with open("socks5free.csv") as f:
for line in f.readlines():
server = line.split(';')[1]
db.insert_document(server_collection, {'_key':server, 'proxies': {"https": f"{socks}://{server}",
"http": f"{socks}://{server}"}, 'country': False})
sleep(0.1)
def used_emails(db, collection):
cursor = db.aql.execute(
"""
FOR doc IN @@col
RETURN doc.email
""",
bind_vars={'@col': collection})
return [email for email in cursor]
def to_accs(db, data, info, profiles, vendor, accs='accs'):
"""
Ta profiles från köpt fil och lägg i accs
"""
used_accs = used_emails(db, accs)
used_profiles = used_emails(db, profiles)
for profile in data:
if len(profile) > 3:
print('\nKlart.\n')
break
doc = {}
doc["vendor"] = vendor
doc["created"] = now()
if 'email' in info:
doc['email'] = profile[info.index('email')]
elif 'login' in info:
doc['email'] = profile[info.index('login')]
if doc['email'] in used_accs or doc['email'] in used_profiles:
continue
doc['name'] = doc['email']
if 'pwd' in info:
doc["pwd"] = profile[info.index('pwd')]
else:
doc["pwd"] = ''
if 'url' in info:
doc['id'] = profile[info.index('url')].replace("https;", "https:")
if "useragent" in info:
doc["useragent"] = profile[info.index('useragent')].strip()
else:
doc["useragent"] = config.user_agent
if 'cookie' in info:
cookies = profile[info.index('cookie')]
if 'c_user=' in cookies:
cookie = {}
for c in cookies.split(';'):
cookie[c[:c.find('=')].strip()] = c[c.find('=') + 1:].strip()
else:
cookies_base64 = cookies.strip()#.strip('=')
# print()
# print(cookies_base64)
# print()
cookies64_bytes = cookies_base64.encode("ascii")
cookies_bytes = base64.b64decode(cookies64_bytes)
#exit()
cookies_str = (
cookies_bytes.decode("ascii")
.replace("'", '"')
.replace("False", "false")
.replace("True", "true")
)
cookies = json.loads(cookies_str)
cookie = {}
if vendor == '159':
for c in cookies['cookies']:
cookie[c['name']] = c['value']
else:
for c in cookies:
name = c["name"]
del c["name"]
cookie[name] = c["value"]
doc['cookie'] = cookie
else:
cookie = {}
if 'birthday' in info:
doc["birthday"] = profile[info.index('birthday')]
if 'token' in info:
doc['token'] = profile[info.index('token')].strip()
db.insert_document(accs, doc)
sleep(0.1)
db.collection('from_market').remove
def used_servers(profiles='profiles'):
cursor = db.aql.execute(
"""
FOR doc IN @@col
RETURN doc._key
""",
bind_vars={'@col': profiles}
)
servers = [doc for doc in cursor]
return servers
def webshare_proxies():
"""Get list of webshare-proxies."""
proxielist = requests.get('https://proxy.webshare.io/proxy/list/download/sompzungcbajbeqmxoopddsthzarqlewjgraicog/-/http/port/direct/')
proxies = {}
count = 0
for line in proxielist.text.split('\n'):
count += 1
proxie = line.replace('\r', '').split(':')
if proxie != ['']:
server = proxie[0]
port = proxie[1]
proxies[count] = {'server': server, 'port': port}
for _, proxie in proxies.items():
proxies={'https':'socks5://xigpxrzr:ezgjcwr8lonj@{server}:{port}'.format(server=proxie['server'], port=proxie['port']),
'http':'socks5://xigpxrzr:ezgjcwr8lonj@{server}:{port}'.format(server=proxie['server'], port=proxie['port'])
}
try:
db.insert_document('servers_webshare', {'_key':proxie['server'], 'proxies': proxies, 'country': 'us'})
except:
pass
if __name__ == "__main__":
vendors = {
'1152': {'info': 'login:password:cookie:token', 'sep': '|'},
'1091': {'info': 'login:password:birthday:url:cookie', 'sep': ':'},
'1113': {'info': 'login:password:mail:password:birthday:country:useragent:token:cookie', 'sep': '|'},
'926': {'info': 'login:password:email:email password:birthday:token:cookie', 'sep': '|'} ,
#'1113': {'info': 'login:mail:password:emailpassword:birthday:useragent:token:cookie', 'sep': '|'},
'159': {'info': 'login:password:birthday:id:cookie', 'sep':':'}
}
pwd = 'concert-hangar-mirth-salk-DECAL'
db = arango_connect(pwd, username='Accs')
###############################
### Variabler att ställa in ###
country='us'
profiles = 'profiles_webshare' # ex profiles_free
servers = 'servers_webshare'
###############################
accs = 'accs'
for i in db.collection('from_market').all():
vendor = i['vendor'].upper()
sep = vendors[vendor]['sep']
info = vendors[vendor]['info'].split(':')
rows = i['data'].split('\n')
data = []
for row in rows:
row = row.replace("https:", "https;")
data.append(row.split(sep))
# Lägg in i accs
to_accs(db, data, info, profiles, vendor)
# Ta bort dokumentet med data
db.collection('from_market').delete(
i['_key'], silent=True, ignore_missing=True
)
#webshare_proxies() ## Den här för att uppdatera servers hos webshare

@ -0,0 +1,141 @@
""" Import profiles from accs"""
from os import chdir
from getpass import getpass
from os.path import abspath, dirname
from random import randint
from time import sleep
import base64
import json
import requests
# Gör fb-scraper till arbetsmapp
chdir(dirname(dirname(abspath(__file__))))
from arangodb import arango_connect
import config
from helpers import now, nowstamp
def used_servers(profiles='profiles'):
cursor = db.aql.execute(
"""
FOR doc IN @@col
RETURN doc._key
""",
bind_vars={'@col': profiles}
)
servers = [doc for doc in cursor]
return servers
def used_servers(profiles='profiles'):
"""
Check what servers are used.
"""
cursor = db.aql.execute(
"""
FOR doc IN @@col
RETURN doc._key
""",
bind_vars={'@col': profiles}
)
servers = [doc for doc in cursor]
return servers
def get_server(db, country, servers='servers', profiles='profiles'):
""" Hämtar en server åt profilen """
if country == 'any':
cursor = db.aql.execute(
"""
FOR doc IN @@col
FILTER doc.country not in ['us', 'se']
RETURN {proxies: doc.proxies, country: doc.country, id: doc.id, _key:doc._key, city:doc.city, server:doc.server, server_connect:doc.server_connect}
""", bind_vars={'@col': servers})
else:
cursor = db.aql.execute(
"""
FOR doc IN @@servers
FILTER doc.country == @country
RETURN {proxies: doc.proxies, country: doc.country, id: doc.id, _key:doc._key, city:doc.city, server:doc.server, server_connect:doc.server_connect}
""",
bind_vars={'country': country, '@servers': servers}
)
servers = []
used = used_servers(profiles)
for server in cursor:
if server['_key'] not in used:
servers.append(server)
if servers != []:
server = servers[randint(0, len(servers)-1)]
return server
def used_emails(db, collection):
cursor = db.aql.execute(
"""
FOR doc IN @@col
RETURN doc.email
""",
bind_vars={'@col': collection})
return [email for email in cursor]
def to_profiles(db, profiles, servers, country='us', accs='accs', continuous=True):
"""
Tar en prifil från accs och en server från servers och lägger in i profiles.
Args:
db ([type]): Arango database.
profiles (str, optional): . Profiles to upload to.
servers (str, optional): Proxyservers to use.
country (str, optional): Country for profiles. Defaults to 'us'.
accs (str, optional): Table for accounts. Defaults to 'accs'.
continuous (bool, optional): If "true" the update happens every other day. Defaults to True.
"""
while True:
count = 0
for profile in [i for i in db.collection(accs).all()]:
count +=1
if profile['email'] in used_emails(db, profiles):
continue
id_acc = profile['_id']
del profile['_id']
del profile['_rev']
server = get_server(db, country, servers, profiles)
if server == None:
print(f'Inlagda profiler: {count}\nInga fler lediga servrar.')
break
profile["server"] = server["server"]
profile["server_connect"] = server["server_connect"]
profile["_key"] = server["_key"]#.replace(':', '')
profile["in_use"] = nowstamp()
profile['proxies'] = server['proxies']
db.insert_document(profiles, profile)
sleep(0.1)
db.delete_document(id_acc)
print('Inlagda profiler:', count, end='\r')
if not continuous:
break
sleep(86400)
if __name__ == "__main__":
# Starta koppling till arango.
pwd = 'concert-hangar-mirth-salk-DECAL'
db = arango_connect(pwd, username='Accs')
###############################
### Variabler att ställa in ###
country='us'
profiles = 'profiles_webshare' # profiles_webshare, profiles_mullvad
servers = 'servers_webshare'
###############################
to_profiles(db, profiles=profiles, country=country, servers=servers, continuous=True)
Loading…
Cancel
Save