Merge pull request #5 from lasseedfast/swarm-mode

Swarm mode
master
Lasse Edfast 5 years ago committed by GitHub
commit 57dc486798
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 8
      .gitignore
  2. 8
      Dockerfile
  3. 19
      docker/free/Dockerfile
  4. 14
      docker/mrkoll/Dockerfile
  5. 414
      facebook/__main__.py
  6. 324
      facebook/arangodb.py
  7. 229
      facebook/classes.py
  8. 13
      facebook/config.py
  9. BIN
      facebook/face_enc
  10. 110
      facebook/faces.py
  11. 185
      facebook/gephi (kopia).py
  12. 213
      facebook/gephi.py
  13. 115
      facebook/helpers.py
  14. 70
      facebook/images.py
  15. 169
      facebook/images_pi.py
  16. 45
      facebook/modemtest.py
  17. 123
      facebook/profile_create.py
  18. 171
      facebook/scrapers.py
  19. 557
      facebook/socks5free.csv
  20. 23
      facebook/start_database.py
  21. 74
      facebook/stats.py
  22. 131
      facebook/testclass.py
  23. 14
      fb-webbapp/main.py
  24. 15
      integrity.py
  25. 3
      requirements.txt
  26. 25
      workspace.code-workspace

8
.gitignore vendored

@ -12,4 +12,10 @@ workspace.code-workspace
password_arango.txt password_arango.txt
*.txt *.txt
~requirements.txt ~requirements.txt
*.gexf *.gexf
facebook/mrkoll.py
*.pyc
facebook/tortest.py
facebook/phone.py
facebook/accs.py
facebook/gephi.py

@ -3,11 +3,13 @@ FROM python:3.8
WORKDIR / WORKDIR /
COPY data/ .
COPY requirements.txt . COPY requirements.txt .
RUN pip install -r requirements.txt RUN pip install -r requirements.txt
ADD . . COPY facebook/accs.py facebook/classes.py facebook/config.py facebook/helpers.py facebook/__main__.py facebook/arangodb.py facebook/scrapers.py /facebook/
ENTRYPOINT [ "python", "facebook/__main__.py" ] ENTRYPOINT [ "python", "facebook/__main__.py" ]
@ -15,7 +17,5 @@ CMD ["",""]
# BUILD: # BUILD:
# docker buildx create --use # docker buildx create --use
#docker buildx build --platform linux/arm -t l3224/fb-reactions:pi --push . #docker buildx build --platform linux/arm,linux/arm64 -t l3224/fb-scraper:pi --push .
# START
# docker run -it --name fb1 -v vol1:/data l3224/fb-reactions:latest [-s -u user1,user2]

@ -0,0 +1,19 @@
FROM python:3.8
WORKDIR /
COPY requirements.txt .
RUN pip install -r requirements.txt
ADD . .
ENTRYPOINT [ "python", "facebook/__main__.py", "-p free" ]
CMD ["",""]
# BUILD:
# docker buildx create --use
#docker buildx build --file docker/free/Dockerfile --platform linux/arm -t l3224/fb-scraper:free --push .

@ -0,0 +1,14 @@
FROM python:3.8
WORKDIR /
COPY requirements.txt .
RUN pip install -r requirements.txt
ADD . .
ENTRYPOINT [ "python", "facebook/mrkoll.py" ]
# docker buildx build --file docker/mrkoll/Dockerfile --platform linux/arm -t l3224/fb-scraper:mrkoll --push .

@ -1,221 +1,227 @@
import random import random
import traceback import traceback
from getopt import GetoptError, getopt from getopt import getopt
from sys import argv, exit from sys import argv
from time import sleep from time import sleep
from subprocess import check_output from datetime import datetime
from re import split from config import set_pwd
from socket import gethostname from random import randint
from arangodb import db, write_report, backup, report_blocked, get_profile, remove_profile, checked_members, friends_of_user
from classes import Profile, User
from helpers import sleep_, write_error, _print
from scrapers import profile_picture_reactions
def finish():
""" Avslutar: skriver rapport och gör profilerna oanvända """
for profile in profiles:
profile.unused()
write_report(users, list(all_pictures.difference(all_pictures_start)))
exit()
if __name__ == "__main__": if __name__ == "__main__":
print() print()
proxieservers = 'mullvad'
if gethostname() not in ['macbook.local']: # Lägg till för studiodatorn
# Hämta namn för containern där skriptet körs
try:
containers = check_output(['docker', 'container', 'ls']).decode()
container = split('\W\W+', containers.split('\n')[1])[-1]
except FileNotFoundError:
pass
else:
container_name = 'macbook'
# Argument och alternativ # Argument och alternativ
# Variabler som kan ändras
url_other_pictures = [] # Fylls eventuellt på
test = False
write = True
mode = 'all'
pwd = None
proxieservers = 'mullvad'
argv = argv[1:] argv = argv[1:]
try:
opts, args = getopt(argv, "bm:u:o:", ['backup=',"mode=", "users=", "other="]) opts, args = getopt(argv, "bim:u:o:p:wl:", ["backup", "images", "mode=", "user=", "other=", "profiles=", "write", "password="])
for o, a in opts:
# mode_nr används för hur ofta profile ska roteras for o, a in opts:
if o in ["-m", "--mode"]: print(o)
mode = a if o in ['-l', "--password"]:
if mode == 'single': pwd = a.strip()
mode_nr = 1.7
elif mode == 'few': set_pwd(pwd)
mode_nr = 1.4
elif mode == 'force': # Importera andra moduler
mode_nr = 1 from config import url_bas
else: from arangodb import (
mode = 'all' blocked_profile,
new_profile,
backup,
get_user,
check_for_user,
friends_of_user,
)
from classes import Profile, User
from helpers import sleep_, write_error, _print, check_profile_status, update_cookie
from scrapers import profile_picture_reactions
for o, a in opts:
# Bestäm vilka profiler/proxies som ska användas
if o in ['-p', '--profiles']:
proxieservers = a.strip()
print(f'Proxieservers: {proxieservers}')
# Bestäm mode
if o in ["-m", "--mode"]:
mode = a.strip()
if mode == "single":
mode_nr = 1.7
elif mode == "few":
mode_nr = 1.4
elif mode == "solo":
mode_nr = 1.4
elif mode == "force":
mode_nr = 1 mode_nr = 1
for o, a in opts: # Bestäm user
if o in ["-u", "--user"]: if o in ["-u", "--user"]:
try: if a == 'test': # För att testa profiler i profiles_test
users = [ test = True
User(str(i).strip(), mode) container = str(a.strip())
for i in [(str(i).strip()) for i in a.split(",")] if all([a.strip()[:4] == "leak", len(a) < 7]) or a == 'test':
] sleep(randint(0, 40)/10) # För att docker service inte ska gå vidare exakt samtidigt
except StopIteration: lookups = "leak_lookups"
raise Exception userdoc = get_user(collection=lookups)
if o in ["-o", "--other"]: elif a.strip()[:7] == "lookups":
url_other_picture = a lookups = "lookups"
if o in ['-b', '--backup']: userdoc = get_user(collection=lookups)
while True: if 'other' in userdoc:
backup(db) url_other_pictures = userdoc['other']
sleep(21600) else:
url_other_pictures = []
elif a == 'test':
if "users" not in globals(): lookups = "leak_lookups"
users = [ userdoc = get_user(collection=lookups)
User(str(i).strip(), mode) else:
for i in input("Vem/vilka vill du kolla bilder för? ").split(",") lookups = "lookups"
] userdoc = {'_key': a}
except GetoptError:
users = [
User(str(i).strip(), mode)
for i in input("Vem/vilka vill du kolla bilder för? ").split(",")
]
mode = input("Söka efter alla, första/sida eller första? (all, few, single)? ").lower().strip()
if mode == '':
mode = 'all'
if "url_other_picture" in globals(): if o in ["-o", "--other"]:
users[0].url_other_picture = url_other_picture[url_other_picture.find('facebook.com') + 12:] url_other_pictures = a.split(",")
if o in ["-b", "--backup"]:
backup(db)
if o in ['-w', "--write"]:
write = False
if 'userdoc' not in globals():
lookups = "lookups"
userdoc = {'_key': str(input("Vem/vilka vill du kolla bilder för? ")).strip()}
print("Kollar profilbilder för:")
for user in users:
print("-", user.username)
print()
if 'container' not in globals(): print('Mode:', mode)
usernames = [user.username for user in users] print('Write:', write)
if len(usernames) == 1:
container = usernames[0] # Hämta en användare att kolla upp
else: user = User(str(userdoc['_key']).strip(), mode, other_pictures=url_other_pictures)
container = '-'.join(usernames)
if "url_other_pictures" in globals():
# Skapa tre olika profiler att besöka Facebook med l = []
profiles = [] for url in url_other_pictures:
for i in range(0, 3): l.append(url[url.find("facebook.com") + 12 :])
doc = get_profile() user.url_other_pictures = l
profile = Profile(doc, container)
profile.browser.open("https://api.ipify.org") # Hämta profil
print(f"Profil {profile.name} använder IP-adress {profile.viewing().text}." profile = new_profile(container, proxieservers)
) profile.write = write
if profile.logged_in == False:
profile.accept_cookies() update_cookie(profile.browser.session.cookies, profile)
sleep_(2) sleep(3)
profile.login()
profiles.append(profile)
print()
sleep(3)
profile_nr = 1
profile = profiles[profile_nr]
_print(profile.container, user.username, f"Börjar med profilen {profile.name}")
# Gå igenom de användare som efterfrågats # Gå igenom de användare som efterfrågats
try:
while True: if lookups == "leak_lookups":
for user in users: id = user.username
# Set för kollade bilder och kollade medlemmar check_profile_status(profile, user)
all_pictures = set([doc["_key"] for doc in db.collection("pictures").all()]) if profile.blocked:
all_pictures_start = all_pictures.copy() profile = blocked_profile(profile, proxieservers=proxieservers)
members_checked = checked_members() profile.open(url_bas + "/" + user.username)
profile.container = user.username url = profile.browser.state.url.strip("/").strip("?_rdr")
if "php?" not in url:
# Hämta reaktioner för den första användaren user = User(str(url[url.rfind("/") + 1 :]).strip(), mode)
if any([user.username not in members_checked, mode == 'force']): user.id = id
try: sleep_(4)
profile_picture_reactions(profile, user, all_pictures, first_user=True, mode=mode) container = str(user.username)
except: profile.container = container
_print(profile.container, user.username, traceback.format_exc())
if len(users) == 1: if "container" not in globals():
for profile in profiles: container = str(user.username)
profile.unused() profile.container = container
friends = friends_of_user(user.username)
friends_unchecked = list(set(friends) - set(members_checked)) profile.users_checked += 1
_print(profile.container, user.username, f"\nKlar med, {user.username}\n") # Hämta reaktioner för den första användaren
_print(profile.container, user.username, f"Vänner som reagerat: {len(friends)}") if any([not check_for_user(user.username, mode=mode), mode == "force"]):
_print(profile.container, user.username, "\nVänner att kolla:") try:
while True:
for friend in friends_unchecked: # Uppdatera in_use
print(friend) profile.update_time()
_print(profile.container, user.username, ', '.join([friend for friend in friends_unchecked]), silent=True) profile = profile_picture_reactions(
print() profile, user, first_user=True, mode=mode
)
# Hämta reaktioner för users vänner (som reagerat) if profile.blocked:
profile = blocked_profile(profile, proxieservers=proxieservers)
else:
break
except:
_print(profile, user, traceback.format_exc())
if mode == 'solo':
exit()
friends = friends_of_user(user.username)
_print(profile, user, f"\nKlar med, {user.username}\n")
_print(profile, user, f"Vänner som reagerat: {len(friends)}")
_print(profile, user, "\nVänner att kolla:")
friends_unchecked = []
for friend in friends:
if not check_for_user(friend):
print(friend)
friends_unchecked.append(friend)
_print(profile, user, [friends_unchecked], silent=True)
_print(profile, user, f'Totalt: {len(friends_unchecked)}')
print()
# Hämta reaktioner för users vänner (som reagerat)
count_friends = 0
for friend in friends_unchecked:
if datetime.now().strftime("%H") == '03' and int(datetime.now().strftime("%M")) < 30: # Sov för att kunna säkerhetskopieraa
sleep(1800)
count_friends += 1
user = User(str(friend), mode, other_pictures=[])
sleep_(2)
# Uppdatera in_use
profile.update_time()
try:
if not check_for_user(user.username):
p = profile_picture_reactions(profile, user, mode=mode)
if isinstance(p, Profile):
profile = p
except Exception as e: # Fel4
write_error(
4,
profile,
e=e,
user=user,
traceback=traceback.format_exc(),
soup=profile.viewing(),
)
_print(profile, user, f"\nFel: {str(user.username)}\n")
sleep_(15)
if not profile.blocked:
_print(profile, user, f"Klar med {user.username} \n")
# Rotera fb-profiler
if count_friends > 2 * mode_nr:
if random.randrange(0, 2, 1) == 1:
profile = new_profile(container, proxieservers=proxieservers)
count_friends = 0
_print(profile, user, f"Växlar till {profile.name}")
elif count_friends > 4 * mode_nr:
profile = new_profile(container, proxieservers=proxieservers)
count_friends = 0 count_friends = 0
for friend in friends_unchecked: _print(profile, user, f"Växlar till {profile.name}")
count_friends += 1
user = User(str(friend), mode) elif profile.blocked:
sleep_(2) profile = blocked_profile(profile, proxieservers=proxieservers)
try: _print(profile, None, f"Klar med alla vänner.")
profile_picture_reactions(
profile, user, all_pictures, mode=mode
)
except Exception as e: # Fel4
write_error(
4,
e=e,
user=user.username,
profile=profile.container,
traceback=traceback.format_exc(),
soup=profile.viewing(),
)
_print(profile.container, user.username, f"\nFel: {str(user.username)}\n")
sleep_(15)
if profile.blocked == False:
_print(profile.container, user.username, f"Klar med {user.username} \n")
# Rotera fb-profiler
if count_friends > 5 * mode_nr:
if random.randrange(0, 2, 1) == 1:
profile_nr += 1
count_friends = 0
_print(profile.container, user.username, f"Växlar till {profiles[profile_nr].name}")
elif count_friends > 9 * mode_nr:
profile_nr += 1
count_friends = 0
_print(profile.container, user.username, f"Växlar till {profiles[profile_nr].name}")
if profile_nr > len(profiles) - 1:
profile_nr = 0
elif profile.blocked == True:
# Ta bort profilen ur databasen
report_blocked(profile, users)
remove_profile(profile.doc)
# Ta bort från listan på fb-profiler som används
profiles.remove(profile)
# Försök lägga till en ny fb-profil (om det finns en skapad och ledig i databasen)
try:
doc = get_profile()
profiles[profile_nr] = Profile(doc, container)
_print(profile.container, user.username, f"Laddat ny profil: {profiles[profile_nr].name}")
sleep(3)
except e:
_print(profile.container, user.username, "Det behövs nya profiler...")
if len(profiles) == 0:
break
for s in range(0, 1600 / len(profiles)):
print(user, f"Sover {600-s} sekunder till... ", end="\r")
profile_nr += 1
_print(profile.container, user.username, f"Försöker med {profiles[profile_nr].name}.")
profile = profiles[profile_nr]
except:
finish()

@ -1,95 +1,101 @@
from getpass import getpass from getpass import getpass
from random import randint
from time import sleep from time import sleep
import json import json
from datetime import datetime
from json2html import json2html
import nacl.secret
import nacl.utils
from arango import ArangoClient from arango import ArangoClient
from config import * from config import *
# Starta koppling till arangodb
# Avkryptera lösen till arango
try:
# Om scriptet körs på Macbook finns löseordet i en fil
with open('password_arango.txt') as f:
pwd = f.readline()
except FileNotFoundError:
for i in range(0, 6, 1):
if i == 5:
exit()
try:
key = "sssladnnklja" + getpass()
pwd = (
nacl.secret.SecretBox(key.encode())
.decrypt(pwd_arango, encoder=nacl.encoding.HexEncoder)
.decode("utf-8")
)
break
except:
print("Fel lösenord.")
sleep(1)
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd) for i in range(0, 6, 1):
if i == 5:
exit()
try:
# Om scriptet körs på Macbook finns lösenordet i en fil
with open("../password_arango.txt") as f:
pwd = f.readline()
except FileNotFoundError:
if 'pwd' not in globals():
pwd = getpass(f'Lösenord för {user_arango}: ')
try:
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd)
db.collection('members').random() # För att testa löseordet/kopplingen.
break
except:
print("Fel lösenord.")
sleep(1)
from helpers import now, _print from helpers import now, _print, nowstamp, sleep_
from classes import Profile
def checked_members(): def checked_members():
cursor = db.aql.execute( cursor = db.aql.execute(
""" """
FOR doc IN members FOR doc IN members
FILTER doc.checked == true FILTER doc.checked == true
RETURN doc._key RETURN doc._key
""" """
) )
members_checked = set([doc for doc in cursor]) members_checked = set([doc for doc in cursor])
return members_checked return members_checked
def update_inuse(profile):
db.collection("profiles").update(profile["doc"]["id"])
def count_docs(col): def count_docs(col):
cursor = db.aql.execute( cursor = db.aql.execute(
""" """
FOR doc IN @@col FOR doc IN @@col
COLLECT WITH COUNT INTO length COLLECT WITH COUNT INTO length
RETURN length RETURN length
""", """,
bind_vars={"@col": col} bind_vars={"@col": col},
) )
return cursor.next() return cursor.next()
def report_blocked(profile, users):
db.insert_document({
'_id':f'reports/{now()}',
'profile': profile.name,
'users': [user.username for user in users],
})
def write_report(users, pictures): def report_blocked(profile):
db.insert_document({ try:
'_id':f'reports/{now()}', db.insert_document(
'users': [user.username for user in users], "reports",
'members': count_docs('members'), {
'total_picture_reactions':count_docs('picture_reactions'), "_key": str(profile.name).replace(' ', ''),
'pictures':count_docs('pictures'), "profile": profile.__dict__
'new_pictures': pictures },
}) overwrite=True,
)
except:
_print(profile, profile.container, f'Kunde inte rapportera blockerad: {profile.name}.')
def get_profile(created=True): def get_profile(db=db, collection='mullvad'):
""" Hämtar profil från profiles """ """ Hämtar profil från profiles """
cursor = db.aql.execute(
while True:
cursor = db.aql.execute(
""" """
FOR doc IN profiles FOR doc IN @@col
FILTER doc.in_use == false FILTER doc.in_use < @inuse
FILTER doc.created == @created RETURN doc
RETURN doc """,
""", bind_vars={"inuse": nowstamp() - 1200, '@col': f'profiles_{collection}'}
bind_vars={'created': created} )
)
return cursor.next()
profiles = [profile for profile in cursor]
if profiles == []:
sleep(180)
else:
profile = profiles[randint(0, len(profiles) - 1)]
return profile
def friends_of_user(user): def friends_of_user(user):
"""Returnernar användare som reagerat på user:s bilder""" """Returnernar användare som reagerat på user:s bilder"""
@ -104,28 +110,206 @@ def friends_of_user(user):
return [doc[8:] for doc in cursor] return [doc[8:] for doc in cursor]
def remove_profile(profile): def remove_profile(profile, proxieservers='mullvad'):
db.collection("profiles").delete(profile['_key'], silent=True, ignore_missing=True) """ Tar bort en blockerad profil från databasen. """
_print(profile.container, f'{profile.name} blockerad och borttagen {now()}.' _print(profile, None, f"Tar bort {profile.name}.")
db.collection(f'profiles_{proxieservers}').delete(
profile.doc["_key"], silent=True, ignore_missing=True
) )
_print(profile, profile.container, f"{profile.name} blockerad och borttagen {now()}.")
# TODO #2 Bättre funktion för backup av databasen # TODO #2 Bättre funktion för backup av databasen
def arango_connect(pwd):
return ArangoClient(hosts=host_arango).db(
db_arango, username=user_arango, password=pwd
)
def check_for_user(username, mode=''):
""" Checks if a user exist in db and if it's checked """
checked = False
if db.collection("members").has(username):
member = db.collection('members').get(username)
if 'checked' in member:
if member['checked'] == True:
checked = True
if mode == 'all':
if 'mode' in member:
if member['mode'] in ['few', 'solo']:
checked = False
return checked
def check_for_picture(id):
""" Checks if a picture exist in db """
return db.collection("pictures").has(id)
def get_user(collection="lookups"):
""" Hämtar användare att kolla upp från lookups """
if collection == "leak_lookups":
doc = db.collection("leak_lookups").random()
doc["other"] = []
db.collection(collection).delete(doc["_key"])
else:
cursor = db.aql.execute(
"""
FOR doc IN @@col
RETURN doc
""",
bind_vars={"@col": collection},
)
try:
doc = cursor.next()
if "other" not in doc:
doc["other"] = []
db.collection(collection).delete(doc["_key"])
except StopIteration:
doc = None
return doc
def backup(db): def backup(db):
"""Skapar en json-backup för specificerade collections. """Skapar en json-backup och statistik för specificerade collections.
Args: Args:
db: databaskoppling till aktuell databas db: databaskoppling till aktuell databas
""" """
d = {} while True:
for col in ['members', 'pictures', 'picture_reactions', 'profiles']: if not datetime.now().strftime("%H") == '03' and int(datetime.now().strftime("%M")) < 10:
l = [] sleep(120)
for doc in db.collection(col).all(): continue
l.append(doc) collections = ["members", "pictures", "picture_reactions", "profiles", "stats"]
d[col] = l for col in collections:
with open('data/backup.json', 'w') as f: l = []
json.dump(d, f) count = 0
print(f'Senaste backup: {now()}') icount = 0
for doc in db.collection(col).all():
count += 1
l.append(doc)
if count == 1000000:
icount += 1
count = 0
with open(f"data/backup_{col}_{icount}.json", "w") as f:
json.dump(l, f)
l = []
icount += 1
with open(f"data/backup_{col}_{icount}.json", "w") as f:
json.dump(l, f)
l = []
print(f"Senaste backup: {now()}")
write_stats()
sleep(82800)
def write_stats(continuous=False):
while True:
d = {}
for col in db.collections():
if not col['system']:
d[col['name']] = db.collection(col['name']).count()
del d['stats']
#d['time'] = now()
cursor = db.aql.execute(
"""
FOR doc IN members
FILTER doc.checked == true
COLLECT WITH COUNT INTO length
RETURN length
"""
)
d['checked_members'] = cursor.next()
# Hur många konton per säljare som finns kvar
cursor = db.aql.execute(
'''
for doc in profiles
filter has(doc, "vendor")
COLLECT vendor = doc.vendor WITH COUNT INTO length
RETURN {
"vendor" : vendor,
"active" : length
}
''')
d['active_vendors'] = [doc for doc in cursor]
d['_key'] = now()[:13]
db.insert_document( "stats", d, overwrite=True)
# Skriv en html-fil
with open('webbapp/templates/stats.html', 'a+') as html:
html.truncate(0)
html.write('<!DOCTYPE html> <br>')
html.write(json2html.convert(json = d))
# Sov för att fortsätta senare
if continuous:
sleep(86400)
else:
break
def blocked_profile(profile, proxieservers):
""" Tar bort profilen som blivit blockad och returnerar en ny. """
_print(profile, None, f'Rapporterar att {profile.name} blockats.')
report_blocked(profile)
_print(profile, None, f'Tar bort {profile.name} från databasen.')
remove_profile(profile, proxieservers)
_print(profile, None, f'Hämtar en ny profil.')
profile = new_profile(profile.container, proxieservers)
return profile
def new_profile(container, proxieservers):
""" Hämtar en ny profil. """
profile = Profile(get_profile(proxieservers=proxieservers), container, proxieservers)
_print(profile, None, f'Hämtade profilen {profile.name}. Login = {profile.logged_in}.')
if profile.logged_in == False:
profile.accept_cookies()
sleep_(2)
profile.login()
sleep_(2)
try:
profile.open(url_bas)
if "accept all" in profile.viewing().text.lower():
_print(profile, None, f'Accepterar cookies {profile.name}.')
profile.accept_cookies()
sleep_(3)
except:
pass
return profile
def find_id():
"https://mbasic.facebook.com/leif.jonsson.98499/about?lst=100064897389168%3A100000134933241%3A1615858816"
cursor = db.aql.execute(
"""
for doc in members
filter has(doc, "about")
filter doc.facebook_id == ''
filter doc.about != false
return doc
""",
)
n = 0
for doc in cursor:
about = doc['about']
try:
doc['facebook_id'] = about[about.find('%')+3: about.rfind('%')]
db.update_document(doc, silent=True, check_rev=False)
#sleep(0.01)
n += 1
print(n, end = '\r')
except AttributeError:
pass
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd) db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd)

@ -1,6 +1,6 @@
import pickle
import random import random
from datetime import datetime from datetime import datetime
from time import sleep
import requests import requests
import werkzeug import werkzeug
@ -10,26 +10,31 @@ werkzeug.cached_property = werkzeug.utils.cached_property
from robobrowser import RoboBrowser from robobrowser import RoboBrowser
from arangodb import db from arangodb import db
from config import * from config import url_bas
from helpers import sleep_, update_cookie from helpers import sleep_, update_cookie, write_error, nowstamp, _print
class User: class User:
def __init__(self, username, mode): def __init__(self, username, mode, other_pictures=[]):
self.collection = "members" self.collection = "members"
self.username = str(username) self.username = str(username)
self.mode = mode self.mode = mode
self.fetched = datetime.now().strftime("%Y%m%d") self.fetched = datetime.now().strftime("%Y%m%d")
self.url_coverphotos = '' self.url_coverphotos = ""
self.id = '' self.id = ""
self.url_likes = '' self.url_likes = ""
self.url_about = '' self.url_about = ""
self.url_timeline = '' self.url_timeline = ""
self.profile_pictures = '' self.url_profilepictures = ""
self.url = '' self.profile_pictures = 0
self.name = '' self.pictures = []
self.url_other_picture = '' self.url_friends = ""
self.url = ""
self.name = ""
self.url_other_pictures = other_pictures
self.reactions = 0 self.reactions = 0
self.profile_pictures = 0
self.checked_pictures = []
def add_to_db(self): def add_to_db(self):
# Lägg till profilen till arrango # Lägg till profilen till arrango
@ -47,34 +52,38 @@ class User:
"cover photos": self.url_coverphotos, "cover photos": self.url_coverphotos,
"fetched": self.fetched, "fetched": self.fetched,
"reactions": self.reactions, "reactions": self.reactions,
'mode': self.mode "mode": self.mode,
"pictures": self.pictures,
}, },
overwrite_mode="update", overwrite_mode="update",
silent=True, silent=True,
keep_none=False keep_none=False,
) )
def checked(self): def checked(self):
db.update_document( db.update_document(
{ {
"_id": "members/" + str(self.username), "_id": "members/" + str(self.username),
"checked": True, "checked": True,
"pictures_checked": self.profile_pictures, "pictures_checked": len(self.checked_pictures),
"reaction": self.reactions "checked_pictures": self.checked_pictures,
}) "reactions": self.reactions,
}
)
class Picture: class Picture:
def __init__(self, user): def __init__(self, user):
self.collection = "pictures" self.collection = "pictures"
self.user = user self.user = user
self.id = '' self.id = ""
self.url_full = '' self.url_full = ""
self.date = '' self.date = ""
self.url = '' self.url = ""
self.no_reactions = '' self.no_reactions = ""
self.reactions = [] self.reactions = []
self.src = ""
def add_to_db(self): def add_to_db(self):
db.insert_document( db.insert_document(
@ -86,80 +95,128 @@ class Picture:
"url": self.url, "url": self.url,
"no_reactions": self.no_reactions, "no_reactions": self.no_reactions,
"user": self.user, "user": self.user,
"src": self.src,
}, },
overwrite_mode="update", overwrite_mode="update",
silent=True, silent=True,
keep_none=False keep_none=False,
) )
class Profile: class Profile:
def __init__(self, profile, container): def __init__(self, profile, container, proxieservers):
""" Creates a new profile to do searches with. """Creates a new profile to do searches with.
Args: Args:
profile (dict): Document fetched from database. profile (dict): Document fetched from database.
container (string): Container name. container (str): Docker container that runs the script.
""" """
# Uppdatera dokumentet i arango
self.doc = profile self.doc = profile
self.doc['in_use'] = True
db.update_document(self.doc, check_rev=False)
# Användaruppgifter # Användaruppgifter
self.name = self.doc["name"].strip() self.name = self.doc["name"].strip()
self.email = self.doc["email"] self.email = self.doc["email"]
self.pwd = self.doc["pwd"] self.pwd = self.doc["pwd"]
self.server = self.doc["server"] self.server = self.doc["server"]
self.cookie = self.doc["cookie"]
self.useragent = self.doc["useragent"]
self.proxieservers = proxieservers
self.blocked = False self.blocked = False
self.container = container self.container = str(container)
self.users_checked = 0
self.write = True
# Ange proxies # Ange proxies
session = requests.Session() session = requests.Session()
session.proxies = { session.proxies = self.doc["proxies"]
"https": "socks5://'8155249667566524'@{}".format(self.server),
"http": "socks5://'8155249667566524'@{}".format(self.server),
}
# Starta browser # Starta browser
user_agent = "Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1" user_agent = self.useragent
self.browser = RoboBrowser( self.browser = RoboBrowser(
session=session, user_agent=user_agent, history=False, parser="lxml" session=session, user_agent=user_agent, history=False, parser="lxml"
) )
# TODO Ta bort gamla metoden om nya (hämta från doc) fungerar
# try:
# # Försök hämta cookie från fil
# self.browser.session.cookies = pickle.load(
# open("data/cookie_{}.pkl".format(self.name), "rb")
# )
# self.logged_in = True
try: try:
self.browser.session.cookies = pickle.load( self.browser.session.cookies.update(self.cookie)
open("data/cookie_{}.pkl".format(self.name), "rb")
)
self.logged_in = True self.logged_in = True
except: except:
self.logged_in = False self.logged_in = False
def update_time(self):
"""Uppdatera dokumentet i arango."""
self.doc["in_use"] = nowstamp()
db.update_document(self.doc, check_rev=False)
def viewing(self): def viewing(self):
""" Returnerar browser i html-format """ """Returnerar browser i html-format"""
return self.browser.parsed return self.browser.parsed
def open(self, url):
n = 0
while True:
n += 1
sleep(1)
try:
# Försök öppna url, om det misslyckas så vänta lite och försök sen igen
self.browser.open(url)
if "/a/nux/wizard/nav.php?step=phone&amp;skip" in self.viewing():
self.browser.open(
url_bas + "/a/nux/wizard/nav.php?step=phone&amp;skip"
)
break
except Exception as e:
print(e)
print(n)
_print(self, None, f"Kunde inte öppna url {url}")
if n == 5:
if "Connection refused" in e:
self.doc["e"] = e
db.insert_document("blocked_profiles", self.doc)
n = 0
from arangodb import get_profile, remove_profile
# Ta bort den gamla profilen från databasen och ersätt profile med nytt innehåll från ny profil
remove_profile(self)
self.__init__(get_profile(self.proxieservers), self.container)
_print(self, None, f"Ny profil hämtad {self.email}")
self.update_time()
else:
sleep(40)
def accept_cookies(self): def accept_cookies(self):
""" Accepterar cookies """ """Accepterar cookies"""
self.browser.open("https://mbasic.facebook.com") self.browser.open("https://mbasic.facebook.com")
soup = BeautifulSoup(str(self.browser.parsed), "lxml") soup = BeautifulSoup(str(self.browser.parsed), "lxml")
if 'accept all' not in soup.text.lower(): if "accept all" not in soup.text.lower():
sleep_(2) sleep_(2)
cookie_accept_url = "https://mbasic.facebook.com/cookie/consent-page" cookie_accept_url = "https://mbasic.facebook.com/cookie/consent-page"
self.browser.open(cookie_accept_url) self.browser.open(cookie_accept_url)
sleep_(2) sleep_(2)
try: try:
form = self.browser.get_form() form = self.browser.get_form()
self.browser.submit_form(form) self.browser.submit_form(form)
print(f"Accepterade cookies för {self.name}") _print(self, None, f"Accepterade cookies för {self.name}")
sleep_(2) sleep_(2)
update_cookie(self.browser.session.cookies, self.name) update_cookie(self.browser.session.cookies, self)
except Exception as e: except:
print(f"Accepterade inte cookies för {self.name}") try:
write_error(12, self, soup=self.browser.parsed)
except:
pass
_print(self, None, f"Accepterade inte cookies för {self.name}")
def login(self): def login(self):
""" Loggar in på Facebook. """ """Loggar in på Facebook."""
print("Loggar in {}".format(self.name)) print("Loggar in {}".format(self.name))
@ -168,49 +225,57 @@ class Profile:
# Kolla om browser redan är inloggad # Kolla om browser redan är inloggad
soup = BeautifulSoup(str(self.browser.parsed), "lxml") soup = BeautifulSoup(str(self.browser.parsed), "lxml")
if 'log out' in soup.text.lower(): if "log out" in soup.text.lower():
print("Redan inloggad.") print("Redan inloggad.")
try:
# Hitta och fyll i formulär # Hitta och fyll i formulär
form = self.browser.get_form(id="login_form") form = self.browser.get_form(id="login_form")
form["email"].value = self.email form["email"].value = self.email
form["pass"].value = self.pwd form["pass"].value = self.pwd
self.browser.submit_form(form, submit=form["login"]) self.browser.submit_form(form, submit=form["login"])
# Vänta lite och uppdatera cookie # Vänta lite och uppdatera cookie
print("Loggade in.") print("Loggade in.")
sleep_(2) sleep_(2)
self.open(url_bas)
sleep_(2)
except TypeError:
try:
write_error(11, self, soup=soup, profile=self.name)
except:
pass
def update_cookie(self, cookie):
self.cookie = cookie
db.update_document({"_id": self.doc["_id"], "cookie": cookie}, check_rev=False)
def unused(self):
""" Sätter user till False för valda profiler """
self.doc["in_use"] = False
db.update_document(self.doc, silent=True)
class Proxies: class Proxies:
def __init__(self): def __init__(self):
self.proxies = [ self.proxies = [
'gb25-wg.socks5.mullvad.net:1080', "gb25-wg.socks5.mullvad.net:1080",
'gb26-wg.socks5.mullvad.net:1080', "gb26-wg.socks5.mullvad.net:1080",
'gb27-wg.socks5.mullvad.net:1080', "gb27-wg.socks5.mullvad.net:1080",
'gb28-wg.socks5.mullvad.net:1080', "gb28-wg.socks5.mullvad.net:1080",
'gb29-wg.socks5.mullvad.net:1080' "gb29-wg.socks5.mullvad.net:1080",
] ]
def get_proxie(self): def get_proxie(self):
return self.proxies.pop(random.randrange(0, len(self.proxies), 1)) return self.proxies.pop(random.randrange(0, len(self.proxies), 1))
class Friend: class Friend:
def __init__(self, user): def __init__(self, user):
self.collection = "members" self.collection = "members"
self.user = user # The friends friend self.user = user # The friends friend
self.username = '' self.username = ""
self.url = '' self.url = ""
self.name = '' self.name = ""
self.single = ''
def add_to_db(self): def add_to_db(self):
db.insert_document( db.insert_document(
self.collection, self.collection,
{ {
"_key": self.username, "_key": str(self.username),
"url": url_bas + self.url, "url": url_bas + self.url,
"name": self.name, "name": self.name,
}, },

@ -1,9 +1,18 @@
from getpass import getpass
def set_pwd(_pwd=None):
global pwd
if _pwd == None:
_pwd = getpass('Lösenord för Arango-användaren:')
pwd = _pwd
# Info för arangodb # Info för arangodb
user_arango = "Lasse" user_arango = "Lasse"
pwd_arango = "4c071768bedc259288361c07aafd8535fca546086fada4e7b5de4e2bb26b0e70fa8d348c998b90d032a5b8f3fdbae1881b843021e3475198e6fb45f58d8dc450bd52f77d" pwd_arango = "4c071768bedc259288361c07aafd8535fca546086fada4e7b5de4e2bb26b0e70fa8d348c998b90d032a5b8f3fdbae1881b843021e3475198e6fb45f58d8dc450bd52f77d"
db_arango = "facebook" db_arango = "facebook"
host_arango = "http://arango.lasseedfast.se" host_arango = 'http://192.168.0.4:8529'
#host_arango = "http://arango.lasseedfast.se"
# Andra uppgifter # Andra uppgifter
url_bas = "https://mbasic.facebook.com" url_bas = "https://mbasic.facebook.com"
user_agent = "Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1"
mullvad = '8155249667566524'

Binary file not shown.

@ -0,0 +1,110 @@
import os
import pickle
import time
import cv2
import face_recognition
def build_data():
""" Build the face_enc file with data to recognize from """
knownEncodings = []
knownNames = []
members = os.listdir('../profile_pictures')
#get paths of each file in folder named Images
#Images here contains my data(folders of various persons)
for member in members:
if '.DS_Store' in member:
continue
imagePaths = []
for path in os.listdir(f'../profile_pictures/{member}'):
if '.jpg' in path:
imagePaths.append(f'../profile_pictures/{member}/{path}')
# loop over the image paths
for imagePath in imagePaths:
print(imagePath)
# load the input image and convert it from BGR (OpenCV ordering)
# to dlib ordering (RGB)
image = cv2.imread(imagePath)
rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
#Use Face_recognition to locate faces
boxes = face_recognition.face_locations(rgb, number_of_times_to_upsample = 2) #,model='hog'
# compute the facial embedding for the face
encodings = face_recognition.face_encodings(image, boxes)
# loop over the encodings
for encoding in encodings:
knownEncodings.append(encoding)
knownNames.append(member)
#save emcodings along with their names in dictionary data
data = {"encodings": knownEncodings, "names": knownNames}
#use pickle to save data into a file for later use
with open("face_enc", "wb") as f:
f.write(pickle.dumps(data))
f.close()
def identify_face(imagePath):
#find path of xml file containing haarcascade file
cascPathface = os.path.dirname(
cv2.__file__) + "/data/haarcascade_frontalface_alt2.xml"
# load the harcaascade in the cascade classifier
faceCascade = cv2.CascadeClassifier(cascPathface)
# load the known faces and embeddings saved in last file
data = pickle.loads(open('face_enc', "rb").read())
#Find path to the image you want to detect face and pass it here
image = cv2.imread(imagePath)
rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
#convert image to Greyscale for haarcascade
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(60, 60),
flags=cv2.CASCADE_SCALE_IMAGE)
# the facial embeddings for face in input
encodings = face_recognition.face_encodings(rgb)
names = []
# loop over the facial embeddings incase
# we have multiple embeddings for multiple fcaes
for encoding in encodings:
#Compare encodings with encodings in data["encodings"]
#Matches contain array with boolean values and True for the embeddings it matches closely
#and False for rest
matches = face_recognition.compare_faces(data["encodings"],
encoding)
#set name =unknown if no encoding matches
name = "Unknown"
# check to see if we have found a match
if True in matches:
#Find positions at which we get True and store them
matchedIdxs = [i for (i, b) in enumerate(matches) if b]
counts = {}
# loop over the matched indexes and maintain a count for
# each recognized face face
for i in matchedIdxs:
#Check the names at respective indexes we stored in matchedIdxs
name = data["names"][i]
#increase count for the name we got
counts[name] = counts.get(name, 0) + 1
#set name which has highest count
name = max(counts, key=counts.get)
print(counts)
# update the list of names
names.append(name)
# loop over the recognized faces
for ((x, y, w, h), name) in zip(faces, names):
# rescale the face coordinates
# draw the predicted face name on the image
cv2.rectangle(image, (x, y), (x + w, y + h), (0, 255, 0), 2)
cv2.putText(image, name, (x, y), cv2.FONT_HERSHEY_SIMPLEX,
0.75, (0, 255, 0), 2)
cv2.imshow("Frame", image)
cv2.waitKey(0)
identify_face('/Users/Lasse/Datorgemensamt/Programmeringsprojekt/Facebook/fb-scraper/profile_pictures/millington.jiang/4138068259557849.jpg')

@ -0,0 +1,185 @@
import locale
import re
from datetime import datetime
import networkx as nx
import pandas as pd
from numpy.core.numeric import NaN
locale.setlocale(locale.LC_TIME, "en_US")
from arangodb import db
def nodes_from_list(
nodes, collection="members", return_fields="{'_key': doc._key, 'name': doc.name}"
):
aql_edges = f"""
FOR doc IN @@ecollection
FILTER doc._id IN @nodes
RETURN {return_fields}
"""
cursor = db.aql.execute(
aql_edges, bind_vars={"@ecollection": collection, "nodes": nodes}
)
return [doc for doc in cursor]
def edges_from_nodes(
nodes, edge_collections=["picture_reactions"], simple=True, mode="or"
):
"""
Returnerar en df med relationer för valda noder och relationtabeller.
Args:
nodes (list): Noder som ska ingå i relationerna
edge_collections (list, optional): Relationtabeller att hämta relationer från. Defaults to ['messages'].
simple (bool, optional): Simple ger bara _from, _to och _key. Defaults to True.
Returns:
pd.DataFrame: DataFrame.
"""
if simple:
return_fields = (
"{'_to': doc._to, '_from': doc._from, '_id':doc._id, '_key':doc._key}"
)
else:
return_fields = "doc"
edges = []
for collection in edge_collections:
aql = f"""
FOR doc IN @@edge_collection
FILTER doc._from IN @nodes {mode} doc._to IN @nodes
RETURN {return_fields}
"""
cursor = db.aql.execute(
aql,
bind_vars={
"@edge_collection": collection,
"nodes": nodes,
},
)
edges = edges + [doc for doc in cursor]
return edges
def convert_date(date):
try:
new_date = datetime.strptime(date, "%d %b %Y")
except ValueError:
try:
new_date = datetime.strptime(date, "%d %B %Y")
except ValueError:
try:
new_date = datetime.strptime(date, "%b %d, %Y")
except ValueError:
try:
new_date = datetime.strptime(date, "%B %d, %Y")
except ValueError:
try:
new_date = datetime.strptime(date + " 2021", "%d %b %Y")
except ValueError:
return ""
return new_date.strftime("%Y-%d-%d")
# return f'{new_date.date().year}-{new_date.date().month}-{new_date.date().day}'
def export_network(members, n=2):
""" Exporterar en gexf-fil med noder utifrån en lista med medlemmar. """
filename = f"data/-.join({members}).-old.gexf"
ids = []
for member in members:
ids.append(f"members/{member}")
friends = set()
# Hämta relationer kopplade till members från databasen
edges = edges_from_nodes(ids)
for edge in edges:
friends.add(edge["_from"])
friends.add(edge["_to"])
edges = edges_from_nodes(list(friends))
# Skapa en dict där det syns vem som har interagerat med hur många
d = {}
for i in edges:
_to = i["_to"]
_from = i["_from"]
if _to not in d:
d[_to] = set([i["_from"]])
else:
d[_to] = d[_to] | set([i["_from"]])
if _from not in d:
d[_from] = set([i["_to"]])
else:
d[_from] = d[_from] | set([i["_to"]])
# Sålla ut så bara medlemmar som reagerat med [n] två av grundanvändarens vänner kommer med
friends = set(friends)
members = []
for key, value in d.items():
if len(value & friends) >= n or key in friends:
members.append(key)
# Skapa df med edges
edges = pd.DataFrame(
edges_from_nodes(members, mode="and", simple=False),
columns=["_key", "_to", "_from", "reaction", "picture"],
)
edges.set_index("_key", inplace=True)
# En lista på användare att ta med till nätverket
members = list(set(edges["_from"].unique()) | set(edges["_to"].unique()))
# Skapa noder till nätverket
nodes = nodes_from_list(
members
) # , return_fields="{'id':doc._key, 'label':doc.name")
nodes = [(i["_key"], i) for i in nodes]
# Lägg till några kolumner i edges-tabellen
edges._from = edges._from.apply(lambda x: x[8:])
edges._to = edges._to.apply(lambda x: x[8:])
edges.picture = edges.picture.apply(
lambda x: re.search("\d+", x).group()
) # Rensa bort url-info i de fall bilden har fått fel id
# Hämta bilder för att kunna lägga datum till edges
p = ["pictures/" + i for i in edges.picture.unique().tolist()]
d = {}
pictures = nodes_from_list(
p, collection="pictures", return_fields="{'id': doc._key, 'date':doc.date}"
)
for picture in pictures:
d[picture["id"]] = convert_date(picture["date"])
edges["date"] = edges.picture.apply(lambda x: d[x])
# Skapa graf utifrån relationer
G = nx.from_pandas_edgelist(
edges,
source="_from",
target="_to",
edge_attr=["reaction", "date"], #, "now"
create_using=nx.MultiDiGraph,
)
# Lägg till noderna i grafen
G.add_nodes_from(nodes)
# Exportera till filer
nx.write_gexf(
G,
filename
)
if __name__ == "__main__":
export_network(["linda.kakuli"])
# export_network(input('Member: '))

@ -1,6 +1,7 @@
import locale import locale
import re import re
from datetime import datetime from datetime import datetime
from sys import argv
import networkx as nx import networkx as nx
import pandas as pd import pandas as pd
@ -25,42 +26,36 @@ def nodes_from_list(
return [doc for doc in cursor] return [doc for doc in cursor]
def edges_from_nodes( def edges_from_nodes(
nodes, edge_collections=["picture_reactions"], simple=True, mode="or" nodes, edge_collections=["picture_reactions"], mode="or"
): ):
""" """
Returnerar en df med relationer för valda noder och relationtabeller. Returnerar en dict med relationer för valda noder och relationtabeller.
Args: Args:
nodes (list): Noder som ska ingå i relationerna nodes (list): Noder som ska ingå i relationerna
edge_collections (list, optional): Relationtabeller att hämta relationer från. Defaults to ['messages']. edge_collections (list, optional): Relationtabeller att hämta relationer från. Defaults to ['messages'].
simple (bool, optional): Simple ger bara _from, _to och _key. Defaults to True.
Returns: Returns:
pd.DataFrame: DataFrame. dict: Dict med relationer
""" """
if simple:
return_fields = (
"{'_to': doc._to, '_from': doc._from, '_id':doc._id, '_key':doc._key}"
)
else:
return_fields = "doc"
edges = [] edges = []
for collection in edge_collections: for collection in edge_collections:
aql_edges = f""" aql_edges = f"""
FOR doc IN @@edge_collection FOR doc IN @@edge_collection
FILTER doc._from IN @nodes {mode} doc._to IN @nodes FILTER doc._from IN @nodes {mode} doc._to IN @nodes
RETURN {return_fields} RETURN doc
""" """
cursor = db.aql.execute( cursor = db.aql.execute(
aql_edges, aql_edges,
bind_vars={ bind_vars={
"@edge_collection": collection, "@edge_collection": collection,
"nodes": nodes, "nodes": nodes,
}, }, stream=True
) )
edges = edges + [doc for doc in cursor] edges = edges + [doc for doc in cursor]
@ -89,17 +84,32 @@ def convert_date(date):
# return f'{new_date.date().year}-{new_date.date().month}-{new_date.date().day}' # return f'{new_date.date().year}-{new_date.date().month}-{new_date.date().day}'
def export_network(member, n=2): def get_edges(member, n=2, lookups=[], common=True):
""" Exporterar en gexf-fil med noder utifrån en medlem. """ """ Returnerar en df med edges för vald member.
Args:
member (str): Username for member.
lookups (list): Användare att hitta gemensamt nätverk för
noncommon (bool): Om den ena användarens förstakontakter ska räknas till den andra användarens nätverk
Returns:
df: Dataframe with edges
"""
member = f"members/{member}" member = f"members/{member}"
lookups = [f"members/{i}" for i in lookups]
member_friends = set() member_friends = set()
# Hämta relationer kopplade till member från databasen # Hämta relationer kopplade till member från databasen
for edge in edges_from_nodes([member]): for edge in edges_from_nodes([member]):
member_friends.add(edge["_from"]) member_friends.add(edge["_from"])
member_friends.add(edge["_to"]) member_friends.add(edge["_to"])
edges = edges_from_nodes(list(member_friends)) member_friends = list(member_friends)
if not common:
# Ta bort de andra i lookups så inte de får kompisars kompisar
member_friends = [friend for friend in member_friends if friend not in lookups] # ! Ska den här vara kvar?
for member in lookups:
member_friends.append(member)
edges = edges_from_nodes(member_friends)
# Skapa en dict där det syns vem som har interagerat med hur många # Skapa en dict där det syns vem som har interagerat med hur många
d = {} d = {}
@ -120,34 +130,51 @@ def export_network(member, n=2):
member_friends = set(member_friends) member_friends = set(member_friends)
members = [] members = []
for key, value in d.items(): for key, value in d.items():
if len(value & member_friends) >= n: if len(value & member_friends) >= n or key in member_friends:
members.append(key) members.append(key)
# Skapa df med edges # Skapa df med edges
edges = pd.DataFrame( edges = pd.DataFrame(
edges_from_nodes(members, mode="and", simple=False), edges_from_nodes(members, mode="and"),
columns=["_key", "_to", "_from", "reaction", "picture"], columns=["_key", "_to", "_from", "reaction", "picture"],
) )
edges.set_index("_key", inplace=True) edges.set_index("_key", inplace=True)
# En lista på användare att ta med till nätverket return edges
members = list(set(edges["_from"].unique()) | set(edges["_to"].unique()))
# Skapa noder till nätverket def members_from_edges(edges):
nodes = nodes_from_list( """ En lista på användare att ta med till nätverket.
members
) # , return_fields="{'id':doc._key, 'label':doc.name")
nodes = [(i["_key"], i) for i in nodes]
Args:
edges (df): Dataframe with edges.
Returns:
list: List of unique members in edges (to and from).
"""
return list(set(edges["_from"].unique()) | set(edges["_to"].unique()))
def edges_for_network(edges):
""" Prepare edges for the network
Args:
edges (df): Dataframe with edges
Returns:
df: Dataframe with edges prepared for network.
"""
# Lägg till några kolumner i edges-tabellen # Lägg till några kolumner i edges-tabellen
edges._from = edges._from.apply(lambda x: x[8:]) edges._from = edges._from.apply(lambda x: x[8:]) # Ta bort "members/"
edges._to = edges._to.apply(lambda x: x[8:]) edges._to = edges._to.apply(lambda x: x[8:]) # Ta bort "members/"
edges.picture = edges.picture.apply( edges.picture = edges.picture.apply(
lambda x: re.search("\d+", x).group() lambda x: re.search("\d+", x).group()
) # Rensa bort url-info i de fall bilden har fått fel id ) # Rensa bort url-info i de fall bilden har fått fel id
# Hämta bilder för att kunna lägga datum till edges # Hämta bilder för att kunna lägga datum till edges
p = ["pictures/" + i for i in edges.picture.unique().tolist()] p = ["pictures/" + i for i in edges.picture.unique().tolist()]
d = {} d = {}
pictures = nodes_from_list( pictures = nodes_from_list(
p, collection="pictures", return_fields="{'id': doc._key, 'date':doc.date}" p, collection="pictures", return_fields="{'id': doc._key, 'date':doc.date}"
@ -157,25 +184,145 @@ def export_network(member, n=2):
edges["date"] = edges.picture.apply(lambda x: d[x]) edges["date"] = edges.picture.apply(lambda x: d[x])
return edges
def export_network(member):
""" Exporterar en gexf-fil med noder utifrån en medlem. """
filename = f"data/{member}_.gexf"
edges = get_edges(member, n=3)
members = members_from_edges(edges)
# Skapa graf utifrån relationer # Skapa graf utifrån relationer
G = nx.from_pandas_edgelist( G = nx.from_pandas_edgelist(
edges, edges_for_network(edges),
source="_from", source="_from",
target="_to", target="_to",
edge_attr=["reaction", "date", "now"], edge_attr=["reaction", "date"], #, "now"
create_using=nx.MultiDiGraph, create_using=nx.MultiDiGraph,
) )
## Skapa noder till nätverket
nodes = nodes_from_list(
members
) # , return_fields="{'id':doc._key, 'label':doc.name")
# Lägg till noderna i grafen # Lägg till noderna i grafen
G.add_nodes_from(nodes) G.add_nodes_from([(i["_key"], i) for i in nodes])
# Exportera till filer # Exportera till filer
nx.write_gexf( nx.write_gexf(
G, G,
f"data/network_test.gexf", filename
) )
def common_friends(d, n=2):
""" Filtrera ut gemensamma vänner """
common_friends = {}
for _, value in d.items():
for friend in set(value):
if friend not in common_friends:
common_friends[friend] = 1
else:
common_friends[friend] += 1
l = []
for key, value in common_friends.items():
if value >= n:
l.append(key)
if l == []:
print('Inga gemensamma i nätverken.')
exit()
return l
if __name__ == "__main__": if __name__ == "__main__":
export_network("maria.hansson.botin")
lookups = [
'katherine.zimmerman.754',
'boogiesaman.bakhtiari',
'lena.tidestromsagstrom',
'bibi.rodoo',
'mina.benaissa',
'henrik.johnsson.73',
'fabian.asserback',
'100005696055822',
'fia.wiren',
'daniel.kjellander.5'
]
print('Samlar data för:')
for i in lookups:
print(i)
print(f'({len(lookups)} stycken\n')
# Hur många vänners vänners ska känna
if len(lookups) == 1:
n = 1
elif len(argv) > 1:
n = int(argv[1])
else:
#from math import sqrt
n = round(len(lookups)/2.2 + 1)
print(f'n = {n}')
if len(lookups) <= 3:
filename = f"../data/{'-'.join(lookups).replace('.','')}.gexf"
else:
from datetime import datetime
filename = f"../data/{datetime.now()}.gexf"
if len (lookups) == 1:
export_network(lookups[0])
exit()
d = {}
for member in lookups:
edges = get_edges(member, lookups=lookups, common = False, n=n)
friends = members_from_edges(edges)
d[member] = friends
print(member, len(friends))
# Filtrera gemensamma vänner
common = common_friends(d)
print('Common friends: ', len(common))
edges = pd.DataFrame(edges_from_nodes(common, mode='and')) # and om båda noderna ska vara med i common friends, annars or
members = members_from_edges(edges)
edges = edges_for_network(edges)
# Skapa graf utifrån relationer
G = nx.from_pandas_edgelist(
edges,
source="_from",
target="_to",
edge_attr=["reaction", "date"], #, "now"
create_using=nx.MultiDiGraph,
)
## Skapa noder till nätverket
nodes = nodes_from_list(
members
) # , return_fields="{'id':doc._key, 'label':doc.name")
# Lägg till noderna i grafen
G.add_nodes_from([(i["_key"], i) for i in nodes])
# Exportera till filer
nx.write_gexf(
G,
filename
)
#export_network("asifasghar")
# export_network(input('Member: ')) # export_network(input('Member: '))

@ -2,8 +2,9 @@ import pickle
import random import random
from datetime import datetime from datetime import datetime
from time import sleep from time import sleep
import json
from arangodb import db from config import url_bas
def sleep_(t): def sleep_(t):
@ -12,7 +13,7 @@ def sleep_(t):
""" """
variation = 4 # Testa olika sovlängder för att inte få användaren blockerad variation = 4 # Testa olika sovlängder för att inte få användaren blockerad
sleep(t * variation * random.randrange(85, 115, 1) / 100) sleep(t * variation * random.randrange(85, 115, 1) / 100)
if random.randrange(0, 60, 1) == 1: if random.randrange(0, 50, 1) == 1:
longsleep = random.randrange(200, 300) longsleep = random.randrange(200, 300)
print('') print('')
for s in range(0, longsleep): for s in range(0, longsleep):
@ -21,14 +22,17 @@ def sleep_(t):
print() print()
sleep(random.randrange(0, 10, 1) / 4) sleep(random.randrange(0, 10, 1) / 4)
# TODO #1 spara cookies till db # TODO #1 spara cookies till db
def update_cookie(cookies, profile_name): def update_cookie(cookies, profile):
""" Uppdaterar cookie för browser """ """ Uppdaterar cookie för browser """
with open("data/cookie_{}.pkl".format(profile_name), "wb") as f: # with open("data/cookie_{}.pkl".format(profile.name), "wb") as f:
pickle.dump(cookies, f) # pickle.dump(cookies, f)
# cookies_dict = json.dumps(dict(cookies))
profile.update_cookie(cookies.get_dict())
def write_error(nr, e="", traceback="", soup="", user="", url="", url_name="", profile=""): def write_error(nr, profile, e=" ", traceback="", soup="", user="", url="", url_name=""):
"""Skriver info efter error till arango """Skriver info efter error till arango
Args: Args:
@ -36,19 +40,27 @@ def write_error(nr, e="", traceback="", soup="", user="", url="", url_name="", p
e (str, optional): error. Defaults to "". e (str, optional): error. Defaults to "".
traceback (str, optional): The traceback from traceback.format_exc(). Defaults to "". traceback (str, optional): The traceback from traceback.format_exc(). Defaults to "".
soup (str, optional): Soup. Defaults to "". soup (str, optional): Soup. Defaults to "".
user (str, optional): The user. Defaults to "". user (class, optional): The user. Defaults to "".
url (str, optional): Url, if any. Defaults to "". url (str, optional): Url, if any. Defaults to "".
count (int, optional): Count, if any. Defaults to 0. count (int, optional): Count, if any. Defaults to 0.
url_name (str, optional): The description of the url, if any. Defaults to "". url_name (str, optional): The description of the url, if any. Defaults to "".
profile (user, optional): The profile.
""" """
if url == "": if url == "":
url = "ingen url" url = "ingen url"
url_name = "ingen url" url_name = "ingen url"
# try:
# BARA VID FELSÖKNING # # BARA VID FELSÖKNING
_print(profile.container, e) # print(profile, user, e)
_print(profile.container, traceback) # print(profile, user, traceback.format_exc())
# _print(profile, user, e)
# _print(profile, user, traceback.format_exc())
# except Exception as e:
# print('Kunde inte skriva error print till databasen.')
# print(e)
if "e" not in locals():
e = 'Unknown error'
doc = { doc = {
"_key": f"{now()}_{profile.container})", "_key": f"{now()}_{profile.container})",
"number": nr, "number": nr,
@ -58,9 +70,9 @@ def write_error(nr, e="", traceback="", soup="", user="", url="", url_name="", p
"url": str(url), "url": str(url),
"url_name": url_name, "url_name": url_name,
"soup": str(soup), "soup": str(soup),
"traceback": str(traceback), "traceback": str(traceback).split('\n'),
} }
try: try:
db.insert_document( db.insert_document(
"errors", "errors",
@ -69,14 +81,17 @@ def write_error(nr, e="", traceback="", soup="", user="", url="", url_name="", p
silent=True, silent=True,
) )
except Exception as e: except Exception as e:
_print(profile.container, user, e) _print(profile, user, e)
def now(): def now():
""" Returns current date and time as string""" """ Returns current date and time as string"""
return datetime.now().strftime("%Y-%m-%d_%H:%M:%S") return datetime.now().strftime("%Y-%m-%d_%H:%M:%S")
def _print(container, user, text, end='\n', silent=False): def nowstamp():
""" Returns current date and time as timestamp"""
return int(datetime.now().timestamp())
def _print(profile, user, text, end='\n', silent=False, sleeptime=0):
""" Write a "print" to the database (and prints in in the terminal) """ Write a "print" to the database (and prints in in the terminal)
Args: Args:
@ -86,15 +101,69 @@ def _print(container, user, text, end='\n', silent=False):
end (str, optional): The end value for print. Defaults to '\n'. end (str, optional): The end value for print. Defaults to '\n'.
silent (bool, optional): If a print should be done in the terminal. Defaults to False. silent (bool, optional): If a print should be done in the terminal. Defaults to False.
""" """
if silent == False: if silent == False:
print(text, end=end) print(text, end=end)
if user != '':
user = f"{user} - " if profile.write == False:
return None
if profile.container[:4] == 'leak' and len(profile.container) < 7:
_key = f'{profile.container}_{now()[2:10]}'
elif profile.container[:7] == 'lookups':
_key = f'{profile.container}_{now()[2:10]}'
else:
_key = profile.container
try:
if isinstance(text, list):
to_print = {user.username: text}
else:
to_print = f"{user.username} - {text.strip()}"
except:
if isinstance(text, list):
to_print = {user: text}
else:
to_print = f"{text.strip()}"
db.insert_document( db.insert_document(
"prints", "prints",
{'_key': container, 'print':{now(): f"{user}{text.strip()}"}}, {'_key': _key, 'print':{now(): to_print}},
overwrite_mode="update", overwrite_mode="update",
silent=True, silent=True,
merge=True )
) sleep(sleeptime)
def check_profile_status(profile, user):
if profile.browser._cursor == -1: # Om ingen sida har öppnats än.
profile.open(url_bas)
if any(
[
"It looks like you were misusing this feature by going too fast."
in profile.viewing().text,
"Access Denied" in profile.viewing().text,
"Your Account Has Been Disabled" in profile.viewing().text
]
):
_print(profile, user, f"{profile.name} blocked\n".upper(), sleeptime=1)
_print(profile, user, profile.viewing().text, sleeptime=1)
profile.blocked = True # Nu tar jag bort dem, kan göras på annat sätt kanske?
elif "accept all" in profile.viewing().text.lower():
profile.accept_cookies()
sleep_(3)
profile.open(user.url_photos)
elif (
profile.viewing().find("title").text.strip() == "Log in to Facebook | Facebook"
):
sleep_(5)
profile.login()
sleep_(5)
profile.open(user.url_photos)
return profile
from arangodb import db

@ -0,0 +1,70 @@
import requests
import os
from datetime import date, datetime, timedelta
from time import sleep
from arangodb import db
def download_image(url, user, id):
# Kolla så användarmappen finns
if not os.path.isdir(f'../profile_pictures/{user}'):
os.mkdir(f'../profile_pictures/{user}')
# Ladda ner bilden
r = requests.get(url)
if r.text == 'URL signature expired':
print('För gammal länk.')
exit()
elif r.status_code == 403:
exit()
img_data = r.content
with open(f'../profile_pictures/{user}/{id}.jpg', 'wb') as handler:
handler.write(img_data)
def get_pictures(day):
cursor = db.aql.execute(
"""
for doc in members
filter doc.fetched == @date
filter has(doc, "checked_pictures")
filter not has(doc, "pictures_downloaded")
return {'member': doc._key, 'pictures':doc.checked_pictures}
""",
bind_vars={'date': day}
)
for doc in cursor:
pictures = []
for picture in doc['pictures']:
pictures.append(picture[picture.find('fbid=')+5:])
cursor = db.aql.execute(
"""
for doc in pictures
filter doc._key in @list
limit 10
return {'_key': doc._key, 'user':doc.user, 'url': doc.src}
""",
bind_vars={"list": pictures},
)
for picture in cursor:
download_image(picture['url'], picture['user'], picture['_key'])
print(picture['_key'])
sleep(2)
db.update_document({'_id': 'members/' + str(doc['member']), 'pictures_downloaded': True}, silent=True, check_rev=False)
def old_pics():
if not os.path.isdir(f'../profile_pictures'):
os.mkdir(f'../profile_pictures')
start = date.today()
for i in range(1,60):
d = start - timedelta(days=i)
get_pictures(d.strftime('%Y%m%d'))

@ -0,0 +1,169 @@
import os
from datetime import date, datetime, timedelta
from getpass import getpass
from time import sleep
import random
import requests
import urllib3
urllib3.disable_warnings()
from arango import ArangoClient
def download_image(url, user, id):
# Ladda ner bilden
while True:
try:
server = servers_mullvad[random.randint(0, len(servers_mullvad)-1)]
proxies = {
"https": "socks5://'8155249667566524'@{}".format(server),
"http": "socks5://'8155249667566524'@{}".format(server),
}
r = requests.get(url, proxies=proxies)
break
except requests.exceptions.ConnectionError:
sleep(300)
if r.text == "URL signature expired":
print("För gammal länk.")
exit()
elif r.status_code == 403:
exit()
image_name = f"/ssd/profile_pictures/{user}/{id}.jpg"
img_data = r.content
with open(image_name, "wb") as handler:
handler.write(img_data)
#nc_path = f"https://nc.lasseedfast.se/remote.php/dav/files/Lasse/profile_pictures/{user}/{id}.jpg"
# headers = {"Content-type": "image/jpeg", "Slug": "heart"}
# while True:
# try:
# r = requests.put(
# nc_path, data=open(image_name, "rb"), headers=headers, auth=auth, verify=False
# )
# break
# except:
# print('Kunde inte ladda upp', nc_path)
# sleep(5)
print(f"{user}\t{id}\t{r.status_code}")
def get_pictures(day):
cursor = db.aql.execute(
"""
for doc in members
filter doc.fetched == @date
filter has(doc, "checked_pictures")
filter not has(doc, "pictures_downloaded")
return {'member': doc._key, 'pictures':doc.checked_pictures}
""",
bind_vars={"date": str(day)},
)
# Skapa en lista med bilder att gå igenom.
images = []
for doc in cursor:
images.append(doc)
for doc in images:
user = doc["member"]
# # Skapa mapp för användarens bilder på NC...
# nc_path = f"https://nc.lasseedfast.se/remote.php/dav/files/Lasse/profile_pictures/{user}"
# while True:
# try:
# requests.request("MKCOL", nc_path, verify=False, auth=auth)
# break
# except:
# print('Kunde inte skapa', nc_path)
# sleep(5)
# ...och på datorn (för backup)
if not os.path.isdir(f"/ssd/profile_pictures/{user}"):
os.mkdir(f"/ssd/profile_pictures/{user}")
pictures = []
for picture in doc["pictures"]:
pictures.append(picture[picture.find("fbid=") + 5 :])
cursor = db.aql.execute(
"""
for doc in pictures
filter doc._key in @list
limit 10
return {'_key': doc._key, 'user':doc.user, 'url': doc.src}
""",
bind_vars={"list": pictures},
)
for picture in cursor:
while True:
download_image(picture["url"], picture["user"], picture["_key"])
sleep(1)
break
db.update_document(
{"_id": "members/" + str(doc["member"]), "pictures_downloaded": True},
silent=True,
check_rev=False,
)
# def old_pics():
# if not os.path.isdir(f'profile_pictures'):
# os.mkdir(f'profile_pictures')
# start = date.today()
# for i in range(1,60):
# d = start - timedelta(days=i)
# get_pictures(d.strftime('%Y%m%d'))
if __name__ == '__main__':
# Info för arangodb
user_arango = "Lasse"
db_arango = "facebook"
host_arango = "http://192.168.0.4:8529"
# Starta koppling till arangodb
# Avkryptera lösen till arango
pwd = getpass(f"Arangolösenord för {user_arango}: ")
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd)
auth = ("Lasse", "affix-sip-jejune-epigraph-ENTROPY-stupefy1")
servers_mullvad = [
"se15-wg.socks5.mullvad.net:1080",
"se17-wg.socks5.mullvad.net:1080",
"se18-wg.socks5.mullvad.net:1080",
"se19-wg.socks5.mullvad.net:1080",
"se21-wg.socks5.mullvad.net:1080",
"se22-wg.socks5.mullvad.net:1080",
"se23-wg.socks5.mullvad.net:1080",
"se3-wg.socks5.mullvad.net:1080",
"se5-wg.socks5.mullvad.net:1080",
"se9-wg.socks5.mullvad.net:1080",
"se10-wg.socks5.mullvad.net:1080",
"se2-wg.socks5.mullvad.net:1080",
"se6-wg.socks5.mullvad.net:1080",
"se7-wg.socks5.mullvad.net:1080",
"se8-wg.socks5.mullvad.net:1080",
"se13-wg.socks5.mullvad.net:1080",
"se14-wg.socks5.mullvad.net:1080",
"se26-wg.socks5.mullvad.net:1080",
"se27-wg.socks5.mullvad.net:1080",
"se28-wg.socks5.mullvad.net:1080",
]
while True:
today = date.today().strftime('%Y%m%d')
get_pictures(today)
yesterday = date.today() - timedelta(days=1)
get_pictures(yesterday.strftime('%Y%m%d'))
sleep(300)

@ -0,0 +1,45 @@
import subprocess
import requests
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from time import sleep
while True:
wlan = subprocess.Popen(['iwgetid'], stdout=subprocess.PIPE)
wlan =wlan.communicate()[0].decode()
if '4G-UFI-5671' in wlan:
print('Sucess')
break
else:
sleep(20)
print('Nuvarande ip:', requests.get('https://api.ipify.org').text)
# Set up selenium browser
options = Options()
options.headless = True
browser = webdriver.Chrome(options=options)
# Login to modem
browser.get('http://192.168.100.1/cellweb/login.asp')
sleep(3)
username = browser.find_element_by_id("user_name")
password = browser.find_element_by_id("user_password")
username.send_keys("admin")
password.send_keys("1340asde")
# Go to reboot and accept
browser.find_element_by_xpath("/html/body/section/form/button").click() # Login
sleep(1)
browser.find_element_by_xpath("/html/body/section/div[2]/div[6]/a").click() # More
sleep(1)
browser.find_element_by_xpath("/html/body/section[2]/div/div[2]/div/a").click() # Reboot
sleep(1)
browser.find_element_by_xpath("/html/body/div[4]/div/div/div[2]/div[2]").click() # Accept
sleep(1)
browser.switch_to_alert().accept() # Accept again (alert)
browser.close()
sleep(120)
print('Ny ip:', requests.get('https://api.ipify.org').text)

@ -1,16 +1,21 @@
import random import random
import subprocess import subprocess
from getopt import getopt from getopt import getopt
from os import chdir
from os.path import abspath, dirname
from sys import argv from sys import argv
from time import sleep from time import sleep
from os import chdir
from os.path import dirname
from arangodb import db, get_profile, remove_profile # Gör fb-scraper till arbetsmapp
chdir(dirname(dirname(abspath(__file__))))
from arangodb import arango_connect, used_servers
from config import *
from helpers import now from helpers import now
def profile_generator(db, n):
def profile_generator(db, n=1, bulk=False):
cursor = db.aql.execute( cursor = db.aql.execute(
""" """
@ -33,29 +38,36 @@ def profile_generator(db, n):
with open("data/passwords.txt") as f: with open("data/passwords.txt") as f:
words = [word for word in f.readlines()] words = [word for word in f.readlines()]
servers_used = used_servers()
with open("data/servers.txt") as f: with open("data/servers.txt") as f:
servers = [] servers = []
for line in f.readlines(): while len(servers) < n:
if "@" in line: for line in f.readlines():
line = line.strip() if "@" in line:
city = line[: line.find("@")].strip() line = line.strip()
city = line[: line.find("@")].strip()
if "WireGuard" in line and line.strip()[:2] in [
"gb", if "WireGuard" in line and line.strip()[:2] in [
"us", "gb",
]: # "au", "ca" #För senare när det behövs "us",
line = line.strip() ]: # "au", "ca" #För senare när det behövs
country_short = line[:2] line = line.strip()
server = line[: line.find("-")] country_short = line[:2]
city_short = city[city.find("(") + 1 : city.find(")")] server = line[: line.find("-")]
server_name = [country_short, city_short, server + "-wireguard"]
servers.append( # Kolla så att servern inte redan används av profil i databasen
{ # eller finns i listan som skapas nu.
"server_city": city, if server not in servers_used and server not in servers:
"server": server + "-wg.socks5.mullvad.net:1080", city_short = city[city.find("(") + 1 : city.find(")")]
"server_connect": server_name, server_name = [country_short, city_short, server + "-wireguard"]
} servers.append(
) {
"server_city": city,
"server": server + "-wg.socks5.mullvad.net:1080",
"server_connect": server_name,
}
)
count = 0 count = 0
for i in range(0, n - 1): for i in range(0, n - 1):
@ -79,6 +91,8 @@ def profile_generator(db, n):
server = server_info["server"].strip() server = server_info["server"].strip()
birthday = f"{year}-{random.randrange(1, 13)}-{random.randrange(1, 30)}" birthday = f"{year}-{random.randrange(1, 13)}-{random.randrange(1, 30)}"
_key = server[: server.find("-")] _key = server[: server.find("-")]
doc = { doc = {
"_id": "profiles/" + _key, "_id": "profiles/" + _key,
"_key": _key, "_key": _key,
@ -92,17 +106,23 @@ def profile_generator(db, n):
"in_use": False, "in_use": False,
} }
# Skriv till databasen (skriver inte profiler med servarar som redan används) # Skriv till databasen (skriver inte profiler med servarar som redan används)
try:
db.insert_document("profiles", doc) if bulk == True:
count += 1 try:
except: db.insert_document("profiles", doc)
pass count += 1
except:
pass
print(f"Skrev {count} profiler till databasen. ")
print(f"Skrev {count} profiler till databasen. ") else:
return doc
def mullvad(server): def mullvad(server):
""" Anslut till Mullvad-server. """ """ Anslut till Mullvad-server. """
sleep(2)
subprocess.run( subprocess.run(
[ [
"mullvad", "mullvad",
@ -118,6 +138,9 @@ def mullvad(server):
connect_to_mullvad.wait() connect_to_mullvad.wait()
sleep(3) sleep(3)
def close_browser():
subprocess.run(["osascript", "-e", 'quit app "Brave Browser"'])
subprocess.run(["osascript", "-e", 'quit app "Opera"'])
def create_profile(): def create_profile():
""" Supports during the creation of a profile """ """ Supports during the creation of a profile """
@ -125,9 +148,10 @@ def create_profile():
while True: while True:
mullvad(arango_server) mullvad(arango_server)
sleep(2)
# Hämta profil # Hämta profil
profile = get_profile(created=False) db = arango_connect(pwd)
profile = profile_generator(db)
# Asnlut till profilens VPN-server # Asnlut till profilens VPN-server
mullvad(profile["server_connect"]) mullvad(profile["server_connect"])
@ -142,7 +166,6 @@ def create_profile():
print(profile["birthday"]) print(profile["birthday"])
print(profile["name"]) print(profile["name"])
print()
print() print()
print(profile["name"]) print(profile["name"])
print(profile["email"]) print(profile["email"])
@ -153,38 +176,45 @@ def create_profile():
user_input = user_input.lower() user_input = user_input.lower()
if user_input in ["done", "d", ""]: if user_input in ["done", "d", ""]:
subprocess.run(["osascript", "-e", 'quit app "Brave Browser"']) close_browser()
sleep(1) sleep(1)
profile["created"] = True profile["created"] = True
mullvad(arango_server) mullvad(arango_server)
sleep(3) sleep(3)
db.update_document(profile) db = arango_connect(pwd)
db.update_document('profiles', profile)
elif user_input in ["delete", "d"]: elif user_input in ["delete", "d"]:
subprocess.run(["osascript", "-e", 'quit app "Brave Browser"']) close_browser()
sleep(1)
mullvad(arango_server) mullvad(arango_server)
blocked = remove_profile(profile) db = arango_connect(pwd)
blocked = db.collection("profiles").delete(profile['_key'])
blocked['_id'] = 'block_created/' + now() blocked['_id'] = 'block_created/' + now()
blocked['key'] = now() blocked['key'] = now()
sleep(2)
db = arango_connect(pwd)
db.insert_document("block_created", blocked) db.insert_document("block_created", blocked)
elif user_input in ["quit", "q"]: elif user_input in ["quit", "q"]:
subprocess.run(["osascript", "-e", 'quit app "Brave Browser"']) close_browser()
sleep(1) close_browser()
mullvad(arango_server) mullvad(arango_server)
sleep(3)
exit() exit()
else: else:
sleep(3)
db = arango_connect(pwd)
continue continue
if __name__ == "__main__": if __name__ == "__main__":
print(__file__) print(__file__)
# Säkerställ att arbetsmappen är samma som den där scriptet ligger # Det här ska köras lokalt så löseordet finns i fil
chdir(dirname(__file__)) with open('password_arango.txt') as f:
subprocess.run(['cd', '..']) pwd = f.readline()
db = arango_connect(pwd)
argv = argv[1:] argv = argv[1:]
opts, args = getopt(argv, "cg:", ["create", "generate"]) opts, args = getopt(argv, "cg:", ["create", "generate"])
@ -193,3 +223,6 @@ if __name__ == "__main__":
profile_generator(db, a) profile_generator(db, a)
if o in ['-c', '--create']: if o in ['-c', '--create']:
create_profile() create_profile()
[{'domain': '.facebook.com', 'httpOnly': False, 'name': 'x-referer', 'path': '/', 'sameSite': 'None', 'secure': True, 'value': 'eyJyIjoiL2NvbmZpcm1lbWFpbC5waHA%2Fc29mdD1oamsiLCJoIjoiL2NvbmZpcm1lbWFpbC5waHA%2Fc29mdD1oamsiLCJzIjoibSJ9'}, {'domain': '.facebook.com', 'expiry': 1649728634, 'httpOnly': True, 'name': 'xs', 'path': '/', 'secure': True, 'value': '2%3AZrCj3xPTmzApJw%3A2%3A1618192633%3A-1%3A-1'}, {'domain': '.facebook.com', 'expiry': 1649728634, 'httpOnly': False, 'name': 'c_user', 'path': '/', 'secure': True, 'value': '100066462633263'}, {'domain': '.facebook.com', 'expiry': 1618797592, 'httpOnly': False, 'name': 'wd', 'path': '/', 'sameSite': 'None', 'secure': True, 'value': '994x534'}, {'domain': '.facebook.com', 'httpOnly': False, 'name': 'm_pixel_ratio', 'path': '/', 'secure': True, 'value': '1.25'}, {'domain': '.facebook.com', 'expiry': 1625968625, 'httpOnly': True, 'name': 'fr', 'path': '/', 'secure': True, 'value': '16Qhs4a4NEktNwlhZ.AWXIpZOVbupyu5pAidanfvTaWIc.Bgc6jD.C2.AAA.0.0.Bgc6jy.AWXCdEVJ7k4'}, {'domain': '.facebook.com', 'expiry': 1681264643, 'httpOnly': True, 'name': 'sb', 'path': '/', 'secure': True, 'value': 'w6hzYAXTtE1avdx0LoFIrHox'}, {'domain': '.facebook.com', 'expiry': 1681264587, 'httpOnly': True, 'name': 'datr', 'path': '/', 'secure': True, 'value': 'w6hzYONZsuS635di6pHBZV7D'}]

@ -1,41 +1,34 @@
import re import re
import traceback import traceback
import requests
from arangodb import db from arangodb import db, check_for_picture
from classes import Friend, Picture, Reaction from classes import Friend, Picture, Reaction
from config import * from config import *
from helpers import sleep_, update_cookie, write_error, _print from helpers import sleep_, update_cookie, write_error, _print, check_profile_status
def profile_picture_reactions( def profile_picture_reactions(profile, user, first_user=False, mode="all"):
profile, user, all_pictures, first_user=False, mode="all" # try:
):
# Fixa url:er osv # Fixa url:er osv
if user.username.isnumeric(): if user.username.isnumeric():
user.url = url_bas + "/profile.php?id=" + str(user.username) user.url = url_bas + "/profile.php?id=" + str(user.username)
user.url_photos = user.url + "&v=photos" user.url_photos = user.url + "&v=photos"
user.id = user.username
else: else:
user.username = user.username.replace("/", "") user.username = user.username.replace("/", "")
user.url = url_bas + "/" + user.username user.url = url_bas + "/" + user.username
user.url_photos = user.url + "/photos" user.url_photos = user.url + "/photos"
# Gå till sidan för profilbilder # Gå till sidan för profilbilder
profile.browser.open(user.url_photos) profile.open(user.url_photos)
# print(profile.viewing())
sleep_(4) sleep_(4)
if ( profile = check_profile_status(profile, user)
"""You can't use Facebook because your account, or activity on it, doesn't follow our Community Standards."""
in profile.viewing().text
):
_print(profile.container, user.username, f"{profile.name} blocked\n".upper())
profile.blocked = True
return "blocked"
elif "accept all" in profile.viewing().text.lower():
profile.accept_cookies()
profile.browser.open(user.url_photos)
user.name = user.username # Om inte namnet hittas senare user.name = user.username # Om inte namnet hittas senare
try: try:
@ -47,23 +40,27 @@ def profile_picture_reactions(
except Exception as e: except Exception as e:
write_error( write_error(
6, 6,
profile,
e=e, e=e,
traceback=traceback.format_exc(), traceback=traceback.format_exc(),
profile=profile.container,
soup=profile.viewing(), soup=profile.viewing(),
user=user, user=user,
url=user.url_photos, url=user.url_photos,
) )
if first_user == True: if first_user == True:
_print(profile.container, user.username, profile.viewing().prettify()) _print(profile, user, profile.viewing().prettify())
exit() exit()
_print(profile.container, user.username, f"Hämtar reaktioner på profilbilder för {user.name} ({user.username})") _print(
profile,
user,
f"Hämtar reaktioner på profilbilder för {user.name} ({user.username})",
)
# Hitta länk till olika saker hos användarem, inkl facebook-id # Hitta länk till olika saker hos användarem, inkl facebook-id
for a in profile.viewing().find_all("a", href=True): for a in profile.viewing().find_all("a", href=True):
if "Profile pictures" in a.text: if "Profile pictures" in a.text:
user.url_album = url_bas + a["href"] # Länk till album för profilbulder user.url_album = url_bas + a["href"] # Länk till album för profilbilder
if "profile_id" in a["href"]: if "profile_id" in a["href"]:
l = a["href"] l = a["href"]
try: try:
@ -75,10 +72,13 @@ def profile_picture_reactions(
user.url_likes = url_bas + a["href"] user.url_likes = url_bas + a["href"]
if "About" in a.text: if "About" in a.text:
user.url_about = url_bas + a["href"] user.url_about = url_bas + a["href"]
user.id = user.url_about[user.url_about.find('%')+3: user.url_about.rfind('%')]
if "Timeline" in a.text: if "Timeline" in a.text:
user.url_timeline = url_bas + a["href"] user.url_timeline = url_bas + a["href"]
if "Cover photos" in a.text: if "Cover photos" in a.text:
user.url_coverphotos = url_bas + a["href"] user.url_coverphotos = url_bas + a["href"]
if a.text == "Friends":
user.url_friends = url_bas + a["href"]
# Om det inte finns något profilalbum # Om det inte finns något profilalbum
# Testa ta bort mellanrum och små bokstäver # Testa ta bort mellanrum och små bokstäver
@ -90,78 +90,84 @@ def profile_picture_reactions(
user.add_to_db() user.add_to_db()
# Gå till profilbilden (den första som kommer upp när man går till profilen) # Gå till profilbilden (den första som kommer upp när man går till profilen)
if not hasattr(user, "url_album"): # Om profilen inte har profilalbum # Om profilen inte har profilalbum
write_error(9, soup=profile.viewing(), user=user, profile=profile.container) if not hasattr(user, "url_album"):
if user.url_other_picture != "": write_error(9, profile, soup=profile.viewing(), user=user)
# Använd eventuell extrabild och ta bort den från användaren if user.url_other_pictures != []:
url_pics = [user.url_other_picture] # Använd eventuella extrabilder och ta bort den från användaren
user.url_other_picture = "" url_pics = user.url_other_pictures
user.url_other_pictures = []
else: else:
# Spara ner profilen till databasen och avsluta sökningen på användaren # Spara ner profilen till databasen och avsluta sökningen på användaren
user.url_album = False user.url_album = False
if first_user == False: if first_user == False:
user.checked() user.checked()
user.add_to_db() user.add_to_db()
_print(profile.container, user.username, "Hittar inget album för profilbilder.") _print(profile, user, "Hittar inget album för profilbilder.")
write_error(#fel7 write_error( # fel7
7, 7,
profile,
soup=profile.viewing(), soup=profile.viewing(),
profile=profile.container,
user=user, user=user,
url=user.url_album, url=user.url_album,
url_name="user.url_album", url_name="user.url_album",
) )
return None return profile
# ATT GÖRA Här kan andra bilder väljas istället # ATT GÖRA Här kan andra bilder väljas istället
else: # Normalfallet där användaren har profilbildsalbum # Normalfallet där användaren har profilbildsalbum
profile.browser.open(user.url_album) else:
profile.open(user.url_album)
# Samla alla profilbilder i en lista # Samla alla profilbilder i en lista
url_pics = [] url_pics = user.url_other_pictures
pics = profile.viewing().find("div", {"id": "thumbnail_area"}) pics = profile.viewing().find("div", {"id": "thumbnail_area"})
for i in pics.find_all("a"): for i in pics.find_all("a"):
a = i["href"] a = i["href"]
url_pics.append(a[: a.find("&id")]) url_pics.append(a[: a.find("&id")])
if user.url_other_picture != "":
# Lägg till eventuell extrabild och ta bort den från användaren
url_pics.append(user.url_other_picture)
user.url_other_picture = ""
try: try:
user.profile_pictures = len(url_pics) user.profile_pictures = len(url_pics)
user.pictures = url_pics
except: except:
_print(profile, user, "Hittade inga profilbilder".upper())
user.profile_pictures = 0 user.profile_pictures = 0
user.pictures = url_pics
user.checked() user.checked()
user.add_to_db() user.add_to_db()
return return profile
# Lägg till profilen till arrango. # Lägg till profilen till arrango.
user.add_to_db() user.add_to_db()
# Välj vilja bilder som ska kollas.
if first_user == False:
if mode == "single":
url_pics = url_pics[:1]
elif mode == "few" and len(url_pics) > 1:
url_pics = url_pics[:1] + url_pics[-1:]
# Gå igenom valda bilder. # Gå igenom valda bilder.
for pic in url_pics: for pic in url_pics:
if check_for_picture(pic[pic.find("fbid=") + 5 :]):
continue
# Skriv ut vilken bild som behandlas. # Skriv ut vilken bild som behandlas.
_print(profile.container, user.username, _print(
profile,
user,
f"Bild {url_pics.index(pic) + 1} av {user.profile_pictures}", f"Bild {url_pics.index(pic) + 1} av {user.profile_pictures}",
end="\r", end="\r",
) )
check_picture(url_bas + pic, user, profile) check_picture(url_bas + pic, user, profile)
user.checked_pictures.append(url_bas + pic)
# Välj vilja bilder som ska kollas. # Välj vilja bilder som ska kollas.
if first_user == False: if first_user == False:
if mode == "single" and user.reactions > 30: if mode == "single" and user.reactions > 30:
break break
elif all([mode == "few", user.reactions > 50, pic != url_pics[-1]]): elif all([any[mode == "few", mode == "solo"], user.reactions > 80, pic != url_pics[-1]]):
# Kolla den sista bilder # Kolla den sista bilder
check_picture(url_bas + url_pics[-1], user, profile) check_picture(url_bas + url_pics[-1], user, profile)
user.checked_pictures.append(url_bas + pic)
break break
user.checked() user.checked()
return profile
# except Exception as e:
# _print(None, str(e))
# return profile
def check_picture(url_picture, user, profile): def check_picture(url_picture, user, profile):
@ -179,12 +185,12 @@ def check_picture(url_picture, user, profile):
sleep_(5) sleep_(5)
try: try:
profile.browser.open(picture.url) profile.open(picture.url)
except Exception as e: # Fel3 except Exception as e: # Fel3
write_error( write_error(
3, 3,
profile,
e=e, e=e,
profile=profile.container,
soup=profile.viewing(), soup=profile.viewing(),
user=user, user=user,
url=picture.url, url=picture.url,
@ -192,7 +198,7 @@ def check_picture(url_picture, user, profile):
traceback=traceback.format_exc(), traceback=traceback.format_exc(),
) )
update_cookie(profile.browser.session.cookies, profile.name) update_cookie(profile.browser.session.cookies, profile)
# Hitta info om bilden # Hitta info om bilden
try: try:
@ -202,16 +208,23 @@ def check_picture(url_picture, user, profile):
8, 8,
e=e, e=e,
soup=profile.viewing(), soup=profile.viewing(),
profile=profile.container, profile=profile,
url=picture.url, url=picture.url,
url_name="picture url", url_name="picture url",
user=user, user=user,
traceback=traceback.format_exc(), traceback=traceback.format_exc(),
) )
# TODO #3 lägg till fler bilder som kan gås igenom om det är få profilbilder.
# Hämta länkar för bilden att anvrända sen try:
# _print(profile.container, user.username, profile.viewing().prettify()) for img in profile.viewing().find_all('img'):
if 'https://scontent' in img['src']:
picture.src = img['src']
except Exception as e:
pass
# Hämta länkar för bilden att använda sen
# _print(profile, user, profile.viewing().prettify())
for a in profile.viewing().find_all("a", href=True): for a in profile.viewing().find_all("a", href=True):
if all( if all(
[ [
@ -240,9 +253,9 @@ def check_picture(url_picture, user, profile):
# Hämta reaktioner för bilden # Hämta reaktioner för bilden
sleep_(3) sleep_(3)
profile.browser.open(url_reactions) profile.open(url_reactions)
update_cookie(profile.browser.session.cookies, profile.name) update_cookie(profile.browser.session.cookies, profile)
try: try:
for a in profile.viewing().find_all("a", {"class": "z ba"}, href=True): for a in profile.viewing().find_all("a", {"class": "z ba"}, href=True):
@ -255,15 +268,15 @@ def check_picture(url_picture, user, profile):
except UnboundLocalError: # fel9 except UnboundLocalError: # fel9
write_error( write_error(
9, 9,
profile,
user=user, user=user,
profile=profile.container,
soup=profile.viewing(), soup=profile.viewing(),
traceback=traceback.format_exc(), traceback=traceback.format_exc(),
url=url_reactions, url=url_reactions,
url_name="url_reactions", url_name="url_reactions",
) )
# Bilder med väldigt många likes går inte att visa så här? # Bilder med väldigt många likes går inte att visa så här?
return None return profile
# Addera bilden till arrango # Addera bilden till arrango
picture.add_to_db() picture.add_to_db()
@ -274,9 +287,9 @@ def check_picture(url_picture, user, profile):
try: try:
sleep_(4) sleep_(4)
profile.browser.open(url_limit) profile.open(url_limit)
url_limit = "" url_limit = ""
update_cookie(profile.browser.session.cookies, profile.name) update_cookie(profile.browser.session.cookies, profile)
# Gå igenom alla som reagerat och för in i arango # Gå igenom alla som reagerat och för in i arango
for li in profile.viewing().find_all("li"): for li in profile.viewing().find_all("li"):
@ -288,9 +301,19 @@ def check_picture(url_picture, user, profile):
friend.name = friend_html.text friend.name = friend_html.text
friend.url = friend_html["href"] friend.url = friend_html["href"]
if "profile.php" in friend.url: if "profile.php" in friend.url:
friend.username = friend.url[friend.url.find("id=") + 3 :] if "&paipv" in friend.url:
friend.username = friend.url[
friend.url.find("=") + 1 : friend.url.find("&")
]
else:
friend.username = friend.url[friend.url.find("id=") + 3 :]
else: else:
friend.username = friend.url[friend.url.find("/") + 1 :] if "?" in friend.url:
friend.username = friend.url[
friend.url.find("/") + 1 : friend.url.find("?")
]
else:
friend.username = friend.url[friend.url.find("/") + 1 :]
reaction = Reaction(user.username, friend.username, picture.id) reaction = Reaction(user.username, friend.username, picture.id)
for type in ["Love", "Wow", "Like", "Care", "Sad", "Angry", "Haha"]: for type in ["Love", "Wow", "Like", "Care", "Sad", "Angry", "Haha"]:
@ -298,19 +321,22 @@ def check_picture(url_picture, user, profile):
reaction.type = type reaction.type = type
picture.reactions.append(reaction.get_dict()) picture.reactions.append(reaction.get_dict())
# Lägg till vännens profil till arrango # Lägg till vännens profil till arrango
friend.add_to_db() try:
friend.add_to_db()
except:
_print(profile, user, f"Kunde inte lägga till vän {friend.url}")
except AttributeError as e: # Fel1 except AttributeError as e: # Fel1
write_error( write_error(
1, 1,
profile,
e=e, e=e,
soup=str(li), soup=str(li),
user=user, user=user,
profile=profile.container,
traceback=traceback.format_exc(), traceback=traceback.format_exc(),
) )
pass pass
# Lägg till reaktioner till databasen # Lägg till reaktioner till databasen
db.collection("picture_reactions").insert_many( db.collection("picture_reactions").insert_many(
picture.reactions, silent=True, overwrite=True picture.reactions, silent=True, overwrite=True
@ -318,18 +344,19 @@ def check_picture(url_picture, user, profile):
db.collection("picture_reactions").insert_many( db.collection("picture_reactions").insert_many(
picture.reactions, silent=True, overwrite=True picture.reactions, silent=True, overwrite=True
) )
# Uppdatera antalet reaktioner användaren fått # Uppdatera antalet reaktioner användaren fått
user.reactions += len(picture.reactions) user.reactions += len(picture.reactions)
except Exception as e: # Fel2 except Exception as e: # Fel2
write_error( write_error(
2, 2,
profile,
e=e, e=e,
soup=profile.viewing(), soup=profile.viewing(),
profile=profile.container,
user=user, user=user,
url=url_limit, url=url_limit,
url_name="url_limit", url_name="url_limit",
traceback=traceback.format_exc(), traceback=traceback.format_exc(),
) )
pass pass

@ -0,0 +1,557 @@
DE;91.198.137.31:3552;FAIL;
FR;54.36.4.70:61432;188.165.211.29;0:00:00.601284
DE;138.68.82.88:1080;138.68.82.88;0:00:05.222863
DE;172.104.142.154:35819;172.104.142.154;0:00:02.972221
MD;185.14.31.113:443;FAIL;
NL;146.185.132.87:31284;146.185.132.87;0:00:00.541678
UA;46.151.197.254:8080;46.151.197.254;0:00:02.210311
FI;135.181.184.170:54048;135.181.184.170;0:00:00.720659
FR;151.106.34.139:1080;FAIL;
NL;88.202.177.242:1090;FAIL;
UA;95.67.99.99:33871;FAIL;
DE;138.201.5.46:1080;138.201.5.46;0:00:07.487491
DE;159.69.106.103:1080;159.69.106.103;0:00:00.630095
NL;51.15.78.50:1080;51.15.78.50;0:00:00.564266
NL;88.202.177.242:1080;FAIL;
SG;113.77.85.215:1081;149.129.48.241;0:00:03.688375
RU;95.107.37.109:1105;FAIL;
KZ;109.229.161.151:1225;FAIL;
RU;84.22.137.26:9025;84.22.137.26;0:00:09.468929
US;149.28.126.83:1081;149.28.126.83;0:00:01.023434
AR;181.3.58.168:1080;FAIL;
US;67.227.193.162:34496;67.227.193.162;0:00:01.318698
NL;146.185.132.87:44795;146.185.132.87;0:00:02.952634
US;198.58.119.187:50398;69.164.194.35;0:00:01.449008
AR;186.126.79.171:1080;FAIL;
CA;192.252.211.197:14921;FAIL;
CA;192.252.209.155:14455;FAIL;
CZ;89.187.144.153:1080;89.187.144.153;0:00:01.096993
US;209.141.53.246:1080;FAIL;
US;192.111.137.37:18762;FAIL;
CA;192.252.208.67:14287;FAIL;
US;67.55.185.240:1888;FAIL;
NL;142.93.137.235:54866;142.93.137.235;0:00:04.162599
US;192.111.135.18:18301;FAIL;
US;192.111.138.29:4145;FAIL;
SG;45.77.36.30:24574;45.77.36.30;0:00:02.664875
US;70.185.68.133:4145;FAIL;
FR;51.68.134.242:25623;51.68.134.240;0:00:05.998615
FR;193.70.45.126:32821;193.70.45.126;0:00:03.586748
US;98.162.25.7:31653;FAIL;
US;72.223.168.86:57481;FAIL;
DE;213.136.89.190:18461;213.136.89.190;0:00:03.407266
DE;101.53.158.48:9051;FAIL;
PL;5.226.69.12:50477;5.226.69.12;0:00:08.327345
US;98.162.25.29:31679;FAIL;
IN;103.209.64.19:6667;FAIL;
AR;186.126.42.65:1080;200.73.130.62;0:00:11.137412
US;72.221.196.157:35904;FAIL;
US;72.206.181.105:64935;FAIL;
SG;113.77.86.73:1081;149.129.55.120;0:00:02.697133
CA;192.252.214.20:15864;FAIL;
RU;109.72.231.37:1080;109.72.231.37;0:00:01.095943
TR;188.132.179.124:60088;188.132.179.124;0:00:01.228944
AR;181.3.72.8:1080;200.73.132.176;0:00:17.562909
GB;157.245.34.127:32215;157.245.34.127;0:00:06.500380
RU;31.7.232.178:1080;31.7.232.178;0:00:08.192440
US;72.223.168.73:57494;FAIL;
AR;186.126.135.164:1080;200.73.130.62;0:00:14.713391
IN;165.22.220.151:36362;165.22.220.151;0:00:05.533314
US;192.111.137.35:4145;FAIL;
BR;186.126.143.88:1080;FAIL;
BR;181.3.51.12:1080;209.14.2.12;0:00:08.158021
US;104.238.215.49:1080;104.238.215.49;0:00:04.300450
AR;186.126.163.43:1080;FAIL;
BR;181.6.94.90:1080;FAIL;
AR;181.3.23.13:1080;FAIL;
VN;113.160.188.21:1080;113.160.188.21;0:00:05.010119
FI;135.181.184.170:22497;135.181.184.170;0:00:09.929478
FR;51.68.134.247:30204;FAIL;
AR;181.101.2.92:1080;200.73.132.106;0:00:03.141000
BR;181.101.26.136:1080;FAIL;
BR;181.3.71.184:1080;191.252.103.251;0:00:03.371414
AR;181.102.21.228:1080;FAIL;
AR;181.3.37.59:1080;FAIL;
AR;186.126.177.123:1080;FAIL;
AR;186.126.151.29:1080;45.235.98.221;0:00:05.725761
AR;181.3.61.217:1080;FAIL;
AR;181.5.222.133:1080;FAIL;
FR;51.68.134.241:30204;FAIL;
HK;1.65.196.134:1080;1.65.196.134;0:00:04.107338
BR;181.3.8.101:1080;54.232.66.92;0:00:25.527846
UA;91.229.123.191:1080;94.153.23.177;0:00:07.271681
AR;181.3.62.188:1080;FAIL;
KR;222.99.47.68:8888;FAIL;
KR;119.28.73.113:22225;158.247.225.109;0:00:02.975846
AR;181.102.5.177:1080;45.235.99.87;0:00:04.846713
AR;181.101.16.232:1080;FAIL;
AR;181.101.12.108:1080;FAIL;
AR;181.101.38.248:1080;45.235.99.87;0:00:12.370835
AR;181.3.59.102:1080;FAIL;
FR;195.154.178.247:20152;FAIL;
DE;46.101.218.6:24040;46.101.218.6;0:00:02.524995
US;173.236.188.154:7595;173.236.184.102;0:00:07.522997
AR;181.3.4.18:1080;200.69.236.22;0:00:03.333511
CA;181.101.14.230:1080;FAIL;
DK;142.93.245.247:30588;FAIL;
FR;54.36.4.69:61432;188.165.211.29;0:00:00.679880
AR;186.152.120.155:1080;45.235.99.88;0:00:13.682541
IN;27.116.51.181:6667;FAIL;
AR;181.7.201.154:1080;45.235.99.83;0:00:03.619538
FR;51.68.134.245:25623;51.68.134.240;0:00:03.046891
US;192.111.139.165:19402;FAIL;
AR;186.126.140.70:1080;FAIL;
US;184.178.172.5:15303;FAIL;
AR;186.126.25.102:1080;200.73.134.139;0:00:18.534001
US;181.3.66.118:1080;FAIL;
BR;186.126.141.239:1080;177.67.82.171;0:00:20.168977
DE;78.46.200.13:22039;78.46.200.13;0:00:03.381044
BR;186.152.119.220:1080;FAIL;
AR;186.152.33.185:10808;FAIL;
US;181.102.84.53:1080;FAIL;
AR;186.152.31.215:1080;FAIL;
BR;186.152.194.140:1080;FAIL;
US;173.236.184.154:22960;173.236.184.139;0:00:02.895083
FR;137.74.153.106:1080;137.74.153.106;0:00:03.010125
AR;186.126.32.22:1080;138.99.7.145;0:00:07.475672
BR;181.101.11.43:1080;FAIL;
US;72.210.252.134:46164;FAIL;
BR;181.3.56.124:1080;FAIL;
AR;181.101.47.84:1080;FAIL;
CA;181.6.141.73:1080;FAIL;
MD;178.175.139.202:57772;178.175.139.202;0:00:01.611892
PH;210.16.73.82:1080;124.107.231.80;0:00:03.173570
AR;186.126.44.155:1080;200.89.175.133;0:00:08.703594
BR;181.101.60.197:1080;104.41.41.29;0:00:07.245720
KR;125.135.221.94:54557;FAIL;
US;186.126.62.200:1080;FAIL;
GB;178.62.79.115:35580;178.62.79.115;0:00:05.262268
FI;95.216.176.163:1089;95.216.176.163;0:00:09.142730
CA;186.126.21.113:1080;FAIL;
AR;181.3.38.147:1080;181.117.241.51;0:00:04.966959
US;70.166.167.38:57728;FAIL;
AR;181.3.78.111:1080;200.73.131.75;0:00:09.585425
BR;181.5.244.219:1080;FAIL;
FR;51.68.134.240:25623;51.68.134.240;0:00:08.593545
US;181.102.16.72:1080;FAIL;
FR;178.32.47.218:50939;178.32.47.218;0:00:06.439677
US;173.236.189.175:22960;FAIL;
AR;181.7.208.112:1080;FAIL;
IN;103.241.227.110:6667;FAIL;
US;147.135.116.172:53079;147.135.116.172;0:00:02.112520
AR;186.126.64.146:1080;FAIL;
CA;181.0.12.116:1080;FAIL;
US;198.8.94.170:39074;FAIL;
AR;181.3.76.4:1080;FAIL;
AR;181.7.204.60:1080;FAIL;
AR;181.3.28.148:1080;FAIL;
BR;181.3.74.230:1080;45.162.231.55;0:00:13.378087
US;113.73.72.183:1080;FAIL;
US;141.98.134.2:1080;141.98.134.2;0:00:03.583016
CA;192.111.130.5:17002;FAIL;
RU;185.233.202.27:1080;185.233.202.27;0:00:11.702264
DE;173.212.201.250:47492;173.212.201.250;0:00:07.449093
SG;206.189.158.28:7905;206.189.158.28;0:00:08.228267
US;173.236.190.7:7595;173.236.184.102;0:00:05.519787
US;173.236.188.46:22960;173.236.184.139;0:00:05.490614
US;173.236.185.99:22960;173.236.184.139;0:00:09.586001
AR;186.126.73.156:1080;200.73.130.62;0:00:03.150311
GB;157.245.34.127:61851;157.245.34.127;0:00:04.082666
TW;60.169.205.61:1080;FAIL;
BR;181.101.47.97:1080;191.233.232.45;0:00:03.439772
FR;51.68.134.253:25623;51.68.134.240;0:00:08.526576
AR;181.3.16.106:10808;FAIL;
US;173.236.190.93:22960;FAIL;
US;186.126.99.163:10808;FAIL;
AR;186.152.130.181:1080;200.73.138.194;0:00:10.460878
AR;186.152.15.200:1080;FAIL;
AR;181.5.232.149:1080;FAIL;
DE;165.22.17.195:5110;165.22.17.195;0:00:04.337353
FR;51.68.134.244:25623;51.68.134.240;0:00:05.794034
AR;186.126.80.182:1080;FAIL;
SG;206.189.158.28:53176;206.189.158.28;0:00:04.394778
AR;186.126.3.27:1080;FAIL;
AR;186.126.17.42:1080;FAIL;
BR;186.126.159.136:1080;FAIL;
BR;186.126.70.165:1080;54.207.134.244;0:00:07.969362
CL;181.5.217.57:1080;FAIL;
US;66.42.224.229:41679;FAIL;
BR;181.3.2.188:1080;FAIL;
FR;51.68.134.252:25623;51.68.134.240;0:00:03.820479
BR;181.83.226.81:1080;209.14.2.204;0:00:10.407002
US;104.238.212.43:1081;104.238.212.43;0:00:03.010979
BR;186.126.109.207:1080;FAIL;
BR;181.3.39.114:1080;FAIL;
FR;51.68.134.255:22308;51.68.134.240;0:00:05.837994
US;184.178.172.18:15280;FAIL;
FR;51.68.134.247:25623;51.68.134.240;0:00:05.294231
AR;181.3.84.123:1080;45.235.99.87;0:00:07.781855
AR;186.126.51.206:1080;FAIL;
BR;181.83.228.198:1080;FAIL;
AR;186.126.40.168:1080;FAIL;
US;181.0.8.189:1080;FAIL;
AR;181.101.35.11:1080;FAIL;
US;104.238.111.218:57978;104.238.111.218;0:00:06.871360
CA;181.3.20.113:10808;FAIL;
FR;51.75.42.95:25623;51.68.134.240;0:00:04.044253
US;173.236.187.212:22960;173.236.184.139;0:00:03.293691
BR;181.3.65.241:1080;FAIL;
US;173.236.186.231:22960;173.236.184.139;0:00:03.276001
US;165.227.177.113:24586;165.227.177.113;0:00:05.401278
CA;186.126.58.189:10808;51.222.141.137;0:00:04.245833
DE;176.9.160.118:22836;FAIL;
US;74.208.101.185:31200;FAIL;
US;186.126.166.22:1080;FAIL;
AR;181.101.33.157:1080;45.235.99.83;0:00:05.120106
AR;186.126.110.76:1080;FAIL;
US;186.126.170.254:1080;FAIL;
FR;51.68.134.251:25623;51.68.134.240;0:00:06.095322
BR;186.126.74.124:1080;FAIL;
IN;43.224.10.32:6667;FAIL;
US;95.217.132.133:3178;FAIL;
US;157.230.154.211:28030;157.230.154.211;0:00:03.840172
IN;140.238.250.54:1080;140.238.250.54;0:00:04.823383
AR;181.102.134.167:1080;FAIL;
AR;186.126.101.52:1080;FAIL;
CO;181.129.7.202:6699;181.129.7.202;0:00:02.020779
US;186.126.15.241:10808;FAIL;
AR;181.101.8.41:1080;FAIL;
AR;181.0.0.18:1080;FAIL;
SG;181.3.58.52:1080;FAIL;
AR;181.101.9.46:1080;FAIL;
SG;129.226.196.49:41789;129.226.196.49;0:00:02.896387
CA;192.111.129.145:16894;FAIL;
AR;181.3.51.132:1080;FAIL;
AR;181.3.10.74:1080;FAIL;
BR;181.3.9.61:1080;FAIL;
AR;181.3.49.78:1080;FAIL;
GB;181.101.52.44:1080;FAIL;
US;69.61.200.104:36181;FAIL;
BR;186.126.177.239:1080;FAIL;
BR;186.152.122.42:1080;FAIL;
CL;186.126.71.210:1080;170.239.87.87;0:00:10.699452
US;184.178.172.13:15311;FAIL;
BD;103.85.232.146:1080;FAIL;
US;161.35.137.49:28005;FAIL;
AR;181.101.45.131:1080;FAIL;
US;70.166.167.55:57745;FAIL;
AR;181.3.57.187:1080;FAIL;
NL;188.166.104.152:6683;FAIL;
US;95.217.132.133:3038;FAIL;
IN;103.241.227.98:6667;FAIL;
AR;181.102.47.46:1080;FAIL;
PL;5.226.69.12:41284;5.226.69.12;0:00:05.842418
AR;186.126.139.224:10808;FAIL;
AR;181.7.197.13:1080;FAIL;
AR;186.152.16.246:1080;FAIL;
US;113.73.72.177:1080;FAIL;
US;72.221.164.34:60671;FAIL;
BR;181.3.68.127:1080;FAIL;
US;173.236.186.236:22960;173.236.184.139;0:00:03.567567
AR;186.126.167.68:1080;FAIL;
IN;103.240.168.138:6667;FAIL;
US;104.248.0.141:17074;FAIL;
AR;181.5.219.126:1080;FAIL;
CA;186.152.115.63:1080;FAIL;
US;132.148.129.108:34289;132.148.129.108;0:00:06.245162
AR;186.126.138.242:1080;FAIL;
AR;181.102.16.55:1080;FAIL;
US;104.238.215.49:1081;104.238.212.43;0:00:07.598953
US;147.135.116.172:26522;147.135.116.172;0:00:03.047146
GB;178.62.79.49:51591;178.62.79.49;0:00:04.168867
AR;181.3.39.27:1080;FAIL;
BR;181.6.149.14:1080;201.76.56.248;0:00:10.817129
IN;27.116.51.85:6667;FAIL;
IN;103.216.82.22:6667;FAIL;
SG;206.189.158.28:44880;206.189.158.28;0:00:10.378409
SK;109.74.144.149:22743;109.74.144.149;0:00:07.030135
FR;51.68.134.241:25623;51.68.134.240;0:00:08.225295
AR;181.6.8.208:10808;200.73.132.2;0:00:14.850405
AR;186.152.4.160:1080;FAIL;
AR;181.3.46.25:1080;FAIL;
US;208.102.51.6:58208;FAIL;
AR;181.101.53.240:1080;200.73.132.115;0:00:09.802936
IN;103.251.225.16:6667;FAIL;
US;173.236.185.19:22960;FAIL;
FR;51.68.134.250:25623;FAIL;
US;50.62.35.16:41644;50.62.35.16;0:00:02.304961
BR;186.126.129.193:1080;FAIL;
US;166.62.85.224:13954;166.62.85.224;0:00:05.123121
US;47.100.88.171:20900;FAIL;
US;104.238.111.167:14416;FAIL;
US;64.34.217.33:40741;FAIL;
CA;192.252.215.5:16137;FAIL;
US;173.236.184.139:22960;173.236.184.139;0:00:04.575732
DE;46.101.218.6:39749;46.101.218.6;0:00:06.758081
AR;181.101.2.18:1080;FAIL;
US;66.228.36.18:61852;FAIL;
DE;173.212.201.250:23686;FAIL;
IN;43.224.10.35:6667;FAIL;
US;173.236.185.96:22960;FAIL;
AR;181.3.37.213:1080;200.73.130.62;0:00:08.508165
AR;181.3.49.28:1080;FAIL;
US;173.236.191.119:22960;173.236.184.139;0:00:08.729647
BR;181.3.67.154:1080;FAIL;
US;104.248.0.141:57391;104.248.0.141;0:00:03.865643
AR;186.152.149.227:1080;200.73.130.62;0:00:03.071001
CA;186.126.82.88:1080;FAIL;
AR;186.126.151.73:1080;200.73.130.62;0:00:05.884195
DE;173.212.201.250:54349;FAIL;
AR;181.0.5.196:1080;FAIL;
NL;142.93.137.235:6191;142.93.137.235;0:00:04.257492
SG;206.189.158.28:4454;FAIL;
US;157.230.154.211:32381;157.230.154.211;0:00:10.416110
AR;186.126.49.178:1080;FAIL;
CA;181.3.40.39:1080;FAIL;
US;95.217.132.133:3598;FAIL;
FR;51.68.134.249:30204;FAIL;
US;104.238.111.167:53308;FAIL;
DE;171.221.35.24:1080;FAIL;
NL;188.166.104.152:44924;FAIL;
SG;129.226.196.49:13181;129.226.196.49;0:00:13.210261
AR;181.3.55.161:1080;FAIL;
HK;101.132.120.74:1080;FAIL;
SE;95.217.132.133:3508;FAIL;
CA;186.126.129.149:1080;51.79.52.142;0:00:08.184306
AR;181.101.19.224:1080;FAIL;
AR;181.3.7.234:1080;FAIL;
AR;181.6.28.131:1080;FAIL;
BR;181.6.114.165:1080;209.14.2.57;0:00:18.254419
DE;173.212.201.250:33464;FAIL;
NL;146.185.132.87:55158;FAIL;
HK;150.109.148.234:1234;FAIL;
HU;85.90.161.117:2021;85.90.161.117;0:00:02.127226
AR;181.5.201.229:1080;200.73.132.119;0:00:15.974410
US;72.49.49.11:31034;FAIL;
US;97.74.6.64:45683;FAIL;
US;186.126.95.145:10808;FAIL;
DE;54.38.157.22:9999;54.38.157.22;0:00:08.000757
FR;51.68.134.250:30204;FAIL;
BR;186.126.89.33:1080;FAIL;
FR;51.68.134.242:30204;FAIL;
US;166.62.85.184:42828;166.62.85.184;0:00:04.136324
US;173.236.186.172:22960;173.236.184.139;0:00:04.403408
BR;181.83.228.40:1080;FAIL;
US;165.22.13.68:15576;165.22.13.68;0:00:04.907470
US;104.248.48.169:30588;FAIL;
SG;206.189.92.74:38888;FAIL;
AR;181.3.63.142:1080;FAIL;
AR;186.126.87.224:1080;FAIL;
BR;181.3.46.205:1080;FAIL;
CA;181.3.16.31:1080;FAIL;
SG;45.76.187.35:36600;FAIL;
US;173.236.186.230:22960;173.236.184.139;0:00:03.272663
RU;171.221.44.248:1080;FAIL;
US;181.7.201.96:1080;FAIL;
US;147.135.116.172:55546;147.135.116.172;0:00:05.626279
AR;181.3.29.244:1080;FAIL;
BR;186.152.147.113:1080;FAIL;
AR;181.102.81.144:1080;FAIL;
US;104.248.0.141:30247;104.248.0.141;0:00:01.176155
US;104.238.212.43:1080;104.238.215.49;0:00:05.161615
BR;186.152.26.161:1080;191.252.102.212;0:00:09.528139
US;143.110.153.171:3240;FAIL;
PS;213.6.61.150:9999;FAIL;
IN;43.224.10.30:6667;FAIL;
AR;181.101.4.206:1080;FAIL;
SG;206.189.158.28:48500;FAIL;
FR;54.36.246.232:11380;54.36.246.232;0:00:07.263434
DE;213.136.89.190:4374;FAIL;
SG;206.189.158.28:64028;FAIL;
CL;186.126.131.207:1080;FAIL;
IN;43.224.10.36:6667;43.224.10.36;0:00:08.870324
CA;181.3.93.39:1080;FAIL;
NL;142.93.138.78:63421;142.93.138.78;0:00:02.779517
NL;146.185.132.87:49041;146.185.132.87;0:00:08.279986
DE;95.217.132.133:3008;FAIL;
BR;181.101.52.45:1080;FAIL;
US;192.169.201.24:51100;FAIL;
BR;181.3.24.19:1080;FAIL;
AR;186.126.15.57:1080;FAIL;
PL;5.226.69.12:42717;FAIL;
DE;213.136.89.190:13492;FAIL;
BR;181.102.141.53:1080;FAIL;
US;74.208.102.54:31200;74.208.102.54;0:00:10.078336
GB;95.217.132.133:3273;FAIL;
CA;159.203.42.128:28393;159.203.42.128;0:00:04.454060
BR;181.101.29.81:1080;FAIL;
CA;181.3.84.102:1080;FAIL;
US;173.236.189.156:7595;173.236.184.102;0:00:06.332096
FR;51.68.134.246:30204;FAIL;
BR;181.6.24.228:1080;FAIL;
US;95.217.132.133:3503;FAIL;
AR;186.126.54.106:1080;FAIL;
SG;206.189.158.28:48751;FAIL;
NL;178.62.136.189:51423;178.62.136.189;0:00:06.756095
US;173.236.187.42:22960;173.236.184.139;0:00:07.256691
IN;43.224.10.46:6667;FAIL;
US;206.189.231.206:2106;FAIL;
SG;95.217.132.133:3286;FAIL;
SG;129.226.196.49:22157;129.226.196.49;0:00:09.336891
US;173.236.186.241:22960;173.236.184.139;0:00:02.345419
HK;119.28.81.177:20412;FAIL;
RU;31.25.243.40:9432;FAIL;
CA;181.3.65.57:1080;51.222.13.156;0:00:06.097943
AR;181.0.16.160:1080;FAIL;
UA;31.128.248.2:1080;FAIL;
HK;36.150.108.65:1080;FAIL;
RU;31.25.243.40:9159;FAIL;
US;181.0.26.16:1080;FAIL;
CA;181.6.61.241:1080;FAIL;
FR;51.68.134.243:25623;51.68.134.240;0:00:02.797034
BR;181.3.56.31:1080;20.195.214.142;0:00:05.865545
US;147.135.116.172:47283;147.135.116.172;0:00:07.138716
SG;113.77.87.43:1081;FAIL;
FR;51.68.134.255:25623;51.68.134.240;0:00:02.196854
IN;103.216.82.37:6667;103.216.82.37;0:00:04.271719
HK;223.199.179.145:1080;FAIL;
US;104.238.111.167:29182;104.238.111.167;0:00:07.471943
GB;46.101.56.138:33232;FAIL;
DE;213.136.89.190:51808;213.136.89.190;0:00:01.532093
NL;142.93.137.235:1429;142.93.137.235;0:00:04.408165
BR;181.5.210.85:1080;FAIL;
US;67.227.193.162:24595;67.227.193.162;0:00:07.794617
FR;51.68.134.248:25623;51.68.134.240;0:00:07.714408
HK;153.37.113.125:1080;42.3.24.58;0:00:03.530263
US;104.248.0.141:23668;104.248.0.141;0:00:01.404311
AR;186.126.84.156:1080;200.73.128.105;0:00:15.717142
PH;210.16.73.81:1080;FAIL;
FR;51.68.134.252:30204;FAIL;
CA;181.5.242.212:1080;FAIL;
AR;181.6.14.34:1080;FAIL;
NL;146.185.132.87:59746;FAIL;
SG;206.189.158.28:15615;FAIL;
GB;159.65.26.54:34787;159.65.26.54;0:00:07.312364
FR;51.68.134.254:25623;51.68.134.240;0:00:09.785792
SG;206.189.158.28:11007;FAIL;
AR;186.152.26.173:1080;FAIL;
US;206.189.231.206:53323;FAIL;
US;192.169.201.24:7495;FAIL;
AR;181.101.57.210:1080;FAIL;
US;173.236.184.50:7595;FAIL;
US;181.7.211.6:1080;FAIL;
AR;186.126.80.109:1080;FAIL;
CA;181.3.67.17:1080;FAIL;
US;165.22.13.68:25327;165.22.13.68;0:00:06.029895
CA;159.203.42.128:47524;159.203.42.128;0:00:09.931594
AR;181.101.57.64:1080;200.73.133.154;0:00:12.503640
BR;181.6.134.15:1080;FAIL;
AR;181.6.35.81:1080;FAIL;
US;173.236.186.228:22960;FAIL;
CA;181.102.111.148:1080;FAIL;
US;181.3.39.201:1080;FAIL;
DE;95.217.132.133:3412;FAIL;
US;206.189.231.206:50825;206.189.231.206;0:00:01.618712
SG;206.189.158.28:47419;FAIL;
DE;45.149.76.184:9051;FAIL;
GB;159.65.26.54:2975;FAIL;
US;64.34.216.68:40741;64.34.205.58;0:00:13.192013
US;173.236.188.107:7595;173.236.184.102;0:00:03.604567
US;166.62.85.224:42790;FAIL;
DE;181.101.10.10:1080;78.47.73.135;0:00:20.253722
RU;95.107.37.109:3109;85.26.186.44;0:00:04.610048
AR;181.3.29.168:1080;FAIL;
AR;181.6.128.215:1080;FAIL;
US;95.217.132.133:3132;FAIL;
AR;186.126.120.70:1080;FAIL;
UA;80.73.9.238:1080;FAIL;
IN;43.224.10.42:6667;43.224.10.42;0:00:12.005869
US;206.189.180.62:7934;FAIL;
AR;181.3.52.116:1080;FAIL;
AR;181.3.91.214:1080;FAIL;
DE;213.136.89.190:56844;FAIL;
BR;181.7.198.151:1080;191.252.113.106;0:00:15.269279
US;104.248.0.141:54251;FAIL;
GB;176.58.100.26:27016;FAIL;
HK;113.240.216.243:1080;FAIL;
AR;186.126.66.41:1080;FAIL;
US;173.236.189.250:7595;FAIL;
BR;181.5.230.16:1080;191.252.113.106;0:00:19.073131
US;50.62.35.16:29643;FAIL;
IN;103.21.163.76:6667;103.21.163.76;0:00:08.463147
DK;65.21.49.222:9174;FAIL;
US;104.238.97.215:7772;FAIL;
AR;181.3.68.52:1080;FAIL;
AR;186.126.92.77:1080;FAIL;
US;95.217.132.133:3141;FAIL;
BR;186.126.168.161:1080;FAIL;
DE;46.4.156.212:18588;FAIL;
SG;206.189.158.28:7476;FAIL;
AR;181.6.114.157:1080;200.73.132.187;0:00:13.969104
US;181.102.141.210:1080;FAIL;
BR;181.5.212.118:1080;FAIL;
SG;45.76.187.35:44560;45.76.187.35;0:00:09.819446
AR;186.152.150.124:1080;FAIL;
AR;186.126.141.216:1080;FAIL;
CA;186.152.114.192:1080;FAIL;
US;173.236.191.150:22960;173.236.184.139;0:00:09.824398
AR;181.7.207.196:1080;FAIL;
JP;138.91.19.96:1953;138.91.19.96;0:00:12.281648
CL;186.126.48.110:1080;FAIL;
US;74.208.101.185:44614;74.208.102.54;0:00:10.538888
AR;181.101.53.210:1080;FAIL;
US;65.21.49.222:9270;FAIL;
US;173.236.189.19:22960;FAIL;
US;95.217.132.133:3137;FAIL;
AR;186.126.42.157:1080;FAIL;
US;173.236.189.188:22960;173.236.184.139;0:00:05.774545
US;8.210.163.246:50001;FAIL;
DE;213.136.89.190:5136;FAIL;
US;173.236.186.235:22960;173.236.184.139;0:00:04.093197
AR;186.126.176.41:1080;FAIL;
US;173.236.189.191:22960;173.236.184.139;0:00:02.491511
US;173.236.188.227:7595;FAIL;
SG;206.189.158.28:21471;206.189.158.28;0:00:07.368676
US;95.217.132.133:3463;FAIL;
US;173.236.186.1:22960;FAIL;
BR;186.126.101.194:1080;FAIL;
AR;181.101.48.228:1080;FAIL;
US;95.217.132.133:3443;FAIL;
HK;119.28.81.177:59430;FAIL;
AR;181.3.27.242:10808;FAIL;
AR;181.0.30.128:1080;FAIL;
US;173.236.186.167:22960;173.236.184.139;0:00:06.491244
PL;5.226.69.12:46975;5.226.69.12;0:00:07.836800
NL;142.93.137.235:38902;142.93.137.235;0:00:02.734874
US;173.236.188.12:7595;FAIL;
DE;213.136.89.190:52010;FAIL;
US;173.236.188.156:7595;FAIL;
BR;181.3.36.182:1080;FAIL;
FR;51.75.42.92:25623;51.68.134.240;0:00:09.193243
US;173.236.189.132:22960;173.236.184.139;0:00:03.395630
US;173.236.185.29:22960;FAIL;
AR;186.126.50.32:1080;FAIL;
RU;31.25.243.40:9261;FAIL;
IR;5.56.134.237:45698;5.56.134.237;0:00:02.117181
ID;103.224.103.116:1080;FAIL;
CN;110.90.223.72:57114;FAIL;
CN;59.61.160.63:16790;FAIL;
CN;119.187.146.163:1080;119.187.146.163;0:00:03.103184
CN;59.61.160.179:16790;59.61.160.179;0:00:02.662718
CN;113.123.0.217:1080;FAIL;
CN;111.1.36.135:9053;115.238.101.42;0:00:02.840302
CN;111.1.36.132:9053;115.238.101.39;0:00:04.400966
CN;3.131.207.170:11098;FAIL;
CN;117.174.160.105:1080;117.174.160.105;0:00:05.121497
CN;36.27.223.80:35880;120.33.231.36;0:00:05.118523
CN;60.168.25.143:4216;FAIL;
CN;47.104.16.8:6667;FAIL;
CN;114.236.90.5:1080;FAIL;
ID;139.255.89.4:1080;139.255.89.2;0:00:03.779929
CN;111.225.153.226:57114;FAIL;
CN;134.175.90.111:8889;FAIL;
CN;111.1.36.132:9055;FAIL;
CN;121.206.250.10:57114;121.206.250.10;0:00:02.844775
CN;42.193.148.214:1080;FAIL;
CN;111.1.36.134:9053;115.238.101.41;0:00:03.048803
CN;39.96.175.55:1080;FAIL;
CN;47.92.252.178:3129;FAIL;
CN;122.152.219.54:57164;122.152.219.54;0:00:06.862155
ID;36.89.86.49:56845;36.89.86.49;0:00:03.790392
CN;36.27.223.80:35101;FAIL;
CN;36.27.223.80:34638;106.114.146.84;0:00:05.630091
CN;218.64.122.99:7302;218.64.122.99;0:00:09.361461
ID;36.94.126.50:1080;36.94.126.50;0:00:05.162022
CN;47.100.19.147:3129;47.100.19.147;0:00:11.339600
CN;122.152.219.54:1749;FAIL;
CN;59.61.160.153:16790;59.61.160.153;0:00:08.683302
unable to load file from base commit

@ -0,0 +1,23 @@
from arango import ArangoClient
from getpass import getpass
from config import *
from time import sleep
for i in range(0, 6, 1):
if i == 5:
exit()
try:
# Om scriptet körs på Macbook finns lösenordet i en fil
with open("../password_arango.txt") as f:
pwd = f.readline()
except FileNotFoundError:
if pwd == None:
pwd = getpass(f'Lösenord för {user_arango}: ')
try:
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd)
db.collection('members').random() # För att testa löseordet/kopplingen.
break
except:
print("Fel lösenord.")
sleep(1)

@ -0,0 +1,74 @@
from datetime import datetime
from getpass import getpass
from time import sleep
from arango import ArangoClient
from json2html import json2html
def now():
""" Returns current date and time as string"""
return datetime.now().strftime("%Y-%m-%d_%H:%M:%S")
def write_stats(db, continuous=False):
while True:
d = {}
for col in db.collections():
if not col['system']:
d[col['name']] = db.collection(col['name']).count()
del d['stats']
#d['time'] = now()
cursor = db.aql.execute(
"""
FOR doc IN members
FILTER doc.checked == true
COLLECT WITH COUNT INTO length
RETURN length
"""
)
d['checked_members'] = cursor.next()
# Hur många konton per säljare som finns kvar
cursor = db.aql.execute(
'''
for doc in profiles
filter has(doc, "vendor")
COLLECT vendor = doc.vendor WITH COUNT INTO length
RETURN {
"vendor" : vendor,
"active" : length
}
''')
d['active_vendors'] = [doc for doc in cursor]
d['_key'] = now()[:13]
db.insert_document( "stats", d, overwrite=True)
# Skriv en html-fil
with open('website/fb-webbapp/stats.html', 'a+') as html:
html.truncate(0)
html.write('<!DOCTYPE html> <br>')
html.write(json2html.convert(json = d))
# Sov för att fortsätta senare
if continuous:
sleep(86400)
else:
break
# Info för arangodb
user_arango = "Lasse"
db_arango = "facebook"
host_arango = "http://192.168.0.4:8529"
# Starta koppling till arangodb
# Avkryptera lösen till arango
pwd = getpass(f'Arangolösenord för {user_arango}:').strip()
db = ArangoClient(hosts=host_arango).db(
db_arango, username=user_arango, password=pwd
)
write_stats(db, continuous=True)

@ -0,0 +1,131 @@
class Profile:
def __init__(self, profile, container, proxieservers):
"""Creates a new profile to do searches with.
Args:
profile (dict): Document fetched from database.
container (str): Docker container that runs the script.
"""
self.doc = profile
# Användaruppgifter
self.name = self.doc["name"].strip()
self.email = self.doc["email"]
self.pwd = self.doc["pwd"]
self.server = self.doc["server"]
self.cookie = self.doc["cookie"]
self.useragent = self.doc["useragent"]
self.proxieservers = proxieservers
self.blocked = False
self.container = str(container)
self.users_checked = 0
# Ange proxies
session = requests.Session()
session.proxies = self.doc['proxies']
# Starta browser
user_agent = self.useragent
self.browser = RoboBrowser(
session=session, user_agent=user_agent, history=False, parser="lxml"
)
try:
self.browser.session.cookies = pickle.load(
open("data/cookie_{}.pkl".format(self.name), "rb")
)
self.logged_in = True
except:
try:
self.browser.session.cookies.update(self.cookie)
self.logged_in = True
except:
self.logged_in = False
def update_time(self):
""" Uppdatera dokumentet i arango. """
self.doc["in_use"] = nowstamp()
db.update_document(self.doc, check_rev=False)
def viewing(self):
""" Returnerar browser i html-format """
return self.browser.parsed
def open(self, url):
n = 0
while True:
n += 1
sleep(1)
try:
self.browser.open(url)
if '/a/nux/wizard/nav.php?step=phone&amp;skip' in self.viewing():
self.browser.open(url_bas + '/a/nux/wizard/nav.php?step=phone&amp;skip')
break
except Exception as e:
print(e)
print(n)
_print(self, None, f'Kunde inte öppna url {url}')
if n == 5:
if 'Connection refused' in e:
self.doc['e'] = e
db.insert_document('blocked_profiles', self.doc)
n = 0
from arangodb import get_profile, remove_profile
# Ta bort den gamla profilen från databasen och ersätt profile med nytt innehåll från ny profil
remove_profile(self)
self.__init__(get_profile(self.proxieservers), self.container)
_print(self, None, f'Ny profil hämtad {self.email}')
self.update_time()
else:
sleep(40)
def accept_cookies(self):
""" Accepterar cookies """
self.browser.open("https://mbasic.facebook.com")
soup = BeautifulSoup(str(self.browser.parsed), "lxml")
if "accept all" not in soup.text.lower():
sleep_(2)
cookie_accept_url = "https://mbasic.facebook.com/cookie/consent-page"
self.browser.open(cookie_accept_url)
sleep_(2)
try:
form = self.browser.get_form()
self.browser.submit_form(form)
_print(self, None, f"Accepterade cookies för {self.name}")
sleep_(2)
update_cookie(self.browser.session.cookies, self)
except:
try:
write_error(12, self, soup=self.browser.parsed)
except:
pass
_print(self, None, f"Accepterade inte cookies för {self.name}")
def login(self):
""" Loggar in på Facebook. """
print("Loggar in {}".format(self.name))
# Gå till log in-sidan
self.browser.open("https://mbasic.facebook.com/login")
# Kolla om browser redan är inloggad
soup = BeautifulSoup(str(self.browser.parsed), "lxml")
if "log out" in soup.text.lower():
print("Redan inloggad.")
try:
# Hitta och fyll i formulär
form = self.browser.get_form(id="login_form")
form["email"].value = self.email
form["pass"].value = self.pwd
self.browser.submit_form(form, submit=form["login"])
# Vänta lite och uppdatera cookie
print("Loggade in.")
sleep_(2)
self.open(url_bas)
sleep_(2)
except TypeError:
try:
write_error(11, self, soup=soup, profile=self.name)
except:
pass

@ -0,0 +1,14 @@
from flask import Flask, render_template
import json
from json2html import json2html
app = Flask(__name__)
@app.route("/")
def stats():
return render_template("stats.html")
if __name__ == "__main__":
app.run(debug=True)

@ -0,0 +1,15 @@
import subprocess
import requests
from time import sleep
subprocess.run(['sudo', 'wg-quick', 'down', 'integrity'])
with open('ip.txt', 'a+') as f:
while True:
subprocess.run(['wg-quick', 'up', 'integrity'] )
sleep(5)
ip = requests.get('https://api.ipify.org').text
print(ip)
f.write(f'{ip}\n')
subprocess.run(['wg-quick', 'down', 'integrity'])
sleep(5)

@ -6,7 +6,7 @@ idna==2.10
lxml==4.6.2 lxml==4.6.2
pycparser==2.20 pycparser==2.20
PyJWT==2.0.1 PyJWT==2.0.1
PyNaCl==1.4.0 #PyNaCl==1.4.0
PySocks==1.7.1 PySocks==1.7.1
python-arango==7.1.0 python-arango==7.1.0
requests==2.25.1 requests==2.25.1
@ -18,3 +18,4 @@ soupsieve==2.2
toml==0.10.2 toml==0.10.2
urllib3==1.26.3 urllib3==1.26.3
Werkzeug==1.0.1 Werkzeug==1.0.1
json2html

@ -1,12 +1,17 @@
{ {
"folders": [ "folders": [
{ {
"path": "." "path": "."
}, },
{ {
"path": "facebook" "path": "../mrkoll"
} },
], {
"settings": { "path": "facebook"
} }
],
"settings": {
"python.pythonPath": "/Users/Lasse/Datorgemensamt/Programmeringsprojekt/Facebook/fb-scraper/.venv/bin/python"
},
} }
Loading…
Cancel
Save