pull/5/head
Lasse Edfast 5 years ago
parent ef6fe686da
commit 52353def15
  1. 6
      Dockerfile
  2. 19
      docker/free/Dockerfile
  3. 14
      docker/mrkoll/Dockerfile
  4. 244
      facebook/__main__.py
  5. 304
      facebook/arangodb.py
  6. 96
      facebook/classes.py
  7. 8
      facebook/config.py
  8. BIN
      facebook/face_enc
  9. 110
      facebook/faces.py
  10. 185
      facebook/gephi (kopia).py
  11. 207
      facebook/gephi.py
  12. 61
      facebook/helpers.py
  13. 70
      facebook/images.py
  14. 168
      facebook/images_pi.py
  15. 45
      facebook/modemtest.py
  16. 90
      facebook/scrapers.py
  17. 557
      facebook/socks5free.csv
  18. 23
      facebook/start_database.py
  19. 131
      facebook/testclass.py
  20. 15
      integrity.py
  21. 2
      requirements.txt
  22. 4
      workspace.code-workspace

@ -3,11 +3,13 @@ FROM python:3.8
WORKDIR /
COPY data/ .
COPY requirements.txt .
RUN pip install -r requirements.txt
ADD . .
COPY facebook/accs.py facebook/classes.py facebook/config.py facebook/helpers.py facebook/__main__.py facebook/arangodb.py facebook/scrapers.py /facebook/
ENTRYPOINT [ "python", "facebook/__main__.py" ]
@ -15,5 +17,5 @@ CMD ["",""]
# BUILD:
# docker buildx create --use
#docker buildx build --platform linux/arm -t l3224/fb-scraper:pi --push .
#docker buildx build --platform linux/arm,linux/arm64 -t l3224/fb-scraper:pi --push .

@ -0,0 +1,19 @@
FROM python:3.8
WORKDIR /
COPY requirements.txt .
RUN pip install -r requirements.txt
ADD . .
ENTRYPOINT [ "python", "facebook/__main__.py", "-p free" ]
CMD ["",""]
# BUILD:
# docker buildx create --use
#docker buildx build --file docker/free/Dockerfile --platform linux/arm -t l3224/fb-scraper:free --push .

@ -0,0 +1,14 @@
FROM python:3.8
WORKDIR /
COPY requirements.txt .
RUN pip install -r requirements.txt
ADD . .
ENTRYPOINT [ "python", "facebook/mrkoll.py" ]
# docker buildx build --file docker/mrkoll/Dockerfile --platform linux/arm -t l3224/fb-scraper:mrkoll --push .

@ -1,142 +1,158 @@
import random
import traceback
from getopt import GetoptError, getopt
from getopt import getopt
from sys import argv
from time import sleep
from subprocess import check_output
from re import split
from socket import gethostname
from datetime import datetime
from config import set_pwd
from random import randint
from arangodb import *
from classes import Profile, User
from helpers import sleep_, write_error, _print
from scrapers import profile_picture_reactions
if __name__ == "__main__":
print()
proxieservers = 'mullvad'
def blocked_profile(profile):
""" Tar bort profilen som blivit blockad och returnerar en ny. """
report_blocked(profile)
remove_profile(profile)
return new_profile()
# Argument och alternativ
# Variabler som kan ändras
url_other_pictures = [] # Fylls eventuellt på
test = False
write = True
mode = 'all'
pwd = None
def new_profile():
""" Hämtar en ny profil. """
profile = Profile(get_profile(), container)
if profile.logged_in == False:
profile.accept_cookies()
sleep_(2)
profile.login()
return profile
argv = argv[1:]
opts, args = getopt(argv, "bim:u:o:p:wl:", ["backup", "images", "mode=", "user=", "other=", "profiles=", "write", "password="])
for o, a in opts:
print(o)
if o in ['-l', "--password"]:
pwd = a.strip()
set_pwd(pwd)
# Importera andra moduler
from config import url_bas
from arangodb import (
blocked_profile,
new_profile,
backup,
get_user,
check_for_user,
friends_of_user,
)
from classes import Profile, User
from helpers import sleep_, write_error, _print, check_profile_status, update_cookie
from scrapers import profile_picture_reactions
for o, a in opts:
# Bestäm vilka profiler/proxies som ska användas
if o in ['-p', '--profiles']:
proxieservers = a.strip()
print(f'Proxieservers: {proxieservers}')
# Bestäm mode
if o in ["-m", "--mode"]:
mode = a.strip()
if mode == "single":
mode_nr = 1.7
elif mode == "few":
mode_nr = 1.4
elif mode == "force":
mode_nr = 1
if __name__ == "__main__":
print()
# Bestäm user
if o in ["-u", "--user"]:
if a == 'test': # För att testa profiler i profiles_test
test = True
container = str(a.strip())
if all([a.strip()[:4] == "leak", len(a) < 7]) or a == 'test':
sleep(randint(0, 40)/10) # För att docker service inte ska gå vidare exakt samtidigt
lookups = "leak_lookups"
userdoc = get_user(collection=lookups)
elif a.strip()[:7] == "lookups":
lookups = "lookups"
userdoc = get_user(collection=lookups)
if 'other' in userdoc:
url_other_pictures = userdoc['other']
else:
url_other_pictures = []
elif a == 'test':
lookups = "leak_lookups"
userdoc = get_user(collection=lookups)
else:
lookups = "lookups"
userdoc = {'_key': a}
# Hämta namn för containern där skriptet körs
if gethostname() not in ['macbook.local']: # TODO Lägg till för studiodatorn
try:
containers = check_output(['docker', 'lookingup', 'ls']).decode()
container = split('\W\W+', containers.split('\n')[1])[-1]
except FileNotFoundError:
pass
else:
container = 'macbook'
if o in ["-o", "--other"]:
url_other_pictures = a.split(",")
if o in ["-b", "--backup"]:
backup(db)
if o in ['-w', "--write"]:
write = False
# Argument och alternativ
argv = argv[1:]
try:
opts, args = getopt(argv, "bm:u:o:", ['backup=',"mode=", "user=", "other="])
for o, a in opts:
# mode_nr används för hur ofta profile ska roteras
if o in ["-m", "--mode"]:
mode = a.strip()
if mode == 'single':
mode_nr = 1.7
elif mode == 'few':
mode_nr = 1.4
elif mode == 'force':
mode_nr = 1
else:
mode = 'all'
mode_nr = 1
for o, a in opts:
if o in ["-u", "--user"]:
try:
if a.strip()== 'leak':
lookups = 'leak_lookups'
user = get_user(collection='leak_lookups')['_key']
else:
lookups = 'lookups'
user = a
user = User(str(user).strip(), mode)
except StopIteration:
raise Exception
if o in ["-o", "--other"]:
url_other_pictures = a.split(',')
if o in ['-b', '--backup']:
while True:
backup(db)
sleep(21600)
if 'userdoc' not in globals():
lookups = "lookups"
userdoc = {'_key': str(input("Vem/vilka vill du kolla bilder för? ")).strip()}
if "user" not in globals():
lookups = 'lookups'
user = User(str(input("Vem/vilka vill du kolla bilder för? ")).strip(), mode)
except GetoptError:
lookups = 'lookups'
user = User(str(input("Vem/vilka vill du kolla bilder för? ")).strip(), mode)
print('Mode:', mode)
print('Write:', write)
mode = input("Söka efter alla, första/sida eller första? (all, few, single)? ").lower().strip()
if mode == '':
mode = 'all'
# Hämta en användare att kolla upp
user = User(str(userdoc['_key']).strip(), mode, other_pictures=url_other_pictures)
if "url_other_pictures" in globals():
l = []
for url in url_other_pictures:
l.append(url[url.find('facebook.com') + 12:])
l.append(url[url.find("facebook.com") + 12 :])
user.url_other_pictures = l
# Hämta profil
profile = new_profile()
profile = new_profile(container, proxieservers)
profile.write = write
update_cookie(profile.browser.session.cookies, profile)
sleep(3)
# Gå igenom de användare som efterfrågats
#try:
while True:
if lookups == 'leak_lookups':
profile.browser.open(url_bas + "/" + user.username)
url = profile.browser.state.url.strip('/').strip('?_rdr')
user = User(str(url[url.rfind('/') + 1:]).strip(), mode)
if lookups == "leak_lookups":
id = user.username
check_profile_status(profile, user)
if profile.blocked:
profile = blocked_profile(profile, proxieservers=proxieservers)
profile.open(url_bas + "/" + user.username)
url = profile.browser.state.url.strip("/").strip("?_rdr")
if "php?" not in url:
user = User(str(url[url.rfind("/") + 1 :]).strip(), mode)
user.id = id
sleep_(4)
print(f"Kollar profilbilder för {user.username}")
if 'container' not in globals:
container = str(user.username)
profile.container = container
_print(profile, user, f"Börjar med profilen {profile.name}")
if "container" not in globals():
container = str(user.username)
profile.container = container
profile.users_checked += 1
# Set för kollade bilder och kollade medlemmar
#all_pictures = set([doc["_key"] for doc in db.collection("pictures").all()])
#all_pictures_start = all_pictures.copy()
#members_checked = checked_members()
# Hämta reaktioner för den första användaren
if any([not check_for_user(user.username), mode == 'force']):
if any([not check_for_user(user.username, mode=mode), mode == "force"]):
try:
while True:
# Uppdatera in_use
profile.update_time()
profile = profile_picture_reactions(profile, user, first_user=True, mode=mode)
profile = profile_picture_reactions(
profile, user, first_user=True, mode=mode
)
if profile.blocked:
profile = blocked_profile(profile)
profile = blocked_profile(profile, proxieservers=proxieservers)
else:
break
except:
@ -148,17 +164,20 @@ if __name__ == "__main__":
_print(profile, user, "\nVänner att kolla:")
friends_unchecked = []
for friend in friends:
for friend in friends:
if not check_for_user(friend):
print(friend)
friends_unchecked.append(friend)
_print(profile, user, [friends_unchecked], silent=True)
_print(profile, user, f'Totalt: {len(friends_unchecked)}')
print()
# Hämta reaktioner för users vänner (som reagerat)
count_friends = 0
for friend in friends_unchecked:
if datetime.now().strftime("%H") == '03' and int(datetime.now().strftime("%M")) < 30: # Sov för att kunna säkerhetskopieraa
sleep(1800)
count_friends += 1
user = User(str(friend), mode, other_pictures=[])
sleep_(2)
@ -166,11 +185,10 @@ if __name__ == "__main__":
# Uppdatera in_use
profile.update_time()
try:
p = profile_picture_reactions(
profile, user, mode=mode
)
if isinstance(p, Profile):
profile = p
if not check_for_user(user.username):
p = profile_picture_reactions(profile, user, mode=mode)
if isinstance(p, Profile):
profile = p
except Exception as e: # Fel4
write_error(
@ -190,25 +208,29 @@ if __name__ == "__main__":
# Rotera fb-profiler
if count_friends > 2 * mode_nr:
if random.randrange(0, 2, 1) == 1:
profile = new_profile()
profile = new_profile(container, proxieservers=proxieservers)
count_friends = 0
_print(profile, user, f"Växlar till {profile.name}")
elif count_friends > 4 * mode_nr:
profile = new_profile()
profile = new_profile(container, proxieservers=proxieservers)
count_friends = 0
_print(profile, user, f"Växlar till {profile.name}")
elif profile.blocked:
_print(profile, user, f"Tar bort {profile.name}\n".upper(), sleeptime=1)
profile = blocked_profile(profile)
_print(profile, user, f"Växlar till {profile.name}")
profile = blocked_profile(profile, proxieservers=proxieservers)
_print(profile, None, f"Klar med alla vänner.")
sleep(3)
# Hämta ny användare från databasen när alla är genomgångna
while True:
user = get_user(collection=lookups)
if user == None:
new_user = get_user(collection=lookups)
print(new_user)
_print(profile, None, f"Ny user hämtad")
if new_user == None:
sleep(300)
_print(profile, None, "Väntar på ny user.")
else:
user = User(str(user['_key']).strip(), mode)
user = User(str(new_user["_key"]), mode)
_print(profile, user, f"Förberett ny user: {user.username}")
break

@ -2,38 +2,34 @@ from getpass import getpass
from random import randint
from time import sleep
import json
from datetime import datetime
import nacl.secret
import nacl.utils
from arango import ArangoClient
from config import *
# Starta koppling till arangodb
# Avkryptera lösen till arango
try:
# Om scriptet körs på Macbook finns löseordet i en fil
with open("password_arango.txt") as f:
pwd = f.readline()
except FileNotFoundError:
for i in range(0, 6, 1):
if i == 5:
exit()
try:
key = "sssladnnklja" + getpass()
pwd = (
nacl.secret.SecretBox(key.encode())
.decrypt(pwd_arango, encoder=nacl.encoding.HexEncoder)
.decode("utf-8")
)
break
except:
print("Fel lösenord.")
sleep(1)
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd)
for i in range(0, 6, 1):
if i == 5:
exit()
try:
# Om scriptet körs på Macbook finns lösenordet i en fil
with open("../password_arango.txt") as f:
pwd = f.readline()
except FileNotFoundError:
if pwd == None:
pwd = getpass(f'Lösenord för {user_arango}: ')
from helpers import now, _print, nowstamp
try:
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd)
db.collection('members').random() # För att testa löseordet/kopplingen.
break
except:
print("Fel lösenord.")
sleep(1)
from helpers import now, _print, nowstamp, sleep_
from classes import Profile
def checked_members():
@ -64,39 +60,44 @@ def count_docs(col):
return cursor.next()
def report_blocked(profile, users):
db.insert_document(
"reports",
{
"_key": now(),
"profile": profile.name,
"users": [user.username for user in users],
},
overwrite=True,
)
def write_report(users):
db.insert_document(
"reports",
{"_key": now(), "users": [user.username for user in users]},
overwrite=True,
)
def report_blocked(profile):
try:
db.insert_document(
"reports",
{
"_key": str(profile.name).replace(' ', ''),
"profile": profile.__dict__
},
overwrite=True,
)
except:
_print(profile, profile.container, f'Kunde inte rapportera blockerad: {profile.name}.')
def get_profile(db=db, proxieservers='mullvad', collection='profiles'):
""" Hämtar profil från profiles """
if proxieservers != 'mullvad':
collection = f'profiles_{proxieservers}' #TODO Byt namn på profiles till profiles_mullvad i DB
def get_profile(db=db):
""" Hämtar profil från profiles """
cursor = db.aql.execute(
"""
FOR doc IN profiles
FILTER doc.in_use < @inuse
RETURN doc
""",
bind_vars={"inuse": nowstamp() - 900},
)
profiles = [profile for profile in cursor]
profile = profiles[randint(0, len(profiles) - 1)]
return profile
while True:
cursor = db.aql.execute(
"""
FOR doc IN @@col
FILTER doc.in_use < @inuse
RETURN doc
""",
bind_vars={"inuse": nowstamp() - 1200, '@col': collection}
)
profiles = [profile for profile in cursor]
if profiles == []:
sleep(180)
if proxieservers=='test': # Om det är ett test
profile = profiles[0]
else:
profile = profiles[randint(0, len(profiles) - 1)]
return profile
def friends_of_user(user):
@ -113,11 +114,12 @@ def friends_of_user(user):
def remove_profile(profile):
""" Tar bort en blockerad profil från databasen. """
_print(profile, None, f"Tar bort {profile.name}.")
db.collection("profiles").delete(
profile.doc["_key"], silent=True, ignore_missing=True
)
_print(profile, None, f"{profile.name} blockerad och borttagen {now()}.")
_print(profile, profile.container, f"{profile.name} blockerad och borttagen {now()}.")
# TODO #2 Bättre funktion för backup av databasen
@ -129,16 +131,18 @@ def arango_connect(pwd):
)
def check_for_user(username):
def check_for_user(username, mode=''):
""" Checks if a user exist in db and if it's checked """
# TODO Skulle kunna kolla ex mode också
checked = False
if db.collection("members").has(username):
if db.collection('members').get(username)['checked'] == True:
checked = True
else:
checked = False
else:
checked = False
member = db.collection('members').get(username)
if 'checked' in member:
if member['checked'] == True:
checked = True
if mode == 'all':
if 'mode' in member:
if member['mode'] in ['few', 'solo']:
checked = False
return checked
@ -149,49 +153,157 @@ def check_for_picture(id):
def get_user(collection="lookups"):
""" Hämtar användare att kolla upp från lookups """
cursor = db.aql.execute(
"""
FOR doc IN @@col
RETURN doc
""",
bind_vars={"@col": collection},
)
try:
doc = cursor.next()
if "other" not in doc:
doc["other"] = []
if collection == "leak_lookups":
doc = db.collection("leak_lookups").random()
doc["other"] = []
db.collection(collection).delete(doc["_key"])
except StopIteration:
doc = None
else:
cursor = db.aql.execute(
"""
FOR doc IN @@col
RETURN doc
""",
bind_vars={"@col": collection},
)
try:
doc = cursor.next()
if "other" not in doc:
doc["other"] = []
db.collection(collection).delete(doc["_key"])
except StopIteration:
doc = None
return doc
def backup(db):
"""Skapar en json-backup för specificerade collections.
"""Skapar en json-backup och statistik för specificerade collections.
Args:
db: databaskoppling till aktuell databas
"""
d = {}
for col in ["members", "pictures", "picture_reactions", "profiles"]:
l = []
for doc in db.collection(col).all():
l.append(doc)
d[col] = l
with open("data/backup.json", "w") as f:
json.dump(d, f)
print(f"Senaste backup: {now()}")
def used_servers():
while True:
if not datetime.now().strftime("%H") == '03' and int(datetime.now().strftime("%M")) < 10:
sleep(120)
continue
collections = ["members", "pictures", "picture_reactions", "profiles", "stats"]
for col in collections:
l = []
count = 0
icount = 0
for doc in db.collection(col).all():
count += 1
l.append(doc)
if count == 1000000:
icount += 1
count = 0
with open(f"data/backup_{col}_{icount}.json", "w") as f:
json.dump(l, f)
l = []
icount += 1
with open(f"data/backup_{col}_{icount}.json", "w") as f:
json.dump(l, f)
l = []
print(f"Senaste backup: {now()}")
write_stats()
sleep(82800)
def write_stats(continuous=False):
while True:
d = {}
for col in db.collections():
if not col['system']:
d[col['name']] = db.collection(col['name']).count()
del d['stats']
#d['time'] = now()
cursor = db.aql.execute(
"""
FOR doc IN members
FILTER doc.checked == true
COLLECT WITH COUNT INTO length
RETURN length
"""
)
d['checked_members'] = cursor.next()
# Hur många konton per säljare som finns kvar
cursor = db.aql.execute(
'''
for doc in profiles
filter has(doc, "vendor")
COLLECT vendor = doc.vendor WITH COUNT INTO length
RETURN {
"vendor" : vendor,
"active" : length
}
''')
d['active_vendors'] = [doc for doc in cursor]
d['_key'] = now()[:13]
db.insert_document( "stats", d, overwrite=True)
if continuous:
sleep(86400)
else:
break
def blocked_profile(profile, proxieservers):
""" Tar bort profilen som blivit blockad och returnerar en ny. """
_print(profile, None, f'Rapporterar att {profile.name} blockats.')
report_blocked(profile)
_print(profile, None, f'Tar bort {profile.name} från databasen.')
remove_profile(profile)
_print(profile, None, f'Hämtar en ny profil.')
profile = new_profile(profile.container, proxieservers)
return profile
def new_profile(container, proxieservers):
""" Hämtar en ny profil. """
profile = Profile(get_profile(proxieservers=proxieservers), container, proxieservers)
_print(profile, None, f'Hämtade profilen {profile.name}. Login = {profile.logged_in}.')
if profile.logged_in == False:
profile.accept_cookies()
sleep_(2)
profile.login()
sleep_(2)
try:
profile.open(url_bas)
if "accept all" in profile.viewing().text.lower():
_print(profile, None, f'Accepterar cookies {profile.name}.')
profile.accept_cookies()
sleep_(3)
except:
pass
return profile
def find_id():
"https://mbasic.facebook.com/leif.jonsson.98499/about?lst=100064897389168%3A100000134933241%3A1615858816"
cursor = db.aql.execute(
"""
FOR doc IN profiles
RETURN doc._key
"""
for doc in members
filter has(doc, "about")
filter doc.facebook_id == ''
filter doc.about != false
return doc
""",
)
return [doc for doc in cursor]
n = 0
for doc in cursor:
about = doc['about']
try:
doc['facebook_id'] = about[about.find('%')+3: about.rfind('%')]
db.update_document(doc, silent=True, check_rev=False)
#sleep(0.01)
n += 1
print(n, end = '\r')
except AttributeError:
pass
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd)

@ -1,6 +1,6 @@
import pickle
import random
from datetime import datetime
from time import sleep
import requests
import werkzeug
@ -10,8 +10,8 @@ werkzeug.cached_property = werkzeug.utils.cached_property
from robobrowser import RoboBrowser
from arangodb import db
from config import *
from helpers import sleep_, update_cookie, write_error, nowstamp
from config import url_bas
from helpers import sleep_, update_cookie, write_error, nowstamp, _print
class User:
@ -82,6 +82,8 @@ class Picture:
self.url = ""
self.no_reactions = ""
self.reactions = []
self.src = ""
def add_to_db(self):
db.insert_document(
@ -93,6 +95,7 @@ class Picture:
"url": self.url,
"no_reactions": self.no_reactions,
"user": self.user,
"src": self.src,
},
overwrite_mode="update",
silent=True,
@ -101,11 +104,12 @@ class Picture:
class Profile:
def __init__(self, profile, container):
def __init__(self, profile, container, proxieservers):
"""Creates a new profile to do searches with.
Args:
profile (dict): Document fetched from database.
container (str): Docker container that runs the script.
"""
self.doc = profile
@ -118,46 +122,79 @@ class Profile:
self.cookie = self.doc["cookie"]
self.useragent = self.doc["useragent"]
self.proxieservers = proxieservers
self.blocked = False
self.container = container
self.container = str(container)
self.users_checked = 0
self.write = True
# Ange proxies
session = requests.Session()
session.proxies = {
"https": "socks5://'8155249667566524'@{}".format(self.server),
"http": "socks5://'8155249667566524'@{}".format(self.server),
}
session.proxies = self.doc["proxies"]
# Starta browser
# user_agent = "Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1"
user_agent = self.useragent
self.browser = RoboBrowser(
session=session, user_agent=user_agent, history=False, parser="lxml"
)
# TODO Ta bort gamla metoden om nya (hämta från doc) fungerar
# try:
# # Försök hämta cookie från fil
# self.browser.session.cookies = pickle.load(
# open("data/cookie_{}.pkl".format(self.name), "rb")
# )
# self.logged_in = True
try:
self.browser.session.cookies = pickle.load(
open("data/cookie_{}.pkl".format(self.name), "rb")
)
self.browser.session.cookies.update(self.cookie)
self.logged_in = True
except:
try:
self.browser.session.cookies.update(self.cookie)
self.logged_in = True
except:
self.logged_in = False
self.logged_in = False
def update_time(self):
""" Uppdatera dokumentet i arango. """
"""Uppdatera dokumentet i arango."""
self.doc["in_use"] = nowstamp()
db.update_document(self.doc, check_rev=False)
def viewing(self):
""" Returnerar browser i html-format """
"""Returnerar browser i html-format"""
return self.browser.parsed
def open(self, url):
n = 0
while True:
n += 1
sleep(1)
try:
# Försök öppna url, om det misslyckas så vänta lite och försök sen igen
self.browser.open(url)
if "/a/nux/wizard/nav.php?step=phone&amp;skip" in self.viewing():
self.browser.open(
url_bas + "/a/nux/wizard/nav.php?step=phone&amp;skip"
)
break
except Exception as e:
print(e)
print(n)
_print(self, None, f"Kunde inte öppna url {url}")
if n == 5:
if "Connection refused" in e:
self.doc["e"] = e
db.insert_document("blocked_profiles", self.doc)
n = 0
from arangodb import get_profile, remove_profile
# Ta bort den gamla profilen från databasen och ersätt profile med nytt innehåll från ny profil
remove_profile(self)
self.__init__(get_profile(self.proxieservers), self.container)
_print(self, None, f"Ny profil hämtad {self.email}")
self.update_time()
else:
sleep(40)
def accept_cookies(self):
""" Accepterar cookies """
"""Accepterar cookies"""
self.browser.open("https://mbasic.facebook.com")
soup = BeautifulSoup(str(self.browser.parsed), "lxml")
if "accept all" not in soup.text.lower():
@ -168,7 +205,7 @@ class Profile:
try:
form = self.browser.get_form()
self.browser.submit_form(form)
print(f"Accepterade cookies för {self.name}")
_print(self, None, f"Accepterade cookies för {self.name}")
sleep_(2)
update_cookie(self.browser.session.cookies, self)
except:
@ -176,10 +213,10 @@ class Profile:
write_error(12, self, soup=self.browser.parsed)
except:
pass
print(f"Accepterade inte cookies för {self.name}")
_print(self, None, f"Accepterade inte cookies för {self.name}")
def login(self):
""" Loggar in på Facebook. """
"""Loggar in på Facebook."""
print("Loggar in {}".format(self.name))
@ -199,16 +236,17 @@ class Profile:
# Vänta lite och uppdatera cookie
print("Loggade in.")
sleep_(2)
self.open(url_bas)
sleep_(2)
except TypeError:
try:
write_error(11, self, soup=soup, profile=self.name)
except:
pass
def unused(self):
""" Sätter user till False för valda profiler """
self.doc["in_use"] = False
db.update_document(self.doc, silent=True, check_rev=False)
def update_cookie(self, cookie):
self.cookie = cookie
db.update_document({"_id": self.doc["_id"], "cookie": cookie}, check_rev=False)
class Proxies:
@ -232,7 +270,6 @@ class Friend:
self.username = ""
self.url = ""
self.name = ""
self.single = ""
def add_to_db(self):
db.insert_document(
@ -265,4 +302,3 @@ class Reaction:
"picture": self.picture_id,
"reaction": self.type,
}

@ -1,10 +1,16 @@
def set_pwd(_pwd):
global pwd
pwd = _pwd
# Info för arangodb
user_arango = "Lasse"
pwd_arango = "4c071768bedc259288361c07aafd8535fca546086fada4e7b5de4e2bb26b0e70fa8d348c998b90d032a5b8f3fdbae1881b843021e3475198e6fb45f58d8dc450bd52f77d"
db_arango = "facebook"
host_arango = 'http://192.168.0.3:8529'
host_arango = 'http://192.168.0.4:8529'
#host_arango = "http://arango.lasseedfast.se"
# Andra uppgifter
url_bas = "https://mbasic.facebook.com"
user_agent = "Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1"
mullvad = '8155249667566524'

Binary file not shown.

@ -0,0 +1,110 @@
import os
import pickle
import time
import cv2
import face_recognition
def build_data():
""" Build the face_enc file with data to recognize from """
knownEncodings = []
knownNames = []
members = os.listdir('../profile_pictures')
#get paths of each file in folder named Images
#Images here contains my data(folders of various persons)
for member in members:
if '.DS_Store' in member:
continue
imagePaths = []
for path in os.listdir(f'../profile_pictures/{member}'):
if '.jpg' in path:
imagePaths.append(f'../profile_pictures/{member}/{path}')
# loop over the image paths
for imagePath in imagePaths:
print(imagePath)
# load the input image and convert it from BGR (OpenCV ordering)
# to dlib ordering (RGB)
image = cv2.imread(imagePath)
rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
#Use Face_recognition to locate faces
boxes = face_recognition.face_locations(rgb, number_of_times_to_upsample = 2) #,model='hog'
# compute the facial embedding for the face
encodings = face_recognition.face_encodings(image, boxes)
# loop over the encodings
for encoding in encodings:
knownEncodings.append(encoding)
knownNames.append(member)
#save emcodings along with their names in dictionary data
data = {"encodings": knownEncodings, "names": knownNames}
#use pickle to save data into a file for later use
with open("face_enc", "wb") as f:
f.write(pickle.dumps(data))
f.close()
def identify_face(imagePath):
#find path of xml file containing haarcascade file
cascPathface = os.path.dirname(
cv2.__file__) + "/data/haarcascade_frontalface_alt2.xml"
# load the harcaascade in the cascade classifier
faceCascade = cv2.CascadeClassifier(cascPathface)
# load the known faces and embeddings saved in last file
data = pickle.loads(open('face_enc', "rb").read())
#Find path to the image you want to detect face and pass it here
image = cv2.imread(imagePath)
rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
#convert image to Greyscale for haarcascade
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(60, 60),
flags=cv2.CASCADE_SCALE_IMAGE)
# the facial embeddings for face in input
encodings = face_recognition.face_encodings(rgb)
names = []
# loop over the facial embeddings incase
# we have multiple embeddings for multiple fcaes
for encoding in encodings:
#Compare encodings with encodings in data["encodings"]
#Matches contain array with boolean values and True for the embeddings it matches closely
#and False for rest
matches = face_recognition.compare_faces(data["encodings"],
encoding)
#set name =unknown if no encoding matches
name = "Unknown"
# check to see if we have found a match
if True in matches:
#Find positions at which we get True and store them
matchedIdxs = [i for (i, b) in enumerate(matches) if b]
counts = {}
# loop over the matched indexes and maintain a count for
# each recognized face face
for i in matchedIdxs:
#Check the names at respective indexes we stored in matchedIdxs
name = data["names"][i]
#increase count for the name we got
counts[name] = counts.get(name, 0) + 1
#set name which has highest count
name = max(counts, key=counts.get)
print(counts)
# update the list of names
names.append(name)
# loop over the recognized faces
for ((x, y, w, h), name) in zip(faces, names):
# rescale the face coordinates
# draw the predicted face name on the image
cv2.rectangle(image, (x, y), (x + w, y + h), (0, 255, 0), 2)
cv2.putText(image, name, (x, y), cv2.FONT_HERSHEY_SIMPLEX,
0.75, (0, 255, 0), 2)
cv2.imshow("Frame", image)
cv2.waitKey(0)
identify_face('/Users/Lasse/Datorgemensamt/Programmeringsprojekt/Facebook/fb-scraper/profile_pictures/millington.jiang/4138068259557849.jpg')

@ -0,0 +1,185 @@
import locale
import re
from datetime import datetime
import networkx as nx
import pandas as pd
from numpy.core.numeric import NaN
locale.setlocale(locale.LC_TIME, "en_US")
from arangodb import db
def nodes_from_list(
nodes, collection="members", return_fields="{'_key': doc._key, 'name': doc.name}"
):
aql_edges = f"""
FOR doc IN @@ecollection
FILTER doc._id IN @nodes
RETURN {return_fields}
"""
cursor = db.aql.execute(
aql_edges, bind_vars={"@ecollection": collection, "nodes": nodes}
)
return [doc for doc in cursor]
def edges_from_nodes(
nodes, edge_collections=["picture_reactions"], simple=True, mode="or"
):
"""
Returnerar en df med relationer för valda noder och relationtabeller.
Args:
nodes (list): Noder som ska ingå i relationerna
edge_collections (list, optional): Relationtabeller att hämta relationer från. Defaults to ['messages'].
simple (bool, optional): Simple ger bara _from, _to och _key. Defaults to True.
Returns:
pd.DataFrame: DataFrame.
"""
if simple:
return_fields = (
"{'_to': doc._to, '_from': doc._from, '_id':doc._id, '_key':doc._key}"
)
else:
return_fields = "doc"
edges = []
for collection in edge_collections:
aql = f"""
FOR doc IN @@edge_collection
FILTER doc._from IN @nodes {mode} doc._to IN @nodes
RETURN {return_fields}
"""
cursor = db.aql.execute(
aql,
bind_vars={
"@edge_collection": collection,
"nodes": nodes,
},
)
edges = edges + [doc for doc in cursor]
return edges
def convert_date(date):
try:
new_date = datetime.strptime(date, "%d %b %Y")
except ValueError:
try:
new_date = datetime.strptime(date, "%d %B %Y")
except ValueError:
try:
new_date = datetime.strptime(date, "%b %d, %Y")
except ValueError:
try:
new_date = datetime.strptime(date, "%B %d, %Y")
except ValueError:
try:
new_date = datetime.strptime(date + " 2021", "%d %b %Y")
except ValueError:
return ""
return new_date.strftime("%Y-%d-%d")
# return f'{new_date.date().year}-{new_date.date().month}-{new_date.date().day}'
def export_network(members, n=2):
""" Exporterar en gexf-fil med noder utifrån en lista med medlemmar. """
filename = f"data/-.join({members}).-old.gexf"
ids = []
for member in members:
ids.append(f"members/{member}")
friends = set()
# Hämta relationer kopplade till members från databasen
edges = edges_from_nodes(ids)
for edge in edges:
friends.add(edge["_from"])
friends.add(edge["_to"])
edges = edges_from_nodes(list(friends))
# Skapa en dict där det syns vem som har interagerat med hur många
d = {}
for i in edges:
_to = i["_to"]
_from = i["_from"]
if _to not in d:
d[_to] = set([i["_from"]])
else:
d[_to] = d[_to] | set([i["_from"]])
if _from not in d:
d[_from] = set([i["_to"]])
else:
d[_from] = d[_from] | set([i["_to"]])
# Sålla ut så bara medlemmar som reagerat med [n] två av grundanvändarens vänner kommer med
friends = set(friends)
members = []
for key, value in d.items():
if len(value & friends) >= n or key in friends:
members.append(key)
# Skapa df med edges
edges = pd.DataFrame(
edges_from_nodes(members, mode="and", simple=False),
columns=["_key", "_to", "_from", "reaction", "picture"],
)
edges.set_index("_key", inplace=True)
# En lista på användare att ta med till nätverket
members = list(set(edges["_from"].unique()) | set(edges["_to"].unique()))
# Skapa noder till nätverket
nodes = nodes_from_list(
members
) # , return_fields="{'id':doc._key, 'label':doc.name")
nodes = [(i["_key"], i) for i in nodes]
# Lägg till några kolumner i edges-tabellen
edges._from = edges._from.apply(lambda x: x[8:])
edges._to = edges._to.apply(lambda x: x[8:])
edges.picture = edges.picture.apply(
lambda x: re.search("\d+", x).group()
) # Rensa bort url-info i de fall bilden har fått fel id
# Hämta bilder för att kunna lägga datum till edges
p = ["pictures/" + i for i in edges.picture.unique().tolist()]
d = {}
pictures = nodes_from_list(
p, collection="pictures", return_fields="{'id': doc._key, 'date':doc.date}"
)
for picture in pictures:
d[picture["id"]] = convert_date(picture["date"])
edges["date"] = edges.picture.apply(lambda x: d[x])
# Skapa graf utifrån relationer
G = nx.from_pandas_edgelist(
edges,
source="_from",
target="_to",
edge_attr=["reaction", "date"], #, "now"
create_using=nx.MultiDiGraph,
)
# Lägg till noderna i grafen
G.add_nodes_from(nodes)
# Exportera till filer
nx.write_gexf(
G,
filename
)
if __name__ == "__main__":
export_network(["linda.kakuli"])
# export_network(input('Member: '))

@ -1,6 +1,7 @@
import locale
import re
from datetime import datetime
from sys import argv
import networkx as nx
import pandas as pd
@ -25,27 +26,21 @@ def nodes_from_list(
return [doc for doc in cursor]
def edges_from_nodes(
nodes, edge_collections=["picture_reactions"], simple=True, mode="or"
nodes, edge_collections=["picture_reactions"], mode="or"
):
"""
Returnerar en df med relationer för valda noder och relationtabeller.
Returnerar en dict med relationer för valda noder och relationtabeller.
Args:
nodes (list): Noder som ska ingå i relationerna
edge_collections (list, optional): Relationtabeller att hämta relationer från. Defaults to ['messages'].
simple (bool, optional): Simple ger bara _from, _to och _key. Defaults to True.
Returns:
pd.DataFrame: DataFrame.
dict: Dict med relationer
"""
if simple:
return_fields = (
"{'_to': doc._to, '_from': doc._from, '_id':doc._id, '_key':doc._key}"
)
else:
return_fields = "doc"
edges = []
@ -53,14 +48,14 @@ def edges_from_nodes(
aql_edges = f"""
FOR doc IN @@edge_collection
FILTER doc._from IN @nodes {mode} doc._to IN @nodes
RETURN {return_fields}
RETURN doc
"""
cursor = db.aql.execute(
aql_edges,
bind_vars={
"@edge_collection": collection,
"nodes": nodes,
},
}, stream=True
)
edges = edges + [doc for doc in cursor]
@ -89,17 +84,32 @@ def convert_date(date):
# return f'{new_date.date().year}-{new_date.date().month}-{new_date.date().day}'
def export_network(member, n=2):
""" Exporterar en gexf-fil med noder utifrån en medlem. """
def get_edges(member, n=2, lookups=[], common=True):
""" Returnerar en df med edges för vald member.
Args:
member (str): Username for member.
lookups (list): Användare att hitta gemensamt nätverk för
noncommon (bool): Om den ena användarens förstakontakter ska räknas till den andra användarens nätverk
Returns:
df: Dataframe with edges
"""
member = f"members/{member}"
lookups = [f"members/{i}" for i in lookups]
member_friends = set()
# Hämta relationer kopplade till member från databasen
for edge in edges_from_nodes([member]):
member_friends.add(edge["_from"])
member_friends.add(edge["_to"])
edges = edges_from_nodes(list(member_friends))
member_friends = list(member_friends)
if not common:
# Ta bort de andra i lookups så inte de får kompisars kompisar
member_friends = [friend for friend in member_friends if friend not in lookups] # ! Ska den här vara kvar?
for member in lookups:
member_friends.append(member)
edges = edges_from_nodes(member_friends)
# Skapa en dict där det syns vem som har interagerat med hur många
d = {}
@ -125,29 +135,46 @@ def export_network(member, n=2):
# Skapa df med edges
edges = pd.DataFrame(
edges_from_nodes(members, mode="and", simple=False),
edges_from_nodes(members, mode="and"),
columns=["_key", "_to", "_from", "reaction", "picture"],
)
edges.set_index("_key", inplace=True)
# En lista på användare att ta med till nätverket
members = list(set(edges["_from"].unique()) | set(edges["_to"].unique()))
return edges
# Skapa noder till nätverket
nodes = nodes_from_list(
members
) # , return_fields="{'id':doc._key, 'label':doc.name")
nodes = [(i["_key"], i) for i in nodes]
def members_from_edges(edges):
""" En lista på användare att ta med till nätverket.
Args:
edges (df): Dataframe with edges.
Returns:
list: List of unique members in edges (to and from).
"""
return list(set(edges["_from"].unique()) | set(edges["_to"].unique()))
def edges_for_network(edges):
""" Prepare edges for the network
Args:
edges (df): Dataframe with edges
Returns:
df: Dataframe with edges prepared for network.
"""
# Lägg till några kolumner i edges-tabellen
edges._from = edges._from.apply(lambda x: x[8:])
edges._to = edges._to.apply(lambda x: x[8:])
edges._from = edges._from.apply(lambda x: x[8:]) # Ta bort "members/"
edges._to = edges._to.apply(lambda x: x[8:]) # Ta bort "members/"
edges.picture = edges.picture.apply(
lambda x: re.search("\d+", x).group()
) # Rensa bort url-info i de fall bilden har fått fel id
# Hämta bilder för att kunna lägga datum till edges
p = ["pictures/" + i for i in edges.picture.unique().tolist()]
d = {}
pictures = nodes_from_list(
p, collection="pictures", return_fields="{'id': doc._key, 'date':doc.date}"
@ -157,25 +184,145 @@ def export_network(member, n=2):
edges["date"] = edges.picture.apply(lambda x: d[x])
return edges
def export_network(member):
""" Exporterar en gexf-fil med noder utifrån en medlem. """
filename = f"data/{member}_.gexf"
edges = get_edges(member, n=3)
members = members_from_edges(edges)
# Skapa graf utifrån relationer
G = nx.from_pandas_edgelist(
edges,
edges_for_network(edges),
source="_from",
target="_to",
edge_attr=["reaction", "date"], #, "now"
create_using=nx.MultiDiGraph,
)
## Skapa noder till nätverket
nodes = nodes_from_list(
members
) # , return_fields="{'id':doc._key, 'label':doc.name")
# Lägg till noderna i grafen
G.add_nodes_from(nodes)
G.add_nodes_from([(i["_key"], i) for i in nodes])
# Exportera till filer
nx.write_gexf(
G,
f"data/network_test.gexf",
filename
)
def common_friends(d, n=2):
""" Filtrera ut gemensamma vänner """
common_friends = {}
for _, value in d.items():
for friend in set(value):
if friend not in common_friends:
common_friends[friend] = 1
else:
common_friends[friend] += 1
l = []
for key, value in common_friends.items():
if value >= n:
l.append(key)
if l == []:
print('Inga gemensamma i nätverken.')
exit()
return l
if __name__ == "__main__":
export_network("asifasghar")
lookups = [
'katherine.zimmerman.754',
'boogiesaman.bakhtiari',
'lena.tidestromsagstrom',
'bibi.rodoo',
'mina.benaissa',
'henrik.johnsson.73',
'fabian.asserback',
'100005696055822',
'fia.wiren',
'daniel.kjellander.5'
]
print('Samlar data för:')
for i in lookups:
print(i)
print(f'({len(lookups)} stycken\n')
# Hur många vänners vänners ska känna
if len(lookups) == 1:
n = 1
elif len(argv) > 1:
n = int(argv[1])
else:
#from math import sqrt
n = round(len(lookups)/2.2 + 1)
print(f'n = {n}')
if len(lookups) <= 3:
filename = f"../data/{'-'.join(lookups).replace('.','')}.gexf"
else:
from datetime import datetime
filename = f"../data/{datetime.now()}.gexf"
if len (lookups) == 1:
export_network(lookups[0])
exit()
d = {}
for member in lookups:
edges = get_edges(member, lookups=lookups, common = False, n=n)
friends = members_from_edges(edges)
d[member] = friends
print(member, len(friends))
# Filtrera gemensamma vänner
common = common_friends(d)
print('Common friends: ', len(common))
edges = pd.DataFrame(edges_from_nodes(common, mode='and')) # and om båda noderna ska vara med i common friends, annars or
members = members_from_edges(edges)
edges = edges_for_network(edges)
# Skapa graf utifrån relationer
G = nx.from_pandas_edgelist(
edges,
source="_from",
target="_to",
edge_attr=["reaction", "date"], #, "now"
create_using=nx.MultiDiGraph,
)
## Skapa noder till nätverket
nodes = nodes_from_list(
members
) # , return_fields="{'id':doc._key, 'label':doc.name")
# Lägg till noderna i grafen
G.add_nodes_from([(i["_key"], i) for i in nodes])
# Exportera till filer
nx.write_gexf(
G,
filename
)
#export_network("asifasghar")
# export_network(input('Member: '))

@ -2,8 +2,9 @@ import pickle
import random
from datetime import datetime
from time import sleep
import json
from arangodb import db
from config import url_bas
def sleep_(t):
@ -12,7 +13,7 @@ def sleep_(t):
"""
variation = 4 # Testa olika sovlängder för att inte få användaren blockerad
sleep(t * variation * random.randrange(85, 115, 1) / 100)
if random.randrange(0, 60, 1) == 1:
if random.randrange(0, 50, 1) == 1:
longsleep = random.randrange(200, 300)
print('')
for s in range(0, longsleep):
@ -21,11 +22,14 @@ def sleep_(t):
print()
sleep(random.randrange(0, 10, 1) / 4)
# TODO #1 spara cookies till db
def update_cookie(cookies, profile):
""" Uppdaterar cookie för browser """
with open("data/cookie_{}.pkl".format(profile.name), "wb") as f:
pickle.dump(cookies, f)
# with open("data/cookie_{}.pkl".format(profile.name), "wb") as f:
# pickle.dump(cookies, f)
# cookies_dict = json.dumps(dict(cookies))
profile.update_cookie(cookies.get_dict())
def write_error(nr, profile, e=" ", traceback="", soup="", user="", url="", url_name=""):
@ -97,18 +101,27 @@ def _print(profile, user, text, end='\n', silent=False, sleeptime=0):
end (str, optional): The end value for print. Defaults to '\n'.
silent (bool, optional): If a print should be done in the terminal. Defaults to False.
"""
from classes import User
if silent == False:
print(text, end=end)
if isinstance(user, User):
if profile.write == False:
return None
if profile.container[:4] == 'leak' and len(profile.container) < 7:
_key = f'{profile.container}_{now()[2:10]}'
elif profile.container[:7] == 'lookups':
_key = f'{profile.container}_{now()[2:10]}'
else:
_key = profile.container
try:
if isinstance(text, list):
to_print = {user.username: text}
else:
to_print = f"{user.username} - {text.strip()}"
else:
except:
if isinstance(text, list):
to_print = {user: text}
else:
@ -117,8 +130,40 @@ def _print(profile, user, text, end='\n', silent=False, sleeptime=0):
db.insert_document(
"prints",
{'_key': str(profile.container), 'print':{now(): to_print}},
{'_key': _key, 'print':{now(): to_print}},
overwrite_mode="update",
silent=True,
)
sleep(sleeptime)
def check_profile_status(profile, user):
if profile.browser._cursor == -1: # Om ingen sida har öppnats än.
profile.open(url_bas)
if any(
[
"It looks like you were misusing this feature by going too fast."
in profile.viewing().text,
"Access Denied" in profile.viewing().text,
"Your Account Has Been Disabled" in profile.viewing().text
]
):
_print(profile, user, f"{profile.name} blocked\n".upper(), sleeptime=1)
_print(profile, user, profile.viewing().text, sleeptime=1)
profile.blocked = True # Nu tar jag bort dem, kan göras på annat sätt kanske?
elif "accept all" in profile.viewing().text.lower():
profile.accept_cookies()
sleep_(3)
profile.open(user.url_photos)
elif (
profile.viewing().find("title").text.strip() == "Log in to Facebook | Facebook"
):
sleep_(5)
profile.login()
sleep_(5)
profile.open(user.url_photos)
return profile
from arangodb import db

@ -0,0 +1,70 @@
import requests
import os
from datetime import date, datetime, timedelta
from time import sleep
from arangodb import db
def download_image(url, user, id):
# Kolla så användarmappen finns
if not os.path.isdir(f'../profile_pictures/{user}'):
os.mkdir(f'../profile_pictures/{user}')
# Ladda ner bilden
r = requests.get(url)
if r.text == 'URL signature expired':
print('För gammal länk.')
exit()
elif r.status_code == 403:
exit()
img_data = r.content
with open(f'../profile_pictures/{user}/{id}.jpg', 'wb') as handler:
handler.write(img_data)
def get_pictures(day):
cursor = db.aql.execute(
"""
for doc in members
filter doc.fetched == @date
filter has(doc, "checked_pictures")
filter not has(doc, "pictures_downloaded")
return {'member': doc._key, 'pictures':doc.checked_pictures}
""",
bind_vars={'date': day}
)
for doc in cursor:
pictures = []
for picture in doc['pictures']:
pictures.append(picture[picture.find('fbid=')+5:])
cursor = db.aql.execute(
"""
for doc in pictures
filter doc._key in @list
limit 10
return {'_key': doc._key, 'user':doc.user, 'url': doc.src}
""",
bind_vars={"list": pictures},
)
for picture in cursor:
download_image(picture['url'], picture['user'], picture['_key'])
print(picture['_key'])
sleep(2)
db.update_document({'_id': 'members/' + str(doc['member']), 'pictures_downloaded': True}, silent=True, check_rev=False)
def old_pics():
if not os.path.isdir(f'../profile_pictures'):
os.mkdir(f'../profile_pictures')
start = date.today()
for i in range(1,60):
d = start - timedelta(days=i)
get_pictures(d.strftime('%Y%m%d'))

@ -0,0 +1,168 @@
import os
from datetime import date, datetime, timedelta
from getpass import getpass
from time import sleep
import random
import requests
import urllib3
urllib3.disable_warnings()
from arango import ArangoClient
def download_image(url, user, id):
# Ladda ner bilden
while True:
try:
server = servers_mullvad[random.randint(0, len(servers_mullvad)-1)]
proxies = {
"https": "socks5://'8155249667566524'@{}".format(server),
"http": "socks5://'8155249667566524'@{}".format(server),
}
r = requests.get(url, proxies=proxies)
break
except requests.exceptions.ConnectionError:
sleep(300)
if r.text == "URL signature expired":
print("För gammal länk.")
exit()
elif r.status_code == 403:
exit()
image_name = f"profile_pictures/{user}/{id}.jpg"
img_data = r.content
with open(image_name, "wb") as handler:
handler.write(img_data)
nc_path = f"https://nc.lasseedfast.se/remote.php/dav/files/Lasse/profile_pictures/{user}/{id}.jpg"
headers = {"Content-type": "image/jpeg", "Slug": "heart"}
while True:
try:
r = requests.put(
nc_path, data=open(image_name, "rb"), headers=headers, auth=auth, verify=False
)
break
except:
print('Kunde inte ladda upp', nc_path)
sleep(5)
print(f"{user}\t{id}\t{r.status_code}")
def get_pictures(day):
cursor = db.aql.execute(
"""
for doc in members
filter doc.fetched == @date
filter has(doc, "checked_pictures")
filter not has(doc, "pictures_downloaded")
return {'member': doc._key, 'pictures':doc.checked_pictures}
""",
bind_vars={"date": str(day)},
)
for doc in cursor:
user = doc["member"]
# Skapa mapp för användarens bilder på NC...
nc_path = f"https://nc.lasseedfast.se/remote.php/dav/files/Lasse/profile_pictures/{user}"
while True:
try:
requests.request("MKCOL", nc_path, verify=False, auth=auth)
break
except:
print('Kunde inte skapa', nc_path)
sleep(5)
# ...och på datorn (för backup)
if not os.path.isdir(f"profile_pictures/{user}"):
os.mkdir(f"profile_pictures/{user}")
pictures = []
for picture in doc["pictures"]:
pictures.append(picture[picture.find("fbid=") + 5 :])
cursor = db.aql.execute(
"""
for doc in pictures
filter doc._key in @list
limit 10
return {'_key': doc._key, 'user':doc.user, 'url': doc.src}
""",
bind_vars={"list": pictures},
)
for picture in cursor:
while True:
download_image(picture["url"], picture["user"], picture["_key"])
sleep(1)
break
db.update_document(
{"_id": "members/" + str(doc["member"]), "pictures_downloaded": True},
silent=True,
check_rev=False,
)
# def old_pics():
# if not os.path.isdir(f'profile_pictures'):
# os.mkdir(f'profile_pictures')
# start = date.today()
# for i in range(1,60):
# d = start - timedelta(days=i)
# get_pictures(d.strftime('%Y%m%d'))
if __name__ == '__main__':
# Info för arangodb
user_arango = "Pi"
db_arango = "facebook"
host_arango = "http://192.168.0.3:8529"
# Starta koppling till arangodb
# Avkryptera lösen till arango
pwd = getpass("Arangolösenord för Pi: ")
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd)
auth = ("Lasse", "affix-sip-jejune-epigraph-ENTROPY-stupefy1")
servers_mullvad = [
"se15-wg.socks5.mullvad.net:1080",
"se17-wg.socks5.mullvad.net:1080",
"se18-wg.socks5.mullvad.net:1080",
"se19-wg.socks5.mullvad.net:1080",
"se21-wg.socks5.mullvad.net:1080",
"se22-wg.socks5.mullvad.net:1080",
"se23-wg.socks5.mullvad.net:1080",
"se3-wg.socks5.mullvad.net:1080",
"se5-wg.socks5.mullvad.net:1080",
"se9-wg.socks5.mullvad.net:1080",
"se10-wg.socks5.mullvad.net:1080",
"se2-wg.socks5.mullvad.net:1080",
"se6-wg.socks5.mullvad.net:1080",
"se7-wg.socks5.mullvad.net:1080",
"se8-wg.socks5.mullvad.net:1080",
"se13-wg.socks5.mullvad.net:1080",
"se14-wg.socks5.mullvad.net:1080",
"se26-wg.socks5.mullvad.net:1080",
"se27-wg.socks5.mullvad.net:1080",
"se28-wg.socks5.mullvad.net:1080",
]
if not os.path.isdir("profile_pictures"):
os.mkdir("profile_pictures")
while True:
today = date.today().strftime('%Y%m%d')
get_pictures(today)
yesterday = date.today() - timedelta(days=1)
get_pictures(yesterday.strftime('%Y%m%d'))
sleep(300)

@ -0,0 +1,45 @@
import subprocess
import requests
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from time import sleep
while True:
wlan = subprocess.Popen(['iwgetid'], stdout=subprocess.PIPE)
wlan =wlan.communicate()[0].decode()
if '4G-UFI-5671' in wlan:
print('Sucess')
break
else:
sleep(20)
print('Nuvarande ip:', requests.get('https://api.ipify.org').text)
# Set up selenium browser
options = Options()
options.headless = True
browser = webdriver.Chrome(options=options)
# Login to modem
browser.get('http://192.168.100.1/cellweb/login.asp')
sleep(3)
username = browser.find_element_by_id("user_name")
password = browser.find_element_by_id("user_password")
username.send_keys("admin")
password.send_keys("1340asde")
# Go to reboot and accept
browser.find_element_by_xpath("/html/body/section/form/button").click() # Login
sleep(1)
browser.find_element_by_xpath("/html/body/section/div[2]/div[6]/a").click() # More
sleep(1)
browser.find_element_by_xpath("/html/body/section[2]/div/div[2]/div/a").click() # Reboot
sleep(1)
browser.find_element_by_xpath("/html/body/div[4]/div/div/div[2]/div[2]").click() # Accept
sleep(1)
browser.switch_to_alert().accept() # Accept again (alert)
browser.close()
sleep(120)
print('Ny ip:', requests.get('https://api.ipify.org').text)

@ -1,16 +1,15 @@
import re
import traceback
import requests
from arangodb import db
from arangodb import db, check_for_picture
from classes import Friend, Picture, Reaction
from config import *
from helpers import sleep_, update_cookie, write_error, _print
from helpers import sleep_, update_cookie, write_error, _print, check_profile_status
def profile_picture_reactions(
profile, user, first_user=False, mode="all"
):
#try:
def profile_picture_reactions(profile, user, first_user=False, mode="all"):
# try:
# Fixa url:er osv
@ -25,25 +24,11 @@ def profile_picture_reactions(
user.url_photos = user.url + "/photos"
# Gå till sidan för profilbilder
profile.browser.open(user.url_photos)
#print(profile.viewing())
profile.open(user.url_photos)
# print(profile.viewing())
sleep_(4)
if "Your Account Has Been Disabled." in profile.viewing():
_print(profile, user, f"{profile.name} blocked\n".upper(), sleeptime=1)
profile.blocked = True
return profile
elif 'It looks like you were misusing this feature by going too fast.' in profile.viewing().text:
profile.blocked = True # Nu tar jag bort dem, kan göras på annat sätt kanske?
return profile
elif "accept all" in profile.viewing().text.lower():
profile.accept_cookies()
profile.browser.open(user.url_photos)
elif profile.viewing().find('title').text.strip() == 'Log in to Facebook | Facebook':
sleep_(5)
profile.login()
sleep_(5)
profile.browser.open(user.url_photos)
profile = check_profile_status(profile, user)
user.name = user.username # Om inte namnet hittas senare
try:
@ -65,13 +50,17 @@ def profile_picture_reactions(
if first_user == True:
_print(profile, user, profile.viewing().prettify())
exit()
_print(profile, user, f"Hämtar reaktioner på profilbilder för {user.name} ({user.username})")
_print(
profile,
user,
f"Hämtar reaktioner på profilbilder för {user.name} ({user.username})",
)
# Hitta länk till olika saker hos användarem, inkl facebook-id
for a in profile.viewing().find_all("a", href=True):
if "Profile pictures" in a.text:
user.url_album = url_bas + a["href"] # Länk till album för profilbulder
user.url_album = url_bas + a["href"] # Länk till album för profilbilder
if "profile_id" in a["href"]:
l = a["href"]
try:
@ -83,6 +72,7 @@ def profile_picture_reactions(
user.url_likes = url_bas + a["href"]
if "About" in a.text:
user.url_about = url_bas + a["href"]
user.id = user.url_about[user.url_about.find('%')+3: user.url_about.rfind('%')]
if "Timeline" in a.text:
user.url_timeline = url_bas + a["href"]
if "Cover photos" in a.text:
@ -114,7 +104,7 @@ def profile_picture_reactions(
user.checked()
user.add_to_db()
_print(profile, user, "Hittar inget album för profilbilder.")
write_error(#fel7
write_error( # fel7
7,
profile,
soup=profile.viewing(),
@ -127,7 +117,7 @@ def profile_picture_reactions(
# Normalfallet där användaren har profilbildsalbum
else:
profile.browser.open(user.url_album)
profile.open(user.url_album)
# Samla alla profilbilder i en lista
url_pics = user.url_other_pictures
@ -139,7 +129,7 @@ def profile_picture_reactions(
user.profile_pictures = len(url_pics)
user.pictures = url_pics
except:
_print(profile, user, 'Hittade inga profilbilder'.upper())
_print(profile, user, "Hittade inga profilbilder".upper())
user.profile_pictures = 0
user.pictures = url_pics
user.checked()
@ -150,8 +140,13 @@ def profile_picture_reactions(
# Gå igenom valda bilder.
for pic in url_pics:
if check_for_picture(pic[pic.find("fbid=") + 5 :]):
continue
# Skriv ut vilken bild som behandlas.
_print(profile, user,
_print(
profile,
user,
f"Bild {url_pics.index(pic) + 1} av {user.profile_pictures}",
end="\r",
)
@ -190,7 +185,7 @@ def check_picture(url_picture, user, profile):
sleep_(5)
try:
profile.browser.open(picture.url)
profile.open(picture.url)
except Exception as e: # Fel3
write_error(
3,
@ -219,9 +214,16 @@ def check_picture(url_picture, user, profile):
user=user,
traceback=traceback.format_exc(),
)
# TODO #3 lägg till fler bilder som kan gås igenom om det är få profilbilder.
# Hämta länkar för bilden att anvrända sen
try:
for img in profile.viewing().find_all('img'):
if 'https://scontent' in img['src']:
picture.src = img['src']
except Exception as e:
pass
# Hämta länkar för bilden att använda sen
# _print(profile, user, profile.viewing().prettify())
for a in profile.viewing().find_all("a", href=True):
if all(
@ -251,7 +253,7 @@ def check_picture(url_picture, user, profile):
# Hämta reaktioner för bilden
sleep_(3)
profile.browser.open(url_reactions)
profile.open(url_reactions)
update_cookie(profile.browser.session.cookies, profile)
@ -285,7 +287,7 @@ def check_picture(url_picture, user, profile):
try:
sleep_(4)
profile.browser.open(url_limit)
profile.open(url_limit)
url_limit = ""
update_cookie(profile.browser.session.cookies, profile)
@ -299,9 +301,19 @@ def check_picture(url_picture, user, profile):
friend.name = friend_html.text
friend.url = friend_html["href"]
if "profile.php" in friend.url:
friend.username = friend.url[friend.url.find("id=") + 3 :]
if "&paipv" in friend.url:
friend.username = friend.url[
friend.url.find("=") + 1 : friend.url.find("&")
]
else:
friend.username = friend.url[friend.url.find("id=") + 3 :]
else:
friend.username = friend.url[friend.url.find("/") + 1 :]
if "?" in friend.url:
friend.username = friend.url[
friend.url.find("/") + 1 : friend.url.find("?")
]
else:
friend.username = friend.url[friend.url.find("/") + 1 :]
reaction = Reaction(user.username, friend.username, picture.id)
for type in ["Love", "Wow", "Like", "Care", "Sad", "Angry", "Haha"]:
@ -309,7 +321,10 @@ def check_picture(url_picture, user, profile):
reaction.type = type
picture.reactions.append(reaction.get_dict())
# Lägg till vännens profil till arrango
friend.add_to_db()
try:
friend.add_to_db()
except:
_print(profile, user, f"Kunde inte lägga till vän {friend.url}")
except AttributeError as e: # Fel1
write_error(
@ -344,3 +359,4 @@ def check_picture(url_picture, user, profile):
traceback=traceback.format_exc(),
)
pass

@ -0,0 +1,557 @@
DE;91.198.137.31:3552;FAIL;
FR;54.36.4.70:61432;188.165.211.29;0:00:00.601284
DE;138.68.82.88:1080;138.68.82.88;0:00:05.222863
DE;172.104.142.154:35819;172.104.142.154;0:00:02.972221
MD;185.14.31.113:443;FAIL;
NL;146.185.132.87:31284;146.185.132.87;0:00:00.541678
UA;46.151.197.254:8080;46.151.197.254;0:00:02.210311
FI;135.181.184.170:54048;135.181.184.170;0:00:00.720659
FR;151.106.34.139:1080;FAIL;
NL;88.202.177.242:1090;FAIL;
UA;95.67.99.99:33871;FAIL;
DE;138.201.5.46:1080;138.201.5.46;0:00:07.487491
DE;159.69.106.103:1080;159.69.106.103;0:00:00.630095
NL;51.15.78.50:1080;51.15.78.50;0:00:00.564266
NL;88.202.177.242:1080;FAIL;
SG;113.77.85.215:1081;149.129.48.241;0:00:03.688375
RU;95.107.37.109:1105;FAIL;
KZ;109.229.161.151:1225;FAIL;
RU;84.22.137.26:9025;84.22.137.26;0:00:09.468929
US;149.28.126.83:1081;149.28.126.83;0:00:01.023434
AR;181.3.58.168:1080;FAIL;
US;67.227.193.162:34496;67.227.193.162;0:00:01.318698
NL;146.185.132.87:44795;146.185.132.87;0:00:02.952634
US;198.58.119.187:50398;69.164.194.35;0:00:01.449008
AR;186.126.79.171:1080;FAIL;
CA;192.252.211.197:14921;FAIL;
CA;192.252.209.155:14455;FAIL;
CZ;89.187.144.153:1080;89.187.144.153;0:00:01.096993
US;209.141.53.246:1080;FAIL;
US;192.111.137.37:18762;FAIL;
CA;192.252.208.67:14287;FAIL;
US;67.55.185.240:1888;FAIL;
NL;142.93.137.235:54866;142.93.137.235;0:00:04.162599
US;192.111.135.18:18301;FAIL;
US;192.111.138.29:4145;FAIL;
SG;45.77.36.30:24574;45.77.36.30;0:00:02.664875
US;70.185.68.133:4145;FAIL;
FR;51.68.134.242:25623;51.68.134.240;0:00:05.998615
FR;193.70.45.126:32821;193.70.45.126;0:00:03.586748
US;98.162.25.7:31653;FAIL;
US;72.223.168.86:57481;FAIL;
DE;213.136.89.190:18461;213.136.89.190;0:00:03.407266
DE;101.53.158.48:9051;FAIL;
PL;5.226.69.12:50477;5.226.69.12;0:00:08.327345
US;98.162.25.29:31679;FAIL;
IN;103.209.64.19:6667;FAIL;
AR;186.126.42.65:1080;200.73.130.62;0:00:11.137412
US;72.221.196.157:35904;FAIL;
US;72.206.181.105:64935;FAIL;
SG;113.77.86.73:1081;149.129.55.120;0:00:02.697133
CA;192.252.214.20:15864;FAIL;
RU;109.72.231.37:1080;109.72.231.37;0:00:01.095943
TR;188.132.179.124:60088;188.132.179.124;0:00:01.228944
AR;181.3.72.8:1080;200.73.132.176;0:00:17.562909
GB;157.245.34.127:32215;157.245.34.127;0:00:06.500380
RU;31.7.232.178:1080;31.7.232.178;0:00:08.192440
US;72.223.168.73:57494;FAIL;
AR;186.126.135.164:1080;200.73.130.62;0:00:14.713391
IN;165.22.220.151:36362;165.22.220.151;0:00:05.533314
US;192.111.137.35:4145;FAIL;
BR;186.126.143.88:1080;FAIL;
BR;181.3.51.12:1080;209.14.2.12;0:00:08.158021
US;104.238.215.49:1080;104.238.215.49;0:00:04.300450
AR;186.126.163.43:1080;FAIL;
BR;181.6.94.90:1080;FAIL;
AR;181.3.23.13:1080;FAIL;
VN;113.160.188.21:1080;113.160.188.21;0:00:05.010119
FI;135.181.184.170:22497;135.181.184.170;0:00:09.929478
FR;51.68.134.247:30204;FAIL;
AR;181.101.2.92:1080;200.73.132.106;0:00:03.141000
BR;181.101.26.136:1080;FAIL;
BR;181.3.71.184:1080;191.252.103.251;0:00:03.371414
AR;181.102.21.228:1080;FAIL;
AR;181.3.37.59:1080;FAIL;
AR;186.126.177.123:1080;FAIL;
AR;186.126.151.29:1080;45.235.98.221;0:00:05.725761
AR;181.3.61.217:1080;FAIL;
AR;181.5.222.133:1080;FAIL;
FR;51.68.134.241:30204;FAIL;
HK;1.65.196.134:1080;1.65.196.134;0:00:04.107338
BR;181.3.8.101:1080;54.232.66.92;0:00:25.527846
UA;91.229.123.191:1080;94.153.23.177;0:00:07.271681
AR;181.3.62.188:1080;FAIL;
KR;222.99.47.68:8888;FAIL;
KR;119.28.73.113:22225;158.247.225.109;0:00:02.975846
AR;181.102.5.177:1080;45.235.99.87;0:00:04.846713
AR;181.101.16.232:1080;FAIL;
AR;181.101.12.108:1080;FAIL;
AR;181.101.38.248:1080;45.235.99.87;0:00:12.370835
AR;181.3.59.102:1080;FAIL;
FR;195.154.178.247:20152;FAIL;
DE;46.101.218.6:24040;46.101.218.6;0:00:02.524995
US;173.236.188.154:7595;173.236.184.102;0:00:07.522997
AR;181.3.4.18:1080;200.69.236.22;0:00:03.333511
CA;181.101.14.230:1080;FAIL;
DK;142.93.245.247:30588;FAIL;
FR;54.36.4.69:61432;188.165.211.29;0:00:00.679880
AR;186.152.120.155:1080;45.235.99.88;0:00:13.682541
IN;27.116.51.181:6667;FAIL;
AR;181.7.201.154:1080;45.235.99.83;0:00:03.619538
FR;51.68.134.245:25623;51.68.134.240;0:00:03.046891
US;192.111.139.165:19402;FAIL;
AR;186.126.140.70:1080;FAIL;
US;184.178.172.5:15303;FAIL;
AR;186.126.25.102:1080;200.73.134.139;0:00:18.534001
US;181.3.66.118:1080;FAIL;
BR;186.126.141.239:1080;177.67.82.171;0:00:20.168977
DE;78.46.200.13:22039;78.46.200.13;0:00:03.381044
BR;186.152.119.220:1080;FAIL;
AR;186.152.33.185:10808;FAIL;
US;181.102.84.53:1080;FAIL;
AR;186.152.31.215:1080;FAIL;
BR;186.152.194.140:1080;FAIL;
US;173.236.184.154:22960;173.236.184.139;0:00:02.895083
FR;137.74.153.106:1080;137.74.153.106;0:00:03.010125
AR;186.126.32.22:1080;138.99.7.145;0:00:07.475672
BR;181.101.11.43:1080;FAIL;
US;72.210.252.134:46164;FAIL;
BR;181.3.56.124:1080;FAIL;
AR;181.101.47.84:1080;FAIL;
CA;181.6.141.73:1080;FAIL;
MD;178.175.139.202:57772;178.175.139.202;0:00:01.611892
PH;210.16.73.82:1080;124.107.231.80;0:00:03.173570
AR;186.126.44.155:1080;200.89.175.133;0:00:08.703594
BR;181.101.60.197:1080;104.41.41.29;0:00:07.245720
KR;125.135.221.94:54557;FAIL;
US;186.126.62.200:1080;FAIL;
GB;178.62.79.115:35580;178.62.79.115;0:00:05.262268
FI;95.216.176.163:1089;95.216.176.163;0:00:09.142730
CA;186.126.21.113:1080;FAIL;
AR;181.3.38.147:1080;181.117.241.51;0:00:04.966959
US;70.166.167.38:57728;FAIL;
AR;181.3.78.111:1080;200.73.131.75;0:00:09.585425
BR;181.5.244.219:1080;FAIL;
FR;51.68.134.240:25623;51.68.134.240;0:00:08.593545
US;181.102.16.72:1080;FAIL;
FR;178.32.47.218:50939;178.32.47.218;0:00:06.439677
US;173.236.189.175:22960;FAIL;
AR;181.7.208.112:1080;FAIL;
IN;103.241.227.110:6667;FAIL;
US;147.135.116.172:53079;147.135.116.172;0:00:02.112520
AR;186.126.64.146:1080;FAIL;
CA;181.0.12.116:1080;FAIL;
US;198.8.94.170:39074;FAIL;
AR;181.3.76.4:1080;FAIL;
AR;181.7.204.60:1080;FAIL;
AR;181.3.28.148:1080;FAIL;
BR;181.3.74.230:1080;45.162.231.55;0:00:13.378087
US;113.73.72.183:1080;FAIL;
US;141.98.134.2:1080;141.98.134.2;0:00:03.583016
CA;192.111.130.5:17002;FAIL;
RU;185.233.202.27:1080;185.233.202.27;0:00:11.702264
DE;173.212.201.250:47492;173.212.201.250;0:00:07.449093
SG;206.189.158.28:7905;206.189.158.28;0:00:08.228267
US;173.236.190.7:7595;173.236.184.102;0:00:05.519787
US;173.236.188.46:22960;173.236.184.139;0:00:05.490614
US;173.236.185.99:22960;173.236.184.139;0:00:09.586001
AR;186.126.73.156:1080;200.73.130.62;0:00:03.150311
GB;157.245.34.127:61851;157.245.34.127;0:00:04.082666
TW;60.169.205.61:1080;FAIL;
BR;181.101.47.97:1080;191.233.232.45;0:00:03.439772
FR;51.68.134.253:25623;51.68.134.240;0:00:08.526576
AR;181.3.16.106:10808;FAIL;
US;173.236.190.93:22960;FAIL;
US;186.126.99.163:10808;FAIL;
AR;186.152.130.181:1080;200.73.138.194;0:00:10.460878
AR;186.152.15.200:1080;FAIL;
AR;181.5.232.149:1080;FAIL;
DE;165.22.17.195:5110;165.22.17.195;0:00:04.337353
FR;51.68.134.244:25623;51.68.134.240;0:00:05.794034
AR;186.126.80.182:1080;FAIL;
SG;206.189.158.28:53176;206.189.158.28;0:00:04.394778
AR;186.126.3.27:1080;FAIL;
AR;186.126.17.42:1080;FAIL;
BR;186.126.159.136:1080;FAIL;
BR;186.126.70.165:1080;54.207.134.244;0:00:07.969362
CL;181.5.217.57:1080;FAIL;
US;66.42.224.229:41679;FAIL;
BR;181.3.2.188:1080;FAIL;
FR;51.68.134.252:25623;51.68.134.240;0:00:03.820479
BR;181.83.226.81:1080;209.14.2.204;0:00:10.407002
US;104.238.212.43:1081;104.238.212.43;0:00:03.010979
BR;186.126.109.207:1080;FAIL;
BR;181.3.39.114:1080;FAIL;
FR;51.68.134.255:22308;51.68.134.240;0:00:05.837994
US;184.178.172.18:15280;FAIL;
FR;51.68.134.247:25623;51.68.134.240;0:00:05.294231
AR;181.3.84.123:1080;45.235.99.87;0:00:07.781855
AR;186.126.51.206:1080;FAIL;
BR;181.83.228.198:1080;FAIL;
AR;186.126.40.168:1080;FAIL;
US;181.0.8.189:1080;FAIL;
AR;181.101.35.11:1080;FAIL;
US;104.238.111.218:57978;104.238.111.218;0:00:06.871360
CA;181.3.20.113:10808;FAIL;
FR;51.75.42.95:25623;51.68.134.240;0:00:04.044253
US;173.236.187.212:22960;173.236.184.139;0:00:03.293691
BR;181.3.65.241:1080;FAIL;
US;173.236.186.231:22960;173.236.184.139;0:00:03.276001
US;165.227.177.113:24586;165.227.177.113;0:00:05.401278
CA;186.126.58.189:10808;51.222.141.137;0:00:04.245833
DE;176.9.160.118:22836;FAIL;
US;74.208.101.185:31200;FAIL;
US;186.126.166.22:1080;FAIL;
AR;181.101.33.157:1080;45.235.99.83;0:00:05.120106
AR;186.126.110.76:1080;FAIL;
US;186.126.170.254:1080;FAIL;
FR;51.68.134.251:25623;51.68.134.240;0:00:06.095322
BR;186.126.74.124:1080;FAIL;
IN;43.224.10.32:6667;FAIL;
US;95.217.132.133:3178;FAIL;
US;157.230.154.211:28030;157.230.154.211;0:00:03.840172
IN;140.238.250.54:1080;140.238.250.54;0:00:04.823383
AR;181.102.134.167:1080;FAIL;
AR;186.126.101.52:1080;FAIL;
CO;181.129.7.202:6699;181.129.7.202;0:00:02.020779
US;186.126.15.241:10808;FAIL;
AR;181.101.8.41:1080;FAIL;
AR;181.0.0.18:1080;FAIL;
SG;181.3.58.52:1080;FAIL;
AR;181.101.9.46:1080;FAIL;
SG;129.226.196.49:41789;129.226.196.49;0:00:02.896387
CA;192.111.129.145:16894;FAIL;
AR;181.3.51.132:1080;FAIL;
AR;181.3.10.74:1080;FAIL;
BR;181.3.9.61:1080;FAIL;
AR;181.3.49.78:1080;FAIL;
GB;181.101.52.44:1080;FAIL;
US;69.61.200.104:36181;FAIL;
BR;186.126.177.239:1080;FAIL;
BR;186.152.122.42:1080;FAIL;
CL;186.126.71.210:1080;170.239.87.87;0:00:10.699452
US;184.178.172.13:15311;FAIL;
BD;103.85.232.146:1080;FAIL;
US;161.35.137.49:28005;FAIL;
AR;181.101.45.131:1080;FAIL;
US;70.166.167.55:57745;FAIL;
AR;181.3.57.187:1080;FAIL;
NL;188.166.104.152:6683;FAIL;
US;95.217.132.133:3038;FAIL;
IN;103.241.227.98:6667;FAIL;
AR;181.102.47.46:1080;FAIL;
PL;5.226.69.12:41284;5.226.69.12;0:00:05.842418
AR;186.126.139.224:10808;FAIL;
AR;181.7.197.13:1080;FAIL;
AR;186.152.16.246:1080;FAIL;
US;113.73.72.177:1080;FAIL;
US;72.221.164.34:60671;FAIL;
BR;181.3.68.127:1080;FAIL;
US;173.236.186.236:22960;173.236.184.139;0:00:03.567567
AR;186.126.167.68:1080;FAIL;
IN;103.240.168.138:6667;FAIL;
US;104.248.0.141:17074;FAIL;
AR;181.5.219.126:1080;FAIL;
CA;186.152.115.63:1080;FAIL;
US;132.148.129.108:34289;132.148.129.108;0:00:06.245162
AR;186.126.138.242:1080;FAIL;
AR;181.102.16.55:1080;FAIL;
US;104.238.215.49:1081;104.238.212.43;0:00:07.598953
US;147.135.116.172:26522;147.135.116.172;0:00:03.047146
GB;178.62.79.49:51591;178.62.79.49;0:00:04.168867
AR;181.3.39.27:1080;FAIL;
BR;181.6.149.14:1080;201.76.56.248;0:00:10.817129
IN;27.116.51.85:6667;FAIL;
IN;103.216.82.22:6667;FAIL;
SG;206.189.158.28:44880;206.189.158.28;0:00:10.378409
SK;109.74.144.149:22743;109.74.144.149;0:00:07.030135
FR;51.68.134.241:25623;51.68.134.240;0:00:08.225295
AR;181.6.8.208:10808;200.73.132.2;0:00:14.850405
AR;186.152.4.160:1080;FAIL;
AR;181.3.46.25:1080;FAIL;
US;208.102.51.6:58208;FAIL;
AR;181.101.53.240:1080;200.73.132.115;0:00:09.802936
IN;103.251.225.16:6667;FAIL;
US;173.236.185.19:22960;FAIL;
FR;51.68.134.250:25623;FAIL;
US;50.62.35.16:41644;50.62.35.16;0:00:02.304961
BR;186.126.129.193:1080;FAIL;
US;166.62.85.224:13954;166.62.85.224;0:00:05.123121
US;47.100.88.171:20900;FAIL;
US;104.238.111.167:14416;FAIL;
US;64.34.217.33:40741;FAIL;
CA;192.252.215.5:16137;FAIL;
US;173.236.184.139:22960;173.236.184.139;0:00:04.575732
DE;46.101.218.6:39749;46.101.218.6;0:00:06.758081
AR;181.101.2.18:1080;FAIL;
US;66.228.36.18:61852;FAIL;
DE;173.212.201.250:23686;FAIL;
IN;43.224.10.35:6667;FAIL;
US;173.236.185.96:22960;FAIL;
AR;181.3.37.213:1080;200.73.130.62;0:00:08.508165
AR;181.3.49.28:1080;FAIL;
US;173.236.191.119:22960;173.236.184.139;0:00:08.729647
BR;181.3.67.154:1080;FAIL;
US;104.248.0.141:57391;104.248.0.141;0:00:03.865643
AR;186.152.149.227:1080;200.73.130.62;0:00:03.071001
CA;186.126.82.88:1080;FAIL;
AR;186.126.151.73:1080;200.73.130.62;0:00:05.884195
DE;173.212.201.250:54349;FAIL;
AR;181.0.5.196:1080;FAIL;
NL;142.93.137.235:6191;142.93.137.235;0:00:04.257492
SG;206.189.158.28:4454;FAIL;
US;157.230.154.211:32381;157.230.154.211;0:00:10.416110
AR;186.126.49.178:1080;FAIL;
CA;181.3.40.39:1080;FAIL;
US;95.217.132.133:3598;FAIL;
FR;51.68.134.249:30204;FAIL;
US;104.238.111.167:53308;FAIL;
DE;171.221.35.24:1080;FAIL;
NL;188.166.104.152:44924;FAIL;
SG;129.226.196.49:13181;129.226.196.49;0:00:13.210261
AR;181.3.55.161:1080;FAIL;
HK;101.132.120.74:1080;FAIL;
SE;95.217.132.133:3508;FAIL;
CA;186.126.129.149:1080;51.79.52.142;0:00:08.184306
AR;181.101.19.224:1080;FAIL;
AR;181.3.7.234:1080;FAIL;
AR;181.6.28.131:1080;FAIL;
BR;181.6.114.165:1080;209.14.2.57;0:00:18.254419
DE;173.212.201.250:33464;FAIL;
NL;146.185.132.87:55158;FAIL;
HK;150.109.148.234:1234;FAIL;
HU;85.90.161.117:2021;85.90.161.117;0:00:02.127226
AR;181.5.201.229:1080;200.73.132.119;0:00:15.974410
US;72.49.49.11:31034;FAIL;
US;97.74.6.64:45683;FAIL;
US;186.126.95.145:10808;FAIL;
DE;54.38.157.22:9999;54.38.157.22;0:00:08.000757
FR;51.68.134.250:30204;FAIL;
BR;186.126.89.33:1080;FAIL;
FR;51.68.134.242:30204;FAIL;
US;166.62.85.184:42828;166.62.85.184;0:00:04.136324
US;173.236.186.172:22960;173.236.184.139;0:00:04.403408
BR;181.83.228.40:1080;FAIL;
US;165.22.13.68:15576;165.22.13.68;0:00:04.907470
US;104.248.48.169:30588;FAIL;
SG;206.189.92.74:38888;FAIL;
AR;181.3.63.142:1080;FAIL;
AR;186.126.87.224:1080;FAIL;
BR;181.3.46.205:1080;FAIL;
CA;181.3.16.31:1080;FAIL;
SG;45.76.187.35:36600;FAIL;
US;173.236.186.230:22960;173.236.184.139;0:00:03.272663
RU;171.221.44.248:1080;FAIL;
US;181.7.201.96:1080;FAIL;
US;147.135.116.172:55546;147.135.116.172;0:00:05.626279
AR;181.3.29.244:1080;FAIL;
BR;186.152.147.113:1080;FAIL;
AR;181.102.81.144:1080;FAIL;
US;104.248.0.141:30247;104.248.0.141;0:00:01.176155
US;104.238.212.43:1080;104.238.215.49;0:00:05.161615
BR;186.152.26.161:1080;191.252.102.212;0:00:09.528139
US;143.110.153.171:3240;FAIL;
PS;213.6.61.150:9999;FAIL;
IN;43.224.10.30:6667;FAIL;
AR;181.101.4.206:1080;FAIL;
SG;206.189.158.28:48500;FAIL;
FR;54.36.246.232:11380;54.36.246.232;0:00:07.263434
DE;213.136.89.190:4374;FAIL;
SG;206.189.158.28:64028;FAIL;
CL;186.126.131.207:1080;FAIL;
IN;43.224.10.36:6667;43.224.10.36;0:00:08.870324
CA;181.3.93.39:1080;FAIL;
NL;142.93.138.78:63421;142.93.138.78;0:00:02.779517
NL;146.185.132.87:49041;146.185.132.87;0:00:08.279986
DE;95.217.132.133:3008;FAIL;
BR;181.101.52.45:1080;FAIL;
US;192.169.201.24:51100;FAIL;
BR;181.3.24.19:1080;FAIL;
AR;186.126.15.57:1080;FAIL;
PL;5.226.69.12:42717;FAIL;
DE;213.136.89.190:13492;FAIL;
BR;181.102.141.53:1080;FAIL;
US;74.208.102.54:31200;74.208.102.54;0:00:10.078336
GB;95.217.132.133:3273;FAIL;
CA;159.203.42.128:28393;159.203.42.128;0:00:04.454060
BR;181.101.29.81:1080;FAIL;
CA;181.3.84.102:1080;FAIL;
US;173.236.189.156:7595;173.236.184.102;0:00:06.332096
FR;51.68.134.246:30204;FAIL;
BR;181.6.24.228:1080;FAIL;
US;95.217.132.133:3503;FAIL;
AR;186.126.54.106:1080;FAIL;
SG;206.189.158.28:48751;FAIL;
NL;178.62.136.189:51423;178.62.136.189;0:00:06.756095
US;173.236.187.42:22960;173.236.184.139;0:00:07.256691
IN;43.224.10.46:6667;FAIL;
US;206.189.231.206:2106;FAIL;
SG;95.217.132.133:3286;FAIL;
SG;129.226.196.49:22157;129.226.196.49;0:00:09.336891
US;173.236.186.241:22960;173.236.184.139;0:00:02.345419
HK;119.28.81.177:20412;FAIL;
RU;31.25.243.40:9432;FAIL;
CA;181.3.65.57:1080;51.222.13.156;0:00:06.097943
AR;181.0.16.160:1080;FAIL;
UA;31.128.248.2:1080;FAIL;
HK;36.150.108.65:1080;FAIL;
RU;31.25.243.40:9159;FAIL;
US;181.0.26.16:1080;FAIL;
CA;181.6.61.241:1080;FAIL;
FR;51.68.134.243:25623;51.68.134.240;0:00:02.797034
BR;181.3.56.31:1080;20.195.214.142;0:00:05.865545
US;147.135.116.172:47283;147.135.116.172;0:00:07.138716
SG;113.77.87.43:1081;FAIL;
FR;51.68.134.255:25623;51.68.134.240;0:00:02.196854
IN;103.216.82.37:6667;103.216.82.37;0:00:04.271719
HK;223.199.179.145:1080;FAIL;
US;104.238.111.167:29182;104.238.111.167;0:00:07.471943
GB;46.101.56.138:33232;FAIL;
DE;213.136.89.190:51808;213.136.89.190;0:00:01.532093
NL;142.93.137.235:1429;142.93.137.235;0:00:04.408165
BR;181.5.210.85:1080;FAIL;
US;67.227.193.162:24595;67.227.193.162;0:00:07.794617
FR;51.68.134.248:25623;51.68.134.240;0:00:07.714408
HK;153.37.113.125:1080;42.3.24.58;0:00:03.530263
US;104.248.0.141:23668;104.248.0.141;0:00:01.404311
AR;186.126.84.156:1080;200.73.128.105;0:00:15.717142
PH;210.16.73.81:1080;FAIL;
FR;51.68.134.252:30204;FAIL;
CA;181.5.242.212:1080;FAIL;
AR;181.6.14.34:1080;FAIL;
NL;146.185.132.87:59746;FAIL;
SG;206.189.158.28:15615;FAIL;
GB;159.65.26.54:34787;159.65.26.54;0:00:07.312364
FR;51.68.134.254:25623;51.68.134.240;0:00:09.785792
SG;206.189.158.28:11007;FAIL;
AR;186.152.26.173:1080;FAIL;
US;206.189.231.206:53323;FAIL;
US;192.169.201.24:7495;FAIL;
AR;181.101.57.210:1080;FAIL;
US;173.236.184.50:7595;FAIL;
US;181.7.211.6:1080;FAIL;
AR;186.126.80.109:1080;FAIL;
CA;181.3.67.17:1080;FAIL;
US;165.22.13.68:25327;165.22.13.68;0:00:06.029895
CA;159.203.42.128:47524;159.203.42.128;0:00:09.931594
AR;181.101.57.64:1080;200.73.133.154;0:00:12.503640
BR;181.6.134.15:1080;FAIL;
AR;181.6.35.81:1080;FAIL;
US;173.236.186.228:22960;FAIL;
CA;181.102.111.148:1080;FAIL;
US;181.3.39.201:1080;FAIL;
DE;95.217.132.133:3412;FAIL;
US;206.189.231.206:50825;206.189.231.206;0:00:01.618712
SG;206.189.158.28:47419;FAIL;
DE;45.149.76.184:9051;FAIL;
GB;159.65.26.54:2975;FAIL;
US;64.34.216.68:40741;64.34.205.58;0:00:13.192013
US;173.236.188.107:7595;173.236.184.102;0:00:03.604567
US;166.62.85.224:42790;FAIL;
DE;181.101.10.10:1080;78.47.73.135;0:00:20.253722
RU;95.107.37.109:3109;85.26.186.44;0:00:04.610048
AR;181.3.29.168:1080;FAIL;
AR;181.6.128.215:1080;FAIL;
US;95.217.132.133:3132;FAIL;
AR;186.126.120.70:1080;FAIL;
UA;80.73.9.238:1080;FAIL;
IN;43.224.10.42:6667;43.224.10.42;0:00:12.005869
US;206.189.180.62:7934;FAIL;
AR;181.3.52.116:1080;FAIL;
AR;181.3.91.214:1080;FAIL;
DE;213.136.89.190:56844;FAIL;
BR;181.7.198.151:1080;191.252.113.106;0:00:15.269279
US;104.248.0.141:54251;FAIL;
GB;176.58.100.26:27016;FAIL;
HK;113.240.216.243:1080;FAIL;
AR;186.126.66.41:1080;FAIL;
US;173.236.189.250:7595;FAIL;
BR;181.5.230.16:1080;191.252.113.106;0:00:19.073131
US;50.62.35.16:29643;FAIL;
IN;103.21.163.76:6667;103.21.163.76;0:00:08.463147
DK;65.21.49.222:9174;FAIL;
US;104.238.97.215:7772;FAIL;
AR;181.3.68.52:1080;FAIL;
AR;186.126.92.77:1080;FAIL;
US;95.217.132.133:3141;FAIL;
BR;186.126.168.161:1080;FAIL;
DE;46.4.156.212:18588;FAIL;
SG;206.189.158.28:7476;FAIL;
AR;181.6.114.157:1080;200.73.132.187;0:00:13.969104
US;181.102.141.210:1080;FAIL;
BR;181.5.212.118:1080;FAIL;
SG;45.76.187.35:44560;45.76.187.35;0:00:09.819446
AR;186.152.150.124:1080;FAIL;
AR;186.126.141.216:1080;FAIL;
CA;186.152.114.192:1080;FAIL;
US;173.236.191.150:22960;173.236.184.139;0:00:09.824398
AR;181.7.207.196:1080;FAIL;
JP;138.91.19.96:1953;138.91.19.96;0:00:12.281648
CL;186.126.48.110:1080;FAIL;
US;74.208.101.185:44614;74.208.102.54;0:00:10.538888
AR;181.101.53.210:1080;FAIL;
US;65.21.49.222:9270;FAIL;
US;173.236.189.19:22960;FAIL;
US;95.217.132.133:3137;FAIL;
AR;186.126.42.157:1080;FAIL;
US;173.236.189.188:22960;173.236.184.139;0:00:05.774545
US;8.210.163.246:50001;FAIL;
DE;213.136.89.190:5136;FAIL;
US;173.236.186.235:22960;173.236.184.139;0:00:04.093197
AR;186.126.176.41:1080;FAIL;
US;173.236.189.191:22960;173.236.184.139;0:00:02.491511
US;173.236.188.227:7595;FAIL;
SG;206.189.158.28:21471;206.189.158.28;0:00:07.368676
US;95.217.132.133:3463;FAIL;
US;173.236.186.1:22960;FAIL;
BR;186.126.101.194:1080;FAIL;
AR;181.101.48.228:1080;FAIL;
US;95.217.132.133:3443;FAIL;
HK;119.28.81.177:59430;FAIL;
AR;181.3.27.242:10808;FAIL;
AR;181.0.30.128:1080;FAIL;
US;173.236.186.167:22960;173.236.184.139;0:00:06.491244
PL;5.226.69.12:46975;5.226.69.12;0:00:07.836800
NL;142.93.137.235:38902;142.93.137.235;0:00:02.734874
US;173.236.188.12:7595;FAIL;
DE;213.136.89.190:52010;FAIL;
US;173.236.188.156:7595;FAIL;
BR;181.3.36.182:1080;FAIL;
FR;51.75.42.92:25623;51.68.134.240;0:00:09.193243
US;173.236.189.132:22960;173.236.184.139;0:00:03.395630
US;173.236.185.29:22960;FAIL;
AR;186.126.50.32:1080;FAIL;
RU;31.25.243.40:9261;FAIL;
IR;5.56.134.237:45698;5.56.134.237;0:00:02.117181
ID;103.224.103.116:1080;FAIL;
CN;110.90.223.72:57114;FAIL;
CN;59.61.160.63:16790;FAIL;
CN;119.187.146.163:1080;119.187.146.163;0:00:03.103184
CN;59.61.160.179:16790;59.61.160.179;0:00:02.662718
CN;113.123.0.217:1080;FAIL;
CN;111.1.36.135:9053;115.238.101.42;0:00:02.840302
CN;111.1.36.132:9053;115.238.101.39;0:00:04.400966
CN;3.131.207.170:11098;FAIL;
CN;117.174.160.105:1080;117.174.160.105;0:00:05.121497
CN;36.27.223.80:35880;120.33.231.36;0:00:05.118523
CN;60.168.25.143:4216;FAIL;
CN;47.104.16.8:6667;FAIL;
CN;114.236.90.5:1080;FAIL;
ID;139.255.89.4:1080;139.255.89.2;0:00:03.779929
CN;111.225.153.226:57114;FAIL;
CN;134.175.90.111:8889;FAIL;
CN;111.1.36.132:9055;FAIL;
CN;121.206.250.10:57114;121.206.250.10;0:00:02.844775
CN;42.193.148.214:1080;FAIL;
CN;111.1.36.134:9053;115.238.101.41;0:00:03.048803
CN;39.96.175.55:1080;FAIL;
CN;47.92.252.178:3129;FAIL;
CN;122.152.219.54:57164;122.152.219.54;0:00:06.862155
ID;36.89.86.49:56845;36.89.86.49;0:00:03.790392
CN;36.27.223.80:35101;FAIL;
CN;36.27.223.80:34638;106.114.146.84;0:00:05.630091
CN;218.64.122.99:7302;218.64.122.99;0:00:09.361461
ID;36.94.126.50:1080;36.94.126.50;0:00:05.162022
CN;47.100.19.147:3129;47.100.19.147;0:00:11.339600
CN;122.152.219.54:1749;FAIL;
CN;59.61.160.153:16790;59.61.160.153;0:00:08.683302
unable to load file from base commit

@ -0,0 +1,23 @@
from arango import ArangoClient
from getpass import getpass
from config import *
from time import sleep
for i in range(0, 6, 1):
if i == 5:
exit()
try:
# Om scriptet körs på Macbook finns lösenordet i en fil
with open("../password_arango.txt") as f:
pwd = f.readline()
except FileNotFoundError:
if pwd == None:
pwd = getpass(f'Lösenord för {user_arango}: ')
try:
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd)
db.collection('members').random() # För att testa löseordet/kopplingen.
break
except:
print("Fel lösenord.")
sleep(1)

@ -0,0 +1,131 @@
class Profile:
def __init__(self, profile, container, proxieservers):
"""Creates a new profile to do searches with.
Args:
profile (dict): Document fetched from database.
container (str): Docker container that runs the script.
"""
self.doc = profile
# Användaruppgifter
self.name = self.doc["name"].strip()
self.email = self.doc["email"]
self.pwd = self.doc["pwd"]
self.server = self.doc["server"]
self.cookie = self.doc["cookie"]
self.useragent = self.doc["useragent"]
self.proxieservers = proxieservers
self.blocked = False
self.container = str(container)
self.users_checked = 0
# Ange proxies
session = requests.Session()
session.proxies = self.doc['proxies']
# Starta browser
user_agent = self.useragent
self.browser = RoboBrowser(
session=session, user_agent=user_agent, history=False, parser="lxml"
)
try:
self.browser.session.cookies = pickle.load(
open("data/cookie_{}.pkl".format(self.name), "rb")
)
self.logged_in = True
except:
try:
self.browser.session.cookies.update(self.cookie)
self.logged_in = True
except:
self.logged_in = False
def update_time(self):
""" Uppdatera dokumentet i arango. """
self.doc["in_use"] = nowstamp()
db.update_document(self.doc, check_rev=False)
def viewing(self):
""" Returnerar browser i html-format """
return self.browser.parsed
def open(self, url):
n = 0
while True:
n += 1
sleep(1)
try:
self.browser.open(url)
if '/a/nux/wizard/nav.php?step=phone&amp;skip' in self.viewing():
self.browser.open(url_bas + '/a/nux/wizard/nav.php?step=phone&amp;skip')
break
except Exception as e:
print(e)
print(n)
_print(self, None, f'Kunde inte öppna url {url}')
if n == 5:
if 'Connection refused' in e:
self.doc['e'] = e
db.insert_document('blocked_profiles', self.doc)
n = 0
from arangodb import get_profile, remove_profile
# Ta bort den gamla profilen från databasen och ersätt profile med nytt innehåll från ny profil
remove_profile(self)
self.__init__(get_profile(self.proxieservers), self.container)
_print(self, None, f'Ny profil hämtad {self.email}')
self.update_time()
else:
sleep(40)
def accept_cookies(self):
""" Accepterar cookies """
self.browser.open("https://mbasic.facebook.com")
soup = BeautifulSoup(str(self.browser.parsed), "lxml")
if "accept all" not in soup.text.lower():
sleep_(2)
cookie_accept_url = "https://mbasic.facebook.com/cookie/consent-page"
self.browser.open(cookie_accept_url)
sleep_(2)
try:
form = self.browser.get_form()
self.browser.submit_form(form)
_print(self, None, f"Accepterade cookies för {self.name}")
sleep_(2)
update_cookie(self.browser.session.cookies, self)
except:
try:
write_error(12, self, soup=self.browser.parsed)
except:
pass
_print(self, None, f"Accepterade inte cookies för {self.name}")
def login(self):
""" Loggar in på Facebook. """
print("Loggar in {}".format(self.name))
# Gå till log in-sidan
self.browser.open("https://mbasic.facebook.com/login")
# Kolla om browser redan är inloggad
soup = BeautifulSoup(str(self.browser.parsed), "lxml")
if "log out" in soup.text.lower():
print("Redan inloggad.")
try:
# Hitta och fyll i formulär
form = self.browser.get_form(id="login_form")
form["email"].value = self.email
form["pass"].value = self.pwd
self.browser.submit_form(form, submit=form["login"])
# Vänta lite och uppdatera cookie
print("Loggade in.")
sleep_(2)
self.open(url_bas)
sleep_(2)
except TypeError:
try:
write_error(11, self, soup=soup, profile=self.name)
except:
pass

@ -0,0 +1,15 @@
import subprocess
import requests
from time import sleep
subprocess.run(['sudo', 'wg-quick', 'down', 'integrity'])
with open('ip.txt', 'a+') as f:
while True:
subprocess.run(['wg-quick', 'up', 'integrity'] )
sleep(5)
ip = requests.get('https://api.ipify.org').text
print(ip)
f.write(f'{ip}\n')
subprocess.run(['wg-quick', 'down', 'integrity'])
sleep(5)

@ -6,7 +6,7 @@ idna==2.10
lxml==4.6.2
pycparser==2.20
PyJWT==2.0.1
PyNaCl==1.4.0
#PyNaCl==1.4.0
PySocks==1.7.1
python-arango==7.1.0
requests==2.25.1

@ -5,8 +5,12 @@
},
{
"path": "facebook"
},
{
"path": "../mrkoll"
}
],
"settings": {
"python.pythonPath": "/Users/Lasse/.pyenv/versions/3.9.5/bin/python"
}
}
Loading…
Cancel
Save