commit
57dc486798
26 changed files with 2525 additions and 547 deletions
@ -0,0 +1,19 @@ |
||||
|
||||
FROM python:3.8 |
||||
|
||||
WORKDIR / |
||||
|
||||
COPY requirements.txt . |
||||
|
||||
RUN pip install -r requirements.txt |
||||
|
||||
ADD . . |
||||
|
||||
ENTRYPOINT [ "python", "facebook/__main__.py", "-p free" ] |
||||
|
||||
CMD ["",""] |
||||
|
||||
# BUILD: |
||||
# docker buildx create --use |
||||
#docker buildx build --file docker/free/Dockerfile --platform linux/arm -t l3224/fb-scraper:free --push . |
||||
|
||||
@ -0,0 +1,14 @@ |
||||
|
||||
FROM python:3.8 |
||||
|
||||
WORKDIR / |
||||
|
||||
COPY requirements.txt . |
||||
|
||||
RUN pip install -r requirements.txt |
||||
|
||||
ADD . . |
||||
|
||||
ENTRYPOINT [ "python", "facebook/mrkoll.py" ] |
||||
|
||||
# docker buildx build --file docker/mrkoll/Dockerfile --platform linux/arm -t l3224/fb-scraper:mrkoll --push . |
||||
@ -1,221 +1,227 @@ |
||||
import random |
||||
import traceback |
||||
from getopt import GetoptError, getopt |
||||
from sys import argv, exit |
||||
from getopt import getopt |
||||
from sys import argv |
||||
from time import sleep |
||||
from subprocess import check_output |
||||
from re import split |
||||
from socket import gethostname |
||||
|
||||
from arangodb import db, write_report, backup, report_blocked, get_profile, remove_profile, checked_members, friends_of_user |
||||
from classes import Profile, User |
||||
from helpers import sleep_, write_error, _print |
||||
from scrapers import profile_picture_reactions |
||||
|
||||
|
||||
def finish(): |
||||
""" Avslutar: skriver rapport och gör profilerna oanvända """ |
||||
for profile in profiles: |
||||
profile.unused() |
||||
write_report(users, list(all_pictures.difference(all_pictures_start))) |
||||
exit() |
||||
from datetime import datetime |
||||
from config import set_pwd |
||||
from random import randint |
||||
|
||||
if __name__ == "__main__": |
||||
print() |
||||
|
||||
|
||||
if gethostname() not in ['macbook.local']: # Lägg till för studiodatorn |
||||
# Hämta namn för containern där skriptet körs |
||||
try: |
||||
containers = check_output(['docker', 'container', 'ls']).decode() |
||||
container = split('\W\W+', containers.split('\n')[1])[-1] |
||||
except FileNotFoundError: |
||||
pass |
||||
else: |
||||
container_name = 'macbook' |
||||
proxieservers = 'mullvad' |
||||
|
||||
# Argument och alternativ |
||||
|
||||
# Variabler som kan ändras |
||||
url_other_pictures = [] # Fylls eventuellt på |
||||
test = False |
||||
write = True |
||||
mode = 'all' |
||||
pwd = None |
||||
proxieservers = 'mullvad' |
||||
|
||||
argv = argv[1:] |
||||
try: |
||||
opts, args = getopt(argv, "bm:u:o:", ['backup=',"mode=", "users=", "other="]) |
||||
|
||||
opts, args = getopt(argv, "bim:u:o:p:wl:", ["backup", "images", "mode=", "user=", "other=", "profiles=", "write", "password="]) |
||||
|
||||
for o, a in opts: |
||||
# mode_nr används för hur ofta profile ska roteras |
||||
print(o) |
||||
if o in ['-l', "--password"]: |
||||
pwd = a.strip() |
||||
|
||||
set_pwd(pwd) |
||||
|
||||
# Importera andra moduler |
||||
from config import url_bas |
||||
from arangodb import ( |
||||
blocked_profile, |
||||
new_profile, |
||||
backup, |
||||
get_user, |
||||
check_for_user, |
||||
friends_of_user, |
||||
) |
||||
from classes import Profile, User |
||||
from helpers import sleep_, write_error, _print, check_profile_status, update_cookie |
||||
from scrapers import profile_picture_reactions |
||||
|
||||
for o, a in opts: |
||||
|
||||
# Bestäm vilka profiler/proxies som ska användas |
||||
if o in ['-p', '--profiles']: |
||||
proxieservers = a.strip() |
||||
print(f'Proxieservers: {proxieservers}') |
||||
|
||||
# Bestäm mode |
||||
if o in ["-m", "--mode"]: |
||||
mode = a |
||||
if mode == 'single': |
||||
mode = a.strip() |
||||
if mode == "single": |
||||
mode_nr = 1.7 |
||||
elif mode == 'few': |
||||
elif mode == "few": |
||||
mode_nr = 1.4 |
||||
elif mode == 'force': |
||||
mode_nr = 1 |
||||
else: |
||||
mode = 'all' |
||||
elif mode == "solo": |
||||
mode_nr = 1.4 |
||||
elif mode == "force": |
||||
mode_nr = 1 |
||||
|
||||
for o, a in opts: |
||||
# Bestäm user |
||||
if o in ["-u", "--user"]: |
||||
try: |
||||
users = [ |
||||
User(str(i).strip(), mode) |
||||
for i in [(str(i).strip()) for i in a.split(",")] |
||||
] |
||||
except StopIteration: |
||||
raise Exception |
||||
if a == 'test': # För att testa profiler i profiles_test |
||||
test = True |
||||
container = str(a.strip()) |
||||
if all([a.strip()[:4] == "leak", len(a) < 7]) or a == 'test': |
||||
sleep(randint(0, 40)/10) # För att docker service inte ska gå vidare exakt samtidigt |
||||
lookups = "leak_lookups" |
||||
userdoc = get_user(collection=lookups) |
||||
elif a.strip()[:7] == "lookups": |
||||
lookups = "lookups" |
||||
userdoc = get_user(collection=lookups) |
||||
if 'other' in userdoc: |
||||
url_other_pictures = userdoc['other'] |
||||
else: |
||||
url_other_pictures = [] |
||||
elif a == 'test': |
||||
lookups = "leak_lookups" |
||||
userdoc = get_user(collection=lookups) |
||||
else: |
||||
lookups = "lookups" |
||||
userdoc = {'_key': a} |
||||
|
||||
if o in ["-o", "--other"]: |
||||
url_other_picture = a |
||||
if o in ['-b', '--backup']: |
||||
while True: |
||||
url_other_pictures = a.split(",") |
||||
if o in ["-b", "--backup"]: |
||||
backup(db) |
||||
sleep(21600) |
||||
if o in ['-w', "--write"]: |
||||
write = False |
||||
|
||||
|
||||
if "users" not in globals(): |
||||
users = [ |
||||
User(str(i).strip(), mode) |
||||
for i in input("Vem/vilka vill du kolla bilder för? ").split(",") |
||||
] |
||||
if 'userdoc' not in globals(): |
||||
lookups = "lookups" |
||||
userdoc = {'_key': str(input("Vem/vilka vill du kolla bilder för? ")).strip()} |
||||
|
||||
except GetoptError: |
||||
users = [ |
||||
User(str(i).strip(), mode) |
||||
for i in input("Vem/vilka vill du kolla bilder för? ").split(",") |
||||
] |
||||
|
||||
mode = input("Söka efter alla, första/sida eller första? (all, few, single)? ").lower().strip() |
||||
if mode == '': |
||||
mode = 'all' |
||||
print('Mode:', mode) |
||||
print('Write:', write) |
||||
|
||||
if "url_other_picture" in globals(): |
||||
users[0].url_other_picture = url_other_picture[url_other_picture.find('facebook.com') + 12:] |
||||
# Hämta en användare att kolla upp |
||||
user = User(str(userdoc['_key']).strip(), mode, other_pictures=url_other_pictures) |
||||
|
||||
print("Kollar profilbilder för:") |
||||
for user in users: |
||||
print("-", user.username) |
||||
print() |
||||
if "url_other_pictures" in globals(): |
||||
l = [] |
||||
for url in url_other_pictures: |
||||
l.append(url[url.find("facebook.com") + 12 :]) |
||||
user.url_other_pictures = l |
||||
|
||||
if 'container' not in globals(): |
||||
usernames = [user.username for user in users] |
||||
if len(usernames) == 1: |
||||
container = usernames[0] |
||||
else: |
||||
container = '-'.join(usernames) |
||||
|
||||
# Skapa tre olika profiler att besöka Facebook med |
||||
profiles = [] |
||||
for i in range(0, 3): |
||||
doc = get_profile() |
||||
profile = Profile(doc, container) |
||||
profile.browser.open("https://api.ipify.org") |
||||
print(f"Profil {profile.name} använder IP-adress {profile.viewing().text}." |
||||
) |
||||
if profile.logged_in == False: |
||||
profile.accept_cookies() |
||||
sleep_(2) |
||||
profile.login() |
||||
profiles.append(profile) |
||||
print() |
||||
sleep(3) |
||||
# Hämta profil |
||||
profile = new_profile(container, proxieservers) |
||||
profile.write = write |
||||
|
||||
profile_nr = 1 |
||||
profile = profiles[profile_nr] |
||||
|
||||
_print(profile.container, user.username, f"Börjar med profilen {profile.name}") |
||||
update_cookie(profile.browser.session.cookies, profile) |
||||
sleep(3) |
||||
|
||||
# Gå igenom de användare som efterfrågats |
||||
try: |
||||
while True: |
||||
for user in users: |
||||
# Set för kollade bilder och kollade medlemmar |
||||
all_pictures = set([doc["_key"] for doc in db.collection("pictures").all()]) |
||||
all_pictures_start = all_pictures.copy() |
||||
members_checked = checked_members() |
||||
profile.container = user.username |
||||
|
||||
if lookups == "leak_lookups": |
||||
id = user.username |
||||
check_profile_status(profile, user) |
||||
if profile.blocked: |
||||
profile = blocked_profile(profile, proxieservers=proxieservers) |
||||
profile.open(url_bas + "/" + user.username) |
||||
url = profile.browser.state.url.strip("/").strip("?_rdr") |
||||
if "php?" not in url: |
||||
user = User(str(url[url.rfind("/") + 1 :]).strip(), mode) |
||||
user.id = id |
||||
sleep_(4) |
||||
container = str(user.username) |
||||
profile.container = container |
||||
|
||||
if "container" not in globals(): |
||||
container = str(user.username) |
||||
profile.container = container |
||||
|
||||
profile.users_checked += 1 |
||||
|
||||
# Hämta reaktioner för den första användaren |
||||
if any([user.username not in members_checked, mode == 'force']): |
||||
if any([not check_for_user(user.username, mode=mode), mode == "force"]): |
||||
try: |
||||
profile_picture_reactions(profile, user, all_pictures, first_user=True, mode=mode) |
||||
while True: |
||||
# Uppdatera in_use |
||||
profile.update_time() |
||||
profile = profile_picture_reactions( |
||||
profile, user, first_user=True, mode=mode |
||||
) |
||||
if profile.blocked: |
||||
profile = blocked_profile(profile, proxieservers=proxieservers) |
||||
else: |
||||
break |
||||
except: |
||||
_print(profile.container, user.username, traceback.format_exc()) |
||||
if len(users) == 1: |
||||
for profile in profiles: |
||||
profile.unused() |
||||
friends = friends_of_user(user.username) |
||||
friends_unchecked = list(set(friends) - set(members_checked)) |
||||
_print(profile, user, traceback.format_exc()) |
||||
|
||||
if mode == 'solo': |
||||
exit() |
||||
|
||||
_print(profile.container, user.username, f"\nKlar med, {user.username}\n") |
||||
_print(profile.container, user.username, f"Vänner som reagerat: {len(friends)}") |
||||
_print(profile.container, user.username, "\nVänner att kolla:") |
||||
friends = friends_of_user(user.username) |
||||
_print(profile, user, f"\nKlar med, {user.username}\n") |
||||
_print(profile, user, f"Vänner som reagerat: {len(friends)}") |
||||
_print(profile, user, "\nVänner att kolla:") |
||||
|
||||
for friend in friends_unchecked: |
||||
friends_unchecked = [] |
||||
for friend in friends: |
||||
if not check_for_user(friend): |
||||
print(friend) |
||||
_print(profile.container, user.username, ', '.join([friend for friend in friends_unchecked]), silent=True) |
||||
friends_unchecked.append(friend) |
||||
|
||||
_print(profile, user, [friends_unchecked], silent=True) |
||||
_print(profile, user, f'Totalt: {len(friends_unchecked)}') |
||||
print() |
||||
|
||||
# Hämta reaktioner för users vänner (som reagerat) |
||||
count_friends = 0 |
||||
for friend in friends_unchecked: |
||||
if datetime.now().strftime("%H") == '03' and int(datetime.now().strftime("%M")) < 30: # Sov för att kunna säkerhetskopieraa |
||||
sleep(1800) |
||||
count_friends += 1 |
||||
user = User(str(friend), mode) |
||||
user = User(str(friend), mode, other_pictures=[]) |
||||
sleep_(2) |
||||
|
||||
# Uppdatera in_use |
||||
profile.update_time() |
||||
try: |
||||
profile_picture_reactions( |
||||
profile, user, all_pictures, mode=mode |
||||
) |
||||
if not check_for_user(user.username): |
||||
p = profile_picture_reactions(profile, user, mode=mode) |
||||
if isinstance(p, Profile): |
||||
profile = p |
||||
|
||||
except Exception as e: # Fel4 |
||||
write_error( |
||||
4, |
||||
profile, |
||||
e=e, |
||||
user=user.username, |
||||
profile=profile.container, |
||||
user=user, |
||||
traceback=traceback.format_exc(), |
||||
soup=profile.viewing(), |
||||
) |
||||
_print(profile.container, user.username, f"\nFel: {str(user.username)}\n") |
||||
_print(profile, user, f"\nFel: {str(user.username)}\n") |
||||
sleep_(15) |
||||
|
||||
if profile.blocked == False: |
||||
_print(profile.container, user.username, f"Klar med {user.username} \n") |
||||
if not profile.blocked: |
||||
_print(profile, user, f"Klar med {user.username} \n") |
||||
|
||||
# Rotera fb-profiler |
||||
if count_friends > 5 * mode_nr: |
||||
if count_friends > 2 * mode_nr: |
||||
if random.randrange(0, 2, 1) == 1: |
||||
profile_nr += 1 |
||||
profile = new_profile(container, proxieservers=proxieservers) |
||||
count_friends = 0 |
||||
_print(profile.container, user.username, f"Växlar till {profiles[profile_nr].name}") |
||||
elif count_friends > 9 * mode_nr: |
||||
profile_nr += 1 |
||||
_print(profile, user, f"Växlar till {profile.name}") |
||||
elif count_friends > 4 * mode_nr: |
||||
profile = new_profile(container, proxieservers=proxieservers) |
||||
count_friends = 0 |
||||
_print(profile.container, user.username, f"Växlar till {profiles[profile_nr].name}") |
||||
|
||||
if profile_nr > len(profiles) - 1: |
||||
profile_nr = 0 |
||||
|
||||
elif profile.blocked == True: |
||||
# Ta bort profilen ur databasen |
||||
report_blocked(profile, users) |
||||
remove_profile(profile.doc) |
||||
# Ta bort från listan på fb-profiler som används |
||||
profiles.remove(profile) |
||||
# Försök lägga till en ny fb-profil (om det finns en skapad och ledig i databasen) |
||||
try: |
||||
doc = get_profile() |
||||
profiles[profile_nr] = Profile(doc, container) |
||||
_print(profile.container, user.username, f"Laddat ny profil: {profiles[profile_nr].name}") |
||||
sleep(3) |
||||
except e: |
||||
_print(profile.container, user.username, "Det behövs nya profiler...") |
||||
if len(profiles) == 0: |
||||
break |
||||
for s in range(0, 1600 / len(profiles)): |
||||
print(user, f"Sover {600-s} sekunder till... ", end="\r") |
||||
profile_nr += 1 |
||||
_print(profile.container, user.username, f"Försöker med {profiles[profile_nr].name}.") |
||||
|
||||
profile = profiles[profile_nr] |
||||
_print(profile, user, f"Växlar till {profile.name}") |
||||
|
||||
elif profile.blocked: |
||||
profile = blocked_profile(profile, proxieservers=proxieservers) |
||||
|
||||
except: |
||||
finish() |
||||
_print(profile, None, f"Klar med alla vänner.") |
||||
|
||||
@ -1,9 +1,18 @@ |
||||
from getpass import getpass |
||||
def set_pwd(_pwd=None): |
||||
global pwd |
||||
if _pwd == None: |
||||
_pwd = getpass('Lösenord för Arango-användaren:') |
||||
pwd = _pwd |
||||
|
||||
# Info för arangodb |
||||
user_arango = "Lasse" |
||||
pwd_arango = "4c071768bedc259288361c07aafd8535fca546086fada4e7b5de4e2bb26b0e70fa8d348c998b90d032a5b8f3fdbae1881b843021e3475198e6fb45f58d8dc450bd52f77d" |
||||
db_arango = "facebook" |
||||
host_arango = "http://arango.lasseedfast.se" |
||||
host_arango = 'http://192.168.0.4:8529' |
||||
#host_arango = "http://arango.lasseedfast.se" |
||||
|
||||
# Andra uppgifter |
||||
url_bas = "https://mbasic.facebook.com" |
||||
user_agent = "Mozilla/5.0 (iPhone; CPU iPhone OS 10_3 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.75 Mobile/14E5239e Safari/602.1" |
||||
mullvad = '8155249667566524' |
||||
Binary file not shown.
@ -0,0 +1,110 @@ |
||||
import os |
||||
import pickle |
||||
import time |
||||
|
||||
import cv2 |
||||
import face_recognition |
||||
|
||||
|
||||
def build_data(): |
||||
""" Build the face_enc file with data to recognize from """ |
||||
|
||||
knownEncodings = [] |
||||
knownNames = [] |
||||
|
||||
members = os.listdir('../profile_pictures') |
||||
|
||||
#get paths of each file in folder named Images |
||||
#Images here contains my data(folders of various persons) |
||||
for member in members: |
||||
if '.DS_Store' in member: |
||||
continue |
||||
imagePaths = [] |
||||
for path in os.listdir(f'../profile_pictures/{member}'): |
||||
if '.jpg' in path: |
||||
imagePaths.append(f'../profile_pictures/{member}/{path}') |
||||
|
||||
# loop over the image paths |
||||
for imagePath in imagePaths: |
||||
print(imagePath) |
||||
# load the input image and convert it from BGR (OpenCV ordering) |
||||
# to dlib ordering (RGB) |
||||
image = cv2.imread(imagePath) |
||||
rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) |
||||
#Use Face_recognition to locate faces |
||||
boxes = face_recognition.face_locations(rgb, number_of_times_to_upsample = 2) #,model='hog' |
||||
# compute the facial embedding for the face |
||||
encodings = face_recognition.face_encodings(image, boxes) |
||||
# loop over the encodings |
||||
for encoding in encodings: |
||||
knownEncodings.append(encoding) |
||||
knownNames.append(member) |
||||
#save emcodings along with their names in dictionary data |
||||
data = {"encodings": knownEncodings, "names": knownNames} |
||||
#use pickle to save data into a file for later use |
||||
with open("face_enc", "wb") as f: |
||||
f.write(pickle.dumps(data)) |
||||
f.close() |
||||
|
||||
def identify_face(imagePath): |
||||
#find path of xml file containing haarcascade file |
||||
cascPathface = os.path.dirname( |
||||
cv2.__file__) + "/data/haarcascade_frontalface_alt2.xml" |
||||
# load the harcaascade in the cascade classifier |
||||
faceCascade = cv2.CascadeClassifier(cascPathface) |
||||
# load the known faces and embeddings saved in last file |
||||
data = pickle.loads(open('face_enc', "rb").read()) |
||||
#Find path to the image you want to detect face and pass it here |
||||
image = cv2.imread(imagePath) |
||||
rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) |
||||
#convert image to Greyscale for haarcascade |
||||
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) |
||||
faces = faceCascade.detectMultiScale(gray, |
||||
scaleFactor=1.1, |
||||
minNeighbors=5, |
||||
minSize=(60, 60), |
||||
flags=cv2.CASCADE_SCALE_IMAGE) |
||||
|
||||
# the facial embeddings for face in input |
||||
encodings = face_recognition.face_encodings(rgb) |
||||
names = [] |
||||
# loop over the facial embeddings incase |
||||
# we have multiple embeddings for multiple fcaes |
||||
for encoding in encodings: |
||||
#Compare encodings with encodings in data["encodings"] |
||||
#Matches contain array with boolean values and True for the embeddings it matches closely |
||||
#and False for rest |
||||
matches = face_recognition.compare_faces(data["encodings"], |
||||
encoding) |
||||
#set name =unknown if no encoding matches |
||||
name = "Unknown" |
||||
# check to see if we have found a match |
||||
if True in matches: |
||||
#Find positions at which we get True and store them |
||||
matchedIdxs = [i for (i, b) in enumerate(matches) if b] |
||||
counts = {} |
||||
# loop over the matched indexes and maintain a count for |
||||
# each recognized face face |
||||
for i in matchedIdxs: |
||||
#Check the names at respective indexes we stored in matchedIdxs |
||||
name = data["names"][i] |
||||
#increase count for the name we got |
||||
counts[name] = counts.get(name, 0) + 1 |
||||
#set name which has highest count |
||||
name = max(counts, key=counts.get) |
||||
print(counts) |
||||
|
||||
# update the list of names |
||||
names.append(name) |
||||
# loop over the recognized faces |
||||
for ((x, y, w, h), name) in zip(faces, names): |
||||
# rescale the face coordinates |
||||
# draw the predicted face name on the image |
||||
cv2.rectangle(image, (x, y), (x + w, y + h), (0, 255, 0), 2) |
||||
cv2.putText(image, name, (x, y), cv2.FONT_HERSHEY_SIMPLEX, |
||||
0.75, (0, 255, 0), 2) |
||||
cv2.imshow("Frame", image) |
||||
cv2.waitKey(0) |
||||
|
||||
|
||||
identify_face('/Users/Lasse/Datorgemensamt/Programmeringsprojekt/Facebook/fb-scraper/profile_pictures/millington.jiang/4138068259557849.jpg') |
||||
@ -0,0 +1,185 @@ |
||||
import locale |
||||
import re |
||||
from datetime import datetime |
||||
|
||||
import networkx as nx |
||||
import pandas as pd |
||||
from numpy.core.numeric import NaN |
||||
|
||||
locale.setlocale(locale.LC_TIME, "en_US") |
||||
|
||||
from arangodb import db |
||||
|
||||
|
||||
def nodes_from_list( |
||||
nodes, collection="members", return_fields="{'_key': doc._key, 'name': doc.name}" |
||||
): |
||||
aql_edges = f""" |
||||
FOR doc IN @@ecollection |
||||
FILTER doc._id IN @nodes |
||||
RETURN {return_fields} |
||||
""" |
||||
cursor = db.aql.execute( |
||||
aql_edges, bind_vars={"@ecollection": collection, "nodes": nodes} |
||||
) |
||||
return [doc for doc in cursor] |
||||
|
||||
|
||||
def edges_from_nodes( |
||||
nodes, edge_collections=["picture_reactions"], simple=True, mode="or" |
||||
): |
||||
""" |
||||
Returnerar en df med relationer för valda noder och relationtabeller. |
||||
|
||||
Args: |
||||
nodes (list): Noder som ska ingå i relationerna |
||||
edge_collections (list, optional): Relationtabeller att hämta relationer från. Defaults to ['messages']. |
||||
simple (bool, optional): Simple ger bara _from, _to och _key. Defaults to True. |
||||
|
||||
Returns: |
||||
pd.DataFrame: DataFrame. |
||||
""" |
||||
|
||||
if simple: |
||||
return_fields = ( |
||||
"{'_to': doc._to, '_from': doc._from, '_id':doc._id, '_key':doc._key}" |
||||
) |
||||
else: |
||||
return_fields = "doc" |
||||
|
||||
edges = [] |
||||
|
||||
for collection in edge_collections: |
||||
aql = f""" |
||||
FOR doc IN @@edge_collection |
||||
FILTER doc._from IN @nodes {mode} doc._to IN @nodes |
||||
RETURN {return_fields} |
||||
""" |
||||
cursor = db.aql.execute( |
||||
aql, |
||||
bind_vars={ |
||||
"@edge_collection": collection, |
||||
"nodes": nodes, |
||||
}, |
||||
) |
||||
|
||||
edges = edges + [doc for doc in cursor] |
||||
|
||||
return edges |
||||
|
||||
|
||||
def convert_date(date): |
||||
try: |
||||
new_date = datetime.strptime(date, "%d %b %Y") |
||||
except ValueError: |
||||
try: |
||||
new_date = datetime.strptime(date, "%d %B %Y") |
||||
except ValueError: |
||||
try: |
||||
new_date = datetime.strptime(date, "%b %d, %Y") |
||||
except ValueError: |
||||
try: |
||||
new_date = datetime.strptime(date, "%B %d, %Y") |
||||
except ValueError: |
||||
try: |
||||
new_date = datetime.strptime(date + " 2021", "%d %b %Y") |
||||
except ValueError: |
||||
return "" |
||||
return new_date.strftime("%Y-%d-%d") |
||||
# return f'{new_date.date().year}-{new_date.date().month}-{new_date.date().day}' |
||||
|
||||
|
||||
def export_network(members, n=2): |
||||
""" Exporterar en gexf-fil med noder utifrån en lista med medlemmar. """ |
||||
|
||||
filename = f"data/-.join({members}).-old.gexf" |
||||
ids = [] |
||||
for member in members: |
||||
ids.append(f"members/{member}") |
||||
friends = set() |
||||
|
||||
# Hämta relationer kopplade till members från databasen |
||||
edges = edges_from_nodes(ids) |
||||
for edge in edges: |
||||
friends.add(edge["_from"]) |
||||
friends.add(edge["_to"]) |
||||
edges = edges_from_nodes(list(friends)) |
||||
|
||||
# Skapa en dict där det syns vem som har interagerat med hur många |
||||
d = {} |
||||
for i in edges: |
||||
_to = i["_to"] |
||||
_from = i["_from"] |
||||
if _to not in d: |
||||
d[_to] = set([i["_from"]]) |
||||
else: |
||||
d[_to] = d[_to] | set([i["_from"]]) |
||||
|
||||
if _from not in d: |
||||
d[_from] = set([i["_to"]]) |
||||
else: |
||||
d[_from] = d[_from] | set([i["_to"]]) |
||||
|
||||
# Sålla ut så bara medlemmar som reagerat med [n] två av grundanvändarens vänner kommer med |
||||
friends = set(friends) |
||||
members = [] |
||||
for key, value in d.items(): |
||||
if len(value & friends) >= n or key in friends: |
||||
members.append(key) |
||||
|
||||
# Skapa df med edges |
||||
edges = pd.DataFrame( |
||||
edges_from_nodes(members, mode="and", simple=False), |
||||
columns=["_key", "_to", "_from", "reaction", "picture"], |
||||
) |
||||
edges.set_index("_key", inplace=True) |
||||
|
||||
# En lista på användare att ta med till nätverket |
||||
members = list(set(edges["_from"].unique()) | set(edges["_to"].unique())) |
||||
|
||||
# Skapa noder till nätverket |
||||
nodes = nodes_from_list( |
||||
members |
||||
) # , return_fields="{'id':doc._key, 'label':doc.name") |
||||
nodes = [(i["_key"], i) for i in nodes] |
||||
|
||||
# Lägg till några kolumner i edges-tabellen |
||||
edges._from = edges._from.apply(lambda x: x[8:]) |
||||
edges._to = edges._to.apply(lambda x: x[8:]) |
||||
edges.picture = edges.picture.apply( |
||||
lambda x: re.search("\d+", x).group() |
||||
) # Rensa bort url-info i de fall bilden har fått fel id |
||||
|
||||
# Hämta bilder för att kunna lägga datum till edges |
||||
p = ["pictures/" + i for i in edges.picture.unique().tolist()] |
||||
d = {} |
||||
pictures = nodes_from_list( |
||||
p, collection="pictures", return_fields="{'id': doc._key, 'date':doc.date}" |
||||
) |
||||
for picture in pictures: |
||||
d[picture["id"]] = convert_date(picture["date"]) |
||||
|
||||
edges["date"] = edges.picture.apply(lambda x: d[x]) |
||||
|
||||
# Skapa graf utifrån relationer |
||||
G = nx.from_pandas_edgelist( |
||||
edges, |
||||
source="_from", |
||||
target="_to", |
||||
edge_attr=["reaction", "date"], #, "now" |
||||
create_using=nx.MultiDiGraph, |
||||
) |
||||
|
||||
# Lägg till noderna i grafen |
||||
G.add_nodes_from(nodes) |
||||
|
||||
# Exportera till filer |
||||
nx.write_gexf( |
||||
G, |
||||
filename |
||||
) |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
export_network(["linda.kakuli"]) |
||||
# export_network(input('Member: ')) |
||||
@ -0,0 +1,70 @@ |
||||
import requests |
||||
import os |
||||
from datetime import date, datetime, timedelta |
||||
from time import sleep |
||||
|
||||
from arangodb import db |
||||
|
||||
|
||||
def download_image(url, user, id): |
||||
|
||||
# Kolla så användarmappen finns |
||||
if not os.path.isdir(f'../profile_pictures/{user}'): |
||||
os.mkdir(f'../profile_pictures/{user}') |
||||
|
||||
# Ladda ner bilden |
||||
r = requests.get(url) |
||||
if r.text == 'URL signature expired': |
||||
print('För gammal länk.') |
||||
exit() |
||||
elif r.status_code == 403: |
||||
exit() |
||||
img_data = r.content |
||||
with open(f'../profile_pictures/{user}/{id}.jpg', 'wb') as handler: |
||||
handler.write(img_data) |
||||
|
||||
|
||||
def get_pictures(day): |
||||
cursor = db.aql.execute( |
||||
""" |
||||
for doc in members |
||||
filter doc.fetched == @date |
||||
filter has(doc, "checked_pictures") |
||||
filter not has(doc, "pictures_downloaded") |
||||
return {'member': doc._key, 'pictures':doc.checked_pictures} |
||||
""", |
||||
bind_vars={'date': day} |
||||
) |
||||
|
||||
for doc in cursor: |
||||
pictures = [] |
||||
for picture in doc['pictures']: |
||||
pictures.append(picture[picture.find('fbid=')+5:]) |
||||
|
||||
|
||||
cursor = db.aql.execute( |
||||
""" |
||||
for doc in pictures |
||||
filter doc._key in @list |
||||
limit 10 |
||||
return {'_key': doc._key, 'user':doc.user, 'url': doc.src} |
||||
""", |
||||
bind_vars={"list": pictures}, |
||||
) |
||||
|
||||
for picture in cursor: |
||||
download_image(picture['url'], picture['user'], picture['_key']) |
||||
print(picture['_key']) |
||||
sleep(2) |
||||
|
||||
db.update_document({'_id': 'members/' + str(doc['member']), 'pictures_downloaded': True}, silent=True, check_rev=False) |
||||
|
||||
def old_pics(): |
||||
if not os.path.isdir(f'../profile_pictures'): |
||||
os.mkdir(f'../profile_pictures') |
||||
start = date.today() |
||||
for i in range(1,60): |
||||
d = start - timedelta(days=i) |
||||
get_pictures(d.strftime('%Y%m%d')) |
||||
|
||||
|
||||
@ -0,0 +1,169 @@ |
||||
import os |
||||
from datetime import date, datetime, timedelta |
||||
from getpass import getpass |
||||
from time import sleep |
||||
import random |
||||
|
||||
import requests |
||||
import urllib3 |
||||
|
||||
urllib3.disable_warnings() |
||||
from arango import ArangoClient |
||||
|
||||
|
||||
def download_image(url, user, id): |
||||
|
||||
# Ladda ner bilden |
||||
while True: |
||||
try: |
||||
server = servers_mullvad[random.randint(0, len(servers_mullvad)-1)] |
||||
proxies = { |
||||
"https": "socks5://'8155249667566524'@{}".format(server), |
||||
"http": "socks5://'8155249667566524'@{}".format(server), |
||||
} |
||||
r = requests.get(url, proxies=proxies) |
||||
break |
||||
except requests.exceptions.ConnectionError: |
||||
sleep(300) |
||||
|
||||
if r.text == "URL signature expired": |
||||
print("För gammal länk.") |
||||
exit() |
||||
elif r.status_code == 403: |
||||
exit() |
||||
|
||||
image_name = f"/ssd/profile_pictures/{user}/{id}.jpg" |
||||
img_data = r.content |
||||
with open(image_name, "wb") as handler: |
||||
handler.write(img_data) |
||||
|
||||
#nc_path = f"https://nc.lasseedfast.se/remote.php/dav/files/Lasse/profile_pictures/{user}/{id}.jpg" |
||||
|
||||
# headers = {"Content-type": "image/jpeg", "Slug": "heart"} |
||||
# while True: |
||||
# try: |
||||
# r = requests.put( |
||||
# nc_path, data=open(image_name, "rb"), headers=headers, auth=auth, verify=False |
||||
# ) |
||||
# break |
||||
|
||||
# except: |
||||
# print('Kunde inte ladda upp', nc_path) |
||||
# sleep(5) |
||||
|
||||
print(f"{user}\t{id}\t{r.status_code}") |
||||
|
||||
|
||||
def get_pictures(day): |
||||
cursor = db.aql.execute( |
||||
""" |
||||
for doc in members |
||||
filter doc.fetched == @date |
||||
filter has(doc, "checked_pictures") |
||||
filter not has(doc, "pictures_downloaded") |
||||
return {'member': doc._key, 'pictures':doc.checked_pictures} |
||||
""", |
||||
bind_vars={"date": str(day)}, |
||||
) |
||||
|
||||
# Skapa en lista med bilder att gå igenom. |
||||
images = [] |
||||
for doc in cursor: |
||||
images.append(doc) |
||||
for doc in images: |
||||
user = doc["member"] |
||||
|
||||
# # Skapa mapp för användarens bilder på NC... |
||||
# nc_path = f"https://nc.lasseedfast.se/remote.php/dav/files/Lasse/profile_pictures/{user}" |
||||
# while True: |
||||
# try: |
||||
# requests.request("MKCOL", nc_path, verify=False, auth=auth) |
||||
# break |
||||
# except: |
||||
# print('Kunde inte skapa', nc_path) |
||||
# sleep(5) |
||||
|
||||
# ...och på datorn (för backup) |
||||
if not os.path.isdir(f"/ssd/profile_pictures/{user}"): |
||||
os.mkdir(f"/ssd/profile_pictures/{user}") |
||||
|
||||
pictures = [] |
||||
for picture in doc["pictures"]: |
||||
pictures.append(picture[picture.find("fbid=") + 5 :]) |
||||
|
||||
cursor = db.aql.execute( |
||||
""" |
||||
for doc in pictures |
||||
filter doc._key in @list |
||||
limit 10 |
||||
return {'_key': doc._key, 'user':doc.user, 'url': doc.src} |
||||
""", |
||||
bind_vars={"list": pictures}, |
||||
) |
||||
|
||||
for picture in cursor: |
||||
while True: |
||||
download_image(picture["url"], picture["user"], picture["_key"]) |
||||
sleep(1) |
||||
break |
||||
|
||||
db.update_document( |
||||
{"_id": "members/" + str(doc["member"]), "pictures_downloaded": True}, |
||||
silent=True, |
||||
check_rev=False, |
||||
) |
||||
|
||||
|
||||
# def old_pics(): |
||||
# if not os.path.isdir(f'profile_pictures'): |
||||
# os.mkdir(f'profile_pictures') |
||||
# start = date.today() |
||||
# for i in range(1,60): |
||||
# d = start - timedelta(days=i) |
||||
# get_pictures(d.strftime('%Y%m%d')) |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
# Info för arangodb |
||||
user_arango = "Lasse" |
||||
db_arango = "facebook" |
||||
host_arango = "http://192.168.0.4:8529" |
||||
|
||||
|
||||
# Starta koppling till arangodb |
||||
# Avkryptera lösen till arango |
||||
pwd = getpass(f"Arangolösenord för {user_arango}: ") |
||||
|
||||
|
||||
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd) |
||||
auth = ("Lasse", "affix-sip-jejune-epigraph-ENTROPY-stupefy1") |
||||
|
||||
servers_mullvad = [ |
||||
"se15-wg.socks5.mullvad.net:1080", |
||||
"se17-wg.socks5.mullvad.net:1080", |
||||
"se18-wg.socks5.mullvad.net:1080", |
||||
"se19-wg.socks5.mullvad.net:1080", |
||||
"se21-wg.socks5.mullvad.net:1080", |
||||
"se22-wg.socks5.mullvad.net:1080", |
||||
"se23-wg.socks5.mullvad.net:1080", |
||||
"se3-wg.socks5.mullvad.net:1080", |
||||
"se5-wg.socks5.mullvad.net:1080", |
||||
"se9-wg.socks5.mullvad.net:1080", |
||||
"se10-wg.socks5.mullvad.net:1080", |
||||
"se2-wg.socks5.mullvad.net:1080", |
||||
"se6-wg.socks5.mullvad.net:1080", |
||||
"se7-wg.socks5.mullvad.net:1080", |
||||
"se8-wg.socks5.mullvad.net:1080", |
||||
"se13-wg.socks5.mullvad.net:1080", |
||||
"se14-wg.socks5.mullvad.net:1080", |
||||
"se26-wg.socks5.mullvad.net:1080", |
||||
"se27-wg.socks5.mullvad.net:1080", |
||||
"se28-wg.socks5.mullvad.net:1080", |
||||
] |
||||
|
||||
while True: |
||||
today = date.today().strftime('%Y%m%d') |
||||
get_pictures(today) |
||||
yesterday = date.today() - timedelta(days=1) |
||||
get_pictures(yesterday.strftime('%Y%m%d')) |
||||
sleep(300) |
||||
@ -0,0 +1,45 @@ |
||||
import subprocess |
||||
import requests |
||||
from selenium import webdriver |
||||
from selenium.webdriver.chrome.options import Options |
||||
from time import sleep |
||||
|
||||
while True: |
||||
wlan = subprocess.Popen(['iwgetid'], stdout=subprocess.PIPE) |
||||
wlan =wlan.communicate()[0].decode() |
||||
if '4G-UFI-5671' in wlan: |
||||
print('Sucess') |
||||
break |
||||
else: |
||||
sleep(20) |
||||
|
||||
print('Nuvarande ip:', requests.get('https://api.ipify.org').text) |
||||
|
||||
# Set up selenium browser |
||||
options = Options() |
||||
options.headless = True |
||||
browser = webdriver.Chrome(options=options) |
||||
|
||||
# Login to modem |
||||
browser.get('http://192.168.100.1/cellweb/login.asp') |
||||
sleep(3) |
||||
username = browser.find_element_by_id("user_name") |
||||
password = browser.find_element_by_id("user_password") |
||||
username.send_keys("admin") |
||||
password.send_keys("1340asde") |
||||
|
||||
# Go to reboot and accept |
||||
browser.find_element_by_xpath("/html/body/section/form/button").click() # Login |
||||
sleep(1) |
||||
browser.find_element_by_xpath("/html/body/section/div[2]/div[6]/a").click() # More |
||||
sleep(1) |
||||
browser.find_element_by_xpath("/html/body/section[2]/div/div[2]/div/a").click() # Reboot |
||||
sleep(1) |
||||
browser.find_element_by_xpath("/html/body/div[4]/div/div/div[2]/div[2]").click() # Accept |
||||
sleep(1) |
||||
browser.switch_to_alert().accept() # Accept again (alert) |
||||
browser.close() |
||||
sleep(120) |
||||
|
||||
|
||||
print('Ny ip:', requests.get('https://api.ipify.org').text) |
||||
|
unable to load file from base commit
|
@ -0,0 +1,23 @@ |
||||
from arango import ArangoClient |
||||
from getpass import getpass |
||||
from config import * |
||||
from time import sleep |
||||
|
||||
for i in range(0, 6, 1): |
||||
if i == 5: |
||||
exit() |
||||
try: |
||||
# Om scriptet körs på Macbook finns lösenordet i en fil |
||||
with open("../password_arango.txt") as f: |
||||
pwd = f.readline() |
||||
except FileNotFoundError: |
||||
if pwd == None: |
||||
pwd = getpass(f'Lösenord för {user_arango}: ') |
||||
|
||||
try: |
||||
db = ArangoClient(hosts=host_arango).db(db_arango, username=user_arango, password=pwd) |
||||
db.collection('members').random() # För att testa löseordet/kopplingen. |
||||
break |
||||
except: |
||||
print("Fel lösenord.") |
||||
sleep(1) |
||||
@ -0,0 +1,74 @@ |
||||
from datetime import datetime |
||||
from getpass import getpass |
||||
from time import sleep |
||||
|
||||
from arango import ArangoClient |
||||
from json2html import json2html |
||||
|
||||
|
||||
def now(): |
||||
""" Returns current date and time as string""" |
||||
return datetime.now().strftime("%Y-%m-%d_%H:%M:%S") |
||||
|
||||
def write_stats(db, continuous=False): |
||||
while True: |
||||
d = {} |
||||
for col in db.collections(): |
||||
if not col['system']: |
||||
d[col['name']] = db.collection(col['name']).count() |
||||
del d['stats'] |
||||
#d['time'] = now() |
||||
cursor = db.aql.execute( |
||||
""" |
||||
FOR doc IN members |
||||
FILTER doc.checked == true |
||||
COLLECT WITH COUNT INTO length |
||||
RETURN length |
||||
""" |
||||
) |
||||
d['checked_members'] = cursor.next() |
||||
|
||||
|
||||
# Hur många konton per säljare som finns kvar |
||||
cursor = db.aql.execute( |
||||
''' |
||||
for doc in profiles |
||||
filter has(doc, "vendor") |
||||
COLLECT vendor = doc.vendor WITH COUNT INTO length |
||||
RETURN { |
||||
"vendor" : vendor, |
||||
"active" : length |
||||
} |
||||
''') |
||||
d['active_vendors'] = [doc for doc in cursor] |
||||
|
||||
d['_key'] = now()[:13] |
||||
db.insert_document( "stats", d, overwrite=True) |
||||
|
||||
# Skriv en html-fil |
||||
with open('website/fb-webbapp/stats.html', 'a+') as html: |
||||
html.truncate(0) |
||||
html.write('<!DOCTYPE html> <br>') |
||||
|
||||
html.write(json2html.convert(json = d)) |
||||
|
||||
# Sov för att fortsätta senare |
||||
if continuous: |
||||
sleep(86400) |
||||
else: |
||||
break |
||||
|
||||
# Info för arangodb |
||||
user_arango = "Lasse" |
||||
db_arango = "facebook" |
||||
host_arango = "http://192.168.0.4:8529" |
||||
|
||||
# Starta koppling till arangodb |
||||
# Avkryptera lösen till arango |
||||
pwd = getpass(f'Arangolösenord för {user_arango}:').strip() |
||||
|
||||
db = ArangoClient(hosts=host_arango).db( |
||||
db_arango, username=user_arango, password=pwd |
||||
) |
||||
|
||||
write_stats(db, continuous=True) |
||||
@ -0,0 +1,131 @@ |
||||
class Profile: |
||||
def __init__(self, profile, container, proxieservers): |
||||
"""Creates a new profile to do searches with. |
||||
|
||||
Args: |
||||
profile (dict): Document fetched from database. |
||||
container (str): Docker container that runs the script. |
||||
""" |
||||
|
||||
self.doc = profile |
||||
|
||||
# Användaruppgifter |
||||
self.name = self.doc["name"].strip() |
||||
self.email = self.doc["email"] |
||||
self.pwd = self.doc["pwd"] |
||||
self.server = self.doc["server"] |
||||
self.cookie = self.doc["cookie"] |
||||
self.useragent = self.doc["useragent"] |
||||
|
||||
self.proxieservers = proxieservers |
||||
self.blocked = False |
||||
self.container = str(container) |
||||
self.users_checked = 0 |
||||
|
||||
# Ange proxies |
||||
session = requests.Session() |
||||
session.proxies = self.doc['proxies'] |
||||
|
||||
# Starta browser |
||||
user_agent = self.useragent |
||||
self.browser = RoboBrowser( |
||||
session=session, user_agent=user_agent, history=False, parser="lxml" |
||||
) |
||||
try: |
||||
self.browser.session.cookies = pickle.load( |
||||
open("data/cookie_{}.pkl".format(self.name), "rb") |
||||
) |
||||
self.logged_in = True |
||||
except: |
||||
try: |
||||
self.browser.session.cookies.update(self.cookie) |
||||
self.logged_in = True |
||||
except: |
||||
self.logged_in = False |
||||
|
||||
def update_time(self): |
||||
""" Uppdatera dokumentet i arango. """ |
||||
self.doc["in_use"] = nowstamp() |
||||
db.update_document(self.doc, check_rev=False) |
||||
|
||||
def viewing(self): |
||||
""" Returnerar browser i html-format """ |
||||
return self.browser.parsed |
||||
|
||||
def open(self, url): |
||||
n = 0 |
||||
while True: |
||||
n += 1 |
||||
sleep(1) |
||||
try: |
||||
self.browser.open(url) |
||||
if '/a/nux/wizard/nav.php?step=phone&skip' in self.viewing(): |
||||
self.browser.open(url_bas + '/a/nux/wizard/nav.php?step=phone&skip') |
||||
break |
||||
except Exception as e: |
||||
print(e) |
||||
print(n) |
||||
_print(self, None, f'Kunde inte öppna url {url}') |
||||
if n == 5: |
||||
if 'Connection refused' in e: |
||||
self.doc['e'] = e |
||||
db.insert_document('blocked_profiles', self.doc) |
||||
n = 0 |
||||
from arangodb import get_profile, remove_profile |
||||
# Ta bort den gamla profilen från databasen och ersätt profile med nytt innehåll från ny profil |
||||
remove_profile(self) |
||||
self.__init__(get_profile(self.proxieservers), self.container) |
||||
_print(self, None, f'Ny profil hämtad {self.email}') |
||||
self.update_time() |
||||
else: |
||||
sleep(40) |
||||
|
||||
def accept_cookies(self): |
||||
""" Accepterar cookies """ |
||||
self.browser.open("https://mbasic.facebook.com") |
||||
soup = BeautifulSoup(str(self.browser.parsed), "lxml") |
||||
if "accept all" not in soup.text.lower(): |
||||
sleep_(2) |
||||
cookie_accept_url = "https://mbasic.facebook.com/cookie/consent-page" |
||||
self.browser.open(cookie_accept_url) |
||||
sleep_(2) |
||||
try: |
||||
form = self.browser.get_form() |
||||
self.browser.submit_form(form) |
||||
_print(self, None, f"Accepterade cookies för {self.name}") |
||||
sleep_(2) |
||||
update_cookie(self.browser.session.cookies, self) |
||||
except: |
||||
try: |
||||
write_error(12, self, soup=self.browser.parsed) |
||||
except: |
||||
pass |
||||
_print(self, None, f"Accepterade inte cookies för {self.name}") |
||||
def login(self): |
||||
""" Loggar in på Facebook. """ |
||||
|
||||
print("Loggar in {}".format(self.name)) |
||||
|
||||
# Gå till log in-sidan |
||||
self.browser.open("https://mbasic.facebook.com/login") |
||||
|
||||
# Kolla om browser redan är inloggad |
||||
soup = BeautifulSoup(str(self.browser.parsed), "lxml") |
||||
if "log out" in soup.text.lower(): |
||||
print("Redan inloggad.") |
||||
try: |
||||
# Hitta och fyll i formulär |
||||
form = self.browser.get_form(id="login_form") |
||||
form["email"].value = self.email |
||||
form["pass"].value = self.pwd |
||||
self.browser.submit_form(form, submit=form["login"]) |
||||
# Vänta lite och uppdatera cookie |
||||
print("Loggade in.") |
||||
sleep_(2) |
||||
self.open(url_bas) |
||||
sleep_(2) |
||||
except TypeError: |
||||
try: |
||||
write_error(11, self, soup=soup, profile=self.name) |
||||
except: |
||||
pass |
||||
@ -0,0 +1,14 @@ |
||||
from flask import Flask, render_template |
||||
import json |
||||
from json2html import json2html |
||||
|
||||
app = Flask(__name__) |
||||
|
||||
|
||||
@app.route("/") |
||||
def stats(): |
||||
return render_template("stats.html") |
||||
|
||||
if __name__ == "__main__": |
||||
app.run(debug=True) |
||||
|
||||
@ -0,0 +1,15 @@ |
||||
import subprocess |
||||
import requests |
||||
from time import sleep |
||||
|
||||
subprocess.run(['sudo', 'wg-quick', 'down', 'integrity']) |
||||
|
||||
with open('ip.txt', 'a+') as f: |
||||
while True: |
||||
subprocess.run(['wg-quick', 'up', 'integrity'] ) |
||||
sleep(5) |
||||
ip = requests.get('https://api.ipify.org').text |
||||
print(ip) |
||||
f.write(f'{ip}\n') |
||||
subprocess.run(['wg-quick', 'down', 'integrity']) |
||||
sleep(5) |
||||
Loading…
Reference in new issue