It was made as part of a cli multi-tool for automated torrent creation. the main tool has been crashing after giving it a ssh-port forwarding manager (for managing multi router/snark setups), so i ripped out just the snark parsing/command part
Mainly i wanted to do autoload balancing by starting/stopping the next snark in the list and maybe keep some stats, not sure where the sweet spot is yet.
How to use
If you do:
Code: Select all
snarkurl = ['127.0.0.1:8002', '127.0.0.1:8003']
Code: Select all
snarking = Snark(snarkurl)
Code: Select all
snarking.upspeed[0] = upload speed in K/s for snark on port 8002.
- upspeed / downspeed
- peer_connections / dht_peers
- tunnels_in / tunnels_out
- color (changes the color of up/downspeed for cli implementation with for use with rich)
- online (bol)
- stalled (True if snark is reachable but stopped)
- nonce
Bottom has examples for sending start/stop commands to all snarks
Tool
privatebin.i2p link
cpaste.i2p link
Code: Select all
#!/usr/bin/env python3
from bs4 import BeautifulSoup
import requests
import re
###snark urls example
snarkurl = [
'http://127.0.0.1:8001/snark/',
'127.0.0.1:8002/',
'127.0.0.1:8003',
'https://127.0.0.1:8004',
'https://127.0.0.1:8005',
'127.0.0.1:8006',
'127.0.0.1:8007',
'127.0.0.1:8008',
'127.0.0.1:8009',
'127.0.0.1:8010'
]
class Snark:
def __init__(self, urls):
self.urls = urls
self.url = []
self.upspeed = []
self.downspeed = []
self.peer_connections = []
self.dht_peers = []
self.dest = []
self.tunnels_in = []
self.tunnels_out = []
self.shortened_url = []
self.color = []
self.online = []
self.stalled = []
self.nonce = []
def snark_parser(url):
"""
Sets values from the parsing functions. Returns in this order:
url, upspeed, downspeed, peer_connections, dht_peers, dest, tunnels_in,
tunnels_out, color, Online (bol), Stalled (bol), nonce
Keyword arguments:
url -- snark url given as http://host:port/i2psnark/.ajax/xhr1.html
"""
def snark_get_table(url):
"""
Uses BeatufiulSoup to get snark table and nonce. Returns rows as
all <th> and get_nonce from <input> tags
Keyword arguments:
url -- snark url given as http://host:port/i2psnark/.ajax/xhr1.html
"""
try:
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
get_nonce = soup.find('input')
table = soup.find('tfoot')
rows = table.find_all('th')
if response.status_code == 200:
return rows, get_nonce
else:
return False, ''
except Exception as e:
return False, ''
def parse_tag(tag, mod, html_string):
"""
Parses the data between html tags, returns value as a string
If there are modifiers after the tag a space is used, if
mod is '' no space is used for simple tags.
Keyword arguments:
tag -- html tag
mod -- modifiers after the tag
html_string -- the html string to be parsed
"""
if mod != '':
space = ' '
else:
space = ''
reg_str = f"<{tag}{space}{mod}>(.*?)</{tag}>"
res = re.findall(reg_str, html_string)
return(res)
def snark_speed_convert_kbs(speed_string):
"""
Converts speed string given when parsed from snark as x K/s to a float in K/s.
Keyword arguments:
speed_string -- speed string given by snark as x K/s.
"""
if speed_string != '':
x = speed_string.split('\u202f')
if x[1] == 'K/s':
speed = float(x[0])
elif x[1] == 'M/s':
speed = float(x[0]) * 1000
else:
speed = float(x[0]) / 1000
return(speed)
else:
return(0)
def parse_totals(string):
"""
Parse totals in bottom of snark between the span tags.
Splits with '•' to return a list.
Keyword arguments:
string -- html string from snark table.
"""
span = string.find_all('span')
x = str(span[0]).split(' • ')
return(x)
rows, get_nonce = snark_get_table(url)
if rows != False:
totals = parse_totals(rows[0])
try:
nonce = (str(get_nonce).split(' ')[3]).split('"')[1]
upspeed = (parse_tag('th', 'class="rateUp" title="Total upload speed"', str(rows[5])))
downspeed = (parse_tag('th', 'class="rateDown" title="Total download speed"', str(rows[3])))
peer_connections = (totals[2].split(' '))[0]
dht_peers_convert = ((parse_tag('span', '', totals[3])[0]).split(' '))[0]
dht_peers = int(dht_peers_convert.replace(',', ''))
dest = parse_tag('code', '', totals[4])[0]
tunnels_in_out = (((totals[4].split('</span>'))[0]).split('Tunnels: '))[1]
tunnels_in = int(((tunnels_in_out.split(' / ')[0]).split(' '))[0])
tunnels_out = int(((tunnels_in_out.split(' / ')[1]).split(' '))[0])
upspeed = snark_speed_convert_kbs(upspeed[0])
downspeed = (snark_speed_convert_kbs(downspeed[0]))
color = '[green1]'
return(url, upspeed, downspeed, peer_connections, dht_peers, dest, tunnels_in, tunnels_out, color, True, False, nonce)
except Exception as e:
color = '[orange1]'
return(url, 0, 0, 0, 0, 'STOP', 0, 0, color, False, True, nonce)
else:
color = '[red1]'
return(url, 0, 0, 0, 0, 'OFFL', 0, 0, color, False, False, '')
def get_short_url(snarkurl):
'''
Converts urls to the snark console to host:port format.
Returns host:port and concats to /i2psnark/.ajax/xhr1.html
Keyword arguments:
snarkurl -- url to snark instance
'''
host_port = snarkurl
p = '(?:http.*://)?(?P<host>[^:/ ]+).?(?P<port>[0-9]*).*'
m = re.search(p,snarkurl)
host = m.group('host')
port = m.group('port')
host_port = f'{host}:{port}'
fullurl = f"http://{host_port}/i2psnark/.ajax/xhr1.html"
return host_port, fullurl
for item in snarkurl:
host_port, fullurl = get_short_url(item)
snarking = snark_parser(fullurl)
self.url.append(snarking[0])
self.upspeed.append(snarking[1])
self.downspeed.append(snarking[2])
self.peer_connections.append(snarking[3])
self.dht_peers.append(snarking[4])
self.dest.append(snarking[5])
self.tunnels_in.append(snarking[6])
self.tunnels_out.append(snarking[7])
self.shortened_url.append(host_port)
self.color.append(snarking[8])
self.online.append(snarking[9])
self.stalled.append(snarking[10])
self.nonce.append(snarking[11])
def post_command_snark(snark_shortened_url, action, nonce):
"""
Sends post request to snark to start or stop all torrents.
Keyword arguments:
snark_shortened_url -- host:port with no http://
action -- accepts 'start' or 'stop'
nonce -- nonce for the snark instance
"""
if action.lower() == 'start':
command_key = 'action_StartAll'
command_value = 'Start All'
if action.lower() == 'stop':
command_key = 'action_StopAll'
command_value = 'Stop All'
headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:12.0) Gecko/20100101 Firefox/12.0',
'Referer': f'http://{snark_shortened_url}/i2psnark/',
'Origin': f'http://{snark_shortened_url}',
'Connection': 'keep-alive',
'Upgrade-Insecure-Requests': '1',
'Sec-Fetch-Dest': 'iframe',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-User': '?1',
}
data = {
'nonce': nonce,
command_key: command_value,
}
response = requests.post(f'http://{snark_shortened_url}/i2psnark/_post', headers=headers, data=data)
snarking = Snark(snarkurl)
for i in range(0, len(snarkurl)):
print(f"{snarking.shortened_url[i]} {snarking.dest[i]} ▲ {int(snarking.upspeed[i])}K/s {snarking.downspeed[i]}K/s ▼ {snarking.peer_connections[i]} peers {snarking.dht_peers[i]} dht peers")
### start all stopped snarks
# if snarking.online[i] == True:
# post_command_snark(snarking.shortened_url[i], 'start', snarking.nonce[i])
### start all paused snarks
# if snarking.stalled[i] == True:
# post_command_snark(snarking.shortened_url[i], 'start', snarking.nonce[i])
#### stop all online snarks
# if snarking.online[i] == True:
# post_command_snark(snarking.shortened_url[i], 'stop', snarking.nonce[i])
http://o7jgnp7bubzdn7mxfqmghn3lzsjtpgkb ... ARKED.jpeg