master
emdee 1 year ago
parent ae22d14437
commit 9b743bb101

@ -48,7 +48,7 @@ exclusion: the ```--contact``` commandline arg is a comma sep list of conditions
More may be added later.
Because you don't want to exclude the introduction points to any onion
you want to connect to, ```--white_onions``` should whitelist the
you want to connect to, ```--white_services``` should whitelist the
introduction points to a comma sep list of onions, but is
currently broken in stem 1.8.0: see:
* https://github.com/torproject/stem/issues/96
@ -87,8 +87,11 @@ import socket
import time
import argparse
from io import StringIO
import ipaddr
# list(ipaddress._find_address_range(ipaddress.IPv4Network('172.16.0.0/12'))
from urllib3.util.ssl_match_hostname import CertificateError
import stem
from stem import InvalidRequest
from stem.control import Controller
from stem.connection import IncorrectPassword
@ -111,7 +114,7 @@ except ImportError as e:
coloredlogs = False
from trustor_poc import oDownloadUrl, idns_validate, TrustorError
from support_onions import sTorResolve, getaddrinfo, icheck_torrc, bAreWeConnected
from support_onions import icheck_torrc, bAreWeConnected, lIntroductionPoints, zResolveDomain, vwait_for_controller, yKNOWN_NODNS
global LOG
import logging
@ -122,43 +125,13 @@ LOG = logging.getLogger()
ETC_DIR = '/etc/tor/yaml'
aTRUST_DB = {}
aTRUST_DB_INDEX = {}
aFP_EMAIL = {}
sDETAILS_URL = "https://metrics.torproject.org/rs.html#details/"
# You can call this while bootstrapping
sEXCLUDE_EXIT_KEY = 'ExcludeNodes'
sINCLUDE_EXIT_KEY = 'ExitNodes'
sINCLUDE_GUARD_KEY = 'EntryNodes'
# maybe we should check these each time but we
# got them by sorting bad relays in the wild
lKNOWN_NODNS = [
'0x0.is',
'a9.wtf',
'artikel5ev.de',
'arvanode.net',
'dodo.pm',
'dra-family.github.io',
'eraldonion.org',
'galtland.network',
'interfesse.net',
'kryptonit.org',
'lonet.sh',
'moneneis.de',
'nx42.de',
'ormycloud.org',
'plied-privacy.net',
'redacted.org',
'rification-for-nusenu.net',
'sv.ch',
'thingtohide.nl',
'tikel10.org',
'tor-exit-2.aa78i2efsewr0neeknk.xyz',
'tor-exit-3.aa78i2efsewr0neeknk.xyz',
'torix-relays.org',
'tse.com',
'tuxli.org',
'w.digidow.eu',
'www.quintex.com',
]
def oMakeController(sSock='', port=9051):
import getpass
if sSock and os.path.exists(sSock):
@ -191,7 +164,7 @@ def lYamlBadNodes(sFile,
global lKNOWN_NODNS
root = 'ExcludeDomains'
if root not in oBAD_NODES[oBAD_ROOT] or not oBAD_NODES[oBAD_ROOT][root]:
oBAD_NODES[oBAD_ROOT][root] = lKNOWN_NODNS
oBAD_NODES[oBAD_ROOT][root] = yaml.safe_load(StringIO(yKNOWN_NODNS))
else:
lKNOWN_NODNS = oBAD_NODES[oBAD_ROOT][root]
return l
@ -208,28 +181,10 @@ def lYamlGoodNodes(sFile='/etc/tor/torrc-goodnodes.yaml'):
o = yaml.safe_load(oFd)
oGOOD_NODES = o
if 'GuardNodes' in o[oGOOD_ROOT].keys():
l += o[oGOOD_ROOT]['GuardNodes']
l = o[oGOOD_ROOT]['GuardNodes']
# yq '.Nodes.IntroductionPoints|.[]' < /etc/tor/torrc-goodnodes.yaml
return l
def lIntroductionPoints(controller, lOnions):
"""not working in stem 1.8.3"""
l = []
for elt in lOnions:
desc = controller.get_hidden_service_descriptor(elt, await_result=True, timeout=None)
l = desc.introduction_points()
if l:
LOG.warn(f"{elt} NO introduction points\n")
continue
LOG.info(f"{elt} introduction points are...\n")
for introduction_point in l:
LOG.info(' %s:%s => %s' % (introduction_point.address,
introduction_point.port,
introduction_point.identifier))
l += [introduction_point.address]
return l
tBAD_URLS = set()
lATS = ['abuse', 'email']
lINTS = ['ciissversion', 'uplinkbw', 'signingkeylifetime', 'memory']
@ -254,8 +209,16 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
a.update({'fps': []})
keys = list(a.keys())
if 'email' not in keys:
LOG.warn(f"{fp} 'email' not in {keys}")
a['email'] = ''
if 'ciissversion' not in keys:
aFP_EMAIL[fp] = a['email']
LOG.warn(f"{fp} 'ciissversion' not in {keys}")
a['ciissversion'] = 2
# test the url for fps and add it to the array
if 'proof' not in keys:
aFP_EMAIL[fp] = a['email']
LOG.warn(f"{fp} 'proof' not in {keys}")
return a
@ -267,9 +230,11 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
if 'url' not in keys:
if 'uri' not in keys:
a['url'] = ''
aFP_EMAIL[fp] = a['email']
LOG.warn(f"{fp} url and uri not in {keys}")
return a
a['url'] = a['uri']
aFP_EMAIL[fp] = a['email']
LOG.debug(f"{fp} 'uri' but not 'url' in {keys}")
# drop through
@ -280,25 +245,16 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
# domain should be a unique key for contacts
domain = a['url'][8:].strip('/')
if domain in lKNOWN_NODNS:
if lKNOWN_NODNS and domain in lKNOWN_NODNS:
LOG.warn(f"{domain} in lKNOWN_NODNS")
return {}
try:
ip = sTorResolve(domain)
except Exception as e:
ip = ''
ip = zResolveDomain(domain, lKNOWN_NODNS)
if ip == '':
try:
lpair = getaddrinfo(domain, 443)
except Exception as e:
LOG.warn("{e}")
lpair = None
lKNOWN_NODNS.append(domain)
if lpair is None:
LOG.warn(f"TorResolv and getaddrinfo failed for {domain}")
return a
ip = lpair[0]
aFP_EMAIL[fp] = a['email']
LOG.debug(f"{fp} {domain} does not resolve")
lKNOWN_NODNS.append(domain)
return {}
if a['proof'] not in ['uri-rsa']:
# only support uri for now
if False and ub_ctx:
@ -323,22 +279,26 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
LOG.warn(f"CertificateError downloading from {domain} {e}")
tBAD_URLS.add(a['url'])
except TrustorError as e:
LOG.warn(f"TrustorError downloading from {domain} {e.args}")
if e.args == "HTTP Errorcode 404":
aFP_EMAIL[fp] = a['email']
LOG.warn(f"TrustorError 404 from {domain} {e.args}")
else:
LOG.warn(f"TrustorError downloading from {domain} {e.args}")
tBAD_URLS.add(a['url'])
except (BaseException ) as e:
LOG.error(f"Exception {type(e)} downloading from {domain} {e}")
else:
if o.status_code >= 300:
LOG.warn(f"Error downloading from {domain} {o.status_code} {o.reason}")
aFP_EMAIL[fp] = a['email']
LOG.warn(f"Error from {domain} {o.status_code} {o.reason}")
# any reason retry?
tBAD_URLS.add(a['url'])
return a
l = o.text.upper().strip().split('\n')
a['modified'] = time.time()
if not l:
# already squacked in lD
LOG.warn(f"Downloading from {domain} empty for {fp}")
tBAD_URLS.add(a['url'])
else:
a['fps'] = [elt for elt in l if elt and len(elt) == 40
and not elt.startswith('#')]
@ -359,19 +319,6 @@ def aParseContact(contact, fp):
a = yaml.safe_load(oFd)
return a
def vwait_for_controller(controller, wait_boot):
if bAreWeConnected() is False:
raise SystemExit("we are not connected")
percent = i = 0
# You can call this while boostrapping
while percent < 100 and i < wait_boot:
bootstrap_status = controller.get_info("status/bootstrap-phase")
progress_percent = re.match('.* PROGRESS=([0-9]+).*', bootstrap_status)
percent = int(progress_percent.group(1))
LOG.info(f"Bootstrapping {percent}%")
time.sleep(5)
i += 5
def vsetup_logging(log_level, logfile=''):
global LOG
add = True
@ -386,7 +333,7 @@ def vsetup_logging(log_level, logfile=''):
kwargs = dict(level=log_level,
force=True,
format='%(levelname)-4s %(message)s')
format='%(levelname)s %(message)s')
if logfile:
add = logfile.startswith('+')
@ -400,7 +347,7 @@ def vsetup_logging(log_level, logfile=''):
aKw = dict(level=log_level,
logger=LOG,
stream=sys.stdout if add else None,
fmt='%(levelname)-4s %(message)s'
fmt='%(levelname)s %(message)s'
)
coloredlogs.install(**aKw)
if logfile:
@ -414,6 +361,24 @@ def vsetup_logging(log_level, logfile=''):
LOG.addHandler(oHandler)
LOG.info(f"SSetting log_level to {log_level!s}")
logging._levelToName = {
CRITICAL: 'CRITICAL',
ERROR: 'ERROR',
WARNING: 'WARN',
INFO: 'INFO',
DEBUG: 'DEBUG',
NOTSET: 'NOTSET',
}
logging._nameToLevel = {
'CRITICAL': CRITICAL,
'FATAL': FATAL,
'ERROR': ERROR,
'WARN': WARNING,
'WARNING': WARNING,
'INFO': INFO,
'DEBUG': DEBUG,
'NOTSET': NOTSET,
}
def oMainArgparser(_=None):
@ -463,6 +428,10 @@ def oMainArgparser(_=None):
parser.add_argument('--bad_contacts', type=str,
default=os.path.join(ETC_DIR, 'badcontacts.yaml'),
help="Yaml file of bad contacts that bad FPs are using")
parser.add_argument('--strict_nodes', type=int, default=0,
choices=[0,1],
help="Set StrictNodes: 1 is less anonymous but more secure, although some sites may be unreachable")
parser.add_argument('--wait_boot', type=int, default=120,
help="Seconds to wait for Tor to booststrap")
parser.add_argument('--log_level', type=int, default=20,
@ -470,12 +439,12 @@ def oMainArgparser(_=None):
parser.add_argument('--bad_sections', type=str,
default='MyBadExit',
help="sections of the badnodes.yaml to use, comma separated, '' BROKEN")
parser.add_argument('--white_onions', type=str,
parser.add_argument('--white_services', type=str,
default='',
help="comma sep. list of onions to whitelist their introduction points - BROKEN")
parser.add_argument('--torrc_output', type=str, default='',
help="Write the torrc configuration to a file")
parser.add_argument('--proof_output', type=str, default=os.path.join(ETC_DIR, '/proof.yaml'),
parser.add_argument('--proof_output', type=str, default=os.path.join(ETC_DIR, 'proof.yaml'),
help="Write the proof data of the included nodes to a YAML file")
return parser
@ -506,7 +475,8 @@ def vwrite_goodnodes(oArgs, oGOOD_NODES, slen):
def iMain(lArgs):
global aTRUST_DB
global aTRUST_DB_INDEX
global oBAD_NODES
global oGOOD_NODES
global lKNOWN_NODNS
parser = oMainArgparser()
oArgs = parser.parse_args(lArgs)
@ -519,18 +489,26 @@ def iMain(lArgs):
if sFile and os.path.exists(sFile):
icheck_torrc(sFile, oArgs)
twhitelist_set = set()
sFile = oArgs.proof_output
if sFile and os.path.exists(sFile):
try:
with open(sFile, 'rt') as oFd:
aTRUST_DB = yaml.safe_load(oFd)
assert type(aTRUST_DB) == dict
LOG.info(f"{len(aTRUST_DB.keys())} trusted contacts from {sFile}")
# reverse lookup of fps to contacts
# but...
for k,v in aTRUST_DB.items():
if 'modified' not in v.keys():
v['modified'] = time.time()
aTRUST_DB_INDEX[k] = v
if 'fps' in aTRUST_DB[k].keys():
for fp in aTRUST_DB[k]['fps']:
if fp in aTRUST_DB_INDEX:
continue
aTRUST_DB_INDEX[fp] = v
LOG.info(f"{len(aTRUST_DB_INDEX.keys())} good relays from {sFile}")
except Exception as e:
LOG.exception(f"Error reading YAML TrustDB {sFile} {e}")
@ -557,32 +535,40 @@ def iMain(lArgs):
if elt and elt != '{??}':
LOG.warn(f"{sEXCLUDE_EXIT_KEY} is in use already")
lGoodOverrideSet = lYamlGoodNodes(oArgs.good_nodes)
LOG.info(f"lYamlGoodNodes {len(lGoodOverrideSet)} from {oArgs.good_nodes}")
if oArgs.white_onions:
l = lIntroductionPoints(controller, oArgs.white_onions.split(','))
lGoodOverrideSet += l
twhitelist_set.update(set(lYamlGoodNodes(oArgs.good_nodes)))
LOG.info(f"lYamlGoodNodes {len(twhitelist_set)} GuardNodes from {oArgs.good_nodes}")
exit_excludelist = []
global oGOOD_NODES
t = set()
if 'IntroductionPoints' in oGOOD_NODES[oGOOD_ROOT]['Relays'].keys():
t = set(oGOOD_NODES[oGOOD_ROOT]['Relays']['IntroductionPoints'])
# not working = maybe when stem is updated
w = set(oGOOD_NODES[oGOOD_ROOT]['Services'])
if oArgs.white_services:
w.update(oArgs.white_services.split(','))
t.update(lIntroductionPoints(controller, w))
if len(t) > 0:
LOG.info(f"IntroductionPoints {len(t)} nodes")
twhitelist_set.update(t)
texclude_set = set()
if oArgs.bad_nodes and os.path.exists(oArgs.bad_nodes):
if False and oArgs.bad_sections:
# BROKEN
sections = oArgs.bad_sections.split(',')
exit_excludelist = lYamlBadNodes(oArgs.bad_nodes,
lexclude_list = set(lYamlBadNodes(oArgs.bad_nodes,
lWanted=sections,
section=sEXCLUDE_EXIT_KEY)
section=sEXCLUDE_EXIT_KEY))
else:
exit_excludelist = lYamlBadNodes(oArgs.bad_nodes)
texclude_set = set(lYamlBadNodes(oArgs.bad_nodes))
LOG.info(f"lYamlBadNodes {len(exit_excludelist)}")
LOG.info(f"lYamlBadNodes {len(texclude_set)}")
tProofGoodFps = set()
ttrust_db_index = aTRUST_DB_INDEX.keys()
iDnsContact = 0
lBadContactUrls = []
iFakeContact = 0
aBadContacts = {}
aProofUri = {}
lConds = oArgs.contact.split(',')
iR = 0
@ -594,7 +580,7 @@ def iMain(lArgs):
continue
relay.fingerprint = relay.fingerprint.upper()
sofar = f"G:{len(list(aProofUri.keys()))} U:{iDnsContact} F:{iFakeContact} BF:{len(exit_excludelist)} GF:{len(tProofGoodFps)} #{iR}"
sofar = f"G:{len(aTRUST_DB.keys())} U:{iDnsContact} F:{iFakeContact} BF:{len(texclude_set)} GF:{len(ttrust_db_index)} #{iR}"
if not relay.exit_policy.is_exiting_allowed():
if sEXCLUDE_EXIT_KEY == 'ExcludeNodes':
LOG.debug(f"{relay.fingerprint} not an exit {sofar}")
@ -602,14 +588,9 @@ def iMain(lArgs):
LOG.warn(f"{relay.fingerprint} not an exit {sofar}")
# continue
if relay.fingerprint in tProofGoodFps:
# we already have it.
continue
if relay.fingerprint in aTRUST_DB:
if aTRUST_DB[relay.fingerprint]['fps'] and \
relay.fingerprint in aTRUST_DB[relay.fingerprint]['fps']:
tProofGoodFps.add(relay.fingerprint)
# great contact had good fps and we are in them
if relay.fingerprint in aTRUST_DB_INDEX.keys():
# a cached entry
continue
if type(relay.contact) == bytes:
@ -618,45 +599,62 @@ def iMain(lArgs):
if ('Empty' in lConds and not relay.contact) or \
('NoEmail' in lConds and relay.contact and not '@' in relay.contact):
exit_excludelist.append(relay.fingerprint)
texclude_set.add(relay.fingerprint)
continue
if not relay.contact:
if not relay.contact or not 'ciissversion:' in relay.contact:
# should be unreached 'Empty' should always be in lConds
continue
c = relay.contact.lower()
# first rough cut
i = c.find('url:')
if i >=0: c = c[i+4:]
i = c.find(' ')
if i >=0: c = c[:i]
domain = c.replace('https://', '').replace('http://', '').strip('/')
if domain in lKNOWN_NODNS:
c = c.replace('https://', '').replace('http://', '').strip('/')
i = c.find('/')
if i >=0: c = c[:i]
domain = c
LOG.info(f"{relay.fingerprint} domain={domain}")
if domain and domain in lKNOWN_NODNS:
LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {domain} {sofar}")
exit_excludelist.append(relay.fingerprint)
texclude_set.add(relay.fingerprint)
continue
if domain:
ip = zResolveDomain(domain, lKNOWN_NODNS)
if not ip:
LOG.warn(f"{relay.fingerprint} {domain} did not resolve {sofar}")
texclude_set.add(relay.fingerprint)
iFakeContact += 1
continue
elif 'dns-rsa' in relay.contact.lower():
LOG.info(f"skipping 'dns-rsa' {relay.fingerprint}.{domain} {sofar}")
if 'dns-rsa' in relay.contact.lower():
target = f"{relay.fingerprint}.{domain}"
LOG.info(f"skipping 'dns-rsa' {target} {sofar}")
iDnsContact += 1
elif 'proof:uri-rsa' in relay.contact.lower():
a = aParseContact(relay.contact, relay.fingerprint)
if not a:
LOG.warn(f"{relay.fingerprint} did not parse {sofar}")
exit_excludelist.append(relay.fingerprint)
texclude_set.add(relay.fingerprint)
continue
if 'url' in a and a['url']:
if a['url'] in tBAD_URLS:
# The fp is using a contact with a URL we know is bad
LOG.info(f"{relay.fingerprint} skipping in tBAD_URLS {a['url']} {sofar}")
exit_excludelist.append(relay.fingerprint)
iFakeContact += 1
texclude_set.add(relay.fingerprint)
continue
domain = a['url'].replace('https://', '').replace('http://', '')
if domain in lKNOWN_NODNS:
# The fp is using a contact with a URL we know is bogus
LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {a['url']} {sofar}")
exit_excludelist.append(relay.fingerprint)
iFakeContact += 1
texclude_set.add(relay.fingerprint)
continue
@ -671,49 +669,45 @@ def iMain(lArgs):
LOG.warn(f"{relay.fingerprint} did NOT VERIFY {sofar}")
# If it's giving contact info that doesnt check out
# it could be a bad exit with fake contact info
exit_excludelist.append(relay.fingerprint)
texclude_set.add(relay.fingerprint)
aBadContacts[relay.fingerprint] = b
continue
if relay.fingerprint not in b['fps']:
LOG.warn(f"{relay.fingerprint} the FP IS NOT in the list of fps {sofar}")
# assume a fp is using a bogus contact
exit_excludelist.append(relay.fingerprint)
texclude_set.add(relay.fingerprint)
iFakeContact += 1
aBadContacts[relay.fingerprint] = b
continue
# great contact had good fps and we are in them
tProofGoodFps.union(b['fps'])
if relay.fingerprint in aProofUri.keys():
# a cached entry
continue
LOG.info(f"{relay.fingerprint} verified {b['url']} {sofar}")
# add our contact info to the trustdb
aProofUri[relay.fingerprint] = b
aTRUST_DB[relay.fingerprint] = b
for elt in b['fps']:
aTRUST_DB_INDEX[elt] = b
if oArgs.proof_output and oArgs.log_level <= 20:
# as we go along then clobber
with open(proof_output_tmp, 'wt') as oFYaml:
yaml.dump(aProofUri, indent=2, stream=oFYaml)
yaml.dump(aTRUST_DB, indent=2, stream=oFYaml)
oFYaml.close()
exit_excludelist = list(set(exit_excludelist).difference(set(lGoodOverrideSet)))
texclude_set = texclude_set.difference(twhitelist_set)
if oArgs.proof_output and aProofUri:
if oArgs.proof_output and aTRUST_DB:
with open(proof_output_tmp, 'wt') as oFYaml:
yaml.dump(aProofUri, indent=2, stream=oFYaml)
LOG.info(f"Wrote {len(list(aProofUri))} proof details to {oArgs.proof_output}")
yaml.dump(aTRUST_DB, indent=2, stream=oFYaml)
LOG.info(f"Wrote {len(list(aTRUST_DB.keys()))} good contact details to {oArgs.proof_output}")
oFYaml.close()
if os.path.exists(oArgs.proof_output):
bak = oArgs.proof_output +'.bak'
os.rename(oArgs.proof_output, bak)
os.rename(proof_output_tmp, oArgs.proof_output)
if oArgs.torrc_output and exit_excludelist:
if oArgs.torrc_output and texclude_set:
with open(oArgs.torrc_output, 'wt') as oFTorrc:
oFTorrc.write(f"{sEXCLUDE_EXIT_KEY} {','.join(exit_excludelist)}\n")
oFTorrc.write(f"{sINCLUDE_EXIT_KEY} {','.join(tProofGoodFps)}\n")
oFTorrc.write(f"{sEXCLUDE_EXIT_KEY} {','.join(texclude_set)}\n")
oFTorrc.write(f"{sINCLUDE_EXIT_KEY} {','.join(aTRUST_DB_INDEX.keys())}\n")
oFTorrc.write(f"{sINCLUDE_GUARD_KEY} {','.join(o[oGOOD_ROOT]['GuardNodes'])}\n")
LOG.info(f"Wrote tor configuration to {oArgs.torrc_output}")
oFTorrc.close()
@ -724,40 +718,46 @@ def iMain(lArgs):
yaml.dump(aBadContacts, indent=2, stream=oFYaml)
oFYaml.close()
global oBAD_NODES
oBAD_NODES[oBAD_ROOT]['ExcludeNodes']['BadExit'] = exit_excludelist
oBAD_NODES[oBAD_ROOT]['ExcludeNodes']['BadExit'] = list(texclude_set)
oBAD_NODES[oBAD_ROOT]['ExcludeDomains'] = lKNOWN_NODNS
vwrite_badnodes(oArgs, oBAD_NODES, str(len(exit_excludelist)))
vwrite_badnodes(oArgs, oBAD_NODES, str(len(texclude_set)))
global oGOOD_NODES
oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = tProofGoodFps
vwrite_goodnodes(oArgs, oGOOD_NODES, str(len(tProofGoodFps)))
oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = list(aTRUST_DB_INDEX.keys())
# GuardNodes are readonl
vwrite_goodnodes(oArgs, oGOOD_NODES, str(len(ttrust_db_index)))
retval = 0
try:
logging.getLogger('stem').setLevel(30)
try:
if exit_excludelist:
LOG.info(f"{sEXCLUDE_EXIT_KEY} {len(exit_excludelist)} net bad exit nodes")
controller.set_conf(sEXCLUDE_EXIT_KEY, exit_excludelist)
if texclude_set:
LOG.info(f"{sEXCLUDE_EXIT_KEY} {len(texclude_set)} net bad exit relays")
controller.set_conf(sEXCLUDE_EXIT_KEY, texclude_set)
except stem.SocketClosed as e:
LOG.error(f"Failed setting {sEXCLUDE_EXIT_KEY} bad exit nodes in Tor")
LOG.error(f"Failed setting {sEXCLUDE_EXIT_KEY} bad exit relays in Tor")
retval += 1
try:
if tProofGoodFps:
LOG.info(f"{sINCLUDE_EXIT_KEY} {len(tProofGoodFps)} good nodes")
controller.set_conf(sINCLUDE_EXIT_KEY, tProofGoodFps)
if aTRUST_DB_INDEX.keys():
LOG.info(f"{sINCLUDE_EXIT_KEY} {len(aTRUST_DB_INDEX.keys())} good relays")
controller.set_conf(sINCLUDE_EXIT_KEY, aTRUST_DB_INDEX.keys())
except stem.SocketClosed as e:
LOG.error(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
retval += 1
try:
o = oGOOD_NODES
if 'GuardNodes' in o[oGOOD_ROOT].keys():
LOG.info(f"{sINCLUDE_GUARD_KEY} {len(o[oGOOD_ROOT]['GuardNodes'])} guard nodes")
controller.set_conf(sINCLUDE_GUARD_KEY, o[oGOOD_ROOT]['GuardNodes'])
if 'GuardNodes' in oGOOD_NODES[oGOOD_ROOT].keys():
LOG.info(f"{sINCLUDE_GUARD_KEY} {len(oGOOD_NODES[oGOOD_ROOT]['GuardNodes'])} guard nodes")
# FixMe for now override StrictNodes it may be unusable otherwise
controller.set_conf(sINCLUDE_GUARD_KEY,
oGOOD_NODES[oGOOD_ROOT]['GuardNodes'])
cur = controller.get_conf('StrictNodes')
if oArgs.strict_nodes and int(cur) != oArgs.strict_nodes:
LOG.info(f"OVERRIDING StrictNodes to {oArgs.strict_nodes}")
controller.set_conf('StrictNodes', oArgs.strict_nodes)
else:
LOG.info(f"StrictNodes is set to {cur}")
except stem.SocketClosed as e:
LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
retval += 1

@ -9,9 +9,11 @@ from requests.utils import (
select_proxy,
urldefragauth,
)
import urllib3
from urllib3.util import parse_url
from urllib3.util.retry import Retry
from urllib3.util import Timeout as TimeoutSauce
from urllib3.util.ssl_match_hostname import match_hostname as match_hostname
DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
@ -262,8 +264,15 @@ class HTTPSAdapter(HTTPAdapter):
return self.build_response(request, resp)
from urllib3.util.ssl_match_hostname import match_hostname as _match_hostname
def match_hostname(cert, hostname):
def ballow_subdomain_matching(hostname, dnsnames):
for elt in dnsnames:
if len(split(hostname, '.')) > len(split(elt, '.')) and \
hostname.endswith(elt):
# parent
return True
return False
def my_match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
@ -316,6 +325,8 @@ def match_hostname(cert, hostname):
dnsnames.append(value)
if len(dnsnames) > 1:
# soften this to allow subdomain matching
if ballow_subdomain_matching(hostname, dnsnames):
return
raise CertificateError(
"hostname %r "
"doesn't match any of %s" % (hostname, ", ".join(map(repr, dnsnames)))
@ -327,5 +338,4 @@ def match_hostname(cert, hostname):
"no appropriate commonName or subjectAltName fields were found"
)
urllib3.util.ssl_match_hostname = match_hostname
urllib3.util.ssl_match_hostname.match_hostnaem = my_match_hostname

@ -33,7 +33,6 @@ try:
except:
TorContactInfoParser = None
class TrustorError(Exception): pass
# https://stackoverflow.com/questions/2532053/validate-a-hostname-string
@ -235,7 +234,10 @@ def oDownloadUrl(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
except Exception as e:
LOG.warn(f"Could not import HTTPSAdapter {e}")
HTTPSAdapter = None
raise SystemExit(f"{e}")
else:
LOG.info(f"Loaded HTTPSAdapter")
try:
with requests.sessions.Session() as session:
if HTTPSAdapter:

Loading…
Cancel
Save