From d11d95aafed8add1dcc6a9d36e85daf5c95af0b2 Mon Sep 17 00:00:00 2001 From: emdee Date: Wed, 9 Nov 2022 05:43:26 +0000 Subject: [PATCH] add https_adapter.py --- exclude_badExits.py | 182 ++++++++++++++++++++---------- https_adapter.py | 263 ++++++++++++++++++++++++++++++++++++++++++++ support_onions.py | 160 +++++++++++++++++++++++++++ 3 files changed, 547 insertions(+), 58 deletions(-) create mode 100644 https_adapter.py create mode 100644 support_onions.py diff --git a/exclude_badExits.py b/exclude_badExits.py index 9dcb8a4..97e0291 100644 --- a/exclude_badExits.py +++ b/exclude_badExits.py @@ -87,6 +87,7 @@ import time import argparse from io import StringIO +from urllib3.util.ssl_match_hostname import CertificateError from stem import InvalidRequest from stem.control import Controller from stem.connection import IncorrectPassword @@ -107,7 +108,9 @@ try: import coloredlogs except ImportError as e: coloredlogs = False -from trustor_poc import lDownloadUrlFps, idns_validate + +from trustor_poc import oDownloadUrl, idns_validate, TrustorError +from support_onions import sTorResolve, getaddrinfo, icheck_torrc, bAreWeConnected global LOG import logging @@ -115,6 +118,7 @@ import warnings warnings.filterwarnings('ignore') LOG = logging.getLogger() +ETC_DIR = '/etc/tor/yaml' aTRUST_DB = {} sDETAILS_URL = "https://metrics.torproject.org/rs.html#details/" # You can call this while bootstrapping @@ -122,6 +126,20 @@ sEXCLUDE_EXIT_KEY = 'ExcludeNodes' sINCLUDE_EXIT_KEY = 'ExitNodes' sINCLUDE_GUARD_KEY = 'EntryNodes' +# maybe we should check these each time but we +# got them by sorting bad relays in the wild +lKNOWN_NODNS = [ + '0x0.is', + 'a9.wtf', + 'arvanode.net', + 'dodo.pm', + 'galtland.network', + 'interfesse.net', + 'kryptonit.org', + 'nx42.de', + 'tor-exit-2.aa78i2efsewr0neeknk.xyz', + 'tor-exit-3.aa78i2efsewr0neeknk.xyz', +] def oMakeController(sSock='', port=9051): import getpass if sSock and os.path.exists(sSock): @@ -158,15 +176,17 @@ def icheck_torrc(sFile, oArgs): l = open(sFile, 'rt').readlines() a = {} for elt in l: + elt = elt.strip() + if not elt or not ' ' in elt: continue k,v = elt.split(' ', 1) a[k] = v - keys = list(a.keys()) + keys = a if 'HashedControlPassword' not in keys: LOG.info('Add HashedControlPassword for security') print('run: tor --hashcontrolpassword ') - if 'ExcludeNodes' in keys: - elt = 'ExcludeNodes.ExcludeExitNodes.BadExit' + if 'ExcludeExitNodes' in keys: + elt = 'BadNodes.ExcludeExitNodes.BadExit' LOG.warn(f"Remove ExcludeNodes and move then to {oArgs.bad_nodes}") print(f"move to the {elt} section as a list") if 'GuardNodes' in keys: @@ -174,7 +194,7 @@ def icheck_torrc(sFile, oArgs): LOG.warn(f"Remove GuardNodes and move then to {oArgs.good_nodes}") print(f"move to the {elt} section as a list") if 'ExcludeNodes' in keys: - elt = 'ExcludeNodes.ExcludeExitNodes.BadExit' + elt = 'BadNodes.ExcludeNodes.BadExit' LOG.warn(f"Remove ExcludeNodes and move then to {oArgs.bad_nodes}") print(f"move to the {elt} section as a list") if 'ControlSocket' not in keys and os.path.exists('/run/tor/control'): @@ -250,19 +270,20 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050) a[elt] = a[elt].replace('[]', '@') a.update({'fps': []}) + keys = list(a.keys()) # test the url for fps and add it to the array - if 'proof' not in a: - LOG.warn(f"{fp} 'proof' not in {list(a.keys())}") + if 'proof' not in keys: + LOG.warn(f"{fp} 'proof' not in {keys}") return a - if 'url' not in a: - if 'uri' not in a: + if 'url' not in keys: + if 'uri' not in keys: a['url'] = '' - LOG.warn(f"{fp} url and uri not in {list(a.keys())}") + LOG.warn(f"{fp} url and uri not in {keys}") return a a['url'] = a['uri'] - LOG.debug(f"{fp} 'uri' but not 'url' in {list(a.keys())}") + LOG.debug(f"{fp} 'uri' but not 'url' in {keys}") # drop through if a['url'].startswith('http:'): @@ -270,9 +291,17 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050) elif not a['url'].startswith('https:'): a['url'] = 'https:' +a['url'] - # domain should be a unique ket for contacts + # domain should be a unique key for contacts domain = a['url'][8:] - + try: + ip = sTorResolve(domain) + except Exception as e: + lpair = getaddrinfo(domain, 443) + if lpait is None: + LOG.warn(f"TorResolv and getaddrinfo failed for {domain}") + return a + ip = lpair[0] + if a['proof'] not in ['uri-rsa']: # only support uri for now if False and ub_ctx: @@ -285,18 +314,33 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050) LOG.warn(f"{fp} proof={a['proof']} not supported yet") return a - LOG.debug(f"{len(list(a.keys()))} contact fields for {fp}") + LOG.debug(f"{len(keys)} contact fields for {fp}") try: LOG.debug(f"Downloading from {domain} for {fp}") - l = lDownloadUrlFps(domain, https_cafile, - timeout=timeout, host=host, port=port) - except Exception as e: - LOG.exception(f"Error downloading from {domain} for {fp} {e}") + o = oDownloadUrl(domain, https_cafile, + timeout=timeout, host=host, port=port) + # requests response: text "reason", "status_code" + except AttributeError as e: + LOG.exception(f"AttributeError downloading from {domain} {e}") + except CertificateError as e: + LOG.warn(f"CertificateError downloading from {domain} {e}") lBAD_URLS += [a['url']] + except TrustorError as e: + LOG.warn(f"TrustorError downloading from {domain} {e.args}") + lBAD_URLS += [a['url']] + except (BaseException ) as e: + LOG.error(f"Exception {type(e)} downloading from {domain} {e}") else: + if o.status_code >= 300: + LOG.warn(f"Error downloading from {domain} {o.status_code} {o.reason}") + # any reason retry? + lBAD_URLS += [a['url']] + return a + + l = o.text.upper().strip().split('\n') if not l: # already squacked in lD - LOG.warn(f"Downloading from {domain} failed for {fp}") + LOG.warn(f"Downloading from {domain} empty for {fp}") lBAD_URLS += [a['url']] else: a['fps'] = [elt for elt in l if elt and len(elt) == 40 @@ -308,7 +352,6 @@ def aParseContact(contact, fp): See the Tor ContactInfo Information Sharing Specification v2 https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/ """ - contact = str(contact, 'UTF-8') l = [line for line in contact.strip().replace('"', '').split(' ') if ':' in line] LOG.debug(f"{fp} {len(l)} fields") @@ -319,17 +362,6 @@ def aParseContact(contact, fp): a = yaml.safe_load(oFd) return a -def bAreWeConnected(): - # FixMe: Linux only - sFile = f"/proc/{os.getpid()}/net/route" - if not os.path.isfile(sFile): return None - i = 0 - for elt in open(sFile, "r").readlines(): - if elt.startswith('Iface'): continue - if elt.startswith('lo'): continue - i += 1 - return i > 0 - def vwait_for_controller(controller, wait_boot): if bAreWeConnected() is False: raise SystemExit("we are not connected") @@ -412,22 +444,22 @@ def oMainArgparser(_=None): parser.add_argument('--proxy_port', '--proxy-port', default=9050, type=int, help='proxy control port') parser.add_argument('--proxy_ctl', '--proxy-ctl', - default='/run/tor/control', + default='/run/tor/control' if os.path.exists('/run/tor/control') else 9051, type=str, help='control socket - or port') parser.add_argument('--torrc', - default='', + default='/etc/tor/torrc-defaults', type=str, help='torrc to check for suggestions') - parser.add_argument('--timeout', default=30, type=int, + parser.add_argument('--timeout', default=60, type=int, help='proxy download connect timeout') parser.add_argument('--good_nodes', type=str, - default='/etc/tor/yaml/torrc-goodnodes.yaml', + default=os.path.join(ETC_DIR, '/torrc-goodnodes.yaml'), help="Yaml file of good nodes that should not be excluded") parser.add_argument('--bad_nodes', type=str, - default='/etc/tor/yaml/torrc-badnodes.yaml', + default=os.path.join(ETC_DIR, '/torrc-badnodes.yaml'), help="Yaml file of bad nodes that should also be excluded") parser.add_argument('--contact', type=str, default='Empty,NoEmail', help="comma sep list of conditions - Empty,NoEmail") @@ -446,7 +478,7 @@ def oMainArgparser(_=None): help="comma sep. list of onions to whitelist their introduction points - BROKEN") parser.add_argument('--torrc_output', type=str, default='', help="Write the torrc configuration to a file") - parser.add_argument('--proof_output', type=str, default='', + parser.add_argument('--proof_output', type=str, default=os.path.join(ETC_DIR, '/proof.yaml'), help="Write the proof data of the included nodes to a YAML file") return parser @@ -457,7 +489,7 @@ def vwrite_badnodes(oArgs): bak = oArgs.bad_nodes +'.bak' with open(tmp, 'wt') as oFYaml: yaml.dump(oBAD_NODES, indent=2, stream=oFYaml) - LOG.info(f"Wrote {len(list(exit_excludelist))} proof details to {oArgs.bad_nodes}") + LOG.info(f"Wrote {len(list(oBAD_NODES.keys()))} to {oArgs.bad_nodes}") oFYaml.close() if os.path.exists(oArgs.bad_nodes): os.rename(oArgs.bad_nodes, bak) @@ -470,7 +502,7 @@ def vwrite_goodnodes(oArgs): bak = oArgs.good_nodes +'.bak' with open(tmp, 'wt') as oFYaml: yaml.dump(oGOOD_NODES, indent=2, stream=oFYaml) - LOG.info(f"Wrote {len(list(exit_excludelist))} proof details to {oArgs.good_nodes}") + LOG.info(f"Wrote {len(list(oGOOD_NODES.keys()))} good nodes to {oArgs.good_nodes}") oFYaml.close() if os.path.exists(oArgs.good_nodes): os.rename(oArgs.good_nodes, bak) @@ -497,7 +529,7 @@ def iMain(lArgs): except: aTRUST_DB = {} - if oArgs.proxy_ctl.startswith('/') or os.path.exists(oArgs.proxy_ctl): + if os.path.exists(oArgs.proxy_ctl): controller = oMakeController(sSock=oArgs.proxy_ctl) else: port =int(oArgs.proxy_ctl) @@ -543,18 +575,20 @@ def iMain(lArgs): lProofGoodFps = [] iDnsContact = 0 - iBadContact = 0 + lBadContactUrls = [] iFakeContact = 0 aBadContacts = {} aProofUri = {} lConds = oArgs.contact.split(',') + iR = 0 for relay in relays: + iR += 1 if not is_valid_fingerprint(relay.fingerprint): LOG.warn('Invalid Fingerprint: %s' % relay.fingerprint) continue relay.fingerprint = relay.fingerprint.upper() - sofar = f"G:{len(list(aProofUri.keys()))} U:{iDnsContact} F:{iFakeContact} BF:{len(exit_excludelist)} GF:{len(lProofGoodFps)}" + sofar = f"G:{len(list(aProofUri.keys()))} U:{iDnsContact} F:{iFakeContact} BF:{len(exit_excludelist)} GF:{len(lProofGoodFps)} #{iR}" if not relay.exit_policy.is_exiting_allowed(): if sEXCLUDE_EXIT_KEY == 'ExcludeNodes': LOG.debug(f"{relay.fingerprint} not an exit {sofar}") @@ -573,11 +607,20 @@ def iMain(lArgs): continue if relay.contact and b'dns-rsa' in relay.contact.lower(): - LOG.info(f"{relay.fingerprint} skipping 'dns-rsa' {sofar}") + relay.contact = str(relay.contact, 'UTF-8') + c = relay.contact.lower() + i = c.find('url:') + if i >=0: + c = c[i+4:] + i = c.find(' ') + if i >=0: + c = c[:i] + LOG.info(f"{relay.fingerprint} skipping 'dns-rsa' {c} {sofar}") iDnsContact += 1 continue if relay.contact and b'proof:uri-rsa' in relay.contact.lower(): + relay.contact = str(relay.contact, 'UTF-8') a = aParseContact(relay.contact, relay.fingerprint) if not a: LOG.warn(f"{relay.fingerprint} did not parse {sofar}") @@ -588,6 +631,13 @@ def iMain(lArgs): LOG.info(f"{relay.fingerprint} skipping in lBAD_URLS {a['url']} {sofar}") exit_excludelist.append(relay.fingerprint) continue + domain = a['url'][8:] + if domain in lKNOWN_NODNS: + # The fp is using a contact with a URL we know is nonexistent + LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {a['url']} {sofar}") + exit_excludelist.append(relay.fingerprint) + continue + b = aVerifyContact(list(a.values())[0], relay.fingerprint, @@ -597,7 +647,7 @@ def iMain(lArgs): port=oArgs.proxy_port) if not b['fps'] or not b['url']: - LOG.warn(f"{relay.fingerprint} did not verify {sofar}") + LOG.warn(f"{relay.fingerprint} did NOT VERIFY {sofar}") # If it's giving contact info that doesnt check out # it could be a bad exit with fake contact info exit_excludelist.append(relay.fingerprint) @@ -605,7 +655,7 @@ def iMain(lArgs): continue if relay.fingerprint not in b['fps']: - LOG.warn(f"{relay.fingerprint} the fp is not in the list of fps {sofar}") + LOG.warn(f"{relay.fingerprint} the FP IS NOT in the list of fps {sofar}") # assume a fp is using a bogus contact exit_excludelist.append(relay.fingerprint) iFakeContact += 1 @@ -657,23 +707,39 @@ def iMain(lArgs): global oBAD_NODES oBAD_NODES['BadNodes']['ExcludeNodes']['BadExit'] = exit_excludelist vwrite_badnodes(oArgs) - # nothing changed vwrite_goodnodes(oArgs) + global oGOOD_NODES + oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = lProofGoodFps + vwrite_goodnodes(oArgs) retval = 0 try: logging.getLogger('stem').setLevel(30) - if exit_excludelist: - LOG.info(f"{sEXCLUDE_EXIT_KEY} {len(exit_excludelist)} net bad exit nodes") - controller.set_conf(sEXCLUDE_EXIT_KEY, exit_excludelist) - - if lProofGoodFps: - LOG.info(f"{sINCLUDE_EXIT_KEY} {len(lProofGoodFps)} good nodes") - controller.set_conf(sINCLUDE_EXIT_KEY, lProofGoodFps) - - o = oGOOD_NODES - if 'GuardNodes' in o[oGOOD_ROOT].keys(): - LOG.info(f"{sINCLUDE_GUARD_KEY} {len(o[oGOOD_ROOT]['GuardNodes'])} guard nodes") - controller.set_conf(sINCLUDE_GUARD_KEY, o[oGOOD_ROOT]['GuardNodes']) + try: + if exit_excludelist: + LOG.info(f"{sEXCLUDE_EXIT_KEY} {len(exit_excludelist)} net bad exit nodes") + controller.set_conf(sEXCLUDE_EXIT_KEY, exit_excludelist) + + except stem.SocketClosed as e: + LOG.error(f"Failed setting {sEXCLUDE_EXIT_KEY} bad exit nodes in Tor") + retval += 1 + + try: + if lProofGoodFps: + LOG.info(f"{sINCLUDE_EXIT_KEY} {len(lProofGoodFps)} good nodes") + controller.set_conf(sINCLUDE_EXIT_KEY, lProofGoodFps) + except stem.SocketClosed as e: + LOG.error(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor") + retval += 1 + + try: + o = oGOOD_NODES + if 'GuardNodes' in o[oGOOD_ROOT].keys(): + LOG.info(f"{sINCLUDE_GUARD_KEY} {len(o[oGOOD_ROOT]['GuardNodes'])} guard nodes") + controller.set_conf(sINCLUDE_GUARD_KEY, o[oGOOD_ROOT]['GuardNodes']) + except stem.SocketClosed as e: + LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor") + retval += 1 + ) return retval except InvalidRequest as e: diff --git a/https_adapter.py b/https_adapter.py new file mode 100644 index 0000000..c242c95 --- /dev/null +++ b/https_adapter.py @@ -0,0 +1,263 @@ +# -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*- + +from requests import adapters +from requests.utils import ( + DEFAULT_CA_BUNDLE_PATH, + get_auth_from_url, + get_encoding_from_headers, + prepend_scheme_if_needed, + select_proxy, + urldefragauth, +) +from urllib3.util import parse_url +from urllib3.util.retry import Retry +from urllib3.util import Timeout as TimeoutSauce + +DEFAULT_POOLBLOCK = False +DEFAULT_POOLSIZE = 10 +DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None + +class HTTPAdapter(adapters.HTTPAdapter): + def __init__(self, + pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, + max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK + ): + self.config = {} + self.proxy_manager = {} + + if isinstance(max_retries, Retry): + self.max_retries = max_retries + else: + max_retries = Retry.from_int(max_retries) + self.max_retries = max_retries + + self._pool_connections = pool_connections + self._pool_maxsize = pool_maxsize + self._pool_block = pool_block + + self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) + + +class HTTPSAdapter(HTTPAdapter): + """The built-in HTTP Adapter for urllib3. + + Provides a general-case interface for Requests sessions to contact HTTP and + HTTPS urls by implementing the Transport Adapter interface. This class will + usually be created by the :class:`Session ` class under the + covers. + + :param pool_connections: The number of urllib3 connection pools to cache. + :param pool_maxsize: The maximum number of connections to save in the pool. + :param max_retries: The maximum number of retries each connection + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. If you need granular control over the conditions under + which we retry a request, import urllib3's ``Retry`` class and pass + that instead. + :param pool_block: Whether the connection pool should block for connections. + + Usage:: + + >>> import requests + >>> s = requests.Session() + >>> a = requests.adapters.HTTPAdapter(max_retries=3) + >>> s.mount('http://', a) + """ + + def __init__( + self, + pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=1, + max_retries=3, + pool_block=DEFAULT_POOLBLOCK, + ): + retries = Retry(connect=max_retries, read=2, redirect=0) + adapters.HTTPAdapter.__init__(self, + pool_connections=pool_connections, + pool_maxsize=pool_maxsize, + max_retries=retries, + pool_block=pool_block) + + def get_connection(self, url, proxies=None, use_forwarding_for_https=True): + """Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :rtype: urllib3.ConnectionPool + """ + proxy = select_proxy(url, proxies) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, "http") + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL( + "Please check proxy URL. It is malformed " + "and could be missing the host." + ) + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) + else: + # Only scheme should be lower case + parsed = urlparse(url) + url = parsed.geturl() + conn = self.poolmanager.connection_from_url(url, use_forwarding_for_https=True) + + return conn + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple or urllib3 Timeout object + :param verify: (optional) Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + :rtype: requests.Response + """ + + try: + #? _socks_options + conn = self.get_connection(request.url, proxies, use_forwarding_for_https=True) + except LocationValueError as e: + raise InvalidURL(e, request=request) + + self.cert_verify(conn, request.url, verify, cert) + url = self.request_url(request, proxies) + self.add_headers( + request, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + ) + + chunked = not (request.body is None or "Content-Length" in request.headers) + + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError: + raise ValueError( + f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " + f"or a single float to set both timeouts to the same value." + ) + elif isinstance(timeout, TimeoutSauce): + pass + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) + + try: + if not chunked: + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout, + ) + + # Send the request. + else: + if hasattr(conn, "proxy_pool"): + conn = conn.proxy_pool + + low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) + + try: + skip_host = "Host" in request.headers + low_conn.putrequest( + request.method, + url, + skip_accept_encoding=True, + skip_host=skip_host, + ) + + for header, value in request.headers.items(): + low_conn.putheader(header, value) + + low_conn.endheaders() + + for i in request.body: + low_conn.send(hex(len(i))[2:].encode("utf-8")) + low_conn.send(b"\r\n") + low_conn.send(i) + low_conn.send(b"\r\n") + low_conn.send(b"0\r\n\r\n") + + # Receive the response from the server + r = low_conn.getresponse() + + resp = HTTPResponse.from_httplib( + r, + pool=conn, + connection=low_conn, + preload_content=False, + decode_content=False, + ) + except Exception: + # If we hit any problems here, clean up the connection. + # Then, raise so that we can handle the actual exception. + low_conn.close() + raise + + except (ProtocolError, OSError) as err: + raise ConnectionError(err, request=request) + + except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) + + if isinstance(e.reason, ResponseError): + raise RetryError(e, request=request) + + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + + if isinstance(e.reason, _SSLError): + # This branch is for urllib3 v1.22 and later. + raise SSLError(e, request=request) + + raise ConnectionError(e, request=request) + + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + + except _ProxyError as e: + raise ProxyError(e) + + except (_SSLError, _HTTPError) as e: + if isinstance(e, _SSLError): + # This branch is for urllib3 versions earlier than v1.22 + raise SSLError(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) + elif isinstance(e, _InvalidHeader): + raise InvalidHeader(e, request=request) + else: + raise + + return self.build_response(request, resp) diff --git a/support_onions.py b/support_onions.py new file mode 100644 index 0000000..40d2c0e --- /dev/null +++ b/support_onions.py @@ -0,0 +1,160 @@ +# -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*- + +import os +import sys +import argparse +import re +import traceback +import logging +import shutil +import json +import socket +import select +from ctypes import * +import time, contextlib +import unittest +from random import Random +random = Random() + +bHAVE_TORR = shutil.which('tor-resolve') + +def bAreWeConnected(): + # FixMe: Linux only + sFile = f"/proc/{os.getpid()}/net/route" + if not os.path.isfile(sFile): return None + i = 0 + for elt in open(sFile, "r").readlines(): + if elt.startswith('Iface'): continue + if elt.startswith('lo'): continue + i += 1 + return i > 0 + +def sMapaddressResolv(target, iPort=9051): + if not stem: + LOG.warn('please install the stem Python package') + return '' + + try: + controller = oGetStemController(log_level=10) + + map_dict = {"0.0.0.0": target} + map_ret = controller.map_address(map_dict) + + return map_ret + except Exception as e: + LOG.exception(e) + return '' + +def lIntroductionPoints(target, iPort=9051): + if stem == False: return '' + from stem import StreamStatus + from stem.control import EventType, Controller + import getpass + l = [] + try: + controller = oGetStemController(log_level=10) + desc = controller.get_hidden_service_descriptor(target) + l = desc.introduction_points() + if l: + LOG.warn(f"{elt} NO introduction points for {target}\n") + return l + LOG.debug(f"{elt} len(l) introduction points for {target}") + + for introduction_point in l: + l.append('%s:%s => %s' % (introduction_point.address, + introduction_point.port, + introduction_point.identifier)) + + except Exception as e: + LOG.exception(e) + return l + +def sTorResolve(target, + verbose=False, + sHost='127.0.0.1', + iPort=9050, + SOCK_TIMEOUT_SECONDS=10.0, + SOCK_TIMEOUT_TRIES=3, + ): + MAX_INFO_RESPONSE_PACKET_LENGTH = 8 + + seb = b"\o004\o360\o000\o000\o000\o000\o000\o001\o000" + seb = b"\x04\xf0\x00\x00\x00\x00\x00\x01\x00" + seb += bytes(target, 'US-ASCII') + b"\x00" + assert len(seb) == 10+len(target), str(len(seb))+repr(seb) + +# LOG.debug(f"0 Sending {len(seb)} to The TOR proxy {seb}") + + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect((sHost, iPort)) + + sock.settimeout(SOCK_TIMEOUT_SECONDS) + oRet = sock.sendall(seb) + + i = 0 + data = '' + while i < SOCK_TIMEOUT_TRIES: + i += 1 + time.sleep(3) + lReady = select.select([sock.fileno()], [], [], + SOCK_TIMEOUT_SECONDS) + if not lReady[0]: continue + try: + flags=socket.MSG_WAITALL + data = sock.recv(MAX_INFO_RESPONSE_PACKET_LENGTH, flags) + except socket.timeout: + LOG.warn("4 The TOR proxy " \ + +repr((sHost, iPort)) \ + +" didnt reply in " + str(SOCK_TIMEOUT_SECONDS) + " sec." + +" #" +str(i)) + except Exception as e: + LOG.error("4 The TOR proxy " \ + +repr((sHost, iPort)) \ + +" errored with " + str(e) + +" #" +str(i)) + sock.close() + raise SystemExit(4) + else: + if len(data) > 0: break + + if len(data) == 0: + if i > SOCK_TIMEOUT_TRIES: + sLabel = "5 No reply #" + else: + sLabel = "5 No data #" + LOG.info(sLabel +f"{i} from {sHost} {iPort}" ) + sock.close() + raise SystemExit(5) + + assert len(data) >= 8 + packet_sf = data[1] + if packet_sf == 90: + # , "%d" % packet_sf + assert f"{packet_sf}" == "90", f"packet_sf = {packet_sf}" + return f"{data[4]}.{data[5]}.{data[6]}.{data[7]}" + else: + # 91 + LOG.warn(f"tor-resolve failed for {target} from {sHost} {iPort}" ) + + os.system(f"tor-resolve -4 {target} > /tmp/e 2>/dev/null") +# os.system("strace tor-resolve -4 "+target+" 2>&1|grep '^sen\|^rec'") + + return '' + +def getaddrinfo(sHost, sPort): + # do this the explicit way = Ive seen the compact connect fail + # >>> sHost, sPort = 'l27.0.0.1', 33446 + # >>> sock.connect((sHost, sPort)) + # socket.gaierror: [Errno -2] Name or service not known + try: + lElts = socket.getaddrinfo(sHost, int(sPort), socket.AF_INET) + lElts = list(filter(lambda elt: elt[1] == socket.SOCK_DGRAM, lElts)) + assert len(lElts) == 1, repr(lElts) + lPair = lElts[0][-1] + assert len(lPair) == 2, repr(lPair) + assert type(lPair[1]) == int, repr(lPair) + except Exception as e: + LOG.exception(e) + return None + return lPair +