Merge pull request #1 from PavelRykov/master

Update to 1.3a and 1.3.1
This commit is contained in:
wheever 2015-12-23 09:40:59 +08:00
commit 84376d8844
8 changed files with 1220 additions and 1075 deletions

88
CA.crt
View File

@ -1,52 +1,52 @@
-----BEGIN CERTIFICATE-----
MIID+DCCAuCgAwIBAgIBADANBgkqhkiG9w0BAQUFADBWMQswCQYDVQQGEwJDTjEX
MBUGA1UEChMOUHJveEhUVFBTUHJveHkxEjAQBgNVBAsTCXB5T3BlblNTTDEaMBgG
A1UEAxMRUHJveEhUVFBTUHJveHkgQ0EwHhcNMTQwNzI1MDI0NjM1WhcNMjQwNzI0
MDI0NjM1WjBWMQswCQYDVQQGEwJDTjEXMBUGA1UEChMOUHJveEhUVFBTUHJveHkx
EjAQBgNVBAsTCXB5T3BlblNTTDEaMBgGA1UEAxMRUHJveEhUVFBTUHJveHkgQ0Ew
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDu5tWlBhw7kJ4aqv+Uc/Cy
TDNYvE5SV54vjJp7VwU1mVq/VUrcmSsNySrput4fc2jIYs98XH0IWRnEVSqbbtVg
tYKbMODRF2DTtdtfGrxjvPW1CJZTGPV8rln1dSTp1ZPOp9n3fHXRRjRyHTOFw+gs
0PorxQSEHZNtLQfpXD+ou5PL14hE3f/nO227eFsDR9QJGo14U610U+0T1bMX/7mc
0gbLiXoPqQKVFqBCD1Zsq/ZKbHf2jDkg5wjaK6vfGSUEyFaeYWnTVeUDdGy74XRO
ZDVLW1lDJM2glZ7I0s6WcgRrrYoDKWkAl/X+SeFkTQuikoUXU8JXGeWuul6p5RjT
MIID+DCCAuCgAwIBAgIBADANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJDTjEX
MBUGA1UECgwOUHJveEhUVFBTUHJveHkxEjAQBgNVBAsMCXB5T3BlblNTTDEaMBgG
A1UEAwwRUHJveEhUVFBTUHJveHkgQ0EwHhcNMTUxMDAxMDc0MDI1WhcNMjUwOTMw
MDc0MDI1WjBWMQswCQYDVQQGEwJDTjEXMBUGA1UECgwOUHJveEhUVFBTUHJveHkx
EjAQBgNVBAsMCXB5T3BlblNTTDEaMBgGA1UEAwwRUHJveEhUVFBTUHJveHkgQ0Ew
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9mQTpJlPDjEqLnovcT/AL
YwjoP2Siowor2yeEKaGKJjBamu3OkYhS+2kzJhcii705uTCal/f6gDIlnhYXlPEh
L7Z0wsT9IePJSU9+yNtUrWYILfRg1+XkpZVqrPfjBk8usTjtC4kG9xRZno/TeZj/
2Qror/C989Hl+bqZ4p31/l1Jcml/W01PDiGcqESS15bKk24azJ1w69Zhjwn8uZKc
Mnq2myrJsl8fZ82gV2fV8yydhpDudPpHy8y/9U8FfsmODi75aH4A1NkK/2FZyBKE
1OEYd+JfL7QmBCCjIt9AREXA/77HSuj6OXoKWZ0AVuiHLA/psfcRL4+QXd1UtXbF
AgMBAAGjgdAwgc0wDwYDVR0TAQH/BAUwAwEB/zARBglghkgBhvhCAQEEBAMCAgQw
ewYDVR0lAQH/BHEwbwYIKwYBBQUHAwEGCCsGAQUFBwMCBggrBgEFBQcDBAYIKwYB
BQUHAwgGCisGAQQBgjcCARUGCisGAQQBgjcCARYGCisGAQQBgjcKAwEGCisGAQQB
gjcKAwMGCisGAQQBgjcKAwQGCWCGSAGG+EIEATALBgNVHQ8EBAMCAQYwHQYDVR0O
BBYEFCs59meqNHm2h0jDlW5BvWu6gnkuMA0GCSqGSIb3DQEBBQUAA4IBAQAib4M1
QIJoRs0cLQRn8HGVXB3OVYTRp5e+GIdEkxRb69/mOJT6e1rqlk86OkVakK083E2B
a3Vhl4XIW7HhnIPhH/ZjZcwZ/MDiSvpkJVcmWjEeA6PxSpjqgl1bxMlJhbY1e3Bo
ps3rE40vH9+Hq3ps3FCUGAhTnXGA42L//JgU3N2XTWQXfdkoi4eVsIaf4obQT5wD
ThmuTZNLTkJOzFxFqHpuxUHO1BMgKgHirul8Fy9ydj75MJfUCCQh7Prjqf0ch5Ou
LlwRF70nULYJ6KXIaPM9icT/Wo1jWsTT+FJlXj27esuLpth7DjunWZQRjKsg3p0/
DiaGIm0JYJEoRH+u
BBYEFKPBao+B+YH0tMNHNGoLv/3ncZyvMA0GCSqGSIb3DQEBCwUAA4IBAQCFZOPd
SldrKkekP/tO/WnGgXEus8z4Ei7TEAm6qkSJ/r0ZaTKmGek370xvVG4myl0Hngr+
F6blIUzGi8e9mp/2vONhPYKTAg+Y4h5tKz9S6SyvbypBMa4YNZw8DNfd4uVLL/b6
psQcYfMPMpRdM7GlLZbxY9AHyCaHZszc3bSBM/lIhLWJH0pR7QSZZ+cJUHYKODZ8
Cs8goAcA/mJ4h1g63EP1Snlw4U3vMJ8ZQRAeg46FAZATwte9SaahAq1kLql/P8jg
A4gM9xvfRgVOIrfxSHDlnw6gVK6u/WhD4SWIsS2JfNljgUmrcMWB37kNdT3i0yO7
Vydw/UIJw1pqktqz
-----END CERTIFICATE-----
-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDu5tWlBhw7kJ4a
qv+Uc/CyTDNYvE5SV54vjJp7VwU1mVq/VUrcmSsNySrput4fc2jIYs98XH0IWRnE
VSqbbtVgtYKbMODRF2DTtdtfGrxjvPW1CJZTGPV8rln1dSTp1ZPOp9n3fHXRRjRy
HTOFw+gs0PorxQSEHZNtLQfpXD+ou5PL14hE3f/nO227eFsDR9QJGo14U610U+0T
1bMX/7mc0gbLiXoPqQKVFqBCD1Zsq/ZKbHf2jDkg5wjaK6vfGSUEyFaeYWnTVeUD
dGy74XROZDVLW1lDJM2glZ7I0s6WcgRrrYoDKWkAl/X+SeFkTQuikoUXU8JXGeWu
ul6p5RjTAgMBAAECggEBAIzAi5cSpoehiMUFglcgh+tEYewh5bM11AkHuex5NKSD
maeoxNZ7l5Yjs5jJdBzrjgyhysTmaPQGKtw1oUi6YBT+W9i7T3lhYLbbKkg6JExP
BCSIzqheyCORRYcMYZczMkZLNmxNc8y8pCGnereQ2LWu2fX3udVs53MnWMXwh8EM
v3b6Nm16Z/YMAJ5tVF3x4q43D7tjUUUabATQ7iiZA3MZia0RP/WFooZVsNLR6wym
Th2JN6t2Lde06wdEQAjDqSzKSRy7UazynzY8VkDUhQ6c7voVL1DYdjwPdpGdELdE
nqoobg1b4PRLlNFsQVtRN6Xzm5dvlHZt8hH7xviFRiECgYEA+bxkyrLyafCeuLvZ
S8DXBi3nE2BaVUPxFuVqcTGvtZVQus7vJE119wJU7clx9gE8QRujVCw7vOJxAbuM
eDVD1fQOXyWhmXsAXnTyxgwRx5Sskfh3EhPUraOJ8bUkEUZChG0mXWP0JE1IwguD
yGx5yioZyeY+VWfRXAQjwKQc3g0CgYEA9OTfQIdwcVX6rVg2ay78gjhGjwB7vX5b
kApRkOt9T+Bhks4X7Zhdy6jfgvGHZ81CtghPKkiDE8tzb+0qmM4P2ksuPtFS9XHz
X4Ne9ZpxrKbSXB+ZuGmvf3U5YSs1XgzeSDryeU5+gICgKXCNYPkv7KNHvPLLCXGk
FRVz5Vwu+l8CgYEApTtZsKc/LorOaUmYVr+rROJgDfjlK579R3nYuDX4nHrDZCYA
IZPCpgB78qwi43IjzKwdn1Z429syi8PsB6iuTkPgsmEyj1rLfrN/4TshFFyw5Q9w
sJxATfZzEWIZD/jdj8Kr7IRyhwY/SmvV8GidF/5M+8GDonxDD8+JXC75Do0CgYB4
Sf97E8vgb6odAUphpYe0cu39ifA96NYSz263L2SzGnTzBjZspz3TLpTzHa40B3ew
MC6qh0mTLIj8PlOBER5qTClgA7OrChjQHphDLWSOuLZ1N7RrB8aDZdIUb4PcOZfp
hMLbYPn0JroFANsA+gVXR/CR1/RxJBfDn3D5BSmyIwKBgHv/LTB10VbBxd9IfXS4
E9lyBm5ZLX263bZDFdMmL1Dsa9fb8ZhbU2UTroVccMcoYBLmAjUh5o0hVw9gXTi7
H/cjK5ckO6R2iFc1wtsJ4GZkNj+TdOy7l+G4dZq5dECon41SdhKD3hvjYtuZdADS
NgbKfrz5Q13KcUB7jrOFevXD
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC9mQTpJlPDjEqL
novcT/ALYwjoP2Siowor2yeEKaGKJjBamu3OkYhS+2kzJhcii705uTCal/f6gDIl
nhYXlPEhL7Z0wsT9IePJSU9+yNtUrWYILfRg1+XkpZVqrPfjBk8usTjtC4kG9xRZ
no/TeZj/2Qror/C989Hl+bqZ4p31/l1Jcml/W01PDiGcqESS15bKk24azJ1w69Zh
jwn8uZKcMnq2myrJsl8fZ82gV2fV8yydhpDudPpHy8y/9U8FfsmODi75aH4A1NkK
/2FZyBKE1OEYd+JfL7QmBCCjIt9AREXA/77HSuj6OXoKWZ0AVuiHLA/psfcRL4+Q
Xd1UtXbFAgMBAAECggEAK5AHEtLdmCaZ0i6hkANF8jfVChfWtY+kfKMkFzfBiA5y
Ob8zOK0zl21wpHHyCtv0pFiqlDqqnYHrA72o8c4lAS0HTRibTlYFEnCntUfNLU2S
DfsRFVdF2R06kYIgiqcedmn93Gk0GMeYg2btQPfFcbOa0A/szphA+AhDGax6AtUD
gl7+QT4j5HE598ghtl5/DZ4tiw4cfuWjC6ph7tHbKKq9wCH6wQf9kcyIA4ozVBKV
fejE9t4BfVPxzbxN+Quu0+S5SGnKzg1uY+/99Jo1IqtJGQq1OlPFLjVnxUF1N+Wp
nJVBHorILQtGhYxW4QlWsHMdc7iB5r4eFSuKaivMGQKBgQDrCDviK35IuQylxKE8
Xu/eSwPpOwF4ibASgaPmJt+t4O4JLX1GLZX899KDIeXaAFqcp2EF4QUhX2ioGIiO
GGFFAmOHIDvCFfiNpM1m7F0Njj8gedFfT4Yhv73htUlh5zA8vfuv4PN4ZGfjK3L9
sW9OEMUDTey5D/6Wq/IZ8ZGTwwKBgQDOgyJSJQk8K0n4AGLPyP/wmXL4w/xi8IOC
kafs1XsQCn5OvKJZY5ZNyoSzhkKmlUQTO/tmZ5flOk6wVs34StSNSo+JQub5vEYi
gXVNwYB6oPYMtdfPYLSy59h0REugNfkunRj5crPyVttJiVZpxBJHxgnIqJcBj+WT
ehHNJpRK1wKBgFx4s97rj9ca/4/lCi8Phz6lsxc7gPuk6KKPYSX3W4A1BFKWFDjd
TKrn8mpnluCrzPrfm/vNKdCUkj+4z1lg3DxjkTckBn75V/6avbnl+0KPGeU0KJ1g
U3zJzPKV+hZL+J2dff4X+pL+piUp/ic0fX9wd6MyMJYrZdZwNmPguI8zAoGAARJF
F1AB4EIJPDQkTxen3EOviQLbSFgfFopS6LOi0856IUZxQS13Fig60AOeTObxV3g0
Ma/P5eyLg/avUt5wg9sjK38hW6JSatNpHGIonHpBTIeU+wpxZYw2X0QLcGVXSZqf
CoxByrwQny0LObk+rwij/FqDjgqFEmLLvNi6ZDkCgYEA3xgeLNBGf5ghYhgX9PKO
Y1Rg6y1ElqxMCoovkpNlA6bVkyxcYIItIW1npsSeM45x+6Blit74LuleE9UYoN8j
BC8ADhYN7ywb0juCnpLrKuWl/3XNg3wREhvhHfEK1agEysVFUohFwdtfyW4gNWia
wli1LGvTwY1aFj8K29VKvkE=
-----END PRIVATE KEY-----

View File

@ -38,7 +38,7 @@ def create_CA(capath):
OpenSSL.crypto.X509Extension(b"extendedKeyUsage", True, b"serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC"),
OpenSSL.crypto.X509Extension(b"keyUsage", False, b"keyCertSign, cRLSign"),
OpenSSL.crypto.X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=ca)])
ca.sign(key, 'sha1')
ca.sign(key, 'sha256')
with open(capath, 'wb') as fp:
fp.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca))
fp.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key))
@ -77,7 +77,7 @@ def dummy_cert(cafile, certfile, commonname):
cert.get_subject().CN = '*' + commonname if commonname.startswith('.') else commonname
cert.set_serial_number(int(time.time()*10000))
cert.set_pubkey(ca.get_pubkey())
cert.sign(key, "sha1")
cert.sign(key, "sha256")
with open(certfile, 'wb') as fp:
fp.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert))
fp.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key))

View File

@ -1,6 +1,17 @@
ProxHTTPSProxyMII
=================
Version 1.3.1 (20151001)
--------------
* Certifications are now signed via SHA256 instead of SHA1
Version 1.3 (20150114)
--------------
+ Each request has a number ranged from 001 to 999 for reference. 000 is reserved for SSL requests not MITMed like those in [BLACKLIST] and [SSL Pass-Thru] sections.
+ Log window now displays the length of the bytes submitted in POST method
Version 1.2 (20141221)
--------------

View File

@ -5,7 +5,7 @@
_name = 'ProxHTTPSProxyMII'
__author__ = 'phoenix'
__version__ = 'v1.2'
__version__ = 'v1.3.1'
CONFIG = "config.ini"
CA_CERTS = "cacert.pem"
@ -24,7 +24,7 @@ urllib3.disable_warnings()
from socketserver import ThreadingMixIn
from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib.parse import urlparse
from ProxyTool import ProxyRequestHandler, get_cert
from ProxyTool import ProxyRequestHandler, get_cert, counter
from colorama import init, Fore, Back, Style
init(autoreset=True)
@ -127,13 +127,14 @@ class FrontRequestHandler(ProxyRequestHandler):
def do_CONNECT(self):
"Descrypt https request and dispatch to http handler"
# request line: CONNECT www.example.com:443 HTTP/1.1
self.host, self.port = self.path.split(":")
self.proxy, self.pool, self.noverify = pools.getpool(self.host)
if any((fnmatch.fnmatch(self.host, pattern) for pattern in pools.blacklist)):
# BLACK LIST
self.deny_request()
logger.info(Fore.CYAN + 'Denied by blacklist: %s' % self.host)
logger.info("%03d " % self.reqNum + Fore.CYAN + 'Denied by blacklist: %s' % self.host)
elif any((fnmatch.fnmatch(self.host, pattern) for pattern in pools.sslpasslist)):
# SSL Pass-Thru
if self.proxy and self.proxy.startswith('https'):
@ -164,6 +165,9 @@ class FrontRequestHandler(ProxyRequestHandler):
def do_METHOD(self):
"Forward request to Proxomitron"
counter.increment_and_set(self, 'reqNum')
if self.ssltunnel:
# https request
host = self.host if self.port == '443' else "%s:%s" % (self.host, self.port)
@ -172,14 +176,14 @@ class FrontRequestHandler(ProxyRequestHandler):
if not self.bypass:
url = "http://%s%s" % (host, self.path)
# Tag the request so Proxomitron can recognize it
self.headers["Tagged"] = self.version_string()
self.headers["Tagged"] = self.version_string() + ":%d" % self.reqNum
else:
# http request
self.host = urlparse(self.path).hostname
if any((fnmatch.fnmatch(self.host, pattern) for pattern in pools.blacklist)):
# BLACK LIST
self.deny_request()
logger.info(Fore.CYAN + 'Denied by blacklist: %s' % self.host)
logger.info("%03d " % self.reqNum + Fore.CYAN + 'Denied by blacklist: %s' % self.host)
return
host = urlparse(self.path).netloc
self.proxy, self.pool, self.noverify = pools.getpool(self.host, httpmode=True)
@ -191,30 +195,48 @@ class FrontRequestHandler(ProxyRequestHandler):
prefix += '[B]'
pool = self.pool if self.bypass else proxpool
data_length = self.headers.get("Content-Length")
self.postdata = self.rfile.read(int(data_length)) if data_length else None
self.postdata = self.rfile.read(int(data_length)) if data_length and int(data_length) > 0 else None
if self.command == "POST" and "Content-Length" not in self.headers:
buffer = self.rfile.read()
if buffer:
logger.warning("%03d " % self.reqNum + Fore.RED +
'POST w/o "Content-Length" header (Bytes: %d | Transfer-Encoding: %s | HTTPS: %s',
len(buffer), "Transfer-Encoding" in self.headers, self.ssltunnel)
# Remove hop-by-hop headers
self.purge_headers(self.headers)
# pool.urlopen() expects a dict like headers container for http request
headers = urllib3._collections.HTTPHeaderDict()
[headers.add(key, value) for (key, value) in self.headers.items()]
r = None
# Below code in connectionpool.py expect the headers to has a copy() and update() method
# That's why we can't use self.headers directly when call pool.urlopen()
#
# Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being
# reflected in anyone else's copy.
# if self.scheme == 'http':
# headers = headers.copy()
# headers.update(self.proxy_headers)
headers = urllib3._collections.HTTPHeaderDict(self.headers)
try:
# Sometimes 302 redirect would fail with "BadStatusLine" exception, and IE11 doesn't restart the request.
# retries=1 instead of retries=False fixes it.
#! Retry may cause the requests with the same reqNum appear in the log window
r = pool.urlopen(self.command, url, body=self.postdata, headers=headers,
retries=1, redirect=False, preload_content=False, decode_content=False)
if not self.ssltunnel:
logger.info(Fore.GREEN + '%s "%s %s %s" %s %s' %
(prefix, self.command, url, self.request_version, r.status, r.getheader('Content-Length', '-')))
if self.command in ("GET", "HEAD"):
logger.info("%03d " % self.reqNum + Fore.GREEN + '%s "%s %s" %s %s' %
(prefix, self.command, url, r.status, r.getheader('Content-Length', '-')))
else:
logger.info("%03d " % self.reqNum + Fore.GREEN + '%s "%s %s %s" %s %s' %
(prefix, self.command, url, data_length, r.status, r.getheader('Content-Length', '-')))
self.send_response_only(r.status, r.reason)
# HTTPResponse.getheader() combines multiple same name headers into one
# https://login.yahoo.com would fail to login
# Use HTTPResponse.msg instead
# HTTPResponse.msg is easier to handle than urllib3._collections.HTTPHeaderDict
r.headers = r._original_response.msg
self.write_headers(r.headers)
self.purge_write_headers(r.headers)
if self.command == 'HEAD' or r.status in (100, 101, 204, 304):
if self.command == 'HEAD' or r.status in (100, 101, 204, 304) or r.getheader("Content-Length") == '0':
written = None
else:
written = self.stream_to_client(r)
@ -225,10 +247,10 @@ class FrontRequestHandler(ProxyRequestHandler):
# Regular https request exceptions should be handled by rear server
except urllib3.exceptions.TimeoutError as e:
self.sendout_error(url, 504, message="Timeout", explain=e)
logger.warning(Fore.YELLOW + '[F] %s on "%s %s"', e, self.command, url)
logger.warning("%03d " % self.reqNum + Fore.YELLOW + '[F] %s on "%s %s"', e, self.command, url)
except (urllib3.exceptions.HTTPError,) as e:
self.sendout_error(url, 502, message="HTTPError", explain=e)
logger.warning(Fore.YELLOW + '[F] %s on "%s %s"', e, self.command, url)
logger.warning("%03d " % self.reqNum + Fore.YELLOW + '[F] %s on "%s %s"', e, self.command, url)
finally:
if r:
# Release the connection back into the pool
@ -246,6 +268,19 @@ class RearRequestHandler(ProxyRequestHandler):
def do_METHOD(self):
"Convert http request to https"
if self.headers.get("Tagged") and self.headers["Tagged"].startswith(_name):
self.reqNum = int(self.headers["Tagged"].split(":")[1])
# Remove the tag
del self.headers["Tagged"]
else:
self.sendout_error(self.path, 400,
explain="The proxy setting of the client is misconfigured.\n\n" +
"Please set the HTTPS proxy port to %s " % config.FRONTPORT +
"and check the Docs for other settings.")
logger.error(Fore.RED + Style.BRIGHT + "[Misconfigured HTTPS proxy port] " + self.path)
return
# request line: GET http://somehost.com/path?attr=value HTTP/1.1
url = "https" + self.path[4:]
self.host = urlparse(self.path).hostname
@ -254,29 +289,38 @@ class RearRequestHandler(ProxyRequestHandler):
data_length = self.headers.get("Content-Length")
self.postdata = self.rfile.read(int(data_length)) if data_length else None
self.purge_headers(self.headers)
# Remove the tag
del self.headers["Tagged"]
# pool.urlopen() expects a dict like headers container for http request
headers = urllib3._collections.HTTPHeaderDict()
[headers.add(key, value) for (key, value) in self.headers.items()]
r = None
# Below code in connectionpool.py expect the headers to has a copy() and update() method
# That's why we can't use self.headers directly when call pool.urlopen()
#
# Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being
# reflected in anyone else's copy.
# if self.scheme == 'http':
# headers = headers.copy()
# headers.update(self.proxy_headers)
headers = urllib3._collections.HTTPHeaderDict(self.headers)
try:
r = pool.urlopen(self.command, url, body=self.postdata, headers=headers,
retries=1, redirect=False, preload_content=False, decode_content=False)
if proxy:
logger.debug('Using Proxy - %s' % proxy)
color = Fore.RED if noverify else Fore.GREEN
logger.info(color + '%s "%s %s" %s %s' %
if self.command in ("GET", "HEAD"):
logger.info("%03d " % self.reqNum + color + '%s "%s %s" %s %s' %
(prefix, self.command, url, r.status, r.getheader('Content-Length', '-')))
else:
logger.info("%03d " % self.reqNum + color + '%s "%s %s %s" %s %s' %
(prefix, self.command, url, data_length, r.status, r.getheader('Content-Length', '-')))
self.send_response_only(r.status, r.reason)
# HTTPResponse.getheader() combines multiple same name headers into one
# https://login.yahoo.com would fail to login
# Use HTTPResponse.msg instead
# HTTPResponse.msg is easier to handle than urllib3._collections.HTTPHeaderDict
r.headers = r._original_response.msg
self.write_headers(r.headers)
self.purge_write_headers(r.headers)
if self.command == 'HEAD' or r.status in (100, 101, 204, 304):
if self.command == 'HEAD' or r.status in (100, 101, 204, 304) or r.getheader("Content-Length") == '0':
written = None
else:
written = self.stream_to_client(r)
@ -285,13 +329,14 @@ class RearRequestHandler(ProxyRequestHandler):
except urllib3.exceptions.SSLError as e:
self.sendout_error(url, 417, message="SSL Certificate Failed", explain=e)
logger.error(Fore.RED + Style.BRIGHT + "[SSL Certificate Error] " + url)
logger.error("%03d " % self.reqNum + Fore.RED + Style.BRIGHT + "[SSL Certificate Error] " + url)
except urllib3.exceptions.TimeoutError as e:
self.sendout_error(url, 504, message="Timeout", explain=e)
logger.warning(Fore.YELLOW + '[R] %s on "%s %s"', e, self.command, url)
logger.warning("%03d " % self.reqNum + Fore.YELLOW + '[R]%s "%s %s" %s', prefix, self.command, url, e)
except (urllib3.exceptions.HTTPError,) as e:
self.sendout_error(url, 502, message="HTTPError", explain=e)
logger.warning(Fore.YELLOW + '[R] %s on "%s %s"', e, self.command, url)
logger.warning("%03d " % self.reqNum + Fore.YELLOW + '[R]%s "%s %s" %s', prefix, self.command, url, e)
finally:
if r:
# Release the connection back into the pool
@ -317,7 +362,7 @@ try:
logger = logging.getLogger(__name__)
logger.setLevel(getattr(logging, config.LOGLEVEL, logging.INFO))
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(message)s', datefmt='[%H:%M:%S]')
formatter = logging.Formatter('%(asctime)s %(message)s', datefmt='[%H:%M]')
handler.setFormatter(formatter)
logger.addHandler(handler)
@ -332,7 +377,7 @@ try:
for worker in (frontserver.serve_forever, rearserver.serve_forever,
pools.reloadConfig):
thread = threading.Thread(target=worker)
thread.dameon = True
thread.daemon = True
thread.start()
print("=" * 76)
@ -343,5 +388,7 @@ try:
print(' ParentServer : %s' % config.DEFAULTPROXY)
print(' Proxomitron : ' + config.PROXADDR)
print("=" * 76)
while True:
time.sleep(1)
except KeyboardInterrupt:
print("Quitting...")

View File

@ -10,6 +10,7 @@ __version__ = '1.0'
import time
from datetime import datetime
import logging
import threading
import cgi
import socket
import select
@ -88,6 +89,18 @@ def read_write(socket1, socket2, max_idling=10):
pass
if count == max_idling: break
class Counter:
reset_value = 999
def __init__(self, start=0):
self.lock = threading.Lock()
self.value = start
def increment_and_set(self, obj, attr):
with self.lock:
self.value = self.value + 1 if self.value < self.reset_value else 1
setattr(obj, attr, self.value)
counter = Counter()
class ProxyRequestHandler(BaseHTTPRequestHandler):
"""RequestHandler with do_CONNECT method defined
"""
@ -96,6 +109,8 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
ssltunnel = False
# Override default value 'HTTP/1.0'
protocol_version = 'HTTP/1.1'
# To be set in each request
reqNum = 0
def do_CONNECT(self):
"Descrypt https request and dispatch to http handler"
@ -128,13 +143,13 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
BaseHTTPRequestHandler.handle_one_request(self)
return
except (ConnectionError, FileNotFoundError) as e:
logger.warning(Fore.RED + "%s", e)
logger.warning("%03d " % self.reqNum + Fore.RED + "%s %s", self.server_version, e)
except (ssl.SSLEOFError, ssl.SSLError) as e:
if hasattr(self, 'url'):
# Happens after the tunnel is established
logger.warning(Fore.YELLOW + '"%s" while operating on established local SSL tunnel for [%s]' % (e, self.url))
logger.warning("%03d " % self.reqNum + Fore.YELLOW + '"%s" while operating on established local SSL tunnel for [%s]' % (e, self.url))
else:
logger.warning(Fore.YELLOW + '"%s" while trying to establish local SSL tunnel for [%s]' % (e, self.path))
logger.warning("%03d " % self.reqNum + Fore.YELLOW + '"%s" while trying to establish local SSL tunnel for [%s]' % (e, self.path))
self.close_connection = 1
def sendout_error(self, url, code, message=None, explain=None):
@ -190,12 +205,12 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
break
server_conn.setblocking(True)
if b'200' in datas and b'established' in datas.lower():
logger.info(Fore.CYAN + '[P] SSL Pass-Thru: https://%s/' % self.path)
logger.info("%03d " % self.reqNum + Fore.CYAN + '[P] SSL Pass-Thru: https://%s/' % self.path)
self.wfile.write(("HTTP/1.1 200 Connection established\r\n" +
"Proxy-agent: %s\r\n\r\n" % self.version_string()).encode('ascii'))
read_write(self.connection, server_conn)
else:
logger.warning(Fore.YELLOW + 'Proxy %s failed.', self.proxy)
logger.warning("%03d " % self.reqNum + Fore.YELLOW + 'Proxy %s failed.', self.proxy)
if datas:
logger.debug(datas)
self.wfile.write(datas)
@ -209,7 +224,7 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
def tunnel_traffic(self):
"Tunnel traffic to remote host:port"
logger.info(Fore.CYAN + '[D] SSL Pass-Thru: https://%s/' % self.path)
logger.info("%03d " % self.reqNum + Fore.CYAN + '[D] SSL Pass-Thru: https://%s/' % self.path)
server_conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
server_conn.connect((self.host, int(self.port)))
@ -219,10 +234,10 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
read_write(self.connection, server_conn)
except TimeoutError:
self.wfile.write(b"HTTP/1.1 504 Gateway Timeout\r\n\r\n")
logger.warning(Fore.YELLOW + 'Timed Out: https://%s:%s/' % (self.host, self.port))
logger.warning("%03d " % self.reqNum + Fore.YELLOW + 'Timed Out: https://%s:%s/' % (self.host, self.port))
except socket.gaierror as e:
self.wfile.write(b"HTTP/1.1 503 Service Unavailable\r\n\r\n")
logger.warning(Fore.YELLOW + '%s: https://%s:%s/' % (e, self.host, self.port))
logger.warning("%03d " % self.reqNum + Fore.YELLOW + '%s: https://%s:%s/' % (e, self.host, self.port))
finally:
# We don't maintain a connection reuse pool, so close the connection anyway
server_conn.close()
@ -249,7 +264,7 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
"Proxy-Connection", "Proxy-Authenticate"]:
del headers[name]
def write_headers(self, headers):
def purge_write_headers(self, headers):
self.purge_headers(headers)
for key, value in headers.items():
self.send_header(key, value)

1635
cacert.pem

File diff suppressed because it is too large Load Diff

View File

@ -22,8 +22,8 @@ LogLevel =
# [!seq] matches any character not in seq
[PROXY http://192.168.178.8:8123]
duckduckgo.com
*.s3.amazonaws.com
#duckduckgo.com
#*.s3.amazonaws.com
[PROXY http://192.168.178.8:8124]
test.com
@ -57,6 +57,9 @@ secure.informaction.com
### Bypass Proxomitron and the Rear Server, Proxy setting still effective
### This section supports URL matching
[BYPASS URL]
http://www.abc.com/*
https://bcd.net/*
*://feedly.com/*
*.zip
*.rar
*.exe