Merge pull request #1 from PavelRykov/master

Update to 1.3a and 1.3.1
This commit is contained in:
wheever 2015-12-23 09:40:59 +08:00
commit 84376d8844
8 changed files with 1220 additions and 1075 deletions

88
CA.crt
View File

@ -1,52 +1,52 @@
-----BEGIN CERTIFICATE----- -----BEGIN CERTIFICATE-----
MIID+DCCAuCgAwIBAgIBADANBgkqhkiG9w0BAQUFADBWMQswCQYDVQQGEwJDTjEX MIID+DCCAuCgAwIBAgIBADANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJDTjEX
MBUGA1UEChMOUHJveEhUVFBTUHJveHkxEjAQBgNVBAsTCXB5T3BlblNTTDEaMBgG MBUGA1UECgwOUHJveEhUVFBTUHJveHkxEjAQBgNVBAsMCXB5T3BlblNTTDEaMBgG
A1UEAxMRUHJveEhUVFBTUHJveHkgQ0EwHhcNMTQwNzI1MDI0NjM1WhcNMjQwNzI0 A1UEAwwRUHJveEhUVFBTUHJveHkgQ0EwHhcNMTUxMDAxMDc0MDI1WhcNMjUwOTMw
MDI0NjM1WjBWMQswCQYDVQQGEwJDTjEXMBUGA1UEChMOUHJveEhUVFBTUHJveHkx MDc0MDI1WjBWMQswCQYDVQQGEwJDTjEXMBUGA1UECgwOUHJveEhUVFBTUHJveHkx
EjAQBgNVBAsTCXB5T3BlblNTTDEaMBgGA1UEAxMRUHJveEhUVFBTUHJveHkgQ0Ew EjAQBgNVBAsMCXB5T3BlblNTTDEaMBgGA1UEAwwRUHJveEhUVFBTUHJveHkgQ0Ew
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDu5tWlBhw7kJ4aqv+Uc/Cy ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9mQTpJlPDjEqLnovcT/AL
TDNYvE5SV54vjJp7VwU1mVq/VUrcmSsNySrput4fc2jIYs98XH0IWRnEVSqbbtVg YwjoP2Siowor2yeEKaGKJjBamu3OkYhS+2kzJhcii705uTCal/f6gDIlnhYXlPEh
tYKbMODRF2DTtdtfGrxjvPW1CJZTGPV8rln1dSTp1ZPOp9n3fHXRRjRyHTOFw+gs L7Z0wsT9IePJSU9+yNtUrWYILfRg1+XkpZVqrPfjBk8usTjtC4kG9xRZno/TeZj/
0PorxQSEHZNtLQfpXD+ou5PL14hE3f/nO227eFsDR9QJGo14U610U+0T1bMX/7mc 2Qror/C989Hl+bqZ4p31/l1Jcml/W01PDiGcqESS15bKk24azJ1w69Zhjwn8uZKc
0gbLiXoPqQKVFqBCD1Zsq/ZKbHf2jDkg5wjaK6vfGSUEyFaeYWnTVeUDdGy74XRO Mnq2myrJsl8fZ82gV2fV8yydhpDudPpHy8y/9U8FfsmODi75aH4A1NkK/2FZyBKE
ZDVLW1lDJM2glZ7I0s6WcgRrrYoDKWkAl/X+SeFkTQuikoUXU8JXGeWuul6p5RjT 1OEYd+JfL7QmBCCjIt9AREXA/77HSuj6OXoKWZ0AVuiHLA/psfcRL4+QXd1UtXbF
AgMBAAGjgdAwgc0wDwYDVR0TAQH/BAUwAwEB/zARBglghkgBhvhCAQEEBAMCAgQw AgMBAAGjgdAwgc0wDwYDVR0TAQH/BAUwAwEB/zARBglghkgBhvhCAQEEBAMCAgQw
ewYDVR0lAQH/BHEwbwYIKwYBBQUHAwEGCCsGAQUFBwMCBggrBgEFBQcDBAYIKwYB ewYDVR0lAQH/BHEwbwYIKwYBBQUHAwEGCCsGAQUFBwMCBggrBgEFBQcDBAYIKwYB
BQUHAwgGCisGAQQBgjcCARUGCisGAQQBgjcCARYGCisGAQQBgjcKAwEGCisGAQQB BQUHAwgGCisGAQQBgjcCARUGCisGAQQBgjcCARYGCisGAQQBgjcKAwEGCisGAQQB
gjcKAwMGCisGAQQBgjcKAwQGCWCGSAGG+EIEATALBgNVHQ8EBAMCAQYwHQYDVR0O gjcKAwMGCisGAQQBgjcKAwQGCWCGSAGG+EIEATALBgNVHQ8EBAMCAQYwHQYDVR0O
BBYEFCs59meqNHm2h0jDlW5BvWu6gnkuMA0GCSqGSIb3DQEBBQUAA4IBAQAib4M1 BBYEFKPBao+B+YH0tMNHNGoLv/3ncZyvMA0GCSqGSIb3DQEBCwUAA4IBAQCFZOPd
QIJoRs0cLQRn8HGVXB3OVYTRp5e+GIdEkxRb69/mOJT6e1rqlk86OkVakK083E2B SldrKkekP/tO/WnGgXEus8z4Ei7TEAm6qkSJ/r0ZaTKmGek370xvVG4myl0Hngr+
a3Vhl4XIW7HhnIPhH/ZjZcwZ/MDiSvpkJVcmWjEeA6PxSpjqgl1bxMlJhbY1e3Bo F6blIUzGi8e9mp/2vONhPYKTAg+Y4h5tKz9S6SyvbypBMa4YNZw8DNfd4uVLL/b6
ps3rE40vH9+Hq3ps3FCUGAhTnXGA42L//JgU3N2XTWQXfdkoi4eVsIaf4obQT5wD psQcYfMPMpRdM7GlLZbxY9AHyCaHZszc3bSBM/lIhLWJH0pR7QSZZ+cJUHYKODZ8
ThmuTZNLTkJOzFxFqHpuxUHO1BMgKgHirul8Fy9ydj75MJfUCCQh7Prjqf0ch5Ou Cs8goAcA/mJ4h1g63EP1Snlw4U3vMJ8ZQRAeg46FAZATwte9SaahAq1kLql/P8jg
LlwRF70nULYJ6KXIaPM9icT/Wo1jWsTT+FJlXj27esuLpth7DjunWZQRjKsg3p0/ A4gM9xvfRgVOIrfxSHDlnw6gVK6u/WhD4SWIsS2JfNljgUmrcMWB37kNdT3i0yO7
DiaGIm0JYJEoRH+u Vydw/UIJw1pqktqz
-----END CERTIFICATE----- -----END CERTIFICATE-----
-----BEGIN PRIVATE KEY----- -----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDu5tWlBhw7kJ4a MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC9mQTpJlPDjEqL
qv+Uc/CyTDNYvE5SV54vjJp7VwU1mVq/VUrcmSsNySrput4fc2jIYs98XH0IWRnE novcT/ALYwjoP2Siowor2yeEKaGKJjBamu3OkYhS+2kzJhcii705uTCal/f6gDIl
VSqbbtVgtYKbMODRF2DTtdtfGrxjvPW1CJZTGPV8rln1dSTp1ZPOp9n3fHXRRjRy nhYXlPEhL7Z0wsT9IePJSU9+yNtUrWYILfRg1+XkpZVqrPfjBk8usTjtC4kG9xRZ
HTOFw+gs0PorxQSEHZNtLQfpXD+ou5PL14hE3f/nO227eFsDR9QJGo14U610U+0T no/TeZj/2Qror/C989Hl+bqZ4p31/l1Jcml/W01PDiGcqESS15bKk24azJ1w69Zh
1bMX/7mc0gbLiXoPqQKVFqBCD1Zsq/ZKbHf2jDkg5wjaK6vfGSUEyFaeYWnTVeUD jwn8uZKcMnq2myrJsl8fZ82gV2fV8yydhpDudPpHy8y/9U8FfsmODi75aH4A1NkK
dGy74XROZDVLW1lDJM2glZ7I0s6WcgRrrYoDKWkAl/X+SeFkTQuikoUXU8JXGeWu /2FZyBKE1OEYd+JfL7QmBCCjIt9AREXA/77HSuj6OXoKWZ0AVuiHLA/psfcRL4+Q
ul6p5RjTAgMBAAECggEBAIzAi5cSpoehiMUFglcgh+tEYewh5bM11AkHuex5NKSD Xd1UtXbFAgMBAAECggEAK5AHEtLdmCaZ0i6hkANF8jfVChfWtY+kfKMkFzfBiA5y
maeoxNZ7l5Yjs5jJdBzrjgyhysTmaPQGKtw1oUi6YBT+W9i7T3lhYLbbKkg6JExP Ob8zOK0zl21wpHHyCtv0pFiqlDqqnYHrA72o8c4lAS0HTRibTlYFEnCntUfNLU2S
BCSIzqheyCORRYcMYZczMkZLNmxNc8y8pCGnereQ2LWu2fX3udVs53MnWMXwh8EM DfsRFVdF2R06kYIgiqcedmn93Gk0GMeYg2btQPfFcbOa0A/szphA+AhDGax6AtUD
v3b6Nm16Z/YMAJ5tVF3x4q43D7tjUUUabATQ7iiZA3MZia0RP/WFooZVsNLR6wym gl7+QT4j5HE598ghtl5/DZ4tiw4cfuWjC6ph7tHbKKq9wCH6wQf9kcyIA4ozVBKV
Th2JN6t2Lde06wdEQAjDqSzKSRy7UazynzY8VkDUhQ6c7voVL1DYdjwPdpGdELdE fejE9t4BfVPxzbxN+Quu0+S5SGnKzg1uY+/99Jo1IqtJGQq1OlPFLjVnxUF1N+Wp
nqoobg1b4PRLlNFsQVtRN6Xzm5dvlHZt8hH7xviFRiECgYEA+bxkyrLyafCeuLvZ nJVBHorILQtGhYxW4QlWsHMdc7iB5r4eFSuKaivMGQKBgQDrCDviK35IuQylxKE8
S8DXBi3nE2BaVUPxFuVqcTGvtZVQus7vJE119wJU7clx9gE8QRujVCw7vOJxAbuM Xu/eSwPpOwF4ibASgaPmJt+t4O4JLX1GLZX899KDIeXaAFqcp2EF4QUhX2ioGIiO
eDVD1fQOXyWhmXsAXnTyxgwRx5Sskfh3EhPUraOJ8bUkEUZChG0mXWP0JE1IwguD GGFFAmOHIDvCFfiNpM1m7F0Njj8gedFfT4Yhv73htUlh5zA8vfuv4PN4ZGfjK3L9
yGx5yioZyeY+VWfRXAQjwKQc3g0CgYEA9OTfQIdwcVX6rVg2ay78gjhGjwB7vX5b sW9OEMUDTey5D/6Wq/IZ8ZGTwwKBgQDOgyJSJQk8K0n4AGLPyP/wmXL4w/xi8IOC
kApRkOt9T+Bhks4X7Zhdy6jfgvGHZ81CtghPKkiDE8tzb+0qmM4P2ksuPtFS9XHz kafs1XsQCn5OvKJZY5ZNyoSzhkKmlUQTO/tmZ5flOk6wVs34StSNSo+JQub5vEYi
X4Ne9ZpxrKbSXB+ZuGmvf3U5YSs1XgzeSDryeU5+gICgKXCNYPkv7KNHvPLLCXGk gXVNwYB6oPYMtdfPYLSy59h0REugNfkunRj5crPyVttJiVZpxBJHxgnIqJcBj+WT
FRVz5Vwu+l8CgYEApTtZsKc/LorOaUmYVr+rROJgDfjlK579R3nYuDX4nHrDZCYA ehHNJpRK1wKBgFx4s97rj9ca/4/lCi8Phz6lsxc7gPuk6KKPYSX3W4A1BFKWFDjd
IZPCpgB78qwi43IjzKwdn1Z429syi8PsB6iuTkPgsmEyj1rLfrN/4TshFFyw5Q9w TKrn8mpnluCrzPrfm/vNKdCUkj+4z1lg3DxjkTckBn75V/6avbnl+0KPGeU0KJ1g
sJxATfZzEWIZD/jdj8Kr7IRyhwY/SmvV8GidF/5M+8GDonxDD8+JXC75Do0CgYB4 U3zJzPKV+hZL+J2dff4X+pL+piUp/ic0fX9wd6MyMJYrZdZwNmPguI8zAoGAARJF
Sf97E8vgb6odAUphpYe0cu39ifA96NYSz263L2SzGnTzBjZspz3TLpTzHa40B3ew F1AB4EIJPDQkTxen3EOviQLbSFgfFopS6LOi0856IUZxQS13Fig60AOeTObxV3g0
MC6qh0mTLIj8PlOBER5qTClgA7OrChjQHphDLWSOuLZ1N7RrB8aDZdIUb4PcOZfp Ma/P5eyLg/avUt5wg9sjK38hW6JSatNpHGIonHpBTIeU+wpxZYw2X0QLcGVXSZqf
hMLbYPn0JroFANsA+gVXR/CR1/RxJBfDn3D5BSmyIwKBgHv/LTB10VbBxd9IfXS4 CoxByrwQny0LObk+rwij/FqDjgqFEmLLvNi6ZDkCgYEA3xgeLNBGf5ghYhgX9PKO
E9lyBm5ZLX263bZDFdMmL1Dsa9fb8ZhbU2UTroVccMcoYBLmAjUh5o0hVw9gXTi7 Y1Rg6y1ElqxMCoovkpNlA6bVkyxcYIItIW1npsSeM45x+6Blit74LuleE9UYoN8j
H/cjK5ckO6R2iFc1wtsJ4GZkNj+TdOy7l+G4dZq5dECon41SdhKD3hvjYtuZdADS BC8ADhYN7ywb0juCnpLrKuWl/3XNg3wREhvhHfEK1agEysVFUohFwdtfyW4gNWia
NgbKfrz5Q13KcUB7jrOFevXD wli1LGvTwY1aFj8K29VKvkE=
-----END PRIVATE KEY----- -----END PRIVATE KEY-----

View File

@ -38,7 +38,7 @@ def create_CA(capath):
OpenSSL.crypto.X509Extension(b"extendedKeyUsage", True, b"serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC"), OpenSSL.crypto.X509Extension(b"extendedKeyUsage", True, b"serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC"),
OpenSSL.crypto.X509Extension(b"keyUsage", False, b"keyCertSign, cRLSign"), OpenSSL.crypto.X509Extension(b"keyUsage", False, b"keyCertSign, cRLSign"),
OpenSSL.crypto.X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=ca)]) OpenSSL.crypto.X509Extension(b"subjectKeyIdentifier", False, b"hash", subject=ca)])
ca.sign(key, 'sha1') ca.sign(key, 'sha256')
with open(capath, 'wb') as fp: with open(capath, 'wb') as fp:
fp.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca)) fp.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca))
fp.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)) fp.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key))
@ -77,7 +77,7 @@ def dummy_cert(cafile, certfile, commonname):
cert.get_subject().CN = '*' + commonname if commonname.startswith('.') else commonname cert.get_subject().CN = '*' + commonname if commonname.startswith('.') else commonname
cert.set_serial_number(int(time.time()*10000)) cert.set_serial_number(int(time.time()*10000))
cert.set_pubkey(ca.get_pubkey()) cert.set_pubkey(ca.get_pubkey())
cert.sign(key, "sha1") cert.sign(key, "sha256")
with open(certfile, 'wb') as fp: with open(certfile, 'wb') as fp:
fp.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert)) fp.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert))
fp.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)) fp.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key))

View File

@ -1,22 +1,33 @@
ProxHTTPSProxyMII ProxHTTPSProxyMII
================= =================
Version 1.2 (20141221) Version 1.3.1 (20151001)
-------------- --------------
+ Content is streamed to client, while not cached before sending * Certifications are now signed via SHA256 instead of SHA1
* Fix config auto reload
* Less exception traceback dumped Version 1.3 (20150114)
* Tagged header changed from "Tagged:Proxomitron FrontProxy/*" to "Tagged:ProxHTTPSProxyMII FrontProxy/*" --------------
Version 1.1 (20141024) + Each request has a number ranged from 001 to 999 for reference. 000 is reserved for SSL requests not MITMed like those in [BLACKLIST] and [SSL Pass-Thru] sections.
-------------- + Log window now displays the length of the bytes submitted in POST method
+ Support URL bypass Version 1.2 (20141221)
+ Handle both HTTP and HTTPS --------------
+ Auto reload config upon chagnes
+ Content is streamed to client, while not cached before sending
Version 1.0 (20140729) * Fix config auto reload
-------------- * Less exception traceback dumped
* Tagged header changed from "Tagged:Proxomitron FrontProxy/*" to "Tagged:ProxHTTPSProxyMII FrontProxy/*"
Version 1.1 (20141024)
--------------
+ Support URL bypass
+ Handle both HTTP and HTTPS
+ Auto reload config upon chagnes
Version 1.0 (20140729)
--------------
Initial release Initial release

View File

@ -1,119 +1,119 @@
ProxHTTPSProxyMII ProxHTTPSProxyMII
================= =================
Created to provide modern nag-free HTTPS connections for an HTTP proxy. Created to provide modern nag-free HTTPS connections for an HTTP proxy.
How it works How it works
---- ----
![how it works](http://www.proxfilter.net/proxhttpsproxy/HowItWorks.gif) ![how it works](http://www.proxfilter.net/proxhttpsproxy/HowItWorks.gif)
Eligible HTTP Proxies Eligible HTTP Proxies
---- ----
* The [Proxomitron](http://www.proxomitron.info), for which ProxHTTPSProxy was created :) * The [Proxomitron](http://www.proxomitron.info), for which ProxHTTPSProxy was created :)
* Any that have the ability to forward all requests with a "Tagged:ProxHTTPSProxyMII FrontProxy/*" header to the ProxHTTPSProxyMII rear server. * Any that have the ability to forward all requests with a "Tagged:ProxHTTPSProxyMII FrontProxy/*" header to the ProxHTTPSProxyMII rear server.
* Any that can be ran as two instances, one for true http and another for "tagged" http * Any that can be ran as two instances, one for true http and another for "tagged" http
* Any that will only be used to monitor https traffic * Any that will only be used to monitor https traffic
Install Install
---- ----
* ProxHTTPSProxy's "CA.crt" to the Client's store of trusted certificate authorities. * ProxHTTPSProxy's "CA.crt" to the Client's store of trusted certificate authorities.
Configure Configure
---- ----
* The Client to use the ProxHTTPSProxy front server at 127.0.0.1 on port 8079 for secure connections. * The Client to use the ProxHTTPSProxy front server at 127.0.0.1 on port 8079 for secure connections.
* The HTTP proxy to receive requests at 127.0.0.1 on port 8080. * The HTTP proxy to receive requests at 127.0.0.1 on port 8080.
* The HTTP proxy to forward requests to the ProxHTTPSProxy rear server at 127.0.0.1 on port 8081. * The HTTP proxy to forward requests to the ProxHTTPSProxy rear server at 127.0.0.1 on port 8081.
* Edit "Config.ini" to change these requirements. * Edit "Config.ini" to change these requirements.
Execute Execute
---- ----
ProxHTTPSProxy.exe to start. ProxHTTPSProxy.exe to start.
Remember Remember
---- ----
Be aware and careful! Use a direct connection when you don't want any mistakes made. Be aware and careful! Use a direct connection when you don't want any mistakes made.
Use at your own risk! Use at your own risk!
Have fun! Have fun!
Discuss Discuss
---- ----
<http://prxbx.com/forums/showthread.php?tid=2172> <http://prxbx.com/forums/showthread.php?tid=2172>
Author Author
---- ----
* phoenix (aka whenever) * phoenix (aka whenever)
* JJoe (test and doc) * JJoe (test and doc)
Proxomitron Tips Proxomitron Tips
================ ================
To use To use
---- ----
* Add the ProxHTTPSProxy rear server to the Proxomitron's list of external proxies * Add the ProxHTTPSProxy rear server to the Proxomitron's list of external proxies
`127.0.0.1:8081 ProxHTTPSProxyMII` `127.0.0.1:8081 ProxHTTPSProxyMII`
* Add to Proxomitron's "Bypass URLs that match this expression" field if it is empty * Add to Proxomitron's "Bypass URLs that match this expression" field if it is empty
`$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)$SETPROXY(127.0.0.1:8081)(^)` `$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)$SETPROXY(127.0.0.1:8081)(^)`
* Add to the beginning of the entry in Proxomitron's "Bypass URLs that match this expression" field if it is **not** empty * Add to the beginning of the entry in Proxomitron's "Bypass URLs that match this expression" field if it is **not** empty
`$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)$SETPROXY(127.0.0.1:8081)(^)|` `$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)$SETPROXY(127.0.0.1:8081)(^)|`
Tips Tips
---- ----
* Proxomitron always executes some commands in "Bypass URLs that match this expression" field. Adding the entry there allows the Proxomitron to use the rear server when in Bypass mode. * Proxomitron always executes some commands in "Bypass URLs that match this expression" field. Adding the entry there allows the Proxomitron to use the rear server when in Bypass mode.
This undocumented feature brings many possibilities but remember, an actual match triggers bypass of filtering! This undocumented feature brings many possibilities but remember, an actual match triggers bypass of filtering!
- `$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)` checks for the header that indicates an https request. - `$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)` checks for the header that indicates an https request.
- `$SETPROXY(127.0.0.1:8081)` is executed when found. - `$SETPROXY(127.0.0.1:8081)` is executed when found.
- `(^)` expression never matches. - `(^)` expression never matches.
* Identify https connections by testing for the "Tagged" request header that the ProxHTTPSProxy front server adds to the request. * Identify https connections by testing for the "Tagged" request header that the ProxHTTPSProxy front server adds to the request.
`$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)` `$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)`
* For local file requests, use an expression like * For local file requests, use an expression like
`$USEPROXY(false)$RDIR(http://local.ptron/killed.gif)` `$USEPROXY(false)$RDIR(http://local.ptron/killed.gif)`
* Before redirecting "Tagged" connections to external resources consider removing the "Tagged" header. * Before redirecting "Tagged" connections to external resources consider removing the "Tagged" header.
* If needed, the Proxomitron can still do https. After adding the ssl files to the Proxomitron, use a header filter like * If needed, the Proxomitron can still do https. After adding the ssl files to the Proxomitron, use a header filter like
``` ```
[HTTP headers] [HTTP headers]
In = FALSE In = FALSE
Out = TRUE Out = TRUE
Key = "Tagged: Use Proxomitron for https://badcert.com" Key = "Tagged: Use Proxomitron for https://badcert.com"
URL = "badcert.com$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)$USEPROXY(false)$RDIR(https://badcert.com)" URL = "badcert.com$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)$USEPROXY(false)$RDIR(https://badcert.com)"
``` ```
This filter also removes the "Tagged" header. This filter also removes the "Tagged" header.
For the current sidki set For the current sidki set
---- ----
1. Add the following two lines to Exceptions-U 1. Add the following two lines to Exceptions-U
``` ```
$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)$SET(keyword=$GET(keyword)i_proxy:3.)(^) $OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)$SET(keyword=$GET(keyword)i_proxy:3.)(^)
~(^$TST(keyword=i_proxy:[03].))$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)$SET(keyword=$GET(keyword)i_proxy:3.)(^) ~(^$TST(keyword=i_proxy:[03].))$OHDR(Tagged:ProxHTTPSProxyMII FrontProxy/*)$SET(keyword=$GET(keyword)i_proxy:3.)(^)
``` ```
2. Redirect connections to http resources with an expression like 2. Redirect connections to http resources with an expression like
`$USEPROXY(false)$SET(keyword=i_proxy:0.)$RDIR(http://local.ptron/killed.gif)` `$USEPROXY(false)$SET(keyword=i_proxy:0.)$RDIR(http://local.ptron/killed.gif)`

View File

@ -5,7 +5,7 @@
_name = 'ProxHTTPSProxyMII' _name = 'ProxHTTPSProxyMII'
__author__ = 'phoenix' __author__ = 'phoenix'
__version__ = 'v1.2' __version__ = 'v1.3.1'
CONFIG = "config.ini" CONFIG = "config.ini"
CA_CERTS = "cacert.pem" CA_CERTS = "cacert.pem"
@ -24,7 +24,7 @@ urllib3.disable_warnings()
from socketserver import ThreadingMixIn from socketserver import ThreadingMixIn
from http.server import HTTPServer, BaseHTTPRequestHandler from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib.parse import urlparse from urllib.parse import urlparse
from ProxyTool import ProxyRequestHandler, get_cert from ProxyTool import ProxyRequestHandler, get_cert, counter
from colorama import init, Fore, Back, Style from colorama import init, Fore, Back, Style
init(autoreset=True) init(autoreset=True)
@ -127,13 +127,14 @@ class FrontRequestHandler(ProxyRequestHandler):
def do_CONNECT(self): def do_CONNECT(self):
"Descrypt https request and dispatch to http handler" "Descrypt https request and dispatch to http handler"
# request line: CONNECT www.example.com:443 HTTP/1.1 # request line: CONNECT www.example.com:443 HTTP/1.1
self.host, self.port = self.path.split(":") self.host, self.port = self.path.split(":")
self.proxy, self.pool, self.noverify = pools.getpool(self.host) self.proxy, self.pool, self.noverify = pools.getpool(self.host)
if any((fnmatch.fnmatch(self.host, pattern) for pattern in pools.blacklist)): if any((fnmatch.fnmatch(self.host, pattern) for pattern in pools.blacklist)):
# BLACK LIST # BLACK LIST
self.deny_request() self.deny_request()
logger.info(Fore.CYAN + 'Denied by blacklist: %s' % self.host) logger.info("%03d " % self.reqNum + Fore.CYAN + 'Denied by blacklist: %s' % self.host)
elif any((fnmatch.fnmatch(self.host, pattern) for pattern in pools.sslpasslist)): elif any((fnmatch.fnmatch(self.host, pattern) for pattern in pools.sslpasslist)):
# SSL Pass-Thru # SSL Pass-Thru
if self.proxy and self.proxy.startswith('https'): if self.proxy and self.proxy.startswith('https'):
@ -164,6 +165,9 @@ class FrontRequestHandler(ProxyRequestHandler):
def do_METHOD(self): def do_METHOD(self):
"Forward request to Proxomitron" "Forward request to Proxomitron"
counter.increment_and_set(self, 'reqNum')
if self.ssltunnel: if self.ssltunnel:
# https request # https request
host = self.host if self.port == '443' else "%s:%s" % (self.host, self.port) host = self.host if self.port == '443' else "%s:%s" % (self.host, self.port)
@ -172,14 +176,14 @@ class FrontRequestHandler(ProxyRequestHandler):
if not self.bypass: if not self.bypass:
url = "http://%s%s" % (host, self.path) url = "http://%s%s" % (host, self.path)
# Tag the request so Proxomitron can recognize it # Tag the request so Proxomitron can recognize it
self.headers["Tagged"] = self.version_string() self.headers["Tagged"] = self.version_string() + ":%d" % self.reqNum
else: else:
# http request # http request
self.host = urlparse(self.path).hostname self.host = urlparse(self.path).hostname
if any((fnmatch.fnmatch(self.host, pattern) for pattern in pools.blacklist)): if any((fnmatch.fnmatch(self.host, pattern) for pattern in pools.blacklist)):
# BLACK LIST # BLACK LIST
self.deny_request() self.deny_request()
logger.info(Fore.CYAN + 'Denied by blacklist: %s' % self.host) logger.info("%03d " % self.reqNum + Fore.CYAN + 'Denied by blacklist: %s' % self.host)
return return
host = urlparse(self.path).netloc host = urlparse(self.path).netloc
self.proxy, self.pool, self.noverify = pools.getpool(self.host, httpmode=True) self.proxy, self.pool, self.noverify = pools.getpool(self.host, httpmode=True)
@ -191,30 +195,48 @@ class FrontRequestHandler(ProxyRequestHandler):
prefix += '[B]' prefix += '[B]'
pool = self.pool if self.bypass else proxpool pool = self.pool if self.bypass else proxpool
data_length = self.headers.get("Content-Length") data_length = self.headers.get("Content-Length")
self.postdata = self.rfile.read(int(data_length)) if data_length else None self.postdata = self.rfile.read(int(data_length)) if data_length and int(data_length) > 0 else None
if self.command == "POST" and "Content-Length" not in self.headers:
buffer = self.rfile.read()
if buffer:
logger.warning("%03d " % self.reqNum + Fore.RED +
'POST w/o "Content-Length" header (Bytes: %d | Transfer-Encoding: %s | HTTPS: %s',
len(buffer), "Transfer-Encoding" in self.headers, self.ssltunnel)
# Remove hop-by-hop headers # Remove hop-by-hop headers
self.purge_headers(self.headers) self.purge_headers(self.headers)
# pool.urlopen() expects a dict like headers container for http request
headers = urllib3._collections.HTTPHeaderDict()
[headers.add(key, value) for (key, value) in self.headers.items()]
r = None r = None
# Below code in connectionpool.py expect the headers to has a copy() and update() method
# That's why we can't use self.headers directly when call pool.urlopen()
#
# Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being
# reflected in anyone else's copy.
# if self.scheme == 'http':
# headers = headers.copy()
# headers.update(self.proxy_headers)
headers = urllib3._collections.HTTPHeaderDict(self.headers)
try: try:
# Sometimes 302 redirect would fail with "BadStatusLine" exception, and IE11 doesn't restart the request. # Sometimes 302 redirect would fail with "BadStatusLine" exception, and IE11 doesn't restart the request.
# retries=1 instead of retries=False fixes it. # retries=1 instead of retries=False fixes it.
#! Retry may cause the requests with the same reqNum appear in the log window
r = pool.urlopen(self.command, url, body=self.postdata, headers=headers, r = pool.urlopen(self.command, url, body=self.postdata, headers=headers,
retries=1, redirect=False, preload_content=False, decode_content=False) retries=1, redirect=False, preload_content=False, decode_content=False)
if not self.ssltunnel: if not self.ssltunnel:
logger.info(Fore.GREEN + '%s "%s %s %s" %s %s' % if self.command in ("GET", "HEAD"):
(prefix, self.command, url, self.request_version, r.status, r.getheader('Content-Length', '-'))) logger.info("%03d " % self.reqNum + Fore.GREEN + '%s "%s %s" %s %s' %
(prefix, self.command, url, r.status, r.getheader('Content-Length', '-')))
else:
logger.info("%03d " % self.reqNum + Fore.GREEN + '%s "%s %s %s" %s %s' %
(prefix, self.command, url, data_length, r.status, r.getheader('Content-Length', '-')))
self.send_response_only(r.status, r.reason) self.send_response_only(r.status, r.reason)
# HTTPResponse.getheader() combines multiple same name headers into one # HTTPResponse.msg is easier to handle than urllib3._collections.HTTPHeaderDict
# https://login.yahoo.com would fail to login
# Use HTTPResponse.msg instead
r.headers = r._original_response.msg r.headers = r._original_response.msg
self.write_headers(r.headers) self.purge_write_headers(r.headers)
if self.command == 'HEAD' or r.status in (100, 101, 204, 304): if self.command == 'HEAD' or r.status in (100, 101, 204, 304) or r.getheader("Content-Length") == '0':
written = None written = None
else: else:
written = self.stream_to_client(r) written = self.stream_to_client(r)
@ -225,10 +247,10 @@ class FrontRequestHandler(ProxyRequestHandler):
# Regular https request exceptions should be handled by rear server # Regular https request exceptions should be handled by rear server
except urllib3.exceptions.TimeoutError as e: except urllib3.exceptions.TimeoutError as e:
self.sendout_error(url, 504, message="Timeout", explain=e) self.sendout_error(url, 504, message="Timeout", explain=e)
logger.warning(Fore.YELLOW + '[F] %s on "%s %s"', e, self.command, url) logger.warning("%03d " % self.reqNum + Fore.YELLOW + '[F] %s on "%s %s"', e, self.command, url)
except (urllib3.exceptions.HTTPError,) as e: except (urllib3.exceptions.HTTPError,) as e:
self.sendout_error(url, 502, message="HTTPError", explain=e) self.sendout_error(url, 502, message="HTTPError", explain=e)
logger.warning(Fore.YELLOW + '[F] %s on "%s %s"', e, self.command, url) logger.warning("%03d " % self.reqNum + Fore.YELLOW + '[F] %s on "%s %s"', e, self.command, url)
finally: finally:
if r: if r:
# Release the connection back into the pool # Release the connection back into the pool
@ -246,6 +268,19 @@ class RearRequestHandler(ProxyRequestHandler):
def do_METHOD(self): def do_METHOD(self):
"Convert http request to https" "Convert http request to https"
if self.headers.get("Tagged") and self.headers["Tagged"].startswith(_name):
self.reqNum = int(self.headers["Tagged"].split(":")[1])
# Remove the tag
del self.headers["Tagged"]
else:
self.sendout_error(self.path, 400,
explain="The proxy setting of the client is misconfigured.\n\n" +
"Please set the HTTPS proxy port to %s " % config.FRONTPORT +
"and check the Docs for other settings.")
logger.error(Fore.RED + Style.BRIGHT + "[Misconfigured HTTPS proxy port] " + self.path)
return
# request line: GET http://somehost.com/path?attr=value HTTP/1.1 # request line: GET http://somehost.com/path?attr=value HTTP/1.1
url = "https" + self.path[4:] url = "https" + self.path[4:]
self.host = urlparse(self.path).hostname self.host = urlparse(self.path).hostname
@ -254,29 +289,38 @@ class RearRequestHandler(ProxyRequestHandler):
data_length = self.headers.get("Content-Length") data_length = self.headers.get("Content-Length")
self.postdata = self.rfile.read(int(data_length)) if data_length else None self.postdata = self.rfile.read(int(data_length)) if data_length else None
self.purge_headers(self.headers) self.purge_headers(self.headers)
# Remove the tag
del self.headers["Tagged"]
# pool.urlopen() expects a dict like headers container for http request
headers = urllib3._collections.HTTPHeaderDict()
[headers.add(key, value) for (key, value) in self.headers.items()]
r = None r = None
# Below code in connectionpool.py expect the headers to has a copy() and update() method
# That's why we can't use self.headers directly when call pool.urlopen()
#
# Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being
# reflected in anyone else's copy.
# if self.scheme == 'http':
# headers = headers.copy()
# headers.update(self.proxy_headers)
headers = urllib3._collections.HTTPHeaderDict(self.headers)
try: try:
r = pool.urlopen(self.command, url, body=self.postdata, headers=headers, r = pool.urlopen(self.command, url, body=self.postdata, headers=headers,
retries=1, redirect=False, preload_content=False, decode_content=False) retries=1, redirect=False, preload_content=False, decode_content=False)
if proxy: if proxy:
logger.debug('Using Proxy - %s' % proxy) logger.debug('Using Proxy - %s' % proxy)
color = Fore.RED if noverify else Fore.GREEN color = Fore.RED if noverify else Fore.GREEN
logger.info(color + '%s "%s %s" %s %s' % if self.command in ("GET", "HEAD"):
(prefix, self.command, url, r.status, r.getheader('Content-Length', '-'))) logger.info("%03d " % self.reqNum + color + '%s "%s %s" %s %s' %
(prefix, self.command, url, r.status, r.getheader('Content-Length', '-')))
else:
logger.info("%03d " % self.reqNum + color + '%s "%s %s %s" %s %s' %
(prefix, self.command, url, data_length, r.status, r.getheader('Content-Length', '-')))
self.send_response_only(r.status, r.reason) self.send_response_only(r.status, r.reason)
# HTTPResponse.getheader() combines multiple same name headers into one # HTTPResponse.msg is easier to handle than urllib3._collections.HTTPHeaderDict
# https://login.yahoo.com would fail to login
# Use HTTPResponse.msg instead
r.headers = r._original_response.msg r.headers = r._original_response.msg
self.write_headers(r.headers) self.purge_write_headers(r.headers)
if self.command == 'HEAD' or r.status in (100, 101, 204, 304): if self.command == 'HEAD' or r.status in (100, 101, 204, 304) or r.getheader("Content-Length") == '0':
written = None written = None
else: else:
written = self.stream_to_client(r) written = self.stream_to_client(r)
@ -285,13 +329,14 @@ class RearRequestHandler(ProxyRequestHandler):
except urllib3.exceptions.SSLError as e: except urllib3.exceptions.SSLError as e:
self.sendout_error(url, 417, message="SSL Certificate Failed", explain=e) self.sendout_error(url, 417, message="SSL Certificate Failed", explain=e)
logger.error(Fore.RED + Style.BRIGHT + "[SSL Certificate Error] " + url) logger.error("%03d " % self.reqNum + Fore.RED + Style.BRIGHT + "[SSL Certificate Error] " + url)
except urllib3.exceptions.TimeoutError as e: except urllib3.exceptions.TimeoutError as e:
self.sendout_error(url, 504, message="Timeout", explain=e) self.sendout_error(url, 504, message="Timeout", explain=e)
logger.warning(Fore.YELLOW + '[R] %s on "%s %s"', e, self.command, url) logger.warning("%03d " % self.reqNum + Fore.YELLOW + '[R]%s "%s %s" %s', prefix, self.command, url, e)
except (urllib3.exceptions.HTTPError,) as e: except (urllib3.exceptions.HTTPError,) as e:
self.sendout_error(url, 502, message="HTTPError", explain=e) self.sendout_error(url, 502, message="HTTPError", explain=e)
logger.warning(Fore.YELLOW + '[R] %s on "%s %s"', e, self.command, url) logger.warning("%03d " % self.reqNum + Fore.YELLOW + '[R]%s "%s %s" %s', prefix, self.command, url, e)
finally: finally:
if r: if r:
# Release the connection back into the pool # Release the connection back into the pool
@ -317,7 +362,7 @@ try:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.setLevel(getattr(logging, config.LOGLEVEL, logging.INFO)) logger.setLevel(getattr(logging, config.LOGLEVEL, logging.INFO))
handler = logging.StreamHandler() handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(message)s', datefmt='[%H:%M:%S]') formatter = logging.Formatter('%(asctime)s %(message)s', datefmt='[%H:%M]')
handler.setFormatter(formatter) handler.setFormatter(formatter)
logger.addHandler(handler) logger.addHandler(handler)
@ -332,7 +377,7 @@ try:
for worker in (frontserver.serve_forever, rearserver.serve_forever, for worker in (frontserver.serve_forever, rearserver.serve_forever,
pools.reloadConfig): pools.reloadConfig):
thread = threading.Thread(target=worker) thread = threading.Thread(target=worker)
thread.dameon = True thread.daemon = True
thread.start() thread.start()
print("=" * 76) print("=" * 76)
@ -343,5 +388,7 @@ try:
print(' ParentServer : %s' % config.DEFAULTPROXY) print(' ParentServer : %s' % config.DEFAULTPROXY)
print(' Proxomitron : ' + config.PROXADDR) print(' Proxomitron : ' + config.PROXADDR)
print("=" * 76) print("=" * 76)
while True:
time.sleep(1)
except KeyboardInterrupt: except KeyboardInterrupt:
print("Quitting...") print("Quitting...")

View File

@ -10,6 +10,7 @@ __version__ = '1.0'
import time import time
from datetime import datetime from datetime import datetime
import logging import logging
import threading
import cgi import cgi
import socket import socket
import select import select
@ -88,6 +89,18 @@ def read_write(socket1, socket2, max_idling=10):
pass pass
if count == max_idling: break if count == max_idling: break
class Counter:
reset_value = 999
def __init__(self, start=0):
self.lock = threading.Lock()
self.value = start
def increment_and_set(self, obj, attr):
with self.lock:
self.value = self.value + 1 if self.value < self.reset_value else 1
setattr(obj, attr, self.value)
counter = Counter()
class ProxyRequestHandler(BaseHTTPRequestHandler): class ProxyRequestHandler(BaseHTTPRequestHandler):
"""RequestHandler with do_CONNECT method defined """RequestHandler with do_CONNECT method defined
""" """
@ -96,6 +109,8 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
ssltunnel = False ssltunnel = False
# Override default value 'HTTP/1.0' # Override default value 'HTTP/1.0'
protocol_version = 'HTTP/1.1' protocol_version = 'HTTP/1.1'
# To be set in each request
reqNum = 0
def do_CONNECT(self): def do_CONNECT(self):
"Descrypt https request and dispatch to http handler" "Descrypt https request and dispatch to http handler"
@ -128,13 +143,13 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
BaseHTTPRequestHandler.handle_one_request(self) BaseHTTPRequestHandler.handle_one_request(self)
return return
except (ConnectionError, FileNotFoundError) as e: except (ConnectionError, FileNotFoundError) as e:
logger.warning(Fore.RED + "%s", e) logger.warning("%03d " % self.reqNum + Fore.RED + "%s %s", self.server_version, e)
except (ssl.SSLEOFError, ssl.SSLError) as e: except (ssl.SSLEOFError, ssl.SSLError) as e:
if hasattr(self, 'url'): if hasattr(self, 'url'):
# Happens after the tunnel is established # Happens after the tunnel is established
logger.warning(Fore.YELLOW + '"%s" while operating on established local SSL tunnel for [%s]' % (e, self.url)) logger.warning("%03d " % self.reqNum + Fore.YELLOW + '"%s" while operating on established local SSL tunnel for [%s]' % (e, self.url))
else: else:
logger.warning(Fore.YELLOW + '"%s" while trying to establish local SSL tunnel for [%s]' % (e, self.path)) logger.warning("%03d " % self.reqNum + Fore.YELLOW + '"%s" while trying to establish local SSL tunnel for [%s]' % (e, self.path))
self.close_connection = 1 self.close_connection = 1
def sendout_error(self, url, code, message=None, explain=None): def sendout_error(self, url, code, message=None, explain=None):
@ -190,12 +205,12 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
break break
server_conn.setblocking(True) server_conn.setblocking(True)
if b'200' in datas and b'established' in datas.lower(): if b'200' in datas and b'established' in datas.lower():
logger.info(Fore.CYAN + '[P] SSL Pass-Thru: https://%s/' % self.path) logger.info("%03d " % self.reqNum + Fore.CYAN + '[P] SSL Pass-Thru: https://%s/' % self.path)
self.wfile.write(("HTTP/1.1 200 Connection established\r\n" + self.wfile.write(("HTTP/1.1 200 Connection established\r\n" +
"Proxy-agent: %s\r\n\r\n" % self.version_string()).encode('ascii')) "Proxy-agent: %s\r\n\r\n" % self.version_string()).encode('ascii'))
read_write(self.connection, server_conn) read_write(self.connection, server_conn)
else: else:
logger.warning(Fore.YELLOW + 'Proxy %s failed.', self.proxy) logger.warning("%03d " % self.reqNum + Fore.YELLOW + 'Proxy %s failed.', self.proxy)
if datas: if datas:
logger.debug(datas) logger.debug(datas)
self.wfile.write(datas) self.wfile.write(datas)
@ -209,7 +224,7 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
def tunnel_traffic(self): def tunnel_traffic(self):
"Tunnel traffic to remote host:port" "Tunnel traffic to remote host:port"
logger.info(Fore.CYAN + '[D] SSL Pass-Thru: https://%s/' % self.path) logger.info("%03d " % self.reqNum + Fore.CYAN + '[D] SSL Pass-Thru: https://%s/' % self.path)
server_conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try: try:
server_conn.connect((self.host, int(self.port))) server_conn.connect((self.host, int(self.port)))
@ -219,10 +234,10 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
read_write(self.connection, server_conn) read_write(self.connection, server_conn)
except TimeoutError: except TimeoutError:
self.wfile.write(b"HTTP/1.1 504 Gateway Timeout\r\n\r\n") self.wfile.write(b"HTTP/1.1 504 Gateway Timeout\r\n\r\n")
logger.warning(Fore.YELLOW + 'Timed Out: https://%s:%s/' % (self.host, self.port)) logger.warning("%03d " % self.reqNum + Fore.YELLOW + 'Timed Out: https://%s:%s/' % (self.host, self.port))
except socket.gaierror as e: except socket.gaierror as e:
self.wfile.write(b"HTTP/1.1 503 Service Unavailable\r\n\r\n") self.wfile.write(b"HTTP/1.1 503 Service Unavailable\r\n\r\n")
logger.warning(Fore.YELLOW + '%s: https://%s:%s/' % (e, self.host, self.port)) logger.warning("%03d " % self.reqNum + Fore.YELLOW + '%s: https://%s:%s/' % (e, self.host, self.port))
finally: finally:
# We don't maintain a connection reuse pool, so close the connection anyway # We don't maintain a connection reuse pool, so close the connection anyway
server_conn.close() server_conn.close()
@ -249,7 +264,7 @@ class ProxyRequestHandler(BaseHTTPRequestHandler):
"Proxy-Connection", "Proxy-Authenticate"]: "Proxy-Connection", "Proxy-Authenticate"]:
del headers[name] del headers[name]
def write_headers(self, headers): def purge_write_headers(self, headers):
self.purge_headers(headers) self.purge_headers(headers)
for key, value in headers.items(): for key, value in headers.items():
self.send_header(key, value) self.send_header(key, value)

1635
cacert.pem

File diff suppressed because it is too large Load Diff

View File

@ -1,63 +1,66 @@
### The parent proxy has to support CONNECT method, if you want to proxy HTTPS requests ### The parent proxy has to support CONNECT method, if you want to proxy HTTPS requests
### ###
### Proxy setting applies to HTTPS requests only, as it is applied by the Rear Server ### Proxy setting applies to HTTPS requests only, as it is applied by the Rear Server
### HTTP requests are passed to and handled by Proxomitron, please set up Proxomitron for proxy ### HTTP requests are passed to and handled by Proxomitron, please set up Proxomitron for proxy
### ###
### Socks proxy support ### Socks proxy support
### https://github.com/shazow/urllib3/pull/284 ### https://github.com/shazow/urllib3/pull/284
[GENERAL] [GENERAL]
ProxAddr = http://localhost:8080 ProxAddr = http://localhost:8080
FrontPort = 8079 FrontPort = 8079
RearPort = 8081 RearPort = 8081
# DefaultProxy = http://127.0.0.1:8118 # DefaultProxy = http://127.0.0.1:8118
# Proper values for LogLevel are ERROR, WARNING, INFO, DEBUG # Proper values for LogLevel are ERROR, WARNING, INFO, DEBUG
# Default is INFO if unset # Default is INFO if unset
LogLevel = LogLevel =
# * matches everything # * matches everything
# ? matches any single character # ? matches any single character
# [seq] matches any character in seq # [seq] matches any character in seq
# [!seq] matches any character not in seq # [!seq] matches any character not in seq
[PROXY http://192.168.178.8:8123] [PROXY http://192.168.178.8:8123]
duckduckgo.com #duckduckgo.com
*.s3.amazonaws.com #*.s3.amazonaws.com
[PROXY http://192.168.178.8:8124] [PROXY http://192.168.178.8:8124]
test.com test.com
### Ignore SSL certificate verify, Use at your own risk!!! ### Ignore SSL certificate verify, Use at your own risk!!!
### Proxy setting still effective ### Proxy setting still effective
[SSL No-Verify] [SSL No-Verify]
*.12306.cn *.12306.cn
[BLACKLIST] [BLACKLIST]
*.doubleclick.net *.doubleclick.net
*.google-analytics.com *.google-analytics.com
### Bypass Proxomitron and the Rear Server, Proxy setting still effective ### Bypass Proxomitron and the Rear Server, Proxy setting still effective
### SSL certificate verify will be done by the browser ### SSL certificate verify will be done by the browser
[SSL Pass-Thru] [SSL Pass-Thru]
pypi.python.org pypi.python.org
www.gstatic.com www.gstatic.com
watson.telemetry.microsoft.com watson.telemetry.microsoft.com
*.sync.services.mozilla.com *.sync.services.mozilla.com
*.mega.co.nz *.mega.co.nz
# Microsoft SmartScreen Filter Service # Microsoft SmartScreen Filter Service
*.smartscreen.microsoft.com *.smartscreen.microsoft.com
urs.microsoft.com urs.microsoft.com
# NoScript uses https://secure.informaction.com/ipecho to detect the WAN IP # NoScript uses https://secure.informaction.com/ipecho to detect the WAN IP
# https://addons.mozilla.org/en-US/firefox/addon/noscript/privacy/ # https://addons.mozilla.org/en-US/firefox/addon/noscript/privacy/
secure.informaction.com secure.informaction.com
### Bypass Proxomitron and the Rear Server, Proxy setting still effective ### Bypass Proxomitron and the Rear Server, Proxy setting still effective
### This section supports URL matching ### This section supports URL matching
[BYPASS URL] [BYPASS URL]
*.zip http://www.abc.com/*
*.rar https://bcd.net/*
*.exe *://feedly.com/*
*.zip
*.rar
*.exe
*.pdf *.pdf