@@ -15,3 +15,4 @@ target | |||
configs | |||
dist | |||
.m2 | |||
.venv |
@@ -0,0 +1,26 @@ | |||
#!/bin/bash | |||
# | |||
# Set up python venv to run scripts in bin | |||
# | |||
function die { | |||
echo 1>&2 "$0: ${1}" | |||
exit 1 | |||
} | |||
BUBBLE_HOME="$(cd $(dirname ${0})/.. && pwd)" | |||
cd ${BUBBLE_HOME} || die "Error changing to ${BUBBLE_HOME} dir" | |||
if [[ ! -d "${BUBBLE_HOME}/.venv" ]] ; then | |||
python3 -m venv ./.venv || die "Error creating venv" | |||
fi | |||
. ${BUBBLE_HOME}/.venv/bin/activate || die "Error activating bubble venv" | |||
python3 -m pip install requests || die "Error installing pip packages" | |||
if [[ ! -z "${1}" ]] ; then | |||
script=${1} | |||
shift | |||
echo python3 "${script}" "${@}" | |||
else | |||
echo "venv successfully set up" | |||
fi |
@@ -0,0 +1,78 @@ | |||
#!/usr/bin/python3 | |||
# | |||
# Replay a stream, as if mitmproxy were sending filter requests via the filter/apply API | |||
# | |||
# Usage: | |||
# | |||
# breplay_stream.py path_prefix | |||
# | |||
# path_prefix : some file prefix | |||
# | |||
# This will list all files matching the prefix, sort them, and play them back. | |||
# These files should come in triplets: a .url file, a .headers.json file, and a .data file/ | |||
# | |||
# To capture requests for later playback: | |||
# * Set debug_stream_fqdn and debug_stream_uri in mitmproxy/bubble_config.py and restart mitmproxy servers | |||
# * Request a matching using a device whose traffic will be routed through the mitmproxy | |||
# * Capture files will be written to /tmp/bubble_stream_[request-id]_chunkXXXX.[url, headers.json, data] | |||
# | |||
import glob | |||
import json | |||
import requests | |||
import sys | |||
HEADER_FILTER_PASSTHRU = 'X-Bubble-Passthru' | |||
def log (message): | |||
print(message, file=sys.stderr, flush=True) | |||
def replay_stream (prefix, out): | |||
url_files = glob.glob(prefix+'*.url') | |||
if url_files is None or len(url_files) == 0: | |||
log('No files found matching prefix: '+prefix) | |||
return | |||
url_files.sort() | |||
for u in url_files: | |||
chunk_file = replace_suffix(u, '.data') | |||
headers_file = replace_suffix(u, '.headers.json') | |||
with open(u, mode='r') as f: | |||
url = f.read() | |||
with open(headers_file, mode='r') as f: | |||
headers = json.load(f) | |||
with open(chunk_file, mode='rb') as f: | |||
chunk = f.read() | |||
log('sending '+str(len(chunk))+' bytes to '+url) | |||
try: | |||
response_data = replay_request(url, headers, chunk) | |||
except Exception as e: | |||
log('error sending filter request: '+repr(e)) | |||
raise e | |||
log('received '+str(len(response_data))+' bytes') | |||
if len(response_data) > 0: | |||
out.write(response_data) | |||
def replace_suffix(f, suffix): | |||
return f[0:f.rfind('.')] + suffix | |||
def replay_request(url, headers, chunk): | |||
response = requests.post(url, data=chunk, headers=headers) | |||
if not response.ok: | |||
log('replay_request: Error fetching ' + url + ', HTTP status ' + str(response.status_code)) | |||
return b'' | |||
elif HEADER_FILTER_PASSTHRU in response.headers: | |||
log('replay_request: server returned X-Bubble-Passthru, not filtering subsequent requests') | |||
return chunk | |||
return response.content | |||
if __name__ == "__main__": | |||
with open('/tmp/replay_response', mode='wb') as out: | |||
replay_stream(sys.argv[1], out) | |||
out.close() |
@@ -24,7 +24,7 @@ import static org.cobbzilla.util.collection.ArrayUtil.shift; | |||
public class BubbleMain { | |||
private static Map<String, Class<? extends MainBase>> mainClasses = MapBuilder.build(new Object[][]{ | |||
private static final Map<String, Class<? extends MainBase>> mainClasses = MapBuilder.build(new Object[][]{ | |||
{"server", BubbleServer.class}, | |||
{"model", BubbleModelMain.class}, | |||
{"script", BubbleScriptMain.class}, | |||
@@ -26,6 +26,7 @@ import bubble.service.boot.SelfNodeService; | |||
import bubble.service.cloud.DeviceIdService; | |||
import bubble.service.stream.ConnectionCheckResponse; | |||
import bubble.service.stream.StandardRuleEngineService; | |||
import com.fasterxml.jackson.databind.JsonNode; | |||
import lombok.Getter; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.cobbzilla.util.collection.ExpirationEvictionPolicy; | |||
@@ -603,6 +604,16 @@ public class FilterHttpResource { | |||
return ok(summary); | |||
} | |||
@POST @Path(EP_LOGS+"/{requestId}") | |||
public Response requestLog(@Context Request req, | |||
@Context ContainerRequest ctx, | |||
@PathParam("requestId") String requestId, | |||
JsonNode logData) { | |||
final FilterSubContext filterCtx = new FilterSubContext(req, requestId); | |||
log.error(" >>>>> REQUEST-LOG("+requestId+"): "+json(logData, COMPACT_MAPPER)); | |||
return ok_empty(); | |||
} | |||
@Path(EP_ASSETS+"/{requestId}/{appId}") | |||
public AppAssetsResource getAppAssetsResource(@Context Request req, | |||
@Context ContainerRequest ctx, | |||
@@ -39,9 +39,10 @@ import java.util.Map; | |||
import static bubble.ApiConstants.HOME_DIR; | |||
import static bubble.rule.RequestModifierRule.ICON_JS_TEMPLATE; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
import static bubble.rule.RequestModifierRule.ICON_JS_TEMPLATE_NAME; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.*; | |||
import static org.cobbzilla.util.io.FileUtil.abs; | |||
import static org.cobbzilla.util.io.FileUtil.basename; | |||
import static org.cobbzilla.util.io.regex.RegexReplacementFilter.DEFAULT_PREFIX_REPLACEMENT_WITH_MATCH; | |||
import static org.cobbzilla.util.json.JsonUtil.json; | |||
import static org.cobbzilla.util.security.ShaUtil.sha256_hex; | |||
@@ -122,14 +123,16 @@ public abstract class AbstractAppRuleDriver implements AppRuleDriver { | |||
return loadTemplate(defaultSiteTemplate, requestModConfig().getSiteJsTemplate()); | |||
} | |||
@Getter(lazy=true) private final long jarTime = configuration.getBubbleJar().lastModified(); | |||
protected String loadTemplate(String defaultTemplate, String templatePath) { | |||
if (configuration.getEnvironment().containsKey("DEBUG_RULE_TEMPLATES")) { | |||
final File templateFile = new File(HOME_DIR + "/debugTemplates/" + templatePath); | |||
if (templateFile.exists()) { | |||
log.error("loadTemplate: debug file found (using it): "+abs(templateFile)); | |||
final File templateFile = new File(HOME_DIR + "/debugTemplates/" + basename(templatePath)); | |||
if (templateFile.exists() && templateFile.lastModified() > getJarTime()) { | |||
if (log.isDebugEnabled()) log.debug("loadTemplate: debug file found and newer than bubble jar, using it: "+abs(templateFile)); | |||
return FileUtil.toStringOrDie(templateFile); | |||
} else { | |||
log.error("loadTemplate: debug file not found (using default): "+abs(templateFile)); | |||
if (log.isDebugEnabled()) log.debug("loadTemplate: debug file not found or older than bubble jar, using default: "+abs(templateFile)); | |||
} | |||
} | |||
return defaultTemplate; | |||
@@ -161,7 +164,10 @@ public abstract class AbstractAppRuleDriver implements AppRuleDriver { | |||
RegexFilterReader reader = new RegexFilterReader(new InputStreamReader(in), filter).setMaxMatches(1); | |||
if (modConfig.hasAdditionalRegexReplacements()) { | |||
for (BubbleRegexReplacement re : modConfig.getAdditionalRegexReplacements()) { | |||
final RegexReplacementFilter f = new RegexReplacementFilter(re.getInsertionRegex(), re.getReplacement()); | |||
final RegexReplacementFilter f = new RegexReplacementFilter( | |||
re.getInsertionRegex(), | |||
re.getReplacement().replace(NONCE_VAR, filterRequest.getScriptNonce()) | |||
); | |||
reader = new RegexFilterReader(reader, f); | |||
} | |||
} | |||
@@ -182,7 +188,8 @@ public abstract class AbstractAppRuleDriver implements AppRuleDriver { | |||
ctx.put(siteJsInsertionVar, siteJs); | |||
} | |||
if (showIcon) { | |||
ctx.put(CTX_ICON_JS, HandlebarsUtil.apply(getHandlebars(), ICON_JS_TEMPLATE, ctx)); | |||
final String iconJs = loadTemplate(ICON_JS_TEMPLATE, ICON_JS_TEMPLATE_NAME); | |||
ctx.put(CTX_ICON_JS, HandlebarsUtil.apply(getHandlebars(), iconJs, ctx)); | |||
} | |||
return HandlebarsUtil.apply(getHandlebars(), bubbleJsTemplate, ctx); | |||
} | |||
@@ -12,6 +12,7 @@ public interface RequestModifierRule { | |||
RequestModifierConfig getRequestModifierConfig (); | |||
Class<RequestModifierRule> RMR = RequestModifierRule.class; | |||
String ICON_JS_TEMPLATE = stream2string(getPackagePath(RMR)+"/"+ RMR.getSimpleName()+"_icon.js.hbs"); | |||
String ICON_JS_TEMPLATE_NAME = RMR.getSimpleName() + "_icon.js.hbs"; | |||
String ICON_JS_TEMPLATE = stream2string(getPackagePath(RMR) + "/" + ICON_JS_TEMPLATE_NAME); | |||
} |
@@ -366,7 +366,7 @@ public class BubbleBlockRuleDriver extends TrafficAnalyticsRuleDriver implements | |||
if (bubbleBlockConfig.inPageBlocks()) { | |||
return filterInsertJs(in, filterRequest, filterCtx, BUBBLE_JS_TEMPLATE, EMPTY, BLOCK_STATS_JS, showStats); | |||
} | |||
log.warn(prefix+"inserting JS for stats..."); | |||
log.warn(prefix+"inserting JS for stats into: "+request.getUrl()+" with Content-Type: "+filterRequest.getContentType()); | |||
return filterInsertJs(in, filterRequest, filterCtx, getBubbleJsStatsTemplate(), null, null, showStats); | |||
} | |||
@@ -161,9 +161,9 @@ public class AnsiblePrepService { | |||
if (installType == AnsibleInstallType.sage) return (int) (((double) memoryMB) * 0.6d); | |||
if (memoryMB >= 4096) return (int) (((double) memoryMB) * 0.6d); | |||
if (memoryMB >= 2048) return (int) (((double) memoryMB) * 0.5d); | |||
if (memoryMB >= 1024) return (int) (((double) memoryMB) * 0.23d); | |||
if (memoryMB >= 1024) return (int) (((double) memoryMB) * 0.196d); | |||
// no nodes are this small, API probably would not start, not enough memory | |||
return (int) (((double) memoryMB) * 0.22d); | |||
return (int) (((double) memoryMB) * 0.19d); | |||
} | |||
} |
@@ -32,7 +32,7 @@ import static org.cobbzilla.util.io.NullInputStream.NULL_STREAM; | |||
@Slf4j | |||
class ActiveStreamState { | |||
public static final long DEFAULT_BYTE_BUFFER_SIZE = (8 * Bytes.KB); | |||
public static final int DEFAULT_BYTE_BUFFER_SIZE = (int) (8 * Bytes.KB); | |||
public static final long MAX_BYTE_BUFFER_SIZE = (64 * Bytes.KB); | |||
// do not wrap input with encoding stream until we have received at least this many bytes | |||
@@ -194,7 +194,7 @@ class ActiveStreamState { | |||
return baseStream; | |||
} | |||
try { | |||
final InputStream wrapped = encoding.wrapInput(baseStream); | |||
final InputStream wrapped = encoding.wrapInput(baseStream, DEFAULT_BYTE_BUFFER_SIZE); | |||
if (log.isDebugEnabled()) log.debug(prefix+"returning baseStream wrapped in " + wrapped.getClass().getSimpleName()); | |||
return wrapped; | |||
} catch (IOException e) { | |||
@@ -5,8 +5,8 @@ | |||
package bubble.service.stream; | |||
import bubble.model.app.AppMatcher; | |||
import bubble.model.app.RuleDriver; | |||
import bubble.model.app.AppRule; | |||
import bubble.model.app.RuleDriver; | |||
import bubble.rule.AppRuleDriver; | |||
import lombok.Getter; | |||
import lombok.Setter; | |||
@@ -78,6 +78,7 @@ public class StandardAppPrimerService implements AppPrimerService { | |||
log.error("primeApps("+account.getName()+"): "+shortError(e), e); | |||
} | |||
} | |||
log.info("primeApps: completed"); | |||
} | |||
public void prime(Account account) { | |||
@@ -85,6 +85,7 @@ public class StandardRuleEngineService implements RuleEngineService { | |||
@Autowired private RedisService redis; | |||
public static final long MATCHERS_CACHE_TIMEOUT = MINUTES.toSeconds(15); | |||
// public static final long MATCHERS_CACHE_TIMEOUT = HOURS.toSeconds(15); // extend timeout when debugging replayed streams | |||
@Getter(lazy=true) private final RedisService matchersCache = redis.prefixNamespace(getClass().getSimpleName()+".matchers"); | |||
public FilterMatchDecision preprocess(FilterMatchersRequest filter, | |||
@@ -167,7 +168,7 @@ public class StandardRuleEngineService implements RuleEngineService { | |||
if (log.isDebugEnabled()) log.debug(prefix+"no request modifiers, returning passthru"); | |||
return passthru(request); | |||
} else { | |||
log.info(prefix+" applying matchers: "+filterRequest.getMatcherNames()); | |||
if (log.isDebugEnabled()) log.debug(prefix+" applying matchers: "+filterRequest.getMatcherNames()+" to uri: "+filterRequest.getMatchersResponse().getRequest().getUri()); | |||
} | |||
// have we seen this request before? | |||
@@ -1 +1 @@ | |||
bubble.version=Adventure 0.16.1 | |||
bubble.version=Adventure 0.17.0 |
@@ -2,9 +2,9 @@ | |||
[program:bubble] | |||
stdout_logfile = /var/log/bubble/api-server-out.log | |||
stderr_logfile = /var/log/bubble/api-server-err.log | |||
command=sudo -u bubble bash -c "/usr/bin/java \ | |||
command=bash -c 'service postgresql restart && sudo -u bubble bash -c "/usr/bin/java \ | |||
-Dfile.encoding=UTF-8 -Djava.net.preferIPv4Stack=true \ | |||
-XX:+UseG1GC -XX:MaxGCPauseMillis=400 {{ bubble_java_opts }} \ | |||
-cp /home/bubble/api/bubble.jar \ | |||
bubble.server.BubbleServer \ | |||
/home/bubble/api/bubble.env" | |||
/home/bubble/api/bubble.env"' |
@@ -8,3 +8,5 @@ bubble_sage_ip4 = '{{ sage_ip4 }}' | |||
bubble_sage_ip6 = '{{ sage_ip6 }}' | |||
cert_validation_host = '{{ cert_validation_host }}' | |||
debug_capture_fqdn = None | |||
debug_stream_fqdn = None | |||
debug_stream_uri = None |
@@ -13,7 +13,7 @@ testMode: {{#exists BUBBLE_TEST_MODE}}{{BUBBLE_TEST_MODE}}{{else}}false{{/exists | |||
database: | |||
driver: org.postgresql.Driver | |||
url: jdbc:postgresql://127.0.0.1:5432/bubble | |||
url: jdbc:postgresql://127.0.0.1:5432/{{#exists BUBBLE_DB_NAME}}{{BUBBLE_DB_NAME}}{{else}}bubble{{/exists}} | |||
user: bubble | |||
password: '{{#exists BUBBLE_PG_PASSWORD}}{{BUBBLE_PG_PASSWORD}}{{else}}{{key_file '.BUBBLE_PG_PASSWORD'}}{{/exists}}' | |||
@@ -11657,7 +11657,6 @@ warby | |||
wards | |||
wared | |||
wares | |||
warez | |||
warks | |||
warms | |||
warns | |||
@@ -11824,8 +11823,6 @@ widow | |||
width | |||
wield | |||
wiels | |||
wifed | |||
wifey | |||
wifie | |||
wifty | |||
wigan | |||
@@ -11931,7 +11928,6 @@ wrate | |||
wrath | |||
wrawl | |||
wreak | |||
wreck | |||
wrens | |||
wrest | |||
wrick | |||
@@ -11943,7 +11939,6 @@ wrist | |||
write | |||
writs | |||
wroke | |||
wrong | |||
wroot | |||
wrote | |||
wroth | |||
@@ -4,11 +4,32 @@ if (typeof {{PAGE_PREFIX}}_icon_status === 'undefined') { | |||
let {{PAGE_PREFIX}}_icon_status = []; | |||
{{PAGE_PREFIX}}_log = function (data) { | |||
const logData = JSON.stringify(data); | |||
const requestOptions = { | |||
method: 'POST', | |||
body: logData | |||
}; | |||
console.log('Logging to server: '+logData); | |||
fetch('/__bubble/api/filter/logs/{{BUBBLE_REQUEST_ID}}', requestOptions) | |||
.then(() => { | |||
console.log('Logged to server: '+logData); | |||
}) | |||
.catch((error) => { | |||
console.error('Error logging "'+logData+'" to server: '+error); | |||
}); | |||
} | |||
{{PAGE_PREFIX}}_addBubbleApp = function (app) { | |||
if (window.self === window.top) { | |||
if ({{PAGE_PREFIX}}_icon_status.find(a => a.app === app.app)) { | |||
{{PAGE_PREFIX}}_log('addBubbleApp: NOT adding app (already added): '+app.app); | |||
} else { | |||
{{PAGE_PREFIX}}_log('addBubbleApp: adding app: '+app.app); | |||
{{PAGE_PREFIX}}_icon_status.push(app); | |||
} | |||
} | |||
} | |||
{{PAGE_PREFIX}}_getAppIconImgSrc = function (app) { | |||
return '/__bubble/api/filter/assets/{{BUBBLE_REQUEST_ID}}/' + app.app + '/' + app.icon + '?raw=true'; | |||
@@ -75,23 +75,32 @@ const {{JS_PREFIX}}_app_refresh = function (displayFunc) { | |||
icon = 'icon-yellow'; | |||
} else { | |||
icon = 'icon-red'; | |||
if ({{JS_PREFIX}}_app_refresh_interval !== null) { | |||
console.log('cancelling window.interval, red status'); | |||
window.clearInterval({{JS_PREFIX}}_app_refresh_interval); | |||
{{JS_PREFIX}}_app_refresh_interval = null; | |||
} | |||
} | |||
{{JS_PREFIX}}_app.icon = icon; | |||
{{PAGE_PREFIX}}_setAppIconImg({{JS_PREFIX}}_app); | |||
} else if (Date.now() - {{JS_PREFIX}}_app_stats_last_change > {{JS_PREFIX}}_app_stats_timeout) { | |||
} else if ({{JS_PREFIX}}_app_refresh_interval !== null && Date.now() - {{JS_PREFIX}}_app_stats_last_change > {{JS_PREFIX}}_app_stats_timeout) { | |||
console.log('cancelling window.interval, stats unchanged for a while'); | |||
window.clearInterval({{JS_PREFIX}}_app_refresh_interval); | |||
{{JS_PREFIX}}_app_refresh_interval = null; | |||
} | |||
if (typeof displayFunc === 'function') { | |||
displayFunc(); | |||
} | |||
} | |||
}).catch((error) => { | |||
console.log('cancelling window.interval, due to error: '+error); | |||
if ({{JS_PREFIX}}_app_refresh_interval !== null) { | |||
console.log('cancelling window.interval, due to error: ' + error); | |||
window.clearInterval({{JS_PREFIX}}_app_refresh_interval); | |||
{{JS_PREFIX}}_app_refresh_interval = null; | |||
} else { | |||
console.log('error: ' + error); | |||
} | |||
}); | |||
} | |||
@@ -0,0 +1,92 @@ | |||
function {{JS_PREFIX}}_getElementsByXPath(xpath, parent) { | |||
let results = []; | |||
let query = document.evaluate(xpath, parent || document, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null); | |||
for (let i = 0, length = query.snapshotLength; i < length; ++i) { | |||
results.push(query.snapshotItem(i)); | |||
} | |||
return results; | |||
} | |||
const {{JS_PREFIX}}_site_host = location.protocol + '//' + window.location.hostname + '/'; | |||
function {{JS_PREFIX}}_apply_blocks(blocked_users) { | |||
let articles = {{JS_PREFIX}}_getElementsByXPath('//div[@role="article" and @aria-posinset]'); | |||
let sitePrefix = {{JS_PREFIX}}_site_host; | |||
const log = {{PAGE_PREFIX}}_log; | |||
for (let i=0; i<articles.length; i++) { | |||
let article = articles[i]; | |||
let authorLink = article.getElementsByTagName('a')[0]; | |||
if (typeof authorLink.href === 'undefined') { | |||
continue; | |||
} | |||
let authorHref = authorLink.href; | |||
if (!authorHref.startsWith(sitePrefix)) continue; | |||
let authorName = authorHref.substring(sitePrefix.length); | |||
let qPos = authorName.indexOf('?'); | |||
if (qPos !== -1) { | |||
authorName = authorName.substring(0, qPos); | |||
authorLink.href = sitePrefix + authorName; | |||
} | |||
if (blocked_users !== null && blocked_users.includes(authorName)) { | |||
if (article.className.indexOf('{{JS_PREFIX}}_bub_blocked') === -1) { | |||
log('removing post by author: ' + authorName); | |||
article.className = article.className += ' {{JS_PREFIX}}_bub_blocked'; | |||
try { | |||
const feedItem = article.parentElement.parentElement.parentElement.parentElement.parentElement; | |||
log('nuking: '+feedItem.outerHTML.substring(0, 40)) | |||
feedItem.parentElement.removeChild(feedItem); | |||
} catch (e) { | |||
log('error removing post by author: ' + authorName + ': ' + e); | |||
} | |||
} else { | |||
log('found post marked removed but still present (??) by author: ' + authorName); | |||
} | |||
} else { | |||
// have we visited this article before? | |||
if (article.innerHTML.indexOf('_bubble') === -1) { | |||
log('VISITING article node for author: '+authorName); | |||
try { | |||
article.className = article.className += ' {{JS_PREFIX}}_bubble_block'; | |||
if (typeof authorLink.parentNode.parentNode.nextSibling === 'undefined' || authorLink.parentNode.parentNode.nextSibling === null) { | |||
console.log('authorLink.parentNode.parentNode.nextSibling was undefined or null, skipping; outerHTML='+authorLink.parentNode.parentNode.outerHTML); | |||
continue; | |||
} | |||
const spans = authorLink.parentNode.parentNode.nextSibling.getElementsByTagName('span'); | |||
const sepSpans = Array.from(spans).filter(s => s.innerHTML === ' · '); | |||
if (sepSpans.length === 0) { | |||
log('no insertion point found for author: ' + authorName); | |||
continue; | |||
} | |||
sepSpans.map(sepSpan => { | |||
const imgHolder = document.createElement('img'); | |||
imgHolder.src = '/__bubble/api/filter/assets/{{BUBBLE_REQUEST_ID}}/UserBlocker/icon?raw=true'; | |||
imgHolder.width = 16; | |||
const blockLink = document.createElement('a'); | |||
blockLink.addEventListener("click", function (event) { | |||
{{JS_PREFIX}}_block_user(authorName); | |||
try { | |||
const feedItem = article.parentElement.parentElement.parentElement.parentElement.parentElement; | |||
log('click nuking: '+feedItem.outerHTML.substring(0, 40)) | |||
feedItem.parentElement.removeChild(feedItem); | |||
} catch (e) { | |||
log('click error removing post by author: ' + authorName + ': ' + e); | |||
} | |||
return false; | |||
}); | |||
// blockLink.appendChild(line); | |||
blockLink.appendChild(imgHolder); | |||
// log('inserting block icon for ' + authorName + '...'); | |||
sepSpan.parentNode.appendChild(blockLink); | |||
sepSpan.parentNode.appendChild(document.createTextNode(' · ')); | |||
}); | |||
} catch (e) { | |||
log('badness: '+e); | |||
} | |||
} | |||
} | |||
} | |||
} |
@@ -16,14 +16,14 @@ function {{JS_PREFIX}}_apply_blocks(blocked_users) { | |||
for (let i=0; i<authors.length; i++) { | |||
let author = authors[i]; | |||
if (author.href && author.href.startsWith(sitePrefix) | |||
&& author.href.indexOf('?', sitePrefix.length) == -1 | |||
&& author.href.indexOf('/', sitePrefix.length) == -1 | |||
&& author.className.indexOf('_bubble_blocked') == -1) { | |||
&& author.href.indexOf('?', sitePrefix.length) === -1 | |||
&& author.href.indexOf('/', sitePrefix.length) === -1 | |||
&& author.className.indexOf('_bubble_blocked') === -1) { | |||
let authorName = author.href.substring(sitePrefix.length); | |||
if (blocked_users !== null && blocked_users.includes(authorName)) { | |||
// walk parents until we find the article, then go up 3 more divs | |||
let node = author; | |||
while (node.tagName.toUpperCase() != 'ARTICLE') { | |||
while (node.tagName.toUpperCase() !== 'ARTICLE') { | |||
node = node.parentNode; | |||
if (node === null) break; | |||
} | |||
@@ -62,21 +62,21 @@ function {{JS_PREFIX}}_apply_blocks(blocked_users) { | |||
} | |||
// have we visited this tweet before? | |||
if (tweet.className.indexOf('{{JS_PREFIX}}_bubble_block') == -1) { | |||
if (tweet.className.indexOf('{{JS_PREFIX}}_bubble_block') === -1) { | |||
// console.log('VISITING tweet node for author: '+authorName); | |||
let authorWrapper = author.parentNode.parentNode; | |||
let authorDiv = authorWrapper.firstChild; | |||
const authorWrapper = author.parentNode.parentNode; | |||
const authorDiv = authorWrapper.firstChild; | |||
if (typeof authorDiv.tagName !== 'undefined' && authorDiv.tagName.toUpperCase() === 'DIV') { | |||
let blockControl = document.createElement('div'); | |||
const blockControl = document.createElement('div'); | |||
blockControl.style.textAlign = 'center'; | |||
tweet.className = tweet.className += ' {{JS_PREFIX}}_bubble_block'; | |||
let line = document.createElement('hr'); | |||
let imgHolder = document.createElement('img'); | |||
const line = document.createElement('hr'); | |||
const imgHolder = document.createElement('img'); | |||
imgHolder.src = '/__bubble/api/filter/assets/{{BUBBLE_REQUEST_ID}}/UserBlocker/icon?raw=true'; | |||
imgHolder.width = 32; | |||
let blockLink = document.createElement('a'); | |||
const blockLink = document.createElement('a'); | |||
blockLink.addEventListener("click", function (event) { {{JS_PREFIX}}_block_user(authorName); return false; }); | |||
blockLink.appendChild(line); | |||
blockLink.appendChild(imgHolder); | |||
@@ -0,0 +1,31 @@ | |||
[{ | |||
"name": "UserBlocker", | |||
"children": { | |||
"AppSite": [{ | |||
"name": "Facebook", | |||
"url": "https://facebook.com", | |||
"description": "what’s happening in the world and what people are talking about right now.", | |||
"template": true, | |||
"maxSecurityHosts": [ | |||
"addtoany.com", "*.addtoany.com", "appspot.com", "*.appspot.com", "cdninstagram.com", "*.cdninstagram.com", | |||
"energized.pro", "*.energized.pro", | |||
"facebook.com", "*.facebook.com", "facebook.com", "*.facebook.com", "facebook.de", "*.facebook.de", | |||
"facebook.fr", "*.facebook.fr", "facebook.net", "*.facebook.net", "fb.co", "*.fb.co", | |||
"fbcdn.com", "*.fbcdn.com", "fbcdn.net", "*.fbcdn.net", "fb.com", "*.fb.com", | |||
"fb.me", "*.fb.me", "fbsbx.com", "*.fbsbx.com", "fburl.com", "*.fburl.com", "foursquare.com", "*.foursquare.com", "freebasics.com", "*.freebasics.com", | |||
"hootsuite.com", "*.hootsuite.com", "instagram.com", "*.instagram.com", "internet.org", "*.internet.org", "messenger.com", "*.messenger.com", | |||
"metrix.net", "*.metrix.net", "m.me", "*.m.me", "tfbnw.net", "*.tfbnw.net", "thefacebook.com", "*.thefacebook.com", | |||
"wechat.com", "*.wechat.com", "whatsapp.com", "*.whatsapp.com", "whatsapp.net", "*.whatsapp.net" | |||
], | |||
"enableMaxSecurityHosts": true | |||
}], | |||
"AppRule": [{ | |||
"name": "fb_user_blocker", | |||
"template": true, | |||
"driver": "JsUserBlockerRuleDriver", | |||
"config": { | |||
"siteJsTemplate": "bubble/rule/social/block/site/FB.js.hbs" | |||
} | |||
}] | |||
} | |||
}] |
@@ -0,0 +1,15 @@ | |||
[{ | |||
"name": "UserBlocker", | |||
"children": { | |||
"AppMatcher": [{ | |||
"name": "FBMatcher", | |||
"site": "Facebook", | |||
"template": true, | |||
"requestCheck": true, | |||
"requestModifier": true, | |||
"fqdn": "www.facebook.com", | |||
"urlRegex": ".*", | |||
"rule": "fb_user_blocker" | |||
}] | |||
} | |||
}] |
@@ -7,5 +7,7 @@ | |||
"apps/user_block/mr/bubbleApp_userBlock_mr", | |||
"apps/user_block/mr/bubbleApp_userBlock_mr_matchers", | |||
"apps/user_block/twitter/bubbleApp_userBlock_twitter", | |||
"apps/user_block/twitter/bubbleApp_userBlock_twitter_matchers" | |||
"apps/user_block/twitter/bubbleApp_userBlock_twitter_matchers", | |||
"apps/user_block/fb/bubbleApp_userBlock_fb", | |||
"apps/user_block/fb/bubbleApp_userBlock_fb_matchers" | |||
] |
@@ -28,29 +28,11 @@ from mitmproxy.exceptions import TlsProtocolException | |||
from mitmproxy.net import tls as net_tls | |||
import json | |||
import threading | |||
import traceback | |||
import signal | |||
import sys | |||
from bubble_api import bubble_log, bubble_conn_check, bubble_activity_log, REDIS, redis_set, \ | |||
is_bubble_request, is_sage_request, is_not_from_vpn | |||
from bubble_config import bubble_host, bubble_host_alias, bubble_sage_host, bubble_sage_ip4, bubble_sage_ip6, cert_validation_host | |||
# Allow SIGQUIT to print stack traces to stderr | |||
def dumpstacks(signal, frame): | |||
id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) | |||
code = [] | |||
for threadId, stack in sys._current_frames().items(): | |||
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId,""), threadId)) | |||
for filename, lineno, name, line in traceback.extract_stack(stack): | |||
code.append('File: "%s", line %d, in %s' % (filename, lineno, name)) | |||
if line: | |||
code.append(" %s" % (line.strip())) | |||
print("\n------------------------------------- stack traces ------------------------------"+"\n".join(code), file=sys.stderr, flush=True) | |||
signal.signal(signal.SIGQUIT, dumpstacks) | |||
REDIS_DNS_PREFIX = 'bubble_dns_' | |||
REDIS_CONN_CHECK_PREFIX = 'bubble_conn_check_' | |||
REDIS_CHECK_DURATION = 60 * 60 # 1 hour timeout | |||
@@ -0,0 +1,19 @@ | |||
import threading | |||
import traceback | |||
import signal | |||
import sys | |||
# Allow SIGUSR1 to print stack traces to stderr | |||
def dumpstacks(signal, frame): | |||
id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) | |||
code = [] | |||
for threadId, stack in sys._current_frames().items(): | |||
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId,""), threadId)) | |||
for filename, lineno, name, line in traceback.extract_stack(stack): | |||
code.append('File: "%s", line %d, in %s' % (filename, lineno, name)) | |||
if line: | |||
code.append(" %s" % (line.strip())) | |||
print("\n------------------------------------- stack traces ------------------------------"+"\n".join(code), file=sys.stderr, flush=True) | |||
signal.signal(signal.SIGUSR1, dumpstacks) | |||
@@ -1,13 +1,14 @@ | |||
# | |||
# Copyright (c) 2020 Bubble, Inc. All rights reserved. For personal (non-commercial) use, see license: https://getbubblenow.com/bubble-license/ | |||
# | |||
import json | |||
import re | |||
import requests | |||
import urllib | |||
import uuid | |||
import traceback | |||
from mitmproxy.net.http import Headers | |||
from bubble_config import bubble_port, bubble_host_alias, debug_capture_fqdn | |||
from bubble_config import bubble_port, bubble_host_alias, debug_capture_fqdn, debug_stream_fqdn, debug_stream_uri | |||
from bubble_api import CTX_BUBBLE_MATCHERS, CTX_BUBBLE_ABORT, CTX_BUBBLE_LOCATION, BUBBLE_URI_PREFIX, \ | |||
CTX_BUBBLE_REQUEST_ID, CTX_CONTENT_LENGTH, CTX_CONTENT_LENGTH_SENT, bubble_log, get_flow_ctx, add_flow_ctx, \ | |||
HEADER_USER_AGENT, HEADER_FILTER_PASSTHRU, HEADER_CONTENT_SECURITY_POLICY, REDIS, redis_set, parse_host_header | |||
@@ -24,6 +25,7 @@ STANDARD_FILTER_HEADERS = {HEADER_CONTENT_TYPE: CONTENT_TYPE_BINARY} | |||
REDIS_FILTER_PASSTHRU_PREFIX = '__chunk_filter_pass__' | |||
REDIS_FILTER_PASSTHRU_DURATION = 600 | |||
DEBUG_STREAM_COUNTERS = {} | |||
def add_csp_part(new_csp, part): | |||
if len(new_csp) > 0: | |||
@@ -64,22 +66,8 @@ def ensure_bubble_script_csp(csp): | |||
def filter_chunk(flow, chunk, req_id, user_agent, last, content_encoding=None, content_type=None, content_length=None, csp=None): | |||
if debug_capture_fqdn: | |||
host = None | |||
if flow.client_conn.tls_established: | |||
sni = flow.client_conn.connection.get_servername() | |||
if sni: | |||
host = str(sni) | |||
else: | |||
host_header = flow.request.host_header | |||
if host_header: | |||
m = parse_host_header.match(host_header) | |||
if m: | |||
host = str(m.group("host").strip("[]")) | |||
if host: | |||
if host.startswith("b'"): | |||
host = host[2:-1] | |||
if host in debug_capture_fqdn: | |||
bubble_log('filter_chunk: debug_capture_fqdn detected, capturing: '+host) | |||
if debug_capture_fqdn in req_id: | |||
bubble_log('filter_chunk: debug_capture_fqdn detected, capturing: '+debug_capture_fqdn) | |||
f = open('/tmp/bubble_capture_'+req_id, mode='ab', buffering=0) | |||
f.write(chunk) | |||
f.close() | |||
@@ -97,8 +85,7 @@ def filter_chunk(flow, chunk, req_id, user_agent, last, content_encoding=None, c | |||
params_added = False | |||
if chunk and content_type: | |||
params_added = True | |||
url = (url | |||
+ '?type=' + urllib.parse.quote_plus(content_type)) | |||
url = url + '?type=' + urllib.parse.quote_plus(content_type) | |||
if content_encoding: | |||
url = url + '&encoding=' + urllib.parse.quote_plus(content_encoding) | |||
if content_length: | |||
@@ -111,15 +98,33 @@ def filter_chunk(flow, chunk, req_id, user_agent, last, content_encoding=None, c | |||
if csp: | |||
# bubble_log('filter_chunk: url='+url+' (csp='+csp+')') | |||
bubble_log('filter_chunk: url='+url+' (with csp)') | |||
bubble_log('filter_chunk: url='+url+' (with csp) (last='+str(last)+')') | |||
filter_headers = { | |||
HEADER_CONTENT_TYPE: CONTENT_TYPE_BINARY, | |||
HEADER_CONTENT_SECURITY_POLICY: csp | |||
} | |||
else: | |||
bubble_log('filter_chunk: url='+url+' (no csp)') | |||
bubble_log('filter_chunk: url='+url+' (no csp) (last='+str(last)+')') | |||
filter_headers = STANDARD_FILTER_HEADERS | |||
if debug_stream_fqdn and debug_stream_uri and debug_stream_fqdn in req_id and flow.request.path == debug_stream_uri: | |||
if req_id in DEBUG_STREAM_COUNTERS: | |||
count = DEBUG_STREAM_COUNTERS[req_id] + 1 | |||
else: | |||
count = 0 | |||
DEBUG_STREAM_COUNTERS[req_id] = count | |||
bubble_log('filter_chunk: debug_stream detected, capturing: '+debug_stream_fqdn) | |||
f = open('/tmp/bubble_stream_'+req_id+'_chunk'+"{:04d}".format(count)+'.data', mode='wb', buffering=0) | |||
if chunk is not None: | |||
f.write(chunk) | |||
f.close() | |||
f = open('/tmp/bubble_stream_'+req_id+'_chunk'+"{:04d}".format(count)+'.headers.json', mode='w') | |||
f.write(json.dumps(filter_headers)) | |||
f.close() | |||
f = open('/tmp/bubble_stream_'+req_id+'_chunk'+"{:04d}".format(count)+'.url', mode='w') | |||
f.write(url) | |||
f.close() | |||
response = requests.post(url, data=chunk, headers=filter_headers) | |||
if not response.ok: | |||
err_message = 'filter_chunk: Error fetching ' + url + ', HTTP status ' + str(response.status_code) | |||
@@ -174,8 +179,9 @@ def send_bubble_response(response): | |||
def responseheaders(flow): | |||
if flow.request.path and flow.request.path.startswith(BUBBLE_URI_PREFIX): | |||
uri = 'http://127.0.0.1:' + bubble_port + '/' + flow.request.path[len(BUBBLE_URI_PREFIX):] | |||
path = flow.request.path | |||
if path and path.startswith(BUBBLE_URI_PREFIX): | |||
uri = 'http://127.0.0.1:' + bubble_port + '/' + path[len(BUBBLE_URI_PREFIX):] | |||
bubble_log('responseheaders: sending special bubble request to '+uri) | |||
headers = { | |||
'Accept' : 'application/json', | |||
@@ -203,19 +209,27 @@ def responseheaders(flow): | |||
if abort_code is not None: | |||
abort_location = get_flow_ctx(flow, CTX_BUBBLE_LOCATION) | |||
if abort_location is not None: | |||
bubble_log('responseheaders: redirecting request with HTTP status '+str(abort_code)+' to: '+abort_location) | |||
bubble_log('responseheaders: redirecting request with HTTP status '+str(abort_code)+' to: '+abort_location+', path was: '+path) | |||
flow.response.headers = Headers() | |||
flow.response.headers[HEADER_LOCATION] = abort_location | |||
flow.response.status_code = abort_code | |||
flow.response.stream = lambda chunks: [] | |||
else: | |||
bubble_log('responseheaders: aborting request with HTTP status '+str(abort_code)) | |||
bubble_log('responseheaders: aborting request with HTTP status '+str(abort_code)+', path was: '+path) | |||
flow.response.headers = Headers() | |||
flow.response.status_code = abort_code | |||
flow.response.stream = lambda chunks: [] | |||
elif flow.response.status_code // 100 != 2: | |||
bubble_log('responseheaders: response had HTTP status '+str(flow.response.status_code)+', returning as-is') | |||
bubble_log('responseheaders: response had HTTP status '+str(flow.response.status_code)+', returning as-is: '+path) | |||
pass | |||
elif flow.response.headers is None or len(flow.response.headers) == 0: | |||
bubble_log('responseheaders: response had HTTP status '+str(flow.response.status_code)+', and NO response headers, returning as-is: '+path) | |||
pass | |||
elif HEADER_CONTENT_LENGTH in flow.response.headers and flow.response.headers[HEADER_CONTENT_LENGTH] == "0": | |||
bubble_log('responseheaders: response had HTTP status '+str(flow.response.status_code)+', and '+HEADER_CONTENT_LENGTH+' was zero, returning as-is: '+path) | |||
pass | |||
else: | |||
@@ -239,10 +253,10 @@ def responseheaders(flow): | |||
typeRegex = '^text/html.*' | |||
if re.match(typeRegex, content_type): | |||
any_content_type_matches = True | |||
bubble_log(prefix+'found at least one matcher for content_type ('+content_type+'), filtering') | |||
bubble_log(prefix+'found at least one matcher for content_type ('+content_type+'), filtering: '+path) | |||
break | |||
if not any_content_type_matches: | |||
bubble_log(prefix+'no matchers for content_type ('+content_type+'), passing thru') | |||
bubble_log(prefix+'no matchers for content_type ('+content_type+'), passing thru: '+path) | |||
return | |||
if HEADER_CONTENT_ENCODING in flow.response.headers: | |||
@@ -257,7 +271,7 @@ def responseheaders(flow): | |||
csp = None | |||
content_length_value = flow.response.headers.pop(HEADER_CONTENT_LENGTH, None) | |||
bubble_log(prefix+'content_encoding='+repr(content_encoding) + ', content_type='+repr(content_type)) | |||
# bubble_log(prefix+'content_encoding='+repr(content_encoding) + ', content_type='+repr(content_type)) | |||
flow.response.stream = bubble_modify(flow, req_id, user_agent, content_encoding, content_type, csp) | |||
if content_length_value: | |||
flow.response.headers['transfer-encoding'] = 'chunked' | |||
@@ -268,10 +282,10 @@ def responseheaders(flow): | |||
if hasattr(ctx, 'ctx'): | |||
ctx = ctx.ctx | |||
else: | |||
bubble_log(prefix+'error finding server_conn. last ctx has no further ctx. type='+str(type(ctx))+' vars='+str(vars(ctx))) | |||
bubble_log(prefix+'error finding server_conn for path '+path+'. last ctx has no further ctx. type='+str(type(ctx))+' vars='+str(vars(ctx))) | |||
return | |||
if not hasattr(ctx, 'server_conn'): | |||
bubble_log(prefix+'error finding server_conn. ctx type='+str(type(ctx))+' vars='+str(vars(ctx))) | |||
bubble_log(prefix+'error finding server_conn for path '+path+'. ctx type='+str(type(ctx))+' vars='+str(vars(ctx))) | |||
return | |||
content_length = int(content_length_value) | |||
ctx.server_conn.rfile.fake_chunks = content_length | |||
@@ -279,11 +293,11 @@ def responseheaders(flow): | |||
add_flow_ctx(flow, CTX_CONTENT_LENGTH_SENT, 0) | |||
else: | |||
bubble_log(prefix+'no matchers, passing thru') | |||
bubble_log(prefix+'no matchers, passing thru: '+path) | |||
pass | |||
else: | |||
bubble_log(prefix+'no '+HEADER_CONTENT_TYPE +' header, passing thru') | |||
bubble_log(prefix+'no '+HEADER_CONTENT_TYPE +' header, passing thru: '+path) | |||
pass | |||
else: | |||
bubble_log(prefix+'no '+CTX_BUBBLE_MATCHERS +' in ctx, passing thru') | |||
bubble_log(prefix+'no '+CTX_BUBBLE_MATCHERS +' in ctx, passing thru: '+path) | |||
pass |
@@ -20,10 +20,10 @@ MITM_PORT_FILE=/home/mitmproxy/mitmproxy_port | |||
TOTAL_MEM=$(free | grep -m 1 Mem: | awk '{print $2}') | |||
# For 1GB system, MIN_PCT_FREE is 2% | |||
# For 2GB system, MIN_PCT_FREE is 5% | |||
# For 4GB system, MIN_PCT_FREE is 11% | |||
MIN_PCT_FREE=$(expr $(expr $(expr ${TOTAL_MEM} / 500) \* 14) / 10000) | |||
# For 1GB system, MIN_PCT_FREE is 1% | |||
# For 2GB system, MIN_PCT_FREE is 3% | |||
# For 4GB system, MIN_PCT_FREE is 7% | |||
MIN_PCT_FREE=$(expr $(expr $(expr ${TOTAL_MEM} / 500) \* 99) / 100000) | |||
# Start with MITM proxy turned on, or refresh value | |||
if [[ ! -f ${BUBBLE_MITM_MARKER} ]] ; then | |||
@@ -17,6 +17,7 @@ mitmdump \ | |||
--set flow_detail=0 \ | |||
--set stream_large_bodies=5m \ | |||
--set keep_host_header \ | |||
-s ./bubble_debug.py \ | |||
-s ./dns_spoofing.py \ | |||
-s ./bubble_conn_check.py \ | |||
-s ./bubble_modify.py \ | |||
@@ -58,6 +58,7 @@ | |||
mode: 0500 | |||
with_items: | |||
- bubble_api.py | |||
- bubble_debug.py | |||
- dns_spoofing.py | |||
- bubble_conn_check.py | |||
- bubble_modify.py | |||
@@ -2,7 +2,7 @@ | |||
{ | |||
"comment": "try to proxy a website, no rules should be applied", | |||
"request": { | |||
"uri": "p/kyuss.org" | |||
"uri": "p/git.bubblev.org" | |||
}, | |||
"response": { | |||
"raw": true, | |||
@@ -1 +1 @@ | |||
Subproject commit 74230fcc51ee1177a898de135a30b5b70a760e12 | |||
Subproject commit f1f0c443a93562b89df0ba27aaae81feab8d5f3c |