Fix error with parsing domain list
This commit is contained in:
parent
c8dc0ff70d
commit
843b6efc4d
|
@ -4,15 +4,15 @@ BLOATING_DOMAINS=$(cat bloating-domains.txt)
|
||||||
bloat_firefox(){
|
bloat_firefox(){
|
||||||
if [ "$#" = 0 ]; then
|
if [ "$#" = 0 ]; then
|
||||||
echo "Bloating Firefox by bloating defined domain list..."
|
echo "Bloating Firefox by bloating defined domain list..."
|
||||||
DOMAINS=$(printf '%s\n' "${BLOATING_DOMAINS[@]}")
|
DOMAINS_LIST=$(printf '%s\n' "${BLOATING_DOMAINS[@]}")
|
||||||
else
|
else
|
||||||
echo "Bloating Firefox by requested domain list..."
|
echo "Bloating Firefox by requested domain list..."
|
||||||
DOMAINS=`node filter-requested-domains.js "$1"`
|
DOMAINS_LIST=`node filter-requested-domains.js "$1"`
|
||||||
echo "selected domains"
|
echo "selected domains"
|
||||||
echo $DOMAINS
|
echo $DOMAINS_LIST
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -n "$DOMAINS" ]; then
|
if [ -n "$DOMAINS_LIST" ]; then
|
||||||
while IFS= read -r DOMAIN; do
|
while IFS= read -r DOMAIN; do
|
||||||
# these domains return a 404 anyways, no need to waste time on them:
|
# these domains return a 404 anyways, no need to waste time on them:
|
||||||
if is_http_error "$DOMAIN"; then echo "skipping $DOMAIN"; continue; fi
|
if is_http_error "$DOMAIN"; then echo "skipping $DOMAIN"; continue; fi
|
||||||
|
@ -26,7 +26,7 @@ bloat_firefox(){
|
||||||
xdotool key Return
|
xdotool key Return
|
||||||
sleep 1.5
|
sleep 1.5
|
||||||
grab "$DOMAIN after"
|
grab "$DOMAIN after"
|
||||||
done <<< "$DOMAINS"
|
done <<< "$DOMAINS_LIST"
|
||||||
else
|
else
|
||||||
echo "No need to bloat"
|
echo "No need to bloat"
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
facebook.com
|
facebook.com
|
||||||
google.com
|
google.com
|
||||||
hotjar.com
|
|
||||||
maps.google.com
|
|
||||||
linkedin.com
|
|
||||||
cookielaw.org
|
|
||||||
googletagmanager.com
|
|
||||||
googleapis.com
|
googleapis.com
|
||||||
www.google.com
|
googletagmanager.com
|
||||||
|
hotjar.com
|
||||||
|
linkedin.com
|
||||||
|
maps.google.com
|
||||||
sirdata.com
|
sirdata.com
|
||||||
xandr.com
|
|
||||||
site.adform.com
|
site.adform.com
|
||||||
|
www.cookiebot.com
|
||||||
|
www.google.com
|
||||||
|
xandr.com
|
||||||
adtonos.com/pl/home-pl
|
adtonos.com/pl/home-pl
|
||||||
adtraction.com/pl
|
adtraction.com/pl
|
||||||
www.cookiebot.com
|
cookielaw.org
|
|
@ -4,11 +4,11 @@ const BLOATING_DOMAINS = (
|
||||||
fs.readFileSync(pth.join(__dirname, "bloating-domains.txt")) + ""
|
fs.readFileSync(pth.join(__dirname, "bloating-domains.txt")) + ""
|
||||||
).split("\n");
|
).split("\n");
|
||||||
const input = process.argv[2];
|
const input = process.argv[2];
|
||||||
const REQUESTED_DOMAINS = JSON.parse(input);
|
const REQUESTED_DOMAINS = input.split('\n');
|
||||||
|
|
||||||
const array_diff = REQUESTED_DOMAINS.filter(
|
const array_diff = REQUESTED_DOMAINS.filter(
|
||||||
(v) => !BLOATING_DOMAINS.includes(v)
|
(v) => !BLOATING_DOMAINS.includes(v)
|
||||||
);
|
);
|
||||||
|
|
||||||
for (let i in array_diff) {
|
for (let i in array_diff) {
|
||||||
console.log(array_diff[i]);
|
console.log(array_diff[i]);
|
||||||
|
|
Loading…
Reference in New Issue
Block a user