Skip to content

Commit 0e157ef

Browse files
committed
First commit
0 parents  commit 0e157ef

7 files changed

+242
-0
lines changed

.gitignore

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
.idea/
2+
*.iml

README.md

+10
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
# Firebase-scanner
2+
3+
This project contains various tools used for automated scanning and vulnerability discovery in firebase apps.
4+
5+
6+
# Components
7+
8+
- `db-discovery.py` - This tool will aggregate various services (DNSDumpster, Shodan) to attempt to discover random firebase project codes.
9+
- `endpoint-discovery.py` - Run this tool on a wireshark .pcap or binary file to extract potential firebase db endpoints.
10+
- `scanner.py` - This tool will see what data and endpoints in the realtime DB are accessible (read/write info) and dump that information. It can also optionally dump everything that it can read. You can optionally give it an auth token and a list of endpoints (from endpt discovery script) to help it find more data.

db-discovery.py

+48
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
import sys
2+
3+
import requests
4+
from argparse import ArgumentParser, FileType
5+
from dnsdumpster.DNSDumpsterAPI import DNSDumpsterAPI
6+
7+
8+
def dnsdumpster():
9+
results = DNSDumpsterAPI().search('firebaseio.com')
10+
return [domain['domain'] for domain in results['dns_records']['host']]
11+
12+
13+
def is_firebase_project(code: str) -> bool:
14+
r = requests.get("https://{}.firebaseio.com".format(code))
15+
return r.status_code != 404
16+
17+
18+
def has_realtime_db(code: str) -> bool:
19+
r = requests.options("https://{}.firebaseio.com/.json".format(code))
20+
return r.status_code != 423
21+
22+
23+
def discover_dbs(args):
24+
db_candidates = []
25+
if args.type == "dnsdumpster":
26+
db_candidates = dnsdumpster()
27+
28+
print("Discovered DBs:")
29+
for candidate in db_candidates:
30+
if is_firebase_project(candidate) and has_realtime_db(candidate):
31+
args.out.write(candidate + "\n")
32+
if args.out != sys.stdout:
33+
print(candidate)
34+
35+
36+
37+
def parse_args():
38+
parser = ArgumentParser()
39+
parser.add_argument('type', help="Look for potential dbs through this specified method", choices=["dnsdumpster"])
40+
parser.add_argument('--out', help="A file to dump results to", nargs='?', type=FileType('w'), default=sys.stdout)
41+
args = parser.parse_args()
42+
discover_dbs(args)
43+
44+
45+
46+
47+
if __name__ == '__main__':
48+
parse_args()

endpoint-discovery.py

+72
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
from argparse import ArgumentParser, FileType
2+
import re
3+
import sys
4+
5+
6+
def search_pcap(args):
7+
import pyshark
8+
fname = args.file.name
9+
args.file.close()
10+
cap = pyshark.FileCapture(input_file=fname, display_filter="http.request.uri contains \"/firebaseio/\"")
11+
12+
endpoints = []
13+
for packet in cap:
14+
uri = packet.http.request.uri
15+
pattern = re.compile("https?://(.+)\\.firebaseio\\.com/(.*)")
16+
match = pattern.match(uri)
17+
path = match.group(2)
18+
endpoints.append(path)
19+
return endpoints
20+
21+
22+
def search_binary_exact(args):
23+
binary = args.file.read()
24+
pattern = re.compile("https?://(.+)\\.firebaseio\\.com/(.*)")
25+
matches = pattern.findall(binary)
26+
endpoints = [match.group(2) for match in matches]
27+
return endpoints
28+
29+
30+
def search_binary_strings(args):
31+
binary = args.file.read()
32+
pattern = re.compile("\\w{5,}") # find all text with len >= 5
33+
matches = pattern.findall(binary)
34+
endpoints = [match.group(0) for match in matches]
35+
return endpoints
36+
37+
38+
def discover_endpoints(args):
39+
endpoints = []
40+
if args.type == 'pcap':
41+
endpoints = search_pcap(args)
42+
elif args.type == 'binary_exact':
43+
endpoints = search_binary_exact(args)
44+
elif args.type == 'binary_strings':
45+
endpoints = search_binary_strings(args)
46+
47+
cleaned_endpoints = []
48+
for endpoint in endpoints:
49+
if endpoint.startswith("/"):
50+
endpoint = endpoints[1:]
51+
if endpoint.endswith(".json"):
52+
endpoint = endpoints[:-5]
53+
cleaned_endpoints.append(endpoint)
54+
55+
endpoints = cleaned_endpoints
56+
print("{} potential endpoints found.".format(len(endpoints)))
57+
data = "\n".join(endpoints) + "\n"
58+
args.out.write(data)
59+
args.out.close()
60+
61+
62+
def parse_args():
63+
parser = ArgumentParser()
64+
parser.add_argument('type', help="Look for potential endpoints in the specified file type", choices=["pcap", "binary_exact", "binary_strings"])
65+
parser.add_argument('file', help="The file to search through", type=FileType('w'))
66+
parser.add_argument('--out', help="A file to dump results to", nargs='?', type=FileType('w'), default=sys.stdout)
67+
args = parser.parse_args()
68+
discover_endpoints(args)
69+
70+
71+
if __name__ == '__main__':
72+
parse_args()

endpoints.txt

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
test1
2+
test2
3+
test3
4+
test4
5+

requirements.txt

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
pypcapkit==0.14.3
2+
requests==2.21.0

scanner.py

+103
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
from typing import Tuple
2+
from io import StringIO
3+
from argparse import ArgumentParser, FileType
4+
import requests
5+
import json
6+
import sys
7+
8+
common_endpoints = [
9+
"",
10+
"users",
11+
# TODO - users/$uid - extract uid from JWT?
12+
"groups",
13+
"messages",
14+
"posts",
15+
"chats",
16+
]
17+
18+
# TODO - attempt using different values to check value filters
19+
value_attempts = [
20+
"1",
21+
"0",
22+
"-1",
23+
"1566889891"
24+
"true",
25+
"false",
26+
"a",
27+
"null",
28+
"-JSOpn9ZC54A4P4RoqVa",
29+
"591dd66c-ffb0-4f7c-80f2-13345066a159"
30+
]
31+
32+
33+
def try_endpoint(args, endpoint: str) -> Tuple[bool, bool, str]:
34+
url = 'https://{}.firebaseio.com/{}.json'.format(args.code, endpoint)
35+
if args.auth is not None:
36+
url += '?auth={}'.format(args.auth)
37+
38+
read_r = requests.get(url)
39+
# POST nothing to test write access to avoid overwriting data
40+
write_r = requests.post(url, data="null", headers={'Content-type': 'application/json'})
41+
data = read_r.json() if read_r.status_code != 401 else None
42+
print(data)
43+
return read_r.status_code != 401, write_r.status_code != 401, data
44+
45+
46+
# from https://stackoverflow.com/a/7205107/2683545
47+
def merge(a, b, path=None):
48+
"merges b into a"
49+
if path is None: path = []
50+
for key in b:
51+
if key in a:
52+
if isinstance(a[key], dict) and isinstance(b[key], dict):
53+
merge(a[key], b[key], path + [str(key)])
54+
elif a[key] == b[key]:
55+
pass # same leaf value
56+
else:
57+
raise Exception('Conflict at %s' % '.'.join(path + [str(key)]))
58+
else:
59+
a[key] = b[key]
60+
return a
61+
62+
63+
def scan_sites(args):
64+
firebase_sites = args.sites.read().splitlines()
65+
output = {}
66+
for site in firebase_sites:
67+
print("Scanning site {}...".format(site))
68+
arg_endpoints = args.endpoints.read().splitlines()
69+
endpoint_list = common_endpoints + arg_endpoints
70+
endpoint_info = {}
71+
db_dump = {}
72+
for endpoint in endpoint_list:
73+
dump_path = endpoint.split("/")
74+
read_success, write_success, data = try_endpoint(args, endpoint)
75+
if data is not None:
76+
for part in dump_path:
77+
if part not in db_dump:
78+
db_dump[part] = {}
79+
if isinstance(data, dict):
80+
db_dump[part] = merge(db_dump[part], data)
81+
else:
82+
db_dump[part] = data
83+
if read_success or write_success:
84+
endpoint_info[endpoint] = {"read": read_success, "write": write_success}
85+
output[site] = {"info": endpoint_info, "dump": db_dump}
86+
args.out.write(json.dumps(output))
87+
args.out.close()
88+
89+
90+
def parse_args():
91+
parser = ArgumentParser()
92+
parser.add_argument('sites', help="File containing list of firebase sites to scan [code].firebaseio.com", type=FileType('r'))
93+
# parser.add_argument('--methods', help="A list of HTTP methods to try", nargs="*", default=firebase_http_methods, choices=firebase_http_methods)
94+
parser.add_argument('--auth', help="An optional auth token to use")
95+
parser.add_argument('--endpoints', help="A list of known endpoints to check", nargs='?', type=FileType('r'), default=StringIO(""))
96+
parser.add_argument('--dirty', help="Should we modify the db to find more writable locations?", action="store_true", default=False)
97+
parser.add_argument('--out', help="A file to dump info to", nargs='?', type=FileType('w'), default=sys.stdout)
98+
args = parser.parse_args()
99+
scan_sites(args)
100+
101+
102+
if __name__ == '__main__':
103+
parse_args()

0 commit comments

Comments
 (0)