Skip to content

Commit

Permalink
Merge pull request #36 from Azure/rbac-backfill
Browse files Browse the repository at this point in the history
Rbac backfill
  • Loading branch information
legra-ms authored Jan 19, 2021
2 parents aa28fb0 + fd19eca commit 5556a4f
Show file tree
Hide file tree
Showing 8 changed files with 462 additions and 325 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ If you choose to run Stormspotter without Docker, you must have [Python 3.8](htt

### Backend

The backend handles parsing data into Neo4j is built with [FastAPI](https://fastapi.tiangolo.com/). If you don't plan on uploading new content for the database, you may not need to run the backend at all. The backend is configured to run on port 9090. You may change this by changing the port number on line 5 of [app.py](stormfront-backend/app.py). If you do, you must also change the port in the Q-Uploader component in the [DatabaseView Component](stormfront/src/components/DatabaseView.vue) so that the uploads from the frontend get sent to the correct port where the backend resides.
The backend handles parsing data into Neo4j is built with [FastAPI](https://fastapi.tiangolo.com/). If you don't plan on uploading new content for the database, you may not need to run the backend at all. The backend is configured to run on port 9090. You may change this by changing the port number on line 5 of [app.py](backend/app.py). If you do, you must also change the port in the Q-Uploader component in the [DatabaseView Component](frontend/src/components/DatabaseView.vue) so that the uploads from the frontend get sent to the correct port where the backend resides.

```
cd backend
Expand Down
2 changes: 1 addition & 1 deletion backend/backend/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
logging.getLogger("uvicorn.access").handlers = [InterceptHandler()]

app = FastAPI(
title="Stormfront-Backend",
title="Stormspotter-Backend",
description="API Handler for Stormspotter",
version="1.0.0",
)
Expand Down
2 changes: 1 addition & 1 deletion frontend/quasar.conf.js
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ module.exports = function (ctx) {
builder: {
// https://www.electron.build/configuration/configuration

appId: "stormfront",
appId: "stormspotter",
},

// More info: https://quasar.dev/quasar-cli/developing-electron-apps/node-integration
Expand Down
1 change: 0 additions & 1 deletion stormcollector/build_collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import subprocess
import sys
import time
import zipfile
from datetime import datetime

# import distutils.sysconfig
Expand Down
589 changes: 302 additions & 287 deletions stormcollector/poetry.lock

Large diffs are not rendered by default.

5 changes: 5 additions & 0 deletions stormcollector/sscollector.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,11 @@ def main():
"--aad", help="Only scan AAD assets", action="store_true"
)

parentParser.add_argument(
"--backfill",
help="Perform AAD enumeration only for object IDs associated with RBAC enumeration. Only applicable when --azure is specified.",
action="store_true",
)
parentParser.add_argument(
"--subs",
nargs="+",
Expand Down
159 changes: 128 additions & 31 deletions stormcollector/stormcollector/aad.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import asyncio
import time
from dataclasses import dataclass
from itertools import chain

import aiohttp
from loguru import logger
Expand All @@ -12,6 +13,8 @@


class _TokenEvent(asyncio.Event):
"""Handles manual refreshing of access tokens during AAD enumeration"""

def __init__(self, ctx: Context, base_url: str, objName: str) -> None:
super().__init__()
self.currentToken = None
Expand Down Expand Up @@ -73,13 +76,19 @@ async def expand(self, resource_id, prop):
return await expanded.json()

@logger.catch()
async def query_objects(self):
logger.info(f"Starting query for {self.__class__.__name__}")
async def query_objects(self, object_id: str = None):

# Prevent logging for each backfill item
if not object_id:
logger.info(f"Starting query for {self.__class__.__name__}")

self.session = aiohttp.ClientSession(connector=SSL_CONTEXT)
user_url = (
f"{self.base_url}/{self.tenant_id}/{self.resource}?{self.api_version}"
)
if object_id:
user_url = f"{self.base_url}/{self.tenant_id}/{self.resource}/{object_id}?{self.api_version}"
else:
user_url = (
f"{self.base_url}/{self.tenant_id}/{self.resource}?{self.api_version}"
)

next_link = True
while next_link:
Expand All @@ -94,15 +103,26 @@ async def query_objects(self):
if "odata.error" in response:
raise Exception(response)

for value in response["value"]:
parsedVal = await self.parse(value)
# If response contains value, it's normal enumeration.
if response.get("value"):
for value in response["value"]:
parsedVal = await self.parse(value)
await sqlite_writer(
OUTPUT_FOLDER / f"{self.__class__.__name__}.sqlite",
parsedVal,
)
if "odata.nextLink" in response:
user_url = f"{self.base_url}/{self.tenant_id}/{response['odata.nextLink']}&{self.api_version}"
else:
next_link = False
# Else it's backfill
else:
parsedVal = await self.parse(response)
await sqlite_writer(
OUTPUT_FOLDER / f"{self.__class__.__name__}.sqlite", parsedVal
OUTPUT_FOLDER / f"{self.__class__.__name__}.sqlite", parsedVal,
)
if "odata.nextLink" in response:
user_url = f"{self.base_url}/{self.tenant_id}/{response['odata.nextLink']}&{self.api_version}"
else:
next_link = False

await self.session.close()
self._token_event.token_refresh_task.cancel()

Expand Down Expand Up @@ -182,7 +202,7 @@ async def parse(self, value):
return value


async def query_aad(ctx: Context, args: argparse.Namespace):
async def query_aad(ctx: Context, args: argparse.Namespace, backfills: dict = None):
logger.info(f"Checking access for Azure AD: {ctx.cloud['AAD']}")
aad_types = AADObject.__subclasses__()

Expand Down Expand Up @@ -219,25 +239,102 @@ async def query_aad(ctx: Context, args: argparse.Namespace):
)
return await session.close()

await asyncio.gather(
*[
aad_type(
ctx=ctx,
tenant_id="beta",
base_url=ctx.cloud["GRAPH"],
api_version="",
).query_objects()
for aad_type in aad_types
]
)
if backfills:
await asyncio.gather(
list(
chain(
[
AADUser(
ctx=ctx,
tenant_id="beta",
base_url=ctx.cloud["GRAPH"],
api_version="",
).query_objects(obj)
for obj in backfills["User"]
],
[
AADGroup(
ctx=ctx,
tenant_id="beta",
base_url=ctx.cloud["GRAPH"],
api_version="",
).query_objects(obj)
for obj in backfills["Group"]
],
[
AADServicePrincipal(
ctx=ctx,
tenant_id="beta",
base_url=ctx.cloud["GRAPH"],
api_version="",
).query_objects(obj)
for obj in backfills["ServicePrincipal"]
],
)
)
)
else:
await asyncio.gather(
*[
aad_type(
ctx=ctx,
tenant_id="beta",
base_url=ctx.cloud["GRAPH"],
api_version="",
).query_objects()
for aad_type in aad_types
]
)
else:
logger.info(f"Starting enumeration for Azure AD: {ctx.cloud['AAD']}")
await asyncio.gather(
*[
aad_type(
ctx=ctx, tenant_id=tenantid, base_url=ctx.cloud["AAD"],
).query_objects()
for aad_type in aad_types
]
)

if backfills:
await asyncio.gather(
*list(
chain(
[
AADUser(
ctx=ctx,
tenant_id="beta",
base_url=ctx.cloud["GRAPH"],
api_version="",
).query_objects(obj)
for obj in backfills["User"]
],
[
AADGroup(
ctx=ctx,
tenant_id="beta",
base_url=ctx.cloud["GRAPH"],
api_version="",
).query_objects(obj)
for obj in backfills["Group"]
],
[
AADServicePrincipal(
ctx=ctx,
tenant_id="beta",
base_url=ctx.cloud["GRAPH"],
api_version="",
).query_objects(obj)
for obj in backfills["ServicePrincipal"]
],
)
)
)
else:
await asyncio.gather(
*[
aad_type(
ctx=ctx, tenant_id=tenantid, base_url=ctx.cloud["AAD"],
).query_objects()
for aad_type in aad_types
]
)
await session.close()


async def rbac_backfill(ctx: Context, args: argparse.Namespace, backfills: dict):
logger.info("Performing AAD backfill enumeration")
await query_aad(ctx, args, backfills)
logger.info("Completed AAD backfill enumeration")
27 changes: 24 additions & 3 deletions stormcollector/stormcollector/arm.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from loguru import logger

from . import OUTPUT_FOLDER, SSL_CONTEXT
from .aad import rbac_backfill
from .auth import Context
from .utils import sqlite_writer

Expand All @@ -29,9 +30,14 @@ async def _query_resource(
except HttpResponseError as ex:
if "No registered resource provider found for location" in ex.message:
invalid_versions.append(api_version)
api_versions = re.search(
"The supported api-versions are '(.*?)'. The supported locations", ex.message
).groups()[0].split(', ')
api_versions = (
re.search(
"The supported api-versions are '(.*?)'. The supported locations",
ex.message,
)
.groups()[0]
.split(", ")
)
api_versions = list(
filter(lambda v: v not in invalid_versions, api_versions)
)
Expand Down Expand Up @@ -152,6 +158,7 @@ async def query_arm(ctx: Context, args: argparse.Namespace) -> None:
logger.error(f"No subscriptions found for {tenant.tenant_id}")
continue

# ENUMERATE MANAGEMENT CERTS
if ctx.cloud["MGMT"]:
certsTasks = [
asyncio.create_task(_query_management_certs(ctx, sub))
Expand All @@ -164,15 +171,29 @@ async def query_arm(ctx: Context, args: argparse.Namespace) -> None:
if await cert:
await sqlite_writer(certs_output, cert)

# ENUMERATE RBAC
executor = concurrent.futures.ThreadPoolExecutor(max_workers=len(sub_list))
rbacTasks = {executor.submit(_query_rbac, ctx, sub) for sub in sub_list}

backfills = {
"User": set(),
"Group": set(),
"ServicePrincipal": set(),
"Application": set(),
} # Dict of object IDs to hold for AAD enumeration

rbac_output = OUTPUT_FOLDER / f"rbac.sqlite"
for rbac in concurrent.futures.as_completed(*[rbacTasks]):
if rbac.result():
for role in rbac.result():
await sqlite_writer(rbac_output, role)
if args.backfill:
backfills[role["principal_type"]].add(role["principal_id"])

if args.backfill:
await rbac_backfill(ctx, args, backfills)

# ENUMERATE TENANT DATA
subTasks = [
asyncio.create_task(_query_subscription(ctx, sub)) for sub in sub_list
]
Expand Down

0 comments on commit 5556a4f

Please sign in to comment.