-
Notifications
You must be signed in to change notification settings - Fork 28
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #67 from awslabs/authabac1_2
feat: Fine grained authorization rule definition
- Loading branch information
Showing
44 changed files
with
4,302 additions
and
2,013 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -334,3 +334,4 @@ tags | |
|
||
|
||
report | ||
*.orig | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,7 @@ | ||
build | ||
node_modules | ||
cdk.out | ||
.mypy_cache | ||
.mypy_cache | ||
.pytest_cache | ||
ash_cf2cdk_output | ||
ash |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
166 changes: 166 additions & 0 deletions
166
backend/backend/handlers/auth/finegrainedaccessconstraints.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,166 @@ | ||
# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. | ||
# SPDX-License-Identifier: Apache-2.0 | ||
|
||
import json | ||
from backend.handlers.auth import request_to_claims | ||
import boto3 | ||
import logging | ||
import os | ||
import traceback | ||
from backend.logging.logger import safeLogger | ||
from backend.common.dynamodb import to_update_expr | ||
from boto3.dynamodb.conditions import Key, Attr | ||
|
||
logger = safeLogger(child=True, service="finegrainedpolicies", level="INFO") | ||
|
||
region = os.environ['AWS_REGION'] | ||
dynamodb = boto3.resource('dynamodb', region_name=region) | ||
table = dynamodb.Table(os.environ['TABLE_NAME']) | ||
|
||
attrs = "name,groupPermissions,constraintId,description,criteria".split(",") | ||
keys_attrs = { "#{f}".format(f=f): f for f in attrs } | ||
|
||
class ValidationError(Exception): | ||
def __init__(self, code: int, resp: object) -> None: | ||
self.code = code | ||
self.resp = resp | ||
|
||
def get_constraint(event, response): | ||
|
||
# db = boto3.client('dynamodb') | ||
key, constraint = get_constraint_from_event(event) | ||
|
||
response['body'] = table.get_item( | ||
Key=key, | ||
ExpressionAttributeNames=keys_attrs, | ||
ProjectionExpression=",".join(keys_attrs.keys()), | ||
) | ||
response['body']['constraint'] = response['body']['Item'] | ||
|
||
def get_constraints(event, response): | ||
result = table.query( | ||
ExpressionAttributeNames=keys_attrs, | ||
ProjectionExpression=",".join(keys_attrs.keys()), | ||
KeyConditionExpression=Key('entityType').eq('constraint') & Key('sk').begins_with('constraint#'), | ||
) | ||
logger.info( | ||
msg="ddb response", | ||
response=result | ||
) | ||
response['body']['constraints'] = result['Items'] | ||
|
||
|
||
# | ||
# { | ||
# "identifier": "constraintId", | ||
# "name": "user defined name", | ||
# "description": "description", | ||
# "groupPermissions": [{ ... }] | ||
# "created": "utc timestamp", | ||
# "updated": "utc timestamp", | ||
# "criteria": [ | ||
# { | ||
# "field": "fieldname", | ||
# "operator": "contains", # one of contains, does not contain, is one of, is not one of | ||
# "value": "value" # or ["value", "value"] | ||
# } | ||
# ] | ||
# } | ||
# | ||
|
||
def get_constraint_from_event(event): | ||
constraint = None | ||
if 'body' in event: | ||
constraint = json.loads(event['body']) | ||
|
||
pathParameters = event.get('pathParameters', {}) | ||
if 'constraintId' in pathParameters: | ||
constraintId = pathParameters['constraintId'] | ||
else: | ||
constraintId = constraint['identifier'] | ||
|
||
key = { | ||
'entityType': 'constraint', | ||
'sk': 'constraint#' + constraintId, | ||
} | ||
return key, constraint | ||
|
||
|
||
def update_constraint(event, response): | ||
key, constraint = get_constraint_from_event(event) | ||
keys_map, values_map, expr = to_update_expr(constraint) | ||
|
||
logger.info(msg={ | ||
"keys_map": keys_map, | ||
"values_map": values_map, | ||
"expr": expr, | ||
}) | ||
|
||
table.update_item( | ||
Key=key, | ||
UpdateExpression=expr, | ||
ExpressionAttributeNames=keys_map, | ||
ExpressionAttributeValues=values_map, | ||
ReturnValues="UPDATED_NEW" | ||
) | ||
|
||
response['body']['constraint'] = constraint | ||
|
||
|
||
def delete_constraint(event, response): | ||
key, constraint = get_constraint_from_event(event) | ||
table.delete_item( | ||
Key=key | ||
) | ||
response['body'] = { "message": "Constraint deleted." } | ||
|
||
|
||
def lambda_handler(event, context): | ||
|
||
response = { | ||
'statusCode': 200, | ||
'body': { | ||
"requestid": event['requestContext']['requestId'], | ||
}, | ||
} | ||
|
||
try: | ||
claims_and_roles = request_to_claims(event) | ||
|
||
if "super-admin" not in claims_and_roles['roles']: | ||
raise ValidationError(403, "Not Authorized") | ||
|
||
method = event['requestContext']['http']['method'] | ||
pathParameters = event.get('pathParameters', {}) | ||
|
||
# For GET requests, retrieve the constraints from the table and return them as a json object | ||
if method == 'GET' and 'constraintId' in pathParameters: | ||
get_constraint(event, response) | ||
|
||
if method == 'GET' and 'constraintId' not in pathParameters: | ||
get_constraints(event, response) | ||
|
||
# For POST requests, add the new constraint to the table and return the new constraint as a json object | ||
if method == 'POST': | ||
update_constraint(event, response) | ||
|
||
# For DELETE requests, remove the constraint from the table and return the deleted constraint as a json object | ||
if method == 'DELETE': | ||
delete_constraint(event, response) | ||
|
||
|
||
response['body'] = json.dumps(response['body']) | ||
return response | ||
|
||
except ValidationError as ex: | ||
response['statusCode'] = ex.code | ||
response['body']['error'] = ex.resp | ||
response['body'] = json.dumps(response['body']) | ||
return response | ||
|
||
except Exception as ex: | ||
logger.error(traceback.format_exc(), event) | ||
response['statusCode'] = 500 | ||
response['body']['error'] = traceback.format_exc() | ||
response['body'] = json.dumps(response['body']) | ||
return response |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,124 @@ | ||
# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. | ||
# SPDX-License-Identifier: Apache-2.0 | ||
|
||
import boto3 | ||
import os | ||
from boto3.dynamodb.conditions import Key, Attr | ||
|
||
|
||
class AuthEntities: | ||
|
||
def __init__(self, table): | ||
self.table = table | ||
|
||
def all_constraints(self): | ||
attrs = "name,groupPermissions,constraintId,description,criteria,entityType".split(",") | ||
keys_attrs = {"#{f}".format(f=f): f for f in attrs} | ||
# print(keys_attrs) | ||
result = self.table.query( | ||
# Limit=1, | ||
ExpressionAttributeNames=keys_attrs, | ||
ProjectionExpression=",".join(keys_attrs.keys()), | ||
KeyConditionExpression=Key("entityType").eq("constraint"), | ||
# FilterExpression=Attr("groupPermissions/groupId").eq(user_or_group) | ||
) | ||
return result['Items'] | ||
|
||
def group_or_user_to_fine_grained_claims(self, groups): | ||
constraints = self.all_constraints() | ||
for item in constraints: | ||
if len(groups & set([gp['groupId'] for gp in item['groupPermissions']])) > 0: | ||
yield item | ||
|
||
def _format_one_of_criteria(self, criteria): | ||
values = criteria['value'].split(",") | ||
values = ["\"{}\"".format(s.strip()) for s in values] | ||
values = " OR ".join(values) | ||
return f"{criteria['field']}:({values})" | ||
|
||
def claims_to_opensearch_filters(self, claims, groups): | ||
|
||
by_operator = { | ||
"contains": [], | ||
"does_not_contain": [], | ||
"is_one_of": [], | ||
"is_not_one_of": [], | ||
} | ||
claim_predicates = [] | ||
for claim in claims: | ||
group_permission = [p for p in claim['groupPermissions'] if p['groupId'] in groups] | ||
|
||
predicates = [] | ||
for criteria in claim['criteria']: | ||
|
||
if criteria['operator'] == "contains": | ||
predicates.append(f"{criteria['field']}:({criteria['value']})") | ||
|
||
if criteria['operator'] == "does_not_contain": | ||
predicates.append(f"-{criteria['field']}:({criteria['value']})") | ||
|
||
if criteria['operator'] == "is_one_of": | ||
values_str = self._format_one_of_criteria(criteria) | ||
predicates.append(f"{values_str}") | ||
|
||
if criteria['operator'] == "is_not_one_of": | ||
values_str = self._format_one_of_criteria(criteria) | ||
predicates.append(f"-{values_str}") | ||
|
||
claim_predicates.append("(" + " AND ".join(predicates) + ")") | ||
|
||
return { | ||
"query": { | ||
"query_string": { | ||
"query": " OR ".join(claim_predicates) | ||
} | ||
} | ||
} | ||
|
||
def claims_to_opensearch_agg(self, claims, groups): | ||
|
||
permissions = { | ||
"Read": [], | ||
"Edit": [], | ||
"Admin": [] | ||
} | ||
for claim in claims: | ||
group_permission = [p for p in claim['groupPermissions'] if p['groupId'] in groups] | ||
|
||
# The group permission structure is as follows: | ||
# { | ||
# "groupId": "group-id", | ||
# "permissions": "PERMISSION" | ||
# } | ||
# Where PERMISSION is one of Read, Edit, Admin. | ||
# A group can have only 1 permission in a set of groups in a claim. | ||
# | ||
# Aggregate the criteria for each group by the permission. | ||
# | ||
for permission in permissions.keys(): | ||
for group in group_permission: | ||
if group['permission'] == permission: | ||
permissions[permission].append(claim) | ||
|
||
aggs = { | ||
"aggs": { | ||
"permissions": { | ||
"filters": { | ||
"filters": { | ||
|
||
} | ||
} | ||
} | ||
} | ||
} | ||
|
||
for permission in permissions.keys(): | ||
query_string = self.claims_to_opensearch_filters(permissions[permission], groups)['query']['query_string'] | ||
if query_string['query'] == "": | ||
continue | ||
|
||
aggs["aggs"]["permissions"]["filters"]["filters"][permission] = { | ||
"query_string": query_string | ||
} | ||
|
||
return aggs |
Oops, something went wrong.