Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Format python files in blueprints #2079

Merged
merged 2 commits into from
Feb 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions .github/workflows/linting.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,9 @@ jobs:
- name: Check python formatting
id: yapf
run: |
yapf --style="{based_on_style: google, indent_width: 2, SPLIT_BEFORE_NAMED_ASSIGNS: false}" -p -d \
yapf --style="{based_on_style: google, indent_width: 2, SPLIT_BEFORE_NAMED_ASSIGNS: false}" -p -d -r \
tools/*.py \
blueprints/cloud-operations/network-dashboard/src/*py \
blueprints/cloud-operations/network-dashboard/src/plugins/*py
blueprints

- name: Check blueprint metadata
id: metadata
Expand Down
66 changes: 36 additions & 30 deletions blueprints/cloud-operations/adfs/scripts/ad-provisioning/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,14 @@

fake = Faker()

@ click.group()

@click.group()
def cli():
pass
pass


@ cli.command()
@ click.option(
@cli.command()
@click.option(
"--num-users",
help="Number of users to create",
default=10,
Expand All @@ -46,16 +48,17 @@ def cli():
default="users.json",
)
def create_users(num_users, output_file):
rows = []
for i in range(1, num_users):
row = {}
row[FIELD_USER_FIRST_NAME] = fake.first_name()
row[FIELD_USER_LAST_NAME] = fake.last_name()
row[FIELD_USER_USERNAME] = row[FIELD_USER_FIRST_NAME].lower() + "." + \
row[FIELD_USER_LAST_NAME].lower()
row[FIELD_USER_PASSWORD] = fake.password()
rows.append(row)
write_json(output_file, rows)
rows = []
for i in range(1, num_users):
row = {}
row[FIELD_USER_FIRST_NAME] = fake.first_name()
row[FIELD_USER_LAST_NAME] = fake.last_name()
row[FIELD_USER_USERNAME] = row[FIELD_USER_FIRST_NAME].lower() + "." + \
row[FIELD_USER_LAST_NAME].lower()
row[FIELD_USER_PASSWORD] = fake.password()
rows.append(row)
write_json(output_file, rows)


@cli.command()
@click.option(
Expand All @@ -74,25 +77,28 @@ def create_users(num_users, output_file):
default="memberships.json",
)
def create_memberships(users_file, groups_file, output_file):
users = read_json(users_file)
groups = read_json(groups_file)
rows = []
for group in groups:
members = random.sample(users, random.randint(0, len(users) - 1))
for member in members:
row = {}
row[FIELD_MEMBERSHIP_GROUP] = group
row[FIELD_MEMBERSHIP_MEMBER] = member[FIELD_USER_USERNAME]
rows.append(row)
write_json(output_file, rows)
users = read_json(users_file)
groups = read_json(groups_file)
rows = []
for group in groups:
members = random.sample(users, random.randint(0, len(users) - 1))
for member in members:
row = {}
row[FIELD_MEMBERSHIP_GROUP] = group
row[FIELD_MEMBERSHIP_MEMBER] = member[FIELD_USER_USERNAME]
rows.append(row)
write_json(output_file, rows)


def write_json(file, rows):
with open(file, 'w') as f:
json.dump(rows, f, indent=2)
with open(file, 'w') as f:
json.dump(rows, f, indent=2)


def read_json(file):
with open(file, 'r', encoding='UTF8') as f:
return json.load(f)
with open(file, 'r', encoding='UTF8') as f:
return json.load(f)


if __name__ == "__main__":
cli()
cli()
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

'''Cloud Function module to do simple instance tag enforcement.

This module is designed to be plugged in a Cloud Function, attached to a PubSub
Expand Down Expand Up @@ -48,9 +47,7 @@
from googleapiclient import discovery
from googleapiclient.errors import HttpError


_SELF_LINK_RE = re.compile(
r'/projects/([^/]+)/zones/([^/]+)/instances/([^/]+)')
_SELF_LINK_RE = re.compile(r'/projects/([^/]+)/zones/([^/]+)/instances/([^/]+)')
_TAG_SHARED_PREFIXES = ['shared-', 'gke-cluster-']


Expand All @@ -71,10 +68,8 @@ def _set_tags(project, zone, name, fingerprint, tags):
if result['status'] == 'DONE':
break
time.sleep(1)
result = compute.zoneOperations().get(
project=project,
zone=zone,
operation=result['name']).execute()
result = compute.zoneOperations().get(project=project, zone=zone,
operation=result['name']).execute()
except HttpError as e:
raise Error('Error setting tags: %s' % e)

Expand Down Expand Up @@ -151,8 +146,8 @@ def main(event=None, context=None):
if tags['items'] == valid_tags:
logging.info('all tags are valid')
return
logging.info('modify tags %s %s %s %s %s', project,
zone, name, tags['fingerprint'], valid_tags)
logging.info('modify tags %s %s %s %s %s', project, zone, name,
tags['fingerprint'], valid_tags)
try:
_set_tags(project, zone, name, tags.get('fingerprint'), valid_tags)
except Error as e:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,4 +101,4 @@ def main(project, delete=False, dry_run=False):


if __name__ == '__main__':
main()
main()
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

'''Cloud Function module to export data for a given day.

This module is designed to be plugged in a Cloud Function, attached to Cloud
Expand Down Expand Up @@ -50,20 +49,24 @@ def _configure_logging(verbose=True):
@click.option('--bq-project', required=True, help='Bigquery project to use.')
@click.option('--bq-dataset', required=True, help='Bigquery dataset to use.')
@click.option('--bq-table', required=True, help='Bigquery table name to use.')
@click.option('--bq-table-overwrite', required=True, help='Overwrite existing BQ table or create new datetime() one.')
@click.option('--target-node', required=True, help='Node in Google Cloud resource hierarchy.')
@click.option('--read-time', required=False, help=(
'Day to take an asset snapshot in \'YYYYMMDD\' format, uses current day '
' as default. Export will run at midnight of the specified day.'))
@click.option('--bq-table-overwrite', required=True,
help='Overwrite existing BQ table or create new datetime() one.')
@click.option('--target-node', required=True,
help='Node in Google Cloud resource hierarchy.')
@click.option(
'--read-time', required=False, help=(
'Day to take an asset snapshot in \'YYYYMMDD\' format, uses current day '
' as default. Export will run at midnight of the specified day.'))
@click.option('--verbose', is_flag=True, help='Verbose output')
def main_cli(project=None, bq_project=None, bq_dataset=None, bq_table=None, bq_table_overwrite=None, target_node=None,
read_time=None, verbose=False):
def main_cli(project=None, bq_project=None, bq_dataset=None, bq_table=None,
bq_table_overwrite=None, target_node=None, read_time=None,
verbose=False):
'''Trigger Cloud Asset inventory export to Bigquery. Data will be stored in
the dataset specified on a dated table with the name specified.
'''
try:
_main(project, bq_project, bq_dataset, bq_table,
bq_table_overwrite, target_node, read_time, verbose)
_main(project, bq_project, bq_dataset, bq_table, bq_table_overwrite,
target_node, read_time, verbose)
except RuntimeError:
logging.exception('exception raised')

Expand All @@ -81,7 +84,9 @@ def main(event, context):
logging.exception('exception in cloud function entry point')


def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None, bq_table_overwrite=None, target_node=None, read_time=None, verbose=False):
def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None,
bq_table_overwrite=None, target_node=None, read_time=None,
verbose=False):
'Module entry point used by cli and cloud function wrappers.'

_configure_logging(verbose)
Expand All @@ -92,8 +97,7 @@ def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None, bq_tabl
output_config.bigquery_destination.table = '%s_%s' % (
bq_table, read_time.strftime('%Y%m%d'))
else:
output_config.bigquery_destination.table = '%s_latest' % (
bq_table)
output_config.bigquery_destination.table = '%s_latest' % (bq_table)
content_type = asset_v1.ContentType.RESOURCE
output_config.bigquery_destination.dataset = 'projects/%s/datasets/%s' % (
bq_project, bq_dataset)
Expand All @@ -106,12 +110,12 @@ def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None, bq_tabl
'read_time': read_time,
'content_type': content_type,
'output_config': output_config
}
)
})
except (GoogleAPIError, googleapiclient.errors.HttpError) as e:
logging.debug('API Error: %s', e, exc_info=True)
raise RuntimeError(
'Error fetching Asset Inventory entries (resource manager node: %s)' % target_node, e)
'Error fetching Asset Inventory entries (resource manager node: %s)' %
target_node, e)


if __name__ == '__main__':
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

'''Cloud Function module to export BQ table as JSON.

This module is designed to be plugged in a Cloud Function, attached to Cloud
Expand Down Expand Up @@ -47,12 +46,18 @@ def _configure_logging(verbose=True):

@click.command()
@click.option('--bucket', required=True, help='GCS bucket for export')
@click.option('--filename', required=True, help='Path and filename with extension to export e.g. folder/export.json .')
@click.option('--format', required=True, help='The exported file format, e.g. NEWLINE_DELIMITED_JSON or CSV.')
@click.option('--bq-dataset', required=True, help='Bigquery dataset where table for export is located.')
@click.option(
'--filename', required=True,
help='Path and filename with extension to export e.g. folder/export.json .')
@click.option(
'--format', required=True,
help='The exported file format, e.g. NEWLINE_DELIMITED_JSON or CSV.')
@click.option('--bq-dataset', required=True,
help='Bigquery dataset where table for export is located.')
@click.option('--bq-table', required=True, help='Bigquery table to export.')
@click.option('--verbose', is_flag=True, help='Verbose output')
def main_cli(bucket=None, filename=None, format=None, bq_dataset=None, bq_table=None, verbose=False):
def main_cli(bucket=None, filename=None, format=None, bq_dataset=None,
bq_table=None, verbose=False):
'''Trigger Cloud Asset inventory export from Bigquery to file. Data will be stored in
the dataset specified on a dated table with the name specified.
'''
Expand All @@ -75,7 +80,8 @@ def main(event, context):
logging.exception('exception in cloud function entry point')


def _main(bucket=None, filename=None, format=None, bq_dataset=None, bq_table=None, verbose=False):
def _main(bucket=None, filename=None, format=None, bq_dataset=None,
bq_table=None, verbose=False):
'Module entry point used by cli and cloud function wrappers.'

_configure_logging(verbose)
Expand All @@ -84,17 +90,14 @@ def _main(bucket=None, filename=None, format=None, bq_dataset=None, bq_table=Non
dataset_ref = client.dataset(bq_dataset)
table_ref = dataset_ref.table(bq_table)
job_config = bigquery.job.ExtractJobConfig()
job_config.destination_format = (
getattr(bigquery.DestinationFormat, format))
extract_job = client.extract_table(
table_ref, destination_uri, job_config=job_config
)
job_config.destination_format = (getattr(bigquery.DestinationFormat, format))
extract_job = client.extract_table(table_ref, destination_uri,
job_config=job_config)
try:
extract_job.result()
except (GoogleAPIError, googleapiclient.errors.HttpError) as e:
logging.debug('API Error: %s', e, exc_info=True)
raise RuntimeError(
'Error exporting BQ table %s as a file' % bq_table, e)
raise RuntimeError('Error exporting BQ table %s as a file' % bq_table, e)


if __name__ == '__main__':
Expand Down
Loading
Loading