summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRyan Petrello <rpetrell@redhat.com>2021-03-19 17:44:51 +0100
committerRyan Petrello <rpetrell@redhat.com>2021-03-23 14:39:58 +0100
commitc2ef0a65002b4c945fedfefc121dbc0a0390894f (patch)
tree52ab6712c9673f16b3f6aa5213f565b0e4008863
parentMerge pull request #9655 from ansible/jakemcdermott-patch-changelog (diff)
downloadawx-c2ef0a65002b4c945fedfefc121dbc0a0390894f.tar.xz
awx-c2ef0a65002b4c945fedfefc121dbc0a0390894f.zip
move code linting to a stricter pep8-esque auto-formatting tool, black
-rw-r--r--CONTRIBUTING.md2
-rw-r--r--Makefile23
-rw-r--r--awx/__init__.py22
-rw-r--r--awx/api/authentication.py11
-rw-r--r--awx/api/conf.py35
-rw-r--r--awx/api/exceptions.py5
-rw-r--r--awx/api/fields.py27
-rw-r--r--awx/api/filters.py92
-rw-r--r--awx/api/generics.py256
-rw-r--r--awx/api/metadata.py53
-rw-r--r--awx/api/metrics.py8
-rw-r--r--awx/api/pagination.py4
-rw-r--r--awx/api/permissions.py65
-rw-r--r--awx/api/renderers.py18
-rw-r--r--awx/api/serializers.py2179
-rw-r--r--awx/api/swagger.py36
-rw-r--r--awx/api/urls/activity_stream.py5
-rw-r--r--awx/api/urls/ad_hoc_command_event.py5
-rw-r--r--awx/api/urls/credential_input_source.py5
-rw-r--r--awx/api/urls/credential_type.py8
-rw-r--r--awx/api/urls/instance.py10
-rw-r--r--awx/api/urls/instance_group.py7
-rw-r--r--awx/api/urls/inventory_script.py7
-rw-r--r--awx/api/urls/inventory_source.py21
-rw-r--r--awx/api/urls/job_event.py7
-rw-r--r--awx/api/urls/job_host_summary.py8
-rw-r--r--awx/api/urls/job_template.py21
-rw-r--r--awx/api/urls/label.py10
-rw-r--r--awx/api/urls/notification.py10
-rw-r--r--awx/api/urls/oauth2.py30
-rw-r--r--awx/api/urls/oauth2_root.py9
-rw-r--r--awx/api/urls/organization.py28
-rw-r--r--awx/api/urls/project.py14
-rw-r--r--awx/api/urls/role.py9
-rw-r--r--awx/api/urls/schedule.py7
-rw-r--r--awx/api/urls/system_job.py8
-rw-r--r--awx/api/urls/system_job_template.py21
-rw-r--r--awx/api/urls/urls.py23
-rw-r--r--awx/api/urls/user.py5
-rw-r--r--awx/api/urls/webhooks.py6
-rw-r--r--awx/api/urls/workflow_approval.py7
-rw-r--r--awx/api/urls/workflow_approval_template.py5
-rw-r--r--awx/api/urls/workflow_job_template.py28
-rw-r--r--awx/api/versioning.py5
-rw-r--r--awx/api/views/__init__.py709
-rw-r--r--awx/api/views/inventory.py23
-rw-r--r--awx/api/views/metrics.py11
-rw-r--r--awx/api/views/mixin.py69
-rw-r--r--awx/api/views/organization.py42
-rw-r--r--awx/api/views/root.py93
-rw-r--r--awx/api/views/webhooks.py35
-rw-r--r--awx/asgi.py3
-rw-r--r--awx/conf/access.py4
-rw-r--r--awx/conf/apps.py2
-rw-r--r--awx/conf/fields.py40
-rw-r--r--awx/conf/license.py1
-rw-r--r--awx/conf/migrations/0001_initial.py16
-rw-r--r--awx/conf/migrations/0002_v310_copy_tower_settings.py32
-rw-r--r--awx/conf/migrations/0003_v310_JSONField_changes.py12
-rw-r--r--awx/conf/migrations/0004_v320_reencrypt.py4
-rw-r--r--awx/conf/migrations/0005_v330_rename_two_session_settings.py13
-rw-r--r--awx/conf/migrations/0006_v331_ldap_group_type.py8
-rw-r--r--awx/conf/migrations/0007_v380_rename_more_settings.py8
-rw-r--r--awx/conf/migrations/0008_subscriptions.py10
-rw-r--r--awx/conf/migrations/_ldap_group_type.py6
-rw-r--r--awx/conf/migrations/_reencrypt.py13
-rw-r--r--awx/conf/migrations/_rename_setting.py15
-rw-r--r--awx/conf/migrations/_subscriptions.py7
-rw-r--r--awx/conf/models.py19
-rw-r--r--awx/conf/registry.py19
-rw-r--r--awx/conf/serializers.py12
-rw-r--r--awx/conf/settings.py58
-rw-r--r--awx/conf/signals.py7
-rw-r--r--awx/conf/tests/functional/conftest.py6
-rw-r--r--awx/conf/tests/functional/test_api.py249
-rw-r--r--awx/conf/tests/test_env.py2
-rw-r--r--awx/conf/tests/unit/test_fields.py66
-rw-r--r--awx/conf/tests/unit/test_registry.py159
-rw-r--r--awx/conf/tests/unit/test_settings.py239
-rw-r--r--awx/conf/urls.py8
-rwxr-xr-xawx/conf/utils.py5
-rw-r--r--awx/conf/views.py33
-rw-r--r--awx/main/access.py1002
-rw-r--r--awx/main/analytics/broadcast_websocket.py48
-rw-r--r--awx/main/analytics/collectors.py158
-rw-r--r--awx/main/analytics/core.py46
-rw-r--r--awx/main/analytics/metrics.py151
-rw-r--r--awx/main/conf.py231
-rw-r--r--awx/main/constants.py66
-rw-r--r--awx/main/consumers.py70
-rw-r--r--awx/main/credential_plugins/aim.py100
-rw-r--r--awx/main/credential_plugins/azure_kv.py95
-rw-r--r--awx/main/credential_plugins/centrify_vault.py163
-rw-r--r--awx/main/credential_plugins/conjur.py94
-rw-r--r--awx/main/credential_plugins/hashivault.py229
-rw-r--r--awx/main/credential_plugins/plugin.py2
-rw-r--r--awx/main/db/profiled_pg/base.py11
-rw-r--r--awx/main/dispatch/__init__.py9
-rw-r--r--awx/main/dispatch/control.py3
-rw-r--r--awx/main/dispatch/periodic.py10
-rw-r--r--awx/main/dispatch/pool.py44
-rw-r--r--awx/main/dispatch/publish.py18
-rw-r--r--awx/main/dispatch/reaper.py28
-rw-r--r--awx/main/dispatch/worker/base.py7
-rw-r--r--awx/main/dispatch/worker/callback.py28
-rw-r--r--awx/main/dispatch/worker/task.py24
-rw-r--r--awx/main/exceptions.py5
-rw-r--r--awx/main/fields.py279
-rw-r--r--awx/main/isolated/manager.py99
-rw-r--r--awx/main/management/commands/bottleneck.py11
-rw-r--r--awx/main/management/commands/callback_stats.py17
-rw-r--r--awx/main/management/commands/check_license.py3
-rw-r--r--awx/main/management/commands/check_migrations.py1
-rw-r--r--awx/main/management/commands/cleanup_activitystream.py14
-rw-r--r--awx/main/management/commands/cleanup_jobs.py70
-rw-r--r--awx/main/management/commands/cleanup_sessions.py4
-rw-r--r--awx/main/management/commands/cleanup_tokens.py4
-rw-r--r--awx/main/management/commands/create_oauth2_token.py5
-rw-r--r--awx/main/management/commands/create_preload_data.py64
-rw-r--r--awx/main/management/commands/deprovision_instance.py9
-rw-r--r--awx/main/management/commands/expire_sessions.py3
-rw-r--r--awx/main/management/commands/gather_analytics.py20
-rw-r--r--awx/main/management/commands/generate_isolated_key.py19
-rw-r--r--awx/main/management/commands/graph_jobs.py27
-rw-r--r--awx/main/management/commands/inventory_import.py331
-rw-r--r--awx/main/management/commands/list_instances.py3
-rw-r--r--awx/main/management/commands/profile_sql.py20
-rw-r--r--awx/main/management/commands/provision_instance.py11
-rw-r--r--awx/main/management/commands/regenerate_secret_key.py46
-rw-r--r--awx/main/management/commands/register_queue.py27
-rw-r--r--awx/main/management/commands/remove_from_queue.py9
-rw-r--r--awx/main/management/commands/replay_job_events.py51
-rw-r--r--awx/main/management/commands/revoke_oauth2_tokens.py3
-rw-r--r--awx/main/management/commands/run_callback_receiver.py8
-rw-r--r--awx/main/management/commands/run_dispatcher.py22
-rw-r--r--awx/main/management/commands/run_wsbroadcast.py7
-rw-r--r--awx/main/management/commands/stats.py11
-rw-r--r--awx/main/management/commands/test_isolated_connection.py9
-rw-r--r--awx/main/management/commands/unregister_queue.py6
-rw-r--r--awx/main/management/commands/update_password.py6
-rw-r--r--awx/main/managers.py44
-rw-r--r--awx/main/middleware.py23
-rw-r--r--awx/main/migrations/0001_initial.py1079
-rw-r--r--awx/main/migrations/0002_squashed_v300_release.py426
-rw-r--r--awx/main/migrations/0003_squashed_v300_v303_updates.py83
-rw-r--r--awx/main/migrations/0004_squashed_v310_release.py242
-rw-r--r--awx/main/migrations/0006_v320_release.py289
-rw-r--r--awx/main/migrations/0008_v320_drop_v1_credential_fields.py15
-rw-r--r--awx/main/migrations/0010_v322_add_ovirt4_tower_inventory.py42
-rw-r--r--awx/main/migrations/0013_v330_multi_credential.py2
-rw-r--r--awx/main/migrations/0014_v330_saved_launchtime_configs.py11
-rw-r--r--awx/main/migrations/0016_v330_non_blank_workflow.py4
-rw-r--r--awx/main/migrations/0017_v330_move_deprecated_stdout.py16
-rw-r--r--awx/main/migrations/0018_v330_add_additional_stdout_events.py60
-rw-r--r--awx/main/migrations/0020_v330_instancegroup_policies.py35
-rw-r--r--awx/main/migrations/0021_v330_declare_new_rbac_roles.py100
-rw-r--r--awx/main/migrations/0023_v330_inventory_multicred.py5
-rw-r--r--awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py40
-rw-r--r--awx/main/migrations/0026_v330_delete_authtoken.py1
-rw-r--r--awx/main/migrations/0030_v330_modify_application.py8
-rw-r--r--awx/main/migrations/0031_v330_encrypt_oauth2_secret.py4
-rw-r--r--awx/main/migrations/0032_v330_polymorphic_delete.py9
-rw-r--r--awx/main/migrations/0033_v330_oauth_help_text.py35
-rw-r--r--awx/main/migrations/0035_v330_more_oauth2_help_text.py7
-rw-r--r--awx/main/migrations/0036_v330_credtype_remove_become_methods.py1
-rw-r--r--awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py2
-rw-r--r--awx/main/migrations/0039_v330_custom_venv_help_text.py12
-rw-r--r--awx/main/migrations/0041_v330_update_oauth_refreshtoken.py8
-rw-r--r--awx/main/migrations/0042_v330_org_member_role_deparent.py23
-rw-r--r--awx/main/migrations/0044_v330_add_inventory_update_inventory.py4
-rw-r--r--awx/main/migrations/0046_v330_remove_client_credentials_grant.py6
-rw-r--r--awx/main/migrations/0048_v330_django_created_modified_by_model_name.py234
-rw-r--r--awx/main/migrations/0050_v340_drop_celery_tables.py20
-rw-r--r--awx/main/migrations/0051_v340_job_slicing.py35
-rw-r--r--awx/main/migrations/0053_v340_workflow_inventory.py14
-rw-r--r--awx/main/migrations/0054_v340_workflow_convergence.py5
-rw-r--r--awx/main/migrations/0055_v340_add_grafana_notification.py32
-rw-r--r--awx/main/migrations/0056_v350_custom_venv_history.py8
-rw-r--r--awx/main/migrations/0061_v350_track_native_credentialtype_source.py2
-rw-r--r--awx/main/migrations/0065_v350_index_job_status.py17
-rw-r--r--awx/main/migrations/0066_v350_inventorysource_custom_virtualenv.py4
-rw-r--r--awx/main/migrations/0067_v350_credential_plugins.py54
-rw-r--r--awx/main/migrations/0070_v350_gce_instance_id.py4
-rw-r--r--awx/main/migrations/0072_v350_deprecate_fields.py84
-rw-r--r--awx/main/migrations/0074_v360_migrate_instance_group_relations.py10
-rw-r--r--awx/main/migrations/0078_v360_clear_sessions_tokens_jt.py24
-rw-r--r--awx/main/migrations/0079_v360_rm_implicit_oauth2_apps.py6
-rw-r--r--awx/main/migrations/0080_v360_replace_job_origin.py2
-rw-r--r--awx/main/migrations/0081_v360_notify_on_start.py1
-rw-r--r--awx/main/migrations/0083_v360_job_branch_override.py37
-rw-r--r--awx/main/migrations/0084_v360_token_description.py1
-rw-r--r--awx/main/migrations/0085_v360_add_notificationtemplate_messages.py42
-rw-r--r--awx/main/migrations/0086_v360_workflow_approval.py96
-rw-r--r--awx/main/migrations/0087_v360_update_credential_injector_help_text.py18
-rw-r--r--awx/main/migrations/0088_v360_dashboard_optimizations.py17
-rw-r--r--awx/main/migrations/0089_v360_new_job_event_types.py78
-rw-r--r--awx/main/migrations/0090_v360_WFJT_prompts.py60
-rw-r--r--awx/main/migrations/0091_v360_approval_node_notifications.py5
-rw-r--r--awx/main/migrations/0092_v360_webhook_mixin.py44
-rw-r--r--awx/main/migrations/0093_v360_personal_access_tokens.py14
-rw-r--r--awx/main/migrations/0094_v360_webhook_mixin2.py26
-rw-r--r--awx/main/migrations/0096_v360_container_groups.py21
-rw-r--r--awx/main/migrations/0097_v360_workflowapproval_approved_or_denied_by.py9
-rw-r--r--awx/main/migrations/0098_v360_rename_cyberark_aim_credential_type.py14
-rw-r--r--awx/main/migrations/0101_v370_generate_new_uuids_for_iso_nodes.py4
-rw-r--r--awx/main/migrations/0107_v370_workflow_convergence_api_toggle.py8
-rw-r--r--awx/main/migrations/0108_v370_unifiedjob_dependencies_processed.py4
-rw-r--r--awx/main/migrations/0109_v370_job_template_organization_field.py54
-rw-r--r--awx/main/migrations/0112_v370_workflow_node_identifier.py17
-rw-r--r--awx/main/migrations/0113_v370_event_bigint.py32
-rw-r--r--awx/main/migrations/0114_v370_remove_deprecated_manual_inventory_sources.py42
-rw-r--r--awx/main/migrations/0115_v370_schedule_set_null.py9
-rw-r--r--awx/main/migrations/0116_v400_remove_hipchat_notifications.py34
-rw-r--r--awx/main/migrations/0117_v400_remove_cloudforms_inventory.py36
-rw-r--r--awx/main/migrations/0118_add_remote_archive_scm_type.py32
-rw-r--r--awx/main/migrations/0119_inventory_plugins.py24
-rw-r--r--awx/main/migrations/0120_galaxy_credentials.py22
-rw-r--r--awx/main/migrations/0123_drop_hg_support.py18
-rw-r--r--awx/main/migrations/0124_execution_environments.py86
-rw-r--r--awx/main/migrations/0125_more_ee_modeling_changes.py31
-rw-r--r--awx/main/migrations/0126_executionenvironment_container_options.py12
-rw-r--r--awx/main/migrations/0127_reset_pod_spec_override.py5
-rw-r--r--awx/main/migrations/0128_organiaztion_read_roles_ee_admin.py21
-rw-r--r--awx/main/migrations/0129_unifiedjob_installed_collections.py4
-rw-r--r--awx/main/migrations/0130_ee_polymorphic_set_null.py40
-rw-r--r--awx/main/migrations/0131_undo_org_polymorphic_ee.py10
-rw-r--r--awx/main/migrations/0134_unifiedjob_ansible_version.py4
-rw-r--r--awx/main/migrations/__init__.py2
-rw-r--r--awx/main/migrations/_create_system_jobs.py2
-rw-r--r--awx/main/migrations/_credentialtypes.py21
-rw-r--r--awx/main/migrations/_galaxy.py31
-rw-r--r--awx/main/migrations/_hg_removal.py4
-rw-r--r--awx/main/migrations/_inventory_source.py28
-rw-r--r--awx/main/migrations/_inventory_source_vars.py124
-rw-r--r--awx/main/migrations/_migration_utils.py4
-rw-r--r--awx/main/migrations/_rbac.py71
-rw-r--r--awx/main/migrations/_save_password_keys.py4
-rw-r--r--awx/main/migrations/_squashed.py12
-rw-r--r--awx/main/migrations/_squashed_30.py4
-rw-r--r--awx/main/migrations/_squashed_31.py37
-rw-r--r--awx/main/models/__init__.py116
-rw-r--r--awx/main/models/activity_stream.py8
-rw-r--r--awx/main/models/ad_hoc_commands.py41
-rw-r--r--awx/main/models/base.py89
-rw-r--r--awx/main/models/credential/__init__.py998
-rw-r--r--awx/main/models/credential/injectors.py17
-rw-r--r--awx/main/models/events.py119
-rw-r--r--awx/main/models/execution_environments.py2
-rw-r--r--awx/main/models/ha.py115
-rw-r--r--awx/main/models/inventory.py378
-rw-r--r--awx/main/models/jobs.py410
-rw-r--r--awx/main/models/label.py19
-rw-r--r--awx/main/models/mixins.py253
-rw-r--r--awx/main/models/notifications.py327
-rw-r--r--awx/main/models/oauth.py49
-rw-r--r--awx/main/models/organization.py102
-rw-r--r--awx/main/models/projects.py124
-rw-r--r--awx/main/models/rbac.py108
-rw-r--r--awx/main/models/schedules.py55
-rw-r--r--awx/main/models/unified_jobs.py336
-rw-r--r--awx/main/models/workflow.py265
-rw-r--r--awx/main/notifications/base.py1
-rw-r--r--awx/main/notifications/custom_notification_base.py24
-rw-r--r--awx/main/notifications/email_backend.py38
-rw-r--r--awx/main/notifications/grafana_backend.py41
-rw-r--r--awx/main/notifications/irc_backend.py14
-rw-r--r--awx/main/notifications/mattermost_backend.py14
-rw-r--r--awx/main/notifications/pagerduty_backend.py33
-rw-r--r--awx/main/notifications/rocketchat_backend.py16
-rw-r--r--awx/main/notifications/slack_backend.py16
-rw-r--r--awx/main/notifications/twilio_backend.py15
-rw-r--r--awx/main/notifications/webhook_backend.py48
-rw-r--r--awx/main/queue.py2
-rw-r--r--awx/main/redact.py10
-rw-r--r--awx/main/registrar.py7
-rw-r--r--awx/main/routing.py10
-rw-r--r--awx/main/scheduler/dag_simple.py23
-rw-r--r--awx/main/scheduler/dag_workflow.py50
-rw-r--r--awx/main/scheduler/kubernetes.py104
-rw-r--r--awx/main/scheduler/task_manager.py185
-rw-r--r--awx/main/scheduler/tasks.py1
-rw-r--r--awx/main/signals.py156
-rw-r--r--awx/main/tasks.py752
-rw-r--r--awx/main/templatetags/swagger.py6
-rw-r--r--awx/main/tests/URI.py8
-rw-r--r--awx/main/tests/conftest.py26
-rw-r--r--awx/main/tests/data/insights.py2
-rw-r--r--awx/main/tests/docs/test_swagger_generation.py72
-rw-r--r--awx/main/tests/factories/exc.py1
-rw-r--r--awx/main/tests/factories/fixtures.py90
-rw-r--r--awx/main/tests/factories/objects.py18
-rw-r--r--awx/main/tests/factories/tower.py184
-rw-r--r--awx/main/tests/functional/__init__.py11
-rw-r--r--awx/main/tests/functional/analytics/test_collectors.py37
-rw-r--r--awx/main/tests/functional/analytics/test_core.py5
-rw-r--r--awx/main/tests/functional/analytics/test_counts.py4
-rw-r--r--awx/main/tests/functional/analytics/test_metrics.py71
-rw-r--r--awx/main/tests/functional/api/test_activity_streams.py29
-rw-r--r--awx/main/tests/functional/api/test_adhoc.py5
-rw-r--r--awx/main/tests/functional/api/test_auth.py2
-rw-r--r--awx/main/tests/functional/api/test_create_attach_views.py24
-rw-r--r--awx/main/tests/functional/api/test_credential.py511
-rw-r--r--awx/main/tests/functional/api/test_credential_input_sources.py111
-rw-r--r--awx/main/tests/functional/api/test_credential_type.py424
-rw-r--r--awx/main/tests/functional/api/test_deprecated_credential_assignment.py51
-rw-r--r--awx/main/tests/functional/api/test_events.py31
-rw-r--r--awx/main/tests/functional/api/test_generic.py62
-rw-r--r--awx/main/tests/functional/api/test_host_insights.py30
-rw-r--r--awx/main/tests/functional/api/test_instance_group.py29
-rw-r--r--awx/main/tests/functional/api/test_inventory.py407
-rw-r--r--awx/main/tests/functional/api/test_job.py146
-rw-r--r--awx/main/tests/functional/api/test_job_runtime_params.py222
-rw-r--r--awx/main/tests/functional/api/test_job_template.py256
-rw-r--r--awx/main/tests/functional/api/test_notifications.py40
-rw-r--r--awx/main/tests/functional/api/test_oauth.py224
-rw-r--r--awx/main/tests/functional/api/test_organization_counts.py49
-rw-r--r--awx/main/tests/functional/api/test_organizations.py66
-rw-r--r--awx/main/tests/functional/api/test_pagination.py2
-rw-r--r--awx/main/tests/functional/api/test_project.py40
-rw-r--r--awx/main/tests/functional/api/test_rbac_displays.py64
-rw-r--r--awx/main/tests/functional/api/test_resource_access_lists.py3
-rw-r--r--awx/main/tests/functional/api/test_role.py17
-rw-r--r--awx/main/tests/functional/api/test_schedules.py125
-rw-r--r--awx/main/tests/functional/api/test_script_endpoint.py8
-rw-r--r--awx/main/tests/functional/api/test_search_filter.py2
-rw-r--r--awx/main/tests/functional/api/test_settings.py190
-rw-r--r--awx/main/tests/functional/api/test_survey_spec.py263
-rw-r--r--awx/main/tests/functional/api/test_unified_job_template.py38
-rw-r--r--awx/main/tests/functional/api/test_unified_jobs_stdout.py156
-rw-r--r--awx/main/tests/functional/api/test_unified_jobs_view.py28
-rw-r--r--awx/main/tests/functional/api/test_user.py29
-rw-r--r--awx/main/tests/functional/api/test_webhooks.py82
-rw-r--r--awx/main/tests/functional/api/test_workflow_node.py281
-rw-r--r--awx/main/tests/functional/commands/test_cleanup_jobs.py44
-rw-r--r--awx/main/tests/functional/commands/test_commands.py5
-rw-r--r--awx/main/tests/functional/commands/test_inventory_import.py169
-rw-r--r--awx/main/tests/functional/commands/test_oauth2_token_revoke.py23
-rw-r--r--awx/main/tests/functional/commands/test_secret_key_regeneration.py24
-rw-r--r--awx/main/tests/functional/conftest.py313
-rw-r--r--awx/main/tests/functional/models/test_activity_stream.py59
-rw-r--r--awx/main/tests/functional/models/test_context_managers.py8
-rw-r--r--awx/main/tests/functional/models/test_events.py54
-rw-r--r--awx/main/tests/functional/models/test_inventory.py145
-rw-r--r--awx/main/tests/functional/models/test_job.py36
-rw-r--r--awx/main/tests/functional/models/test_job_launch_config.py28
-rw-r--r--awx/main/tests/functional/models/test_job_options.py15
-rw-r--r--awx/main/tests/functional/models/test_notifications.py170
-rw-r--r--awx/main/tests/functional/models/test_project.py18
-rw-r--r--awx/main/tests/functional/models/test_schedule.py244
-rw-r--r--awx/main/tests/functional/models/test_unified_job.py140
-rw-r--r--awx/main/tests/functional/models/test_workflow.py105
-rw-r--r--awx/main/tests/functional/task_management/test_capacity.py1
-rw-r--r--awx/main/tests/functional/task_management/test_container_groups.py10
-rw-r--r--awx/main/tests/functional/task_management/test_rampart_groups.py71
-rw-r--r--awx/main/tests/functional/task_management/test_scheduler.py73
-rw-r--r--awx/main/tests/functional/test_copy.py166
-rw-r--r--awx/main/tests/functional/test_credential.py162
-rw-r--r--awx/main/tests/functional/test_credential_plugins.py1
-rw-r--r--awx/main/tests/functional/test_dispatch.py95
-rw-r--r--awx/main/tests/functional/test_execution_environments.py9
-rw-r--r--awx/main/tests/functional/test_fixture_factories.py43
-rw-r--r--awx/main/tests/functional/test_galaxy_credential_migration.py18
-rw-r--r--awx/main/tests/functional/test_instance_group_ordering.py62
-rw-r--r--awx/main/tests/functional/test_instances.py40
-rw-r--r--awx/main/tests/functional/test_inventory_source_injectors.py65
-rw-r--r--awx/main/tests/functional/test_inventory_source_migration.py16
-rw-r--r--awx/main/tests/functional/test_jobs.py54
-rw-r--r--awx/main/tests/functional/test_labels.py7
-rw-r--r--awx/main/tests/functional/test_ldap.py51
-rw-r--r--awx/main/tests/functional/test_licenses.py19
-rw-r--r--awx/main/tests/functional/test_named_url.py42
-rw-r--r--awx/main/tests/functional/test_notifications.py94
-rw-r--r--awx/main/tests/functional/test_projects.py350
-rw-r--r--awx/main/tests/functional/test_python_requirements.py11
-rw-r--r--awx/main/tests/functional/test_rbac_api.py23
-rw-r--r--awx/main/tests/functional/test_rbac_core.py2
-rw-r--r--awx/main/tests/functional/test_rbac_credential.py25
-rw-r--r--awx/main/tests/functional/test_rbac_instance_groups.py3
-rw-r--r--awx/main/tests/functional/test_rbac_inventory.py33
-rw-r--r--awx/main/tests/functional/test_rbac_job.py107
-rw-r--r--awx/main/tests/functional/test_rbac_job_start.py30
-rw-r--r--awx/main/tests/functional/test_rbac_job_templates.py116
-rw-r--r--awx/main/tests/functional/test_rbac_label.py4
-rw-r--r--awx/main/tests/functional/test_rbac_migration.py48
-rw-r--r--awx/main/tests/functional/test_rbac_notifications.py74
-rw-r--r--awx/main/tests/functional/test_rbac_oauth.py214
-rw-r--r--awx/main/tests/functional/test_rbac_role.py56
-rw-r--r--awx/main/tests/functional/test_rbac_user.py28
-rw-r--r--awx/main/tests/functional/test_rbac_workflow.py40
-rw-r--r--awx/main/tests/functional/test_session.py53
-rw-r--r--awx/main/tests/functional/test_tasks.py30
-rw-r--r--awx/main/tests/functional/utils/test_common.py5
-rwxr-xr-xawx/main/tests/manual/workflows/linear.py6
-rwxr-xr-xawx/main/tests/manual/workflows/parallel.py10
-rw-r--r--awx/main/tests/test_env.py2
-rw-r--r--awx/main/tests/unit/analytics/test_broadcast_websocket.py22
-rw-r--r--awx/main/tests/unit/api/serializers/conftest.py6
-rw-r--r--awx/main/tests/unit/api/serializers/test_activity_stream_serializer.py10
-rw-r--r--awx/main/tests/unit/api/serializers/test_inventory_serializers.py60
-rw-r--r--awx/main/tests/unit/api/serializers/test_job_serializers.py122
-rw-r--r--awx/main/tests/unit/api/serializers/test_job_template_serializers.py39
-rw-r--r--awx/main/tests/unit/api/serializers/test_notification_template_serializers.py81
-rw-r--r--awx/main/tests/unit/api/serializers/test_primary_key_related_field.py2
-rw-r--r--awx/main/tests/unit/api/serializers/test_token_serializer.py8
-rw-r--r--awx/main/tests/unit/api/serializers/test_unified_serializers.py35
-rw-r--r--awx/main/tests/unit/api/serializers/test_workflow_serializers.py135
-rw-r--r--awx/main/tests/unit/api/test_filters.py63
-rw-r--r--awx/main/tests/unit/api/test_generics.py14
-rw-r--r--awx/main/tests/unit/api/test_logger.py115
-rw-r--r--awx/main/tests/unit/api/test_parsers.py5
-rw-r--r--awx/main/tests/unit/api/test_views.py140
-rw-r--r--awx/main/tests/unit/commands/test_inventory_import.py10
-rw-r--r--awx/main/tests/unit/commands/test_replay_job_events.py43
-rw-r--r--awx/main/tests/unit/conftest.py18
-rw-r--r--awx/main/tests/unit/models/test_credential.py29
-rw-r--r--awx/main/tests/unit/models/test_events.py60
-rw-r--r--awx/main/tests/unit/models/test_ha.py42
-rw-r--r--awx/main/tests/unit/models/test_inventory.py35
-rw-r--r--awx/main/tests/unit/models/test_job_template_unit.py29
-rw-r--r--awx/main/tests/unit/models/test_jobs.py11
-rw-r--r--awx/main/tests/unit/models/test_label.py19
-rw-r--r--awx/main/tests/unit/models/test_project.py1
-rw-r--r--awx/main/tests/unit/models/test_survey_models.py260
-rw-r--r--awx/main/tests/unit/models/test_system_jobs.py69
-rw-r--r--awx/main/tests/unit/models/test_unified_job_unit.py48
-rw-r--r--awx/main/tests/unit/models/test_workflow_unit.py53
-rw-r--r--awx/main/tests/unit/notifications/test_grafana.py132
-rw-r--r--awx/main/tests/unit/notifications/test_rocketchat.py60
-rw-r--r--awx/main/tests/unit/scheduler/test_dag_simple.py1
-rw-r--r--awx/main/tests/unit/scheduler/test_dag_workflow.py74
-rw-r--r--awx/main/tests/unit/scheduler/test_kubernetes.py7
-rw-r--r--awx/main/tests/unit/settings/test_defaults.py9
-rw-r--r--awx/main/tests/unit/test_access.py93
-rw-r--r--awx/main/tests/unit/test_capacity.py41
-rw-r--r--awx/main/tests/unit/test_db.py16
-rw-r--r--awx/main/tests/unit/test_fields.py272
-rw-r--r--awx/main/tests/unit/test_redact.py54
-rw-r--r--awx/main/tests/unit/test_settings.py1
-rw-r--r--awx/main/tests/unit/test_tasks.py1062
-rw-r--r--awx/main/tests/unit/test_validators.py44
-rw-r--r--awx/main/tests/unit/test_views.py16
-rw-r--r--awx/main/tests/unit/utils/test_common.py43
-rw-r--r--awx/main/tests/unit/utils/test_encryption.py5
-rw-r--r--awx/main/tests/unit/utils/test_filters.py282
-rw-r--r--awx/main/tests/unit/utils/test_formatters.py4
-rw-r--r--awx/main/tests/unit/utils/test_insights.py9
-rw-r--r--awx/main/tests/unit/utils/test_mem_inventory.py24
-rw-r--r--awx/main/tests/unit/utils/test_reload.py12
-rw-r--r--awx/main/tests/unit/utils/test_safe_yaml.py12
-rw-r--r--awx/main/utils/__init__.py8
-rw-r--r--awx/main/utils/ansible.py14
-rw-r--r--awx/main/utils/common.py300
-rw-r--r--awx/main/utils/db.py18
-rw-r--r--awx/main/utils/deletion.py38
-rw-r--r--awx/main/utils/encryption.py49
-rw-r--r--awx/main/utils/external_logging.py24
-rw-r--r--awx/main/utils/filters.py48
-rw-r--r--awx/main/utils/formatters.py50
-rw-r--r--awx/main/utils/handlers.py16
-rw-r--r--awx/main/utils/insights.py12
-rw-r--r--awx/main/utils/licensing.py93
-rw-r--r--awx/main/utils/mem_inventory.py69
-rw-r--r--awx/main/utils/named_url_graph.py50
-rw-r--r--awx/main/utils/polymorphic.py1
-rw-r--r--awx/main/utils/profiling.py16
-rw-r--r--awx/main/utils/reload.py18
-rw-r--r--awx/main/utils/safe_yaml.py24
-rw-r--r--awx/main/validators.py6
-rw-r--r--awx/main/views.py2
-rw-r--r--awx/main/wsbroadcast.py48
-rw-r--r--awx/playbooks/action_plugins/insights.py24
-rw-r--r--awx/playbooks/action_plugins/project_archive.py4
-rw-r--r--awx/playbooks/library/insights.py4
-rw-r--r--awx/playbooks/library/project_archive.py6
-rw-r--r--awx/plugins/isolated/awx_capacity.py25
-rw-r--r--awx/plugins/isolated/awx_isolated_cleanup.py8
-rwxr-xr-xawx/plugins/isolated/mkfifo.py8
-rw-r--r--awx/settings/defaults.py378
-rw-r--r--awx/settings/development.py80
-rw-r--r--awx/settings/development_quiet.py1
-rw-r--r--awx/settings/production.py22
-rw-r--r--awx/sso/backends.py108
-rw-r--r--awx/sso/conf.py719
-rw-r--r--awx/sso/fields.py224
-rw-r--r--awx/sso/ldap_group_types.py8
-rw-r--r--awx/sso/middleware.py1
-rw-r--r--awx/sso/migrations/0001_initial.py9
-rw-r--r--awx/sso/migrations/0002_expand_provider_options.py6
-rw-r--r--awx/sso/models.py14
-rw-r--r--awx/sso/pipeline.py37
-rw-r--r--awx/sso/tests/functional/test_get_or_set_enterprise_user.py8
-rw-r--r--awx/sso/tests/functional/test_ldap.py13
-rw-r--r--awx/sso/tests/functional/test_pipeline.py67
-rw-r--r--awx/sso/tests/test_env.py2
-rw-r--r--awx/sso/tests/unit/test_fields.py204
-rw-r--r--awx/sso/tests/unit/test_tacacsplus.py21
-rw-r--r--awx/sso/urls.py7
-rw-r--r--awx/sso/validators.py17
-rw-r--r--awx/sso/views.py11
-rw-r--r--awx/ui/__init__.py1
-rw-r--r--awx/ui/apps.py1
-rw-r--r--awx/ui/conf.py31
-rw-r--r--awx/ui/context_processors.py4
-rw-r--r--awx/ui/fields.py2
-rw-r--r--awx/ui_next/apps.py1
-rw-r--r--awx/ui_next/urls.py7
-rw-r--r--awx/urls.py14
-rw-r--r--awx/wsgi.py3
-rw-r--r--awx_collection/plugins/doc_fragments/auth.py3
-rw-r--r--awx_collection/plugins/doc_fragments/auth_legacy.py3
-rw-r--r--awx_collection/plugins/doc_fragments/auth_plugin.py3
-rw-r--r--awx_collection/plugins/inventory/tower.py14
-rw-r--r--awx_collection/plugins/lookup/tower_api.py18
-rw-r--r--awx_collection/plugins/lookup/tower_schedule_rrule.py4
-rw-r--r--awx_collection/plugins/module_utils/tower_api.py91
-rw-r--r--awx_collection/plugins/module_utils/tower_awxkit.py4
-rw-r--r--awx_collection/plugins/module_utils/tower_legacy.py24
-rw-r--r--awx_collection/plugins/module_utils/tower_module.py19
-rw-r--r--awx_collection/plugins/modules/tower_ad_hoc_command.py34
-rw-r--r--awx_collection/plugins/modules/tower_ad_hoc_command_cancel.py16
-rw-r--r--awx_collection/plugins/modules/tower_ad_hoc_command_wait.py23
-rw-r--r--awx_collection/plugins/modules/tower_application.py18
-rw-r--r--awx_collection/plugins/modules/tower_credential.py41
-rw-r--r--awx_collection/plugins/modules/tower_credential_input_source.py5
-rw-r--r--awx_collection/plugins/modules/tower_credential_type.py14
-rw-r--r--awx_collection/plugins/modules/tower_execution_environment.py13
-rw-r--r--awx_collection/plugins/modules/tower_export.py6
-rw-r--r--awx_collection/plugins/modules/tower_group.py26
-rw-r--r--awx_collection/plugins/modules/tower_host.py11
-rw-r--r--awx_collection/plugins/modules/tower_import.py10
-rw-r--r--awx_collection/plugins/modules/tower_instance_group.py13
-rw-r--r--awx_collection/plugins/modules/tower_inventory.py18
-rw-r--r--awx_collection/plugins/modules/tower_inventory_source.py43
-rw-r--r--awx_collection/plugins/modules/tower_inventory_source_update.py14
-rw-r--r--awx_collection/plugins/modules/tower_job_cancel.py16
-rw-r--r--awx_collection/plugins/modules/tower_job_launch.py34
-rw-r--r--awx_collection/plugins/modules/tower_job_list.py7
-rw-r--r--awx_collection/plugins/modules/tower_job_template.py85
-rw-r--r--awx_collection/plugins/modules/tower_job_wait.py25
-rw-r--r--awx_collection/plugins/modules/tower_label.py24
-rw-r--r--awx_collection/plugins/modules/tower_license.py14
-rw-r--r--awx_collection/plugins/modules/tower_meta.py17
-rw-r--r--awx_collection/plugins/modules/tower_notification_template.py72
-rw-r--r--awx_collection/plugins/modules/tower_organization.py11
-rw-r--r--awx_collection/plugins/modules/tower_project.py35
-rw-r--r--awx_collection/plugins/modules/tower_project_update.py12
-rw-r--r--awx_collection/plugins/modules/tower_receive.py6
-rw-r--r--awx_collection/plugins/modules/tower_role.py40
-rw-r--r--awx_collection/plugins/modules/tower_schedule.py12
-rw-r--r--awx_collection/plugins/modules/tower_send.py6
-rw-r--r--awx_collection/plugins/modules/tower_settings.py19
-rw-r--r--awx_collection/plugins/modules/tower_team.py16
-rw-r--r--awx_collection/plugins/modules/tower_token.py32
-rw-r--r--awx_collection/plugins/modules/tower_user.py5
-rw-r--r--awx_collection/plugins/modules/tower_workflow_job_template.py49
-rw-r--r--awx_collection/plugins/modules/tower_workflow_job_template_node.py39
-rw-r--r--awx_collection/plugins/modules/tower_workflow_launch.py12
-rw-r--r--awx_collection/plugins/modules/tower_workflow_template.py55
-rw-r--r--awx_collection/setup.cfg3
-rw-r--r--awx_collection/test/awx/conftest.py69
-rw-r--r--awx_collection/test/awx/test_ad_hoc_wait.py29
-rw-r--r--awx_collection/test/awx/test_application.py3
-rw-r--r--awx_collection/test/awx/test_completeness.py148
-rw-r--r--awx_collection/test/awx/test_credential.py103
-rw-r--r--awx_collection/test/awx/test_credential_input_source.py243
-rw-r--r--awx_collection/test/awx/test_credential_type.py53
-rw-r--r--awx_collection/test/awx/test_group.py43
-rw-r--r--awx_collection/test/awx/test_instance_group.py39
-rw-r--r--awx_collection/test/awx/test_inventory.py51
-rw-r--r--awx_collection/test/awx/test_inventory_source.py94
-rw-r--r--awx_collection/test/awx/test_job.py36
-rw-r--r--awx_collection/test/awx/test_job_template.py199
-rw-r--r--awx_collection/test/awx/test_label.py19
-rw-r--r--awx_collection/test/awx/test_module_utils.py53
-rw-r--r--awx_collection/test/awx/test_notification_template.py142
-rw-r--r--awx_collection/test/awx/test_organization.py12
-rw-r--r--awx_collection/test/awx/test_project.py25
-rw-r--r--awx_collection/test/awx/test_role.py43
-rw-r--r--awx_collection/test/awx/test_schedule.py136
-rw-r--r--awx_collection/test/awx/test_send_receive.py33
-rw-r--r--awx_collection/test/awx/test_settings.py36
-rw-r--r--awx_collection/test/awx/test_team.py33
-rw-r--r--awx_collection/test/awx/test_token.py3
-rw-r--r--awx_collection/test/awx/test_user.py34
-rw-r--r--awx_collection/test/awx/test_workflow_job_template.py116
-rw-r--r--awx_collection/test/awx/test_workflow_job_template_node.py99
-rw-r--r--awx_collection/test/awx/test_workflow_template.py124
-rw-r--r--awxkit/awxkit/__init__.py8
-rw-r--r--awxkit/awxkit/api/__init__.py4
-rw-r--r--awxkit/awxkit/api/client.py6
-rw-r--r--awxkit/awxkit/api/mixins/has_copy.py1
-rw-r--r--awxkit/awxkit/api/mixins/has_create.py18
-rw-r--r--awxkit/awxkit/api/mixins/has_instance_groups.py1
-rw-r--r--awxkit/awxkit/api/mixins/has_notifications.py18
-rw-r--r--awxkit/awxkit/api/mixins/has_status.py13
-rw-r--r--awxkit/awxkit/api/mixins/has_survey.py12
-rw-r--r--awxkit/awxkit/api/mixins/has_variables.py1
-rw-r--r--awxkit/awxkit/api/pages/__init__.py2
-rw-r--r--awxkit/awxkit/api/pages/access_list.py21
-rw-r--r--awxkit/awxkit/api/pages/activity_stream.py3
-rw-r--r--awxkit/awxkit/api/pages/ad_hoc_commands.py49
-rw-r--r--awxkit/awxkit/api/pages/api.py22
-rw-r--r--awxkit/awxkit/api/pages/applications.py19
-rw-r--r--awxkit/awxkit/api/pages/base.py48
-rw-r--r--awxkit/awxkit/api/pages/config.py15
-rw-r--r--awxkit/awxkit/api/pages/credential_input_sources.py3
-rw-r--r--awxkit/awxkit/api/pages/credentials.py147
-rw-r--r--awxkit/awxkit/api/pages/execution_environments.py9
-rw-r--r--awxkit/awxkit/api/pages/instance_groups.py10
-rw-r--r--awxkit/awxkit/api/pages/instances.py3
-rw-r--r--awxkit/awxkit/api/pages/inventory.py379
-rw-r--r--awxkit/awxkit/api/pages/job_templates.py112
-rw-r--r--awxkit/awxkit/api/pages/jobs.py16
-rw-r--r--awxkit/awxkit/api/pages/labels.py40
-rw-r--r--awxkit/awxkit/api/pages/metrics.py7
-rw-r--r--awxkit/awxkit/api/pages/notification_templates.py169
-rw-r--r--awxkit/awxkit/api/pages/notifications.py11
-rw-r--r--awxkit/awxkit/api/pages/organizations.py30
-rw-r--r--awxkit/awxkit/api/pages/page.py114
-rw-r--r--awxkit/awxkit/api/pages/projects.py89
-rw-r--r--awxkit/awxkit/api/pages/roles.py12
-rw-r--r--awxkit/awxkit/api/pages/schedules.py8
-rw-r--r--awxkit/awxkit/api/pages/settings.py40
-rw-r--r--awxkit/awxkit/api/pages/subscriptions.py1
-rw-r--r--awxkit/awxkit/api/pages/survey_spec.py4
-rw-r--r--awxkit/awxkit/api/pages/system_job_templates.py5
-rw-r--r--awxkit/awxkit/api/pages/teams.py15
-rw-r--r--awxkit/awxkit/api/pages/unified_job_templates.py31
-rw-r--r--awxkit/awxkit/api/pages/unified_jobs.py24
-rw-r--r--awxkit/awxkit/api/pages/users.py39
-rw-r--r--awxkit/awxkit/api/pages/workflow_approvals.py1
-rw-r--r--awxkit/awxkit/api/pages/workflow_job_nodes.py16
-rw-r--r--awxkit/awxkit/api/pages/workflow_job_template_nodes.py88
-rw-r--r--awxkit/awxkit/api/pages/workflow_job_templates.py18
-rw-r--r--awxkit/awxkit/api/pages/workflow_jobs.py6
-rw-r--r--awxkit/awxkit/api/registry.py4
-rw-r--r--awxkit/awxkit/api/resources.py1
-rw-r--r--awxkit/awxkit/api/utils.py8
-rw-r--r--awxkit/awxkit/awx/inventory.py16
-rw-r--r--awxkit/awxkit/awx/utils.py19
-rw-r--r--awxkit/awxkit/cli/__init__.py9
-rwxr-xr-xawxkit/awxkit/cli/client.py88
-rw-r--r--awxkit/awxkit/cli/custom.py220
-rw-r--r--awxkit/awxkit/cli/docs/source/conf.py4
-rw-r--r--awxkit/awxkit/cli/format.py36
-rw-r--r--awxkit/awxkit/cli/options.py63
-rw-r--r--awxkit/awxkit/cli/resource.py46
-rw-r--r--awxkit/awxkit/cli/sphinx.py12
-rw-r--r--awxkit/awxkit/cli/stdout.py18
-rw-r--r--awxkit/awxkit/cli/utils.py20
-rw-r--r--awxkit/awxkit/exceptions.py2
-rwxr-xr-xawxkit/awxkit/scripts/basic_session.py48
-rw-r--r--awxkit/awxkit/utils/__init__.py44
-rw-r--r--awxkit/awxkit/utils/toposort.py18
-rw-r--r--awxkit/awxkit/words.py1718
-rw-r--r--awxkit/awxkit/ws.py17
-rw-r--r--awxkit/awxkit/yaml_file.py2
-rw-r--r--awxkit/setup.py13
-rw-r--r--awxkit/test/cli/test_client.py15
-rw-r--r--awxkit/test/cli/test_config.py32
-rw-r--r--awxkit/test/cli/test_format.py11
-rw-r--r--awxkit/test/cli/test_options.py156
-rw-r--r--awxkit/test/test_credentials.py37
-rw-r--r--awxkit/test/test_dependency_resolver.py106
-rw-r--r--awxkit/test/test_registry.py27
-rw-r--r--awxkit/test/test_utils.py209
-rw-r--r--awxkit/test/test_ws.py16
-rw-r--r--requirements/requirements_dev.txt4
-rwxr-xr-xsetup.cfg21
-rw-r--r--tools/ansible/roles/dockerfile/templates/Dockerfile.j22
-rw-r--r--tox.ini6
671 files changed, 20548 insertions, 21934 deletions
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 51d3334aa7..bf81f271c0 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -127,7 +127,7 @@ Fixes and Features for AWX will go through the Github pull request process. Subm
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
* No issues when running linters/code checkers
- * Python: flake8: `(container)/awx_devel$ make flake8`
+ * Python: black: `(container)/awx_devel$ make black`
* Javascript: JsHint: `(container)/awx_devel$ make jshint`
* No issues from unit tests
* Python: py.test: `(container)/awx_devel$ make test`
diff --git a/Makefile b/Makefile
index aaf1dc6d8f..0ff9f06045 100644
--- a/Makefile
+++ b/Makefile
@@ -271,20 +271,8 @@ jupyter:
reports:
mkdir -p $@
-pep8: reports
- @(set -o pipefail && $@ | tee reports/$@.report)
-
-flake8: reports
- @if [ "$(VENV_BASE)" ]; then \
- . $(VENV_BASE)/awx/bin/activate; \
- fi; \
- (set -o pipefail && $@ | tee reports/$@.report)
-
-pyflakes: reports
- @(set -o pipefail && $@ | tee reports/$@.report)
-
-pylint: reports
- @(set -o pipefail && $@ | reports/$@.report)
+black: reports
+ (set -o pipefail && $@ $(BLACK_ARGS) --skip-string-normalization --fast --line-length 160 awx awxkit awx_collection | tee reports/$@.report)
genschema: reports
$(MAKE) swagger PYTEST_ARGS="--genschema --create-db "
@@ -296,7 +284,7 @@ swagger: reports
fi; \
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
-check: flake8 pep8 # pyflakes pylint
+check: black
awx-link:
[ -d "/awx_devel/awx.egg-info" ] || python3 /awx_devel/setup.py egg_info_dev
@@ -332,10 +320,7 @@ test_collection:
# Second we will load any libraries out of the virtualenv (if it's unspecified that should be ok because python should not load out of an empty directory)
# Finally we will add the system path so that the tests can find the ansible libraries
-flake8_collection:
- flake8 awx_collection/ # Different settings, in main exclude list
-
-test_collection_all: test_collection flake8_collection
+test_collection_all: test_collection
# WARNING: symlinking a collection is fundamentally unstable
# this is for rapid development iteration with playbooks, do not use with other test targets
diff --git a/awx/__init__.py b/awx/__init__.py
index 5afb83d115..b9aa936785 100644
--- a/awx/__init__.py
+++ b/awx/__init__.py
@@ -15,9 +15,10 @@ __all__ = ['__version__']
# Check for the presence/absence of "devonly" module to determine if running
# from a source code checkout or release packaage.
try:
- import awx.devonly # noqa
+ import awx.devonly # noqa
+
MODE = 'development'
-except ImportError: # pragma: no cover
+except ImportError: # pragma: no cover
MODE = 'production'
@@ -25,6 +26,7 @@ import hashlib
try:
import django # noqa: F401
+
HAS_DJANGO = True
except ImportError:
HAS_DJANGO = False
@@ -40,6 +42,7 @@ if HAS_DJANGO is True:
try:
names_digest('foo', 'bar', 'baz', length=8)
except ValueError:
+
def names_digest(*args, length):
"""
Generate a 32-bit digest of a set of arguments that can be used to shorten
@@ -64,7 +67,7 @@ def find_commands(management_dir):
continue
elif f.endswith('.py') and f[:-3] not in commands:
commands.append(f[:-3])
- elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover
+ elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover
commands.append(f[:-4])
except OSError:
pass
@@ -75,6 +78,7 @@ def oauth2_getattribute(self, attr):
# Custom method to override
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
from django.conf import settings
+
val = None
if 'migrate' not in sys.argv:
# certain Django OAuth Toolkit migrations actually reference
@@ -94,33 +98,38 @@ def prepare_env():
# Hide DeprecationWarnings when running in production. Need to first load
# settings to apply our filter after Django's own warnings filter.
from django.conf import settings
- if not settings.DEBUG: # pragma: no cover
+
+ if not settings.DEBUG: # pragma: no cover
warnings.simplefilter('ignore', DeprecationWarning)
# Monkeypatch Django find_commands to also work with .pyc files.
import django.core.management
+
django.core.management.find_commands = find_commands
# Monkeypatch Oauth2 toolkit settings class to check for settings
# in django.conf settings each time, not just once during import
import oauth2_provider.settings
+
oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
# database settings to use when management command is run as an external
# program via unit tests.
- for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover
+ for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover
if os.environ.get('AWX_TEST_DATABASE_%s' % opt, None):
settings.DATABASES['default'][opt] = os.environ['AWX_TEST_DATABASE_%s' % opt]
# Disable capturing all SQL queries in memory when in DEBUG mode.
if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.utils import CursorWrapper
+
BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)
# Use the default devserver addr/port defined in settings for runserver.
default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
from django.core.management.commands import runserver as core_runserver
+
original_handle = core_runserver.Command.handle
def handle(self, *args, **options):
@@ -139,7 +148,8 @@ def manage():
# Now run the command (or display the version).
from django.conf import settings
from django.core.management import execute_from_command_line
- if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
+
+ if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
sys.stdout.write('%s\n' % __version__)
# If running as a user without permission to read settings, display an
# error message. Allow --help to still work.
diff --git a/awx/api/authentication.py b/awx/api/authentication.py
index 0bd7f52f37..52b3462005 100644
--- a/awx/api/authentication.py
+++ b/awx/api/authentication.py
@@ -18,7 +18,6 @@ logger = logging.getLogger('awx.api.authentication')
class LoggedBasicAuthentication(authentication.BasicAuthentication):
-
def authenticate(self, request):
if not settings.AUTH_BASIC_ENABLED:
return
@@ -35,22 +34,18 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication):
class SessionAuthentication(authentication.SessionAuthentication):
-
def authenticate_header(self, request):
return 'Session'
class LoggedOAuth2Authentication(OAuth2Authentication):
-
def authenticate(self, request):
ret = super(LoggedOAuth2Authentication, self).authenticate(request)
if ret:
user, token = ret
username = user.username if user else '<none>'
- logger.info(smart_text(
- u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(
- username, request.method, request.path, token.pk
- )
- ))
+ logger.info(
+ smart_text(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk))
+ )
setattr(user, 'oauth_scopes', [x for x in token.scope.split() if x])
return ret
diff --git a/awx/api/conf.py b/awx/api/conf.py
index f7da952004..b71892d76e 100644
--- a/awx/api/conf.py
+++ b/awx/api/conf.py
@@ -38,16 +38,20 @@ register(
register(
'OAUTH2_PROVIDER',
field_class=OAuth2ProviderField,
- default={'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
- 'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
- 'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS},
+ default={
+ 'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
+ 'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
+ 'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS,
+ },
label=_('OAuth 2 Timeout Settings'),
- help_text=_('Dictionary for customizing OAuth 2 timeouts, available items are '
- '`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
- 'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
- 'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
- 'the duration of refresh tokens, after expired access tokens, '
- 'in the number of seconds.'),
+ help_text=_(
+ 'Dictionary for customizing OAuth 2 timeouts, available items are '
+ '`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
+ 'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
+ 'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
+ 'the duration of refresh tokens, after expired access tokens, '
+ 'in the number of seconds.'
+ ),
category=_('Authentication'),
category_slug='authentication',
unit=_('seconds'),
@@ -57,10 +61,12 @@ register(
field_class=fields.BooleanField,
default=False,
label=_('Allow External Users to Create OAuth2 Tokens'),
- help_text=_('For security reasons, users from external auth providers (LDAP, SAML, '
- 'SSO, Radius, and others) are not allowed to create OAuth2 tokens. '
- 'To change this behavior, enable this setting. Existing tokens will '
- 'not be deleted when this setting is toggled off.'),
+ help_text=_(
+ 'For security reasons, users from external auth providers (LDAP, SAML, '
+ 'SSO, Radius, and others) are not allowed to create OAuth2 tokens. '
+ 'To change this behavior, enable this setting. Existing tokens will '
+ 'not be deleted when this setting is toggled off.'
+ ),
category=_('Authentication'),
category_slug='authentication',
)
@@ -71,8 +77,7 @@ register(
required=False,
default='',
label=_('Login redirect override URL'),
- help_text=_('URL to which unauthorized users will be redirected to log in. '
- 'If blank, users will be sent to the Tower login page.'),
+ help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the Tower login page.'),
category=_('Authentication'),
category_slug='authentication',
)
diff --git a/awx/api/exceptions.py b/awx/api/exceptions.py
index 7c7a182d06..8f2c079583 100644
--- a/awx/api/exceptions.py
+++ b/awx/api/exceptions.py
@@ -16,7 +16,4 @@ class ActiveJobConflict(ValidationError):
# turn everything in self.detail into string by using force_text.
# Declare detail afterwards circumvent this behavior.
super(ActiveJobConflict, self).__init__()
- self.detail = {
- "error": _("Resource is being used by running jobs."),
- "active_jobs": active_jobs
- }
+ self.detail = {"error": _("Resource is being used by running jobs."), "active_jobs": active_jobs}
diff --git a/awx/api/fields.py b/awx/api/fields.py
index ace0667a9a..6f288f2bce 100644
--- a/awx/api/fields.py
+++ b/awx/api/fields.py
@@ -16,10 +16,10 @@ __all__ = ['BooleanNullField', 'CharNullField', 'ChoiceNullField', 'VerbatimFiel
class NullFieldMixin(object):
- '''
+ """
Mixin to prevent shortcutting validation when we want to allow null input,
but coerce the resulting value to another type.
- '''
+ """
def validate_empty_values(self, data):
(is_empty_value, data) = super(NullFieldMixin, self).validate_empty_values(data)
@@ -29,18 +29,18 @@ class NullFieldMixin(object):
class BooleanNullField(NullFieldMixin, serializers.NullBooleanField):
- '''
+ """
Custom boolean field that allows null and empty string as False values.
- '''
+ """
def to_internal_value(self, data):
return bool(super(BooleanNullField, self).to_internal_value(data))
class CharNullField(NullFieldMixin, serializers.CharField):
- '''
+ """
Custom char field that allows null as input and coerces to an empty string.
- '''
+ """
def __init__(self, **kwargs):
kwargs['allow_null'] = True
@@ -51,9 +51,9 @@ class CharNullField(NullFieldMixin, serializers.CharField):
class ChoiceNullField(NullFieldMixin, serializers.ChoiceField):
- '''
+ """
Custom choice field that allows null as input and coerces to an empty string.
- '''
+ """
def __init__(self, **kwargs):
kwargs['allow_null'] = True
@@ -64,9 +64,9 @@ class ChoiceNullField(NullFieldMixin, serializers.ChoiceField):
class VerbatimField(serializers.Field):
- '''
+ """
Custom field that passes the value through without changes.
- '''
+ """
def to_internal_value(self, data):
return data
@@ -77,22 +77,19 @@ class VerbatimField(serializers.Field):
class OAuth2ProviderField(fields.DictField):
- default_error_messages = {
- 'invalid_key_names': _('Invalid key names: {invalid_key_names}'),
- }
+ default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
child = fields.IntegerField(min_value=1)
def to_internal_value(self, data):
data = super(OAuth2ProviderField, self).to_internal_value(data)
- invalid_flags = (set(data.keys()) - self.valid_key_names)
+ invalid_flags = set(data.keys()) - self.valid_key_names
if invalid_flags:
self.fail('invalid_key_names', invalid_key_names=', '.join(list(invalid_flags)))
return data
class DeprecatedCredentialField(serializers.IntegerField):
-
def __init__(self, **kwargs):
kwargs['allow_null'] = True
kwargs['default'] = None
diff --git a/awx/api/filters.py b/awx/api/filters.py
index 6d51441c28..1146dad89d 100644
--- a/awx/api/filters.py
+++ b/awx/api/filters.py
@@ -27,9 +27,9 @@ from awx.main.utils.db import get_all_field_names
class TypeFilterBackend(BaseFilterBackend):
- '''
+ """
Filter on type field now returned with all objects.
- '''
+ """
def filter_queryset(self, request, queryset, view):
try:
@@ -64,7 +64,7 @@ class TypeFilterBackend(BaseFilterBackend):
def get_fields_from_path(model, path):
- '''
+ """
Given a Django ORM lookup path (possibly over multiple models)
Returns the fields in the line, and also the revised lookup path
ex., given
@@ -73,7 +73,7 @@ def get_fields_from_path(model, path):
returns tuple of fields traversed as well and a corrected path,
for special cases we do substitutions
([<IntegerField for timeout>], 'project__timeout')
- '''
+ """
# Store of all the fields used to detect repeats
field_list = []
new_parts = []
@@ -82,12 +82,9 @@ def get_fields_from_path(model, path):
raise ParseError(_('No related model for field {}.').format(name))
# HACK: Make project and inventory source filtering by old field names work for backwards compatibility.
if model._meta.object_name in ('Project', 'InventorySource'):
- name = {
- 'current_update': 'current_job',
- 'last_update': 'last_job',
- 'last_update_failed': 'last_job_failed',
- 'last_updated': 'last_job_run',
- }.get(name, name)
+ name = {'current_update': 'current_job', 'last_update': 'last_job', 'last_update_failed': 'last_job_failed', 'last_updated': 'last_job_run'}.get(
+ name, name
+ )
if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model):
name = 'polymorphic_ctype'
@@ -121,28 +118,42 @@ def get_fields_from_path(model, path):
def get_field_from_path(model, path):
- '''
+ """
Given a Django ORM lookup path (possibly over multiple models)
Returns the last field in the line, and the revised lookup path
ex.
(<IntegerField for timeout>, 'project__timeout')
- '''
+ """
field_list, new_path = get_fields_from_path(model, path)
return (field_list[-1], new_path)
class FieldLookupBackend(BaseFilterBackend):
- '''
+ """
Filter using field lookups provided via query string parameters.
- '''
-
- RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by',
- 'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
-
- SUPPORTED_LOOKUPS = ('exact', 'iexact', 'contains', 'icontains',
- 'startswith', 'istartswith', 'endswith', 'iendswith',
- 'regex', 'iregex', 'gt', 'gte', 'lt', 'lte', 'in',
- 'isnull', 'search')
+ """
+
+ RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by', 'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
+
+ SUPPORTED_LOOKUPS = (
+ 'exact',
+ 'iexact',
+ 'contains',
+ 'icontains',
+ 'startswith',
+ 'istartswith',
+ 'endswith',
+ 'iendswith',
+ 'regex',
+ 'iregex',
+ 'gt',
+ 'gte',
+ 'lt',
+ 'lte',
+ 'in',
+ 'isnull',
+ 'search',
+ )
# A list of fields that we know can be filtered on without the possiblity
# of introducing duplicates
@@ -189,10 +200,7 @@ class FieldLookupBackend(BaseFilterBackend):
try:
return self.to_python_related(value)
except ValueError:
- raise ParseError(_('Invalid {field_name} id: {field_id}').format(
- field_name=getattr(field, 'name', 'related field'),
- field_id=value)
- )
+ raise ParseError(_('Invalid {field_name} id: {field_id}').format(field_name=getattr(field, 'name', 'related field'), field_id=value))
else:
return field.to_python(value)
@@ -205,13 +213,13 @@ class FieldLookupBackend(BaseFilterBackend):
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
field = field_list[-1]
- needs_distinct = (not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list))
+ needs_distinct = not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list)
# Type names are stored without underscores internally, but are presented and
# and serialized over the API containing underscores so we remove `_`
# for polymorphic_ctype__model lookups.
if new_lookup.startswith('polymorphic_ctype__model'):
- value = value.replace('_','')
+ value = value.replace('_', '')
elif new_lookup.endswith('__isnull'):
value = to_python_boolean(value)
elif new_lookup.endswith('__in'):
@@ -329,24 +337,20 @@ class FieldLookupBackend(BaseFilterBackend):
args = []
for n, k, v in and_filters:
if n:
- args.append(~Q(**{k:v}))
+ args.append(~Q(**{k: v}))
else:
- args.append(Q(**{k:v}))
+ args.append(Q(**{k: v}))
for role_name in role_filters:
if not hasattr(queryset.model, 'accessible_pk_qs'):
- raise ParseError(_(
- 'Cannot apply role_level filter to this list because its model '
- 'does not use roles for access control.'))
- args.append(
- Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name))
- )
+ raise ParseError(_('Cannot apply role_level filter to this list because its model ' 'does not use roles for access control.'))
+ args.append(Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name)))
if or_filters:
q = Q()
- for n,k,v in or_filters:
+ for n, k, v in or_filters:
if n:
- q |= ~Q(**{k:v})
+ q |= ~Q(**{k: v})
else:
- q |= Q(**{k:v})
+ q |= Q(**{k: v})
args.append(q)
if search_filters and search_filter_relation == 'OR':
q = Q()
@@ -360,11 +364,11 @@ class FieldLookupBackend(BaseFilterBackend):
for constrain in constrains:
q_chain |= Q(**{constrain: term})
queryset = queryset.filter(q_chain)
- for n,k,v in chain_filters:
+ for n, k, v in chain_filters:
if n:
- q = ~Q(**{k:v})
+ q = ~Q(**{k: v})
else:
- q = Q(**{k:v})
+ q = Q(**{k: v})
queryset = queryset.filter(q)
queryset = queryset.filter(*args)
if needs_distinct:
@@ -377,9 +381,9 @@ class FieldLookupBackend(BaseFilterBackend):
class OrderByBackend(BaseFilterBackend):
- '''
+ """
Filter to apply ordering based on query string parameters.
- '''
+ """
def filter_queryset(self, request, queryset, view):
try:
diff --git a/awx/api/generics.py b/awx/api/generics.py
index ac9ab03907..c6be2ac740 100644
--- a/awx/api/generics.py
+++ b/awx/api/generics.py
@@ -35,55 +35,50 @@ from rest_framework.negotiation import DefaultContentNegotiation
# AWX
from awx.api.filters import FieldLookupBackend
-from awx.main.models import (
- UnifiedJob, UnifiedJobTemplate, User, Role, Credential,
- WorkflowJobTemplateNode, WorkflowApprovalTemplate
-)
+from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
from awx.main.access import access_registry
-from awx.main.utils import (
- camelcase_to_underscore,
- get_search_fields,
- getattrd,
- get_object_or_400,
- decrypt_field,
- get_awx_version,
-)
+from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
from awx.main.utils.db import get_all_field_names
from awx.main.views import ApiErrorView
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
from awx.api.versioning import URLPathVersioning
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
-__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
- 'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView',
- 'SubListDestroyAPIView',
- 'SubListCreateAttachDetachAPIView', 'RetrieveAPIView',
- 'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView',
- 'RetrieveUpdateDestroyAPIView',
- 'SubDetailAPIView',
- 'ResourceAccessList',
- 'ParentMixin',
- 'DeleteLastUnattachLabelMixin',
- 'SubListAttachDetachAPIView',
- 'CopyAPIView', 'BaseUsersList',]
+__all__ = [
+ 'APIView',
+ 'GenericAPIView',
+ 'ListAPIView',
+ 'SimpleListAPIView',
+ 'ListCreateAPIView',
+ 'SubListAPIView',
+ 'SubListCreateAPIView',
+ 'SubListDestroyAPIView',
+ 'SubListCreateAttachDetachAPIView',
+ 'RetrieveAPIView',
+ 'RetrieveUpdateAPIView',
+ 'RetrieveDestroyAPIView',
+ 'RetrieveUpdateDestroyAPIView',
+ 'SubDetailAPIView',
+ 'ResourceAccessList',
+ 'ParentMixin',
+ 'DeleteLastUnattachLabelMixin',
+ 'SubListAttachDetachAPIView',
+ 'CopyAPIView',
+ 'BaseUsersList',
+]
logger = logging.getLogger('awx.api.generics')
analytics_logger = logging.getLogger('awx.analytics.performance')
class LoggedLoginView(auth_views.LoginView):
-
def get(self, request, *args, **kwargs):
# The django.auth.contrib login form doesn't perform the content
# negotiation we've come to expect from DRF; add in code to catch
# situations where Accept != text/html (or */*) and reply with
# an HTTP 406
try:
- DefaultContentNegotiation().select_renderer(
- request,
- [StaticHTMLRenderer],
- 'html'
- )
+ DefaultContentNegotiation().select_renderer(request, [StaticHTMLRenderer], 'html')
except NotAcceptable:
resp = Response(status=status.HTTP_406_NOT_ACCEPTABLE)
resp.accepted_renderer = StaticHTMLRenderer()
@@ -96,7 +91,7 @@ class LoggedLoginView(auth_views.LoginView):
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
current_user = getattr(request, 'user', None)
if request.user.is_authenticated:
- logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username,request.META.get('REMOTE_ADDR', None))))
+ logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
ret.set_cookie('userLoggedIn', 'true')
current_user = UserSerializer(self.request.user)
current_user = smart_text(JSONRenderer().render(current_user.data))
@@ -106,29 +101,27 @@ class LoggedLoginView(auth_views.LoginView):
return ret
else:
if 'username' in self.request.POST:
- logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'),request.META.get('REMOTE_ADDR', None))))
+ logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
ret.status_code = 401
return ret
class LoggedLogoutView(auth_views.LogoutView):
-
def dispatch(self, request, *args, **kwargs):
original_user = getattr(request, 'user', None)
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
current_user = getattr(request, 'user', None)
ret.set_cookie('userLoggedIn', 'false')
- if (not current_user or not getattr(current_user, 'pk', True)) \
- and current_user != original_user:
+ if (not current_user or not getattr(current_user, 'pk', True)) and current_user != original_user:
logger.info("User {} logged out.".format(original_user.username))
return ret
def get_view_description(view, html=False):
- '''Wrapper around REST framework get_view_description() to continue
+ """Wrapper around REST framework get_view_description() to continue
to support our historical div.
- '''
+ """
desc = views.get_view_description(view, html=html)
if html:
desc = '<div class="description">%s</div>' % desc
@@ -138,6 +131,7 @@ def get_view_description(view, html=False):
def get_default_schema():
if settings.SETTINGS_MODULE == 'awx.settings.development':
from awx.api.swagger import AutoSchema
+
return AutoSchema()
else:
return views.APIView.schema
@@ -149,21 +143,23 @@ class APIView(views.APIView):
versioning_class = URLPathVersioning
def initialize_request(self, request, *args, **kwargs):
- '''
+ """
Store the Django REST Framework Request object as an attribute on the
normal Django request, store time the request started.
- '''
+ """
self.time_started = time.time()
if getattr(settings, 'SQL_DEBUG', False):
self.queries_before = len(connection.queries)
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
# they respect the allowed proxy list
- if all([
- settings.PROXY_IP_ALLOWED_LIST,
- request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
- request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST
- ]):
+ if all(
+ [
+ settings.PROXY_IP_ALLOWED_LIST,
+ request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
+ request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
+ ]
+ ):
for custom_header in settings.REMOTE_HOST_HEADERS:
if custom_header.startswith('HTTP_'):
request.environ.pop(custom_header, None)
@@ -178,17 +174,19 @@ class APIView(views.APIView):
request.drf_request_user = None
self.__init_request_error__ = exc
except UnsupportedMediaType as exc:
- exc.detail = _('You did not use correct Content-Type in your HTTP request. '
- 'If you are using our REST API, the Content-Type must be application/json')
+ exc.detail = _(
+ 'You did not use correct Content-Type in your HTTP request. ' 'If you are using our REST API, the Content-Type must be application/json'
+ )
self.__init_request_error__ = exc
return drf_request
def finalize_response(self, request, response, *args, **kwargs):
- '''
+ """
Log warning for 400 requests. Add header with elapsed time.
- '''
+ """
from awx.main.utils import get_licenser
from awx.main.utils.licensing import OpenLicense
+
#
# If the URL was rewritten, and we get a 404, we should entirely
# replace the view in the request context with an ApiErrorView()
@@ -212,8 +210,12 @@ class APIView(views.APIView):
return response
if response.status_code >= 400:
- status_msg = "status %s received by user %s attempting to access %s from %s" % \
- (response.status_code, request.user, request.path, request.META.get('REMOTE_ADDR', None))
+ status_msg = "status %s received by user %s attempting to access %s from %s" % (
+ response.status_code,
+ request.user,
+ request.path,
+ request.META.get('REMOTE_ADDR', None),
+ )
if hasattr(self, '__init_request_error__'):
response = self.handle_exception(self.__init_request_error__)
if response.status_code == 401:
@@ -225,7 +227,7 @@ class APIView(views.APIView):
time_started = getattr(self, 'time_started', None)
response['X-API-Product-Version'] = get_awx_version()
response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), OpenLicense) else 'Red Hat Ansible Tower'
-
+
response['X-API-Node'] = settings.CLUSTER_HOST_ID
if time_started:
time_elapsed = time.time() - self.time_started
@@ -311,18 +313,12 @@ class APIView(views.APIView):
return data
def determine_version(self, request, *args, **kwargs):
- return (
- getattr(request, 'version', None),
- getattr(request, 'versioning_scheme', None),
- )
+ return (getattr(request, 'version', None), getattr(request, 'versioning_scheme', None))
def dispatch(self, request, *args, **kwargs):
if self.versioning_class is not None:
scheme = self.versioning_class()
- request.version, request.versioning_scheme = (
- scheme.determine_version(request, *args, **kwargs),
- scheme
- )
+ request.version, request.versioning_scheme = (scheme.determine_version(request, *args, **kwargs), scheme)
if 'version' in kwargs:
kwargs.pop('version')
return super(APIView, self).dispatch(request, *args, **kwargs)
@@ -378,25 +374,22 @@ class GenericAPIView(generics.GenericAPIView, APIView):
d = super(GenericAPIView, self).get_description_context()
if hasattr(self.model, "_meta"):
if hasattr(self.model._meta, "verbose_name"):
- d.update({
- 'model_verbose_name': smart_text(self.model._meta.verbose_name),
- 'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural),
- })
+ d.update(
+ {
+ 'model_verbose_name': smart_text(self.model._meta.verbose_name),
+ 'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural),
+ }
+ )
serializer = self.get_serializer()
metadata = self.metadata_class()
metadata.request = self.request
- for method, key in [
- ('GET', 'serializer_fields'),
- ('POST', 'serializer_create_fields'),
- ('PUT', 'serializer_update_fields')
- ]:
+ for method, key in [('GET', 'serializer_fields'), ('POST', 'serializer_create_fields'), ('PUT', 'serializer_update_fields')]:
d[key] = metadata.get_serializer_info(serializer, method=method)
d['settings'] = settings
return d
class SimpleListAPIView(generics.ListAPIView, GenericAPIView):
-
def get_queryset(self):
return self.request.user.get_queryset(self.model)
@@ -413,9 +406,7 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
else:
order_field = 'name'
d = super(ListAPIView, self).get_description_context()
- d.update({
- 'order_field': order_field,
- })
+ d.update({'order_field': order_field})
return d
@property
@@ -426,9 +417,13 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
def related_search_fields(self):
def skip_related_name(name):
return (
- name is None or name.endswith('_role') or name.startswith('_') or
- name.startswith('deprecated_') or name.endswith('_set') or
- name == 'polymorphic_ctype')
+ name is None
+ or name.endswith('_role')
+ or name.startswith('_')
+ or name.startswith('deprecated_')
+ or name.endswith('_set')
+ or name == 'polymorphic_ctype'
+ )
fields = set([])
for field in self.model._meta.fields:
@@ -482,9 +477,7 @@ class ParentMixin(object):
def get_parent_object(self):
if self.parent_object is not None:
return self.parent_object
- parent_filter = {
- self.lookup_field: self.kwargs.get(self.lookup_field, None),
- }
+ parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
self.parent_object = get_object_or_404(self.parent_model, **parent_filter)
return self.parent_object
@@ -513,10 +506,12 @@ class SubListAPIView(ParentMixin, ListAPIView):
def get_description_context(self):
d = super(SubListAPIView, self).get_description_context()
- d.update({
- 'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name),
- 'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural),
- })
+ d.update(
+ {
+ 'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name),
+ 'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural),
+ }
+ )
return d
def get_queryset(self):
@@ -531,7 +526,6 @@ class SubListAPIView(ParentMixin, ListAPIView):
class DestroyAPIView(generics.DestroyAPIView):
-
def has_delete_permission(self, obj):
return self.request.user.can_access(self.model, 'delete', obj)
@@ -545,12 +539,12 @@ class SubListDestroyAPIView(DestroyAPIView, SubListAPIView):
"""
Concrete view for deleting everything related by `relationship`.
"""
+
check_sub_obj_permission = True
def destroy(self, request, *args, **kwargs):
instance_list = self.get_queryset()
- if (not self.check_sub_obj_permission and
- not request.user.can_access(self.parent_model, 'delete', self.get_parent_object())):
+ if not self.check_sub_obj_permission and not request.user.can_access(self.parent_model, 'delete', self.get_parent_object()):
raise PermissionDenied()
self.perform_list_destroy(instance_list)
return Response(status=status.HTTP_204_NO_CONTENT)
@@ -574,9 +568,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
def get_description_context(self):
d = super(SubListCreateAPIView, self).get_description_context()
- d.update({
- 'parent_key': getattr(self, 'parent_key', None),
- })
+ d.update({'parent_key': getattr(self, 'parent_key', None)})
return d
def get_queryset(self):
@@ -610,8 +602,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
# attempt to deserialize the object
serializer = self.get_serializer(data=data)
if not serializer.is_valid():
- return Response(serializer.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# Verify we have permission to add the object as given.
if not request.user.can_access(self.model, 'add', serializer.validated_data):
@@ -635,9 +626,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
def get_description_context(self):
d = super(SubListCreateAttachDetachAPIView, self).get_description_context()
- d.update({
- "has_attach": True,
- })
+ d.update({"has_attach": True})
return d
def attach_validate(self, request):
@@ -675,9 +664,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
sub = get_object_or_400(self.model, pk=sub_id)
# Verify we have permission to attach.
- if not request.user.can_access(self.parent_model, 'attach', parent, sub,
- self.relationship, data,
- skip_sub_obj_read_check=created):
+ if not request.user.can_access(self.parent_model, 'attach', parent, sub, self.relationship, data, skip_sub_obj_read_check=created):
raise PermissionDenied()
# Verify that the relationship to be added is valid.
@@ -716,8 +703,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
relationship = getattrd(parent, self.relationship)
sub = get_object_or_400(self.model, pk=sub_id)
- if not request.user.can_access(self.parent_model, 'unattach', parent,
- sub, self.relationship, request.data):
+ if not request.user.can_access(self.parent_model, 'unattach', parent, sub, self.relationship, request.data):
raise PermissionDenied()
if parent_key:
@@ -735,28 +721,24 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
def post(self, request, *args, **kwargs):
if not isinstance(request.data, dict):
- return Response('invalid type for post data',
- status=status.HTTP_400_BAD_REQUEST)
+ return Response('invalid type for post data', status=status.HTTP_400_BAD_REQUEST)
if 'disassociate' in request.data:
return self.unattach(request, *args, **kwargs)
else:
return self.attach(request, *args, **kwargs)
-
class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
- '''
+ """
Derived version of SubListCreateAttachDetachAPIView that prohibits creation
- '''
+ """
+
metadata_class = SublistAttachDetatchMetadata
def post(self, request, *args, **kwargs):
sub_id = request.data.get('id', None)
if not sub_id:
- return Response(
- dict(msg=_("{} 'id' field is missing.".format(
- self.model._meta.verbose_name.title()))),
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(dict(msg=_("{} 'id' field is missing.".format(self.model._meta.verbose_name.title()))), status=status.HTTP_400_BAD_REQUEST)
return super(SubListAttachDetachAPIView, self).post(request, *args, **kwargs)
def update_raw_data(self, data):
@@ -768,11 +750,11 @@ class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
class DeleteLastUnattachLabelMixin(object):
- '''
+ """
Models for which you want the last instance to be deleted from the database
when the last disassociate is called should inherit from this class. Further,
the model should implement is_detached()
- '''
+ """
def unattach(self, request, *args, **kwargs):
(sub_id, res) = super(DeleteLastUnattachLabelMixin, self).unattach_validate(request)
@@ -798,7 +780,6 @@ class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView):
class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
-
def update(self, request, *args, **kwargs):
self.update_filter(request, *args, **kwargs)
return super(RetrieveUpdateAPIView, self).update(request, *args, **kwargs)
@@ -839,6 +820,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
def trigger_delayed_deep_copy(*args, **kwargs):
from awx.main.tasks import deep_copy_model_obj
+
connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs))
@@ -869,8 +851,7 @@ class CopyAPIView(GenericAPIView):
field_val[secret] = decrypt_field(obj, secret)
elif isinstance(field_val, dict):
for sub_field in field_val:
- if isinstance(sub_field, str) \
- and isinstance(field_val[sub_field], str):
+ if isinstance(sub_field, str) and isinstance(field_val[sub_field], str):
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
elif isinstance(field_val, str):
try:
@@ -882,15 +863,11 @@ class CopyAPIView(GenericAPIView):
def _build_create_dict(self, obj):
ret = {}
if self.copy_return_serializer_class:
- all_fields = Metadata().get_serializer_info(
- self._get_copy_return_serializer(), method='POST'
- )
+ all_fields = Metadata().get_serializer_info(self._get_copy_return_serializer(), method='POST')
for field_name, field_info in all_fields.items():
if not hasattr(obj, field_name) or field_info.get('read_only', True):
continue
- ret[field_name] = CopyAPIView._decrypt_model_field_if_needed(
- obj, field_name, getattr(obj, field_name)
- )
+ ret[field_name] = CopyAPIView._decrypt_model_field_if_needed(obj, field_name, getattr(obj, field_name))
return ret
@staticmethod
@@ -908,9 +885,11 @@ class CopyAPIView(GenericAPIView):
except AttributeError:
continue
# Adjust copy blocked fields here.
- if field.name in fields_to_discard or field.name in [
- 'id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by'
- ] or field.name.endswith('_role'):
+ if (
+ field.name in fields_to_discard
+ or field.name in ['id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by']
+ or field.name.endswith('_role')
+ ):
create_kwargs.pop(field.name, None)
continue
if field.one_to_many:
@@ -926,33 +905,24 @@ class CopyAPIView(GenericAPIView):
elif field.name == 'name' and not old_parent:
create_kwargs[field.name] = copy_name or field_val + ' copy'
elif field.name in fields_to_preserve:
- create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed(
- obj, field.name, field_val
- )
+ create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed(obj, field.name, field_val)
# WorkflowJobTemplateNodes that represent an approval are *special*;
# when we copy them, we actually want to *copy* the UJT they point at
# rather than share the template reference between nodes in disparate
# workflows
- if (
- isinstance(obj, WorkflowJobTemplateNode) and
- isinstance(getattr(obj, 'unified_job_template'), WorkflowApprovalTemplate)
- ):
- new_approval_template, sub_objs = CopyAPIView.copy_model_obj(
- None, None, WorkflowApprovalTemplate,
- obj.unified_job_template, creater
- )
+ if isinstance(obj, WorkflowJobTemplateNode) and isinstance(getattr(obj, 'unified_job_template'), WorkflowApprovalTemplate):
+ new_approval_template, sub_objs = CopyAPIView.copy_model_obj(None, None, WorkflowApprovalTemplate, obj.unified_job_template, creater)
create_kwargs['unified_job_template'] = new_approval_template
new_obj = model.objects.create(**create_kwargs)
- logger.debug('Deep copy: Created new object {}({})'.format(
- new_obj, model
- ))
+ logger.debug('Deep copy: Created new object {}({})'.format(new_obj, model))
# Need to save separatedly because Djang-crum get_current_user would
# not work properly in non-request-response-cycle context.
new_obj.created_by = creater
new_obj.save()
from awx.main.signals import disable_activity_stream
+
with disable_activity_stream():
for m2m in m2m_to_preserve:
for related_obj in m2m_to_preserve[m2m].all():
@@ -978,8 +948,7 @@ class CopyAPIView(GenericAPIView):
for key in create_kwargs:
create_kwargs[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
try:
- can_copy = request.user.can_access(self.model, 'add', create_kwargs) and \
- request.user.can_access(self.model, 'copy_related', obj)
+ can_copy = request.user.can_access(self.model, 'add', create_kwargs) and request.user.can_access(self.model, 'copy_related', obj)
except PermissionDenied:
return Response({'can_copy': False})
return Response({'can_copy': can_copy})
@@ -998,8 +967,7 @@ class CopyAPIView(GenericAPIView):
if not serializer.is_valid():
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
new_obj, sub_objs = CopyAPIView.copy_model_obj(
- None, None, self.model, obj, request.user, create_kwargs=create_kwargs,
- copy_name=serializer.validated_data.get('name', '')
+ None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
)
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
new_obj.admin_role.members.add(request.user)
@@ -1011,13 +979,9 @@ class CopyAPIView(GenericAPIView):
cache.set(key, sub_objs, timeout=3600)
permission_check_func = None
if hasattr(type(self), 'deep_copy_permission_check_func'):
- permission_check_func = (
- type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func'
- )
+ permission_check_func = (type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func')
trigger_delayed_deep_copy(
- self.model.__module__, self.model.__name__,
- obj.pk, new_obj.pk, request.user.pk, key,
- permission_check_func=permission_check_func
+ self.model.__module__, self.model.__name__, obj.pk, new_obj.pk, request.user.pk, key, permission_check_func=permission_check_func
)
serializer = self._get_copy_return_serializer(new_obj)
headers = {'Location': new_obj.get_absolute_url(request=request)}
@@ -1026,7 +990,7 @@ class CopyAPIView(GenericAPIView):
class BaseUsersList(SubListCreateAttachDetachAPIView):
def post(self, request, *args, **kwargs):
- ret = super(BaseUsersList, self).post( request, *args, **kwargs)
+ ret = super(BaseUsersList, self).post(request, *args, **kwargs)
if ret.status_code != 201:
return ret
try:
diff --git a/awx/api/metadata.py b/awx/api/metadata.py
index dedeeba8fb..c00e2946d8 100644
--- a/awx/api/metadata.py
+++ b/awx/api/metadata.py
@@ -28,18 +28,23 @@ from awx.main.tasks import AWXReceptorJob
class Metadata(metadata.SimpleMetadata):
-
def get_field_info(self, field):
field_info = OrderedDict()
field_info['type'] = self.label_lookup[field]
field_info['required'] = getattr(field, 'required', False)
text_attrs = [
- 'read_only', 'label', 'help_text',
- 'min_length', 'max_length',
- 'min_value', 'max_value',
- 'category', 'category_slug',
- 'defined_in_file', 'unit',
+ 'read_only',
+ 'label',
+ 'help_text',
+ 'min_length',
+ 'max_length',
+ 'min_value',
+ 'max_value',
+ 'category',
+ 'category_slug',
+ 'defined_in_file',
+ 'unit',
]
for attr in text_attrs:
@@ -61,8 +66,9 @@ class Metadata(metadata.SimpleMetadata):
'type': _('Data type for this {}.'),
'url': _('URL for this {}.'),
'related': _('Data structure with URLs of related resources.'),
- 'summary_fields': _('Data structure with name/description for related resources. '
- 'The output for some objects may be limited for performance reasons.'),
+ 'summary_fields': _(
+ 'Data structure with name/description for related resources. ' 'The output for some objects may be limited for performance reasons.'
+ ),
'created': _('Timestamp when this {} was created.'),
'modified': _('Timestamp when this {} was last modified.'),
}
@@ -101,9 +107,7 @@ class Metadata(metadata.SimpleMetadata):
field_info['children'] = self.get_serializer_info(field)
if not isinstance(field, (RelatedField, ManyRelatedField)) and hasattr(field, 'choices'):
- choices = [
- (choice_value, choice_name) for choice_value, choice_name in field.choices.items()
- ]
+ choices = [(choice_value, choice_name) for choice_value, choice_name in field.choices.items()]
if not any(choice in ('', None) for choice, _ in choices):
if field.allow_blank:
choices = [("", "---------")] + choices
@@ -131,7 +135,6 @@ class Metadata(metadata.SimpleMetadata):
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
field_info[notification_type_name] = notification_type_class.default_messages
-
# Update type of fields returned...
model_field = None
if serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
@@ -149,22 +152,19 @@ class Metadata(metadata.SimpleMetadata):
field_info['type'] = 'integer'
elif field.field_name in ('created', 'modified'):
field_info['type'] = 'datetime'
- elif (
- RelatedField in field.__class__.__bases__ or
- isinstance(model_field, ForeignKey)
- ):
+ elif RelatedField in field.__class__.__bases__ or isinstance(model_field, ForeignKey):
field_info['type'] = 'id'
elif (
- isinstance(field, JSONField) or
- isinstance(model_field, JSONField) or
- isinstance(field, DRFJSONField) or
- isinstance(getattr(field, 'model_field', None), JSONField) or
- field.field_name == 'credential_passwords'
+ isinstance(field, JSONField)
+ or isinstance(model_field, JSONField)
+ or isinstance(field, DRFJSONField)
+ or isinstance(getattr(field, 'model_field', None), JSONField)
+ or field.field_name == 'credential_passwords'
):
field_info['type'] = 'json'
elif (
- isinstance(field, ManyRelatedField) and
- field.field_name == 'credentials'
+ isinstance(field, ManyRelatedField)
+ and field.field_name == 'credentials'
# launch-time credentials
):
field_info['type'] = 'list_of_ids'
@@ -175,10 +175,7 @@ class Metadata(metadata.SimpleMetadata):
def get_serializer_info(self, serializer, method=None):
filterer = getattr(serializer, 'filter_field_metadata', lambda fields, method: fields)
- return filterer(
- super(Metadata, self).get_serializer_info(serializer),
- method
- )
+ return filterer(super(Metadata, self).get_serializer_info(serializer), method)
def determine_actions(self, request, view):
# Add field information for GET requests (so field names/labels are
@@ -274,6 +271,7 @@ class Metadata(metadata.SimpleMetadata):
metadata['object_roles'] = roles
from rest_framework import generics
+
if isinstance(view, generics.ListAPIView) and hasattr(view, 'paginator'):
metadata['max_page_size'] = view.paginator.max_page_size
@@ -293,7 +291,6 @@ class RoleMetadata(Metadata):
class SublistAttachDetatchMetadata(Metadata):
-
def determine_actions(self, request, view):
actions = super(SublistAttachDetatchMetadata, self).determine_actions(request, view)
method = 'POST'
diff --git a/awx/api/metrics.py b/awx/api/metrics.py
index 27552e4a4e..5ed3dcabef 100644
--- a/awx/api/metrics.py
+++ b/awx/api/metrics.py
@@ -3,13 +3,9 @@
from django.conf.urls import url
-from awx.api.views import (
- MetricsView
-)
+from awx.api.views import MetricsView
-urls = [
- url(r'^$', MetricsView.as_view(), name='metrics_view'),
-]
+urls = [url(r'^$', MetricsView.as_view(), name='metrics_view')]
__all__ = ['urls']
diff --git a/awx/api/pagination.py b/awx/api/pagination.py
index b462a3a2ed..f9e99c335c 100644
--- a/awx/api/pagination.py
+++ b/awx/api/pagination.py
@@ -10,7 +10,6 @@ from rest_framework.utils.urls import replace_query_param
class DisabledPaginator(DjangoPaginator):
-
@property
def num_pages(self):
return 1
@@ -49,8 +48,7 @@ class Pagination(pagination.PageNumberPagination):
def get_html_context(self):
context = super().get_html_context()
- context['page_links'] = [pl._replace(url=self.cap_page_size(pl.url))
- for pl in context['page_links']]
+ context['page_links'] = [pl._replace(url=self.cap_page_size(pl.url)) for pl in context['page_links']]
return context
diff --git a/awx/api/permissions.py b/awx/api/permissions.py
index ecaabc4b91..96cd3a5c1d 100644
--- a/awx/api/permissions.py
+++ b/awx/api/permissions.py
@@ -15,16 +15,25 @@ from awx.main.utils import get_object_or_400
logger = logging.getLogger('awx.api.permissions')
-__all__ = ['ModelAccessPermission', 'JobTemplateCallbackPermission', 'VariableDataPermission',
- 'TaskPermission', 'ProjectUpdatePermission', 'InventoryInventorySourcesUpdatePermission',
- 'UserPermission', 'IsSuperUser', 'InstanceGroupTowerPermission', 'WorkflowApprovalPermission']
+__all__ = [
+ 'ModelAccessPermission',
+ 'JobTemplateCallbackPermission',
+ 'VariableDataPermission',
+ 'TaskPermission',
+ 'ProjectUpdatePermission',
+ 'InventoryInventorySourcesUpdatePermission',
+ 'UserPermission',
+ 'IsSuperUser',
+ 'InstanceGroupTowerPermission',
+ 'WorkflowApprovalPermission',
+]
class ModelAccessPermission(permissions.BasePermission):
- '''
+ """
Default permissions class to check user access based on the model and
request method, optionally verifying the request data.
- '''
+ """
def check_options_permissions(self, request, view, obj=None):
return self.check_get_permissions(request, view, obj)
@@ -35,8 +44,7 @@ class ModelAccessPermission(permissions.BasePermission):
def check_get_permissions(self, request, view, obj=None):
if hasattr(view, 'parent_model'):
parent_obj = view.get_parent_object()
- if not check_user_access(request.user, view.parent_model, 'read',
- parent_obj):
+ if not check_user_access(request.user, view.parent_model, 'read', parent_obj):
return False
if not obj:
return True
@@ -45,8 +53,7 @@ class ModelAccessPermission(permissions.BasePermission):
def check_post_permissions(self, request, view, obj=None):
if hasattr(view, 'parent_model'):
parent_obj = view.get_parent_object()
- if not check_user_access(request.user, view.parent_model, 'read',
- parent_obj):
+ if not check_user_access(request.user, view.parent_model, 'read', parent_obj):
return False
if hasattr(view, 'parent_key'):
if not check_user_access(request.user, view.model, 'add', {view.parent_key: parent_obj}):
@@ -60,10 +67,7 @@ class ModelAccessPermission(permissions.BasePermission):
extra_kwargs = {}
if view.obj_permission_type == 'admin':
extra_kwargs['data'] = {}
- return check_user_access(
- request.user, view.model, view.obj_permission_type, obj,
- **extra_kwargs
- )
+ return check_user_access(request.user, view.model, view.obj_permission_type, obj, **extra_kwargs)
else:
if obj:
return True
@@ -74,8 +78,7 @@ class ModelAccessPermission(permissions.BasePermission):
# FIXME: For some reason this needs to return True
# because it is first called with obj=None?
return True
- return check_user_access(request.user, view.model, 'change', obj,
- request.data)
+ return check_user_access(request.user, view.model, 'change', obj, request.data)
def check_patch_permissions(self, request, view, obj=None):
return self.check_put_permissions(request, view, obj)
@@ -89,10 +92,10 @@ class ModelAccessPermission(permissions.BasePermission):
return check_user_access(request.user, view.model, 'delete', obj)
def check_permissions(self, request, view, obj=None):
- '''
+ """
Perform basic permissions checking before delegating to the appropriate
method based on the request method.
- '''
+ """
# Don't allow anonymous users. 401, not 403, hence no raised exception.
if not request.user or request.user.is_anonymous:
@@ -117,9 +120,7 @@ class ModelAccessPermission(permissions.BasePermission):
return result
def has_permission(self, request, view, obj=None):
- logger.debug('has_permission(user=%s method=%s data=%r, %s, %r)',
- request.user, request.method, request.data,
- view.__class__.__name__, obj)
+ logger.debug('has_permission(user=%s method=%s data=%r, %s, %r)', request.user, request.method, request.data, view.__class__.__name__, obj)
try:
response = self.check_permissions(request, view, obj)
except Exception as e:
@@ -134,10 +135,10 @@ class ModelAccessPermission(permissions.BasePermission):
class JobTemplateCallbackPermission(ModelAccessPermission):
- '''
+ """
Permission check used by job template callback view for requests from
empheral hosts.
- '''
+ """
def has_permission(self, request, view, obj=None):
# If another authentication method was used and it's not a POST, return
@@ -160,18 +161,16 @@ class JobTemplateCallbackPermission(ModelAccessPermission):
class VariableDataPermission(ModelAccessPermission):
-
def check_put_permissions(self, request, view, obj=None):
if not obj:
return True
- return check_user_access(request.user, view.model, 'change', obj,
- dict(variables=request.data))
+ return check_user_access(request.user, view.model, 'change', obj, dict(variables=request.data))
class TaskPermission(ModelAccessPermission):
- '''
+ """
Permission checks used for API callbacks from running a task.
- '''
+ """
def has_permission(self, request, view, obj=None):
# If another authentication method was used other than the one for
@@ -182,8 +181,7 @@ class TaskPermission(ModelAccessPermission):
# Verify that the ID present in the auth token is for a valid, active
# unified job.
try:
- unified_job = UnifiedJob.objects.get(status='running',
- pk=int(request.auth.split('-')[0]))
+ unified_job = UnifiedJob.objects.get(status='running', pk=int(request.auth.split('-')[0]))
except (UnifiedJob.DoesNotExist, TypeError):
return False
@@ -197,10 +195,10 @@ class TaskPermission(ModelAccessPermission):
class WorkflowApprovalPermission(ModelAccessPermission):
- '''
+ """
Permission check used by workflow `approval` and `deny` views to determine
who has access to approve and deny paused workflow nodes
- '''
+ """
def check_post_permissions(self, request, view, obj=None):
approval = get_object_or_400(view.model, pk=view.kwargs['pk'])
@@ -208,9 +206,10 @@ class WorkflowApprovalPermission(ModelAccessPermission):
class ProjectUpdatePermission(ModelAccessPermission):
- '''
+ """
Permission check used by ProjectUpdateView to determine who can update projects
- '''
+ """
+
def check_get_permissions(self, request, view, obj=None):
project = get_object_or_400(view.model, pk=view.kwargs['pk'])
return check_user_access(request.user, view.model, 'read', project)
diff --git a/awx/api/renderers.py b/awx/api/renderers.py
index 92d59e2c7b..48cba6cf5c 100644
--- a/awx/api/renderers.py
+++ b/awx/api/renderers.py
@@ -11,7 +11,6 @@ from rest_framework.utils import encoders
class SurrogateEncoder(encoders.JSONEncoder):
-
def encode(self, obj):
ret = super(SurrogateEncoder, self).encode(obj)
try:
@@ -28,9 +27,9 @@ class DefaultJSONRenderer(renderers.JSONRenderer):
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
- '''
+ """
Customizations to the default browsable API renderer.
- '''
+ """
def get_default_renderer(self, view):
renderer = super(BrowsableAPIRenderer, self).get_default_renderer(view)
@@ -48,9 +47,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
# see: https://github.com/ansible/awx/issues/3108
# https://code.djangoproject.com/ticket/28121
return data
- return super(BrowsableAPIRenderer, self).get_content(renderer, data,
- accepted_media_type,
- renderer_context)
+ return super(BrowsableAPIRenderer, self).get_content(renderer, data, accepted_media_type, renderer_context)
def get_context(self, data, accepted_media_type, renderer_context):
# Store the associated response status to know how to populate the raw
@@ -125,18 +122,13 @@ class AnsiDownloadRenderer(PlainTextRenderer):
class PrometheusJSONRenderer(renderers.JSONRenderer):
-
def render(self, data, accepted_media_type=None, renderer_context=None):
if isinstance(data, dict):
# HTTP errors are {'detail': ErrorDetail(string='...', code=...)}
- return super(PrometheusJSONRenderer, self).render(
- data, accepted_media_type, renderer_context
- )
+ return super(PrometheusJSONRenderer, self).render(data, accepted_media_type, renderer_context)
parsed_metrics = text_string_to_metric_families(data)
data = {}
for family in parsed_metrics:
for sample in family.samples:
data[sample[0]] = {"labels": sample[1], "value": sample[2]}
- return super(PrometheusJSONRenderer, self).render(
- data, accepted_media_type, renderer_context
- )
+ return super(PrometheusJSONRenderer, self).render(data, accepted_media_type, renderer_context)
diff --git a/awx/api/serializers.py b/awx/api/serializers.py
index b1c561b976..9a6f0a086b 100644
--- a/awx/api/serializers.py
+++ b/awx/api/serializers.py
@@ -43,33 +43,71 @@ from polymorphic.models import PolymorphicModel
# AWX
from awx.main.access import get_user_capabilities
-from awx.main.constants import (
- SCHEDULEABLE_PROVIDERS,
- ACTIVE_STATES,
- CENSOR_VALUE,
-)
+from awx.main.constants import SCHEDULEABLE_PROVIDERS, ACTIVE_STATES, CENSOR_VALUE
from awx.main.models import (
- ActivityStream, AdHocCommand, AdHocCommandEvent, Credential, CredentialInputSource,
- CredentialType, CustomInventoryScript, ExecutionEnvironment, Group, Host, Instance,
- InstanceGroup, Inventory, InventorySource, InventoryUpdate,
- InventoryUpdateEvent, Job, JobEvent, JobHostSummary, JobLaunchConfig,
- JobNotificationMixin, JobTemplate, Label, Notification, NotificationTemplate,
- OAuth2AccessToken, OAuth2Application, Organization, Project,
- ProjectUpdate, ProjectUpdateEvent, RefreshToken, Role, Schedule,
- SystemJob, SystemJobEvent, SystemJobTemplate, Team, UnifiedJob,
- UnifiedJobTemplate, WorkflowApproval, WorkflowApprovalTemplate, WorkflowJob,
- WorkflowJobNode, WorkflowJobTemplate, WorkflowJobTemplateNode, StdoutMaxBytesExceeded
+ ActivityStream,
+ AdHocCommand,
+ AdHocCommandEvent,
+ Credential,
+ CredentialInputSource,
+ CredentialType,
+ CustomInventoryScript,
+ ExecutionEnvironment,
+ Group,
+ Host,
+ Instance,
+ InstanceGroup,
+ Inventory,
+ InventorySource,
+ InventoryUpdate,
+ InventoryUpdateEvent,
+ Job,
+ JobEvent,
+ JobHostSummary,
+ JobLaunchConfig,
+ JobNotificationMixin,
+ JobTemplate,
+ Label,
+ Notification,
+ NotificationTemplate,
+ OAuth2AccessToken,
+ OAuth2Application,
+ Organization,
+ Project,
+ ProjectUpdate,
+ ProjectUpdateEvent,
+ RefreshToken,
+ Role,
+ Schedule,
+ SystemJob,
+ SystemJobEvent,
+ SystemJobTemplate,
+ Team,
+ UnifiedJob,
+ UnifiedJobTemplate,
+ WorkflowApproval,
+ WorkflowApprovalTemplate,
+ WorkflowJob,
+ WorkflowJobNode,
+ WorkflowJobTemplate,
+ WorkflowJobTemplateNode,
+ StdoutMaxBytesExceeded,
)
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
-from awx.main.models.rbac import (
- get_roles_on_resource, role_summary_fields_generator
-)
+from awx.main.models.rbac import get_roles_on_resource, role_summary_fields_generator
from awx.main.fields import ImplicitRoleField, JSONBField
from awx.main.utils import (
- get_type_for_model, get_model_for_type,
- camelcase_to_underscore, getattrd, parse_yaml_or_json,
- has_model_field_prefetched, extract_ansible_vars, encrypt_dict,
- prefetch_page_capabilities, get_external_account, truncate_stdout,
+ get_type_for_model,
+ get_model_for_type,
+ camelcase_to_underscore,
+ getattrd,
+ parse_yaml_or_json,
+ has_model_field_prefetched,
+ extract_ansible_vars,
+ encrypt_dict,
+ prefetch_page_capabilities,
+ get_external_account,
+ truncate_stdout,
)
from awx.main.utils.filters import SmartFilter
from awx.main.utils.named_url_graph import reset_counters
@@ -78,13 +116,12 @@ from awx.main.redact import UriCleaner, REPLACE_STR
from awx.main.validators import vars_validate_or_raise
from awx.api.versioning import reverse
-from awx.api.fields import (BooleanNullField, CharNullField, ChoiceNullField,
- VerbatimField, DeprecatedCredentialField)
+from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, VerbatimField, DeprecatedCredentialField
logger = logging.getLogger('awx.api.serializers')
# Fields that should be summarized regardless of object type.
-DEFAULT_SUMMARY_FIELDS = ('id', 'name', 'description')# , 'created_by', 'modified_by')#, 'type')
+DEFAULT_SUMMARY_FIELDS = ('id', 'name', 'description') # , 'created_by', 'modified_by')#, 'type')
# Keys are fields (foreign keys) where, if found on an instance, summary info
# should be added to the serialized data. Values are a tuple of field names on
@@ -95,23 +132,26 @@ SUMMARIZABLE_FK_FIELDS = {
'user': ('id', 'username', 'first_name', 'last_name'),
'application': ('id', 'name'),
'team': DEFAULT_SUMMARY_FIELDS,
- 'inventory': DEFAULT_SUMMARY_FIELDS + ('has_active_failures',
- 'total_hosts',
- 'hosts_with_active_failures',
- 'total_groups',
- 'has_inventory_sources',
- 'total_inventory_sources',
- 'inventory_sources_with_failures',
- 'organization_id',
- 'kind',
- 'insights_credential_id',),
+ 'inventory': DEFAULT_SUMMARY_FIELDS
+ + (
+ 'has_active_failures',
+ 'total_hosts',
+ 'hosts_with_active_failures',
+ 'total_groups',
+ 'has_inventory_sources',
+ 'total_inventory_sources',
+ 'inventory_sources_with_failures',
+ 'organization_id',
+ 'kind',
+ 'insights_credential_id',
+ ),
'host': DEFAULT_SUMMARY_FIELDS,
'group': DEFAULT_SUMMARY_FIELDS,
'default_environment': DEFAULT_SUMMARY_FIELDS + ('image',),
'execution_environment': DEFAULT_SUMMARY_FIELDS + ('image',),
'project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
- 'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',),
+ 'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed'),
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'kubernetes', 'credential_type_id'),
'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'type', 'canceled_on'),
'job_template': DEFAULT_SUMMARY_FIELDS,
@@ -142,20 +182,18 @@ SUMMARIZABLE_FK_FIELDS = {
def reverse_gfk(content_object, request):
- '''
+ """
Computes a reverse for a GenericForeignKey field.
Returns a dictionary of the form
{ '<type>': reverse(<type detail>) }
for example
{ 'organization': '/api/v2/organizations/1/' }
- '''
+ """
if content_object is None or not hasattr(content_object, 'get_absolute_url'):
return {}
- return {
- camelcase_to_underscore(content_object.__class__.__name__): content_object.get_absolute_url(request=request)
- }
+ return {camelcase_to_underscore(content_object.__class__.__name__): content_object.get_absolute_url(request=request)}
class CopySerializer(serializers.Serializer):
@@ -167,15 +205,12 @@ class CopySerializer(serializers.Serializer):
view = self.context.get('view', None)
obj = view.get_object()
if name == obj.name:
- raise serializers.ValidationError(_(
- 'The original object is already named {}, a copy from'
- ' it cannot have the same name.'.format(name)
- ))
+ raise serializers.ValidationError(_('The original object is already named {}, a copy from' ' it cannot have the same name.'.format(name)))
return attrs
class BaseSerializerMetaclass(serializers.SerializerMetaclass):
- '''
+ """
Custom metaclass to enable attribute inheritance from Meta objects on
serializer base classes.
@@ -215,7 +250,7 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
# Extra field kwargs cannot be removed in subclasses, only replaced.
- '''
+ """
@staticmethod
def _is_list_of_strings(x):
@@ -223,7 +258,7 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
@staticmethod
def _is_extra_kwargs(x):
- return isinstance(x, dict) and all([isinstance(k, str) and isinstance(v, dict) for k,v in x.items()])
+ return isinstance(x, dict) and all([isinstance(k, str) and isinstance(v, dict) for k, v in x.items()])
@classmethod
def _update_meta(cls, base, meta, other=None):
@@ -237,12 +272,12 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
meta_val = meta_val or []
new_vals = []
except_vals = []
- if base: # Merge values from all bases.
+ if base: # Merge values from all bases.
new_vals.extend([x for x in meta_val])
for v in val:
- if not base and v == '*': # Inherit all values from previous base(es).
+ if not base and v == '*': # Inherit all values from previous base(es).
new_vals.extend([x for x in meta_val])
- elif not base and v.startswith('-'): # Except these values.
+ elif not base and v.startswith('-'): # Except these values.
except_vals.append(v[1:])
else:
new_vals.append(v)
@@ -256,9 +291,9 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
meta_val = meta_val or {}
new_val = {}
if base:
- for k,v in meta_val.items():
+ for k, v in meta_val.items():
new_val[k] = copy.deepcopy(v)
- for k,v in val.items():
+ for k, v in val.items():
new_val.setdefault(k, {}).update(copy.deepcopy(v))
val = new_val
# Any other values are copied in case they are mutable objects.
@@ -276,22 +311,20 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetaclass):
-
class Meta:
- fields = ('id', 'type', 'url', 'related', 'summary_fields', 'created',
- 'modified', 'name', 'description')
+ fields = ('id', 'type', 'url', 'related', 'summary_fields', 'created', 'modified', 'name', 'description')
summary_fields = ()
summarizable_fields = ()
# add the URL and related resources
- type = serializers.SerializerMethodField()
- url = serializers.SerializerMethodField()
- related = serializers.SerializerMethodField('_get_related')
+ type = serializers.SerializerMethodField()
+ url = serializers.SerializerMethodField()
+ related = serializers.SerializerMethodField('_get_related')
summary_fields = serializers.SerializerMethodField('_get_summary_fields')
# make certain fields read only
- created = serializers.SerializerMethodField()
- modified = serializers.SerializerMethodField()
+ created = serializers.SerializerMethodField()
+ modified = serializers.SerializerMethodField()
def __init__(self, *args, **kwargs):
super(BaseSerializer, self).__init__(*args, **kwargs)
@@ -322,7 +355,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
'system_job': _('Management Job'),
'workflow_job': _('Workflow Job'),
'workflow_job_template': _('Workflow Template'),
- 'job_template': _('Job Template')
+ 'job_template': _('Job Template'),
}
choices = []
for t in self.get_types():
@@ -358,12 +391,9 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
def get_related(self, obj):
res = OrderedDict()
view = self.context.get('view', None)
- if view and (hasattr(view, 'retrieve') or view.request.method == 'POST') and \
- type(obj) in settings.NAMED_URL_GRAPH:
+ if view and (hasattr(view, 'retrieve') or view.request.method == 'POST') and type(obj) in settings.NAMED_URL_GRAPH:
original_url = self.get_url(obj)
- res['named_url'] = self._generate_named_url(
- original_url, obj, settings.NAMED_URL_GRAPH[type(obj)]
- )
+ res['named_url'] = self._generate_named_url(original_url, obj, settings.NAMED_URL_GRAPH[type(obj)])
if getattr(obj, 'created_by', None):
res['created_by'] = self.reverse('api:user_detail', kwargs={'pk': obj.created_by.pk})
if getattr(obj, 'modified_by', None):
@@ -383,8 +413,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
# because it results in additional queries.
if fk == 'job' and isinstance(obj, UnifiedJob):
continue
- if fk == 'project' and (isinstance(obj, InventorySource) or
- isinstance(obj, Project)):
+ if fk == 'project' and (isinstance(obj, InventorySource) or isinstance(obj, Project)):
continue
try:
@@ -459,14 +488,11 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
else:
model = self.Meta.model
prefetch_list = self.capabilities_prefetch
- self.context['capability_map'] = prefetch_page_capabilities(
- model, qs, prefetch_list, view.request.user
- )
+ self.context['capability_map'] = prefetch_page_capabilities(model, qs, prefetch_list, view.request.user)
if obj.id in self.context['capability_map']:
capabilities_cache = self.context['capability_map'][obj.id]
return get_user_capabilities(
- view.request.user, obj, method_list=self.show_capabilities, parent_obj=parent_obj,
- capabilities_cache=capabilities_cache
+ view.request.user, obj, method_list=self.show_capabilities, parent_obj=parent_obj, capabilities_cache=capabilities_cache
)
else:
# Contextual information to produce user_capabilities doesn't exist
@@ -485,7 +511,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
if obj is None:
return None
elif isinstance(obj, User):
- return obj.last_login # Not actually exposed for User.
+ return obj.last_login # Not actually exposed for User.
elif hasattr(obj, 'modified'):
return obj.modified
return None
@@ -543,10 +569,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
if isinstance(validator, validators.UniqueValidator):
unique_error_message = model_field.error_messages.get('unique', None)
if unique_error_message:
- unique_error_message = unique_error_message % {
- 'model_name': capfirst(opts.verbose_name),
- 'field_label': capfirst(model_field.verbose_name),
- }
+ unique_error_message = unique_error_message % {'model_name': capfirst(opts.verbose_name), 'field_label': capfirst(model_field.verbose_name)}
validator.message = unique_error_message
return field_class, field_kwargs
@@ -594,7 +617,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
# do any validation implemented on the model class.
exclusions = self.get_validation_exclusions(self.instance)
obj = self.instance or self.Meta.model()
- for k,v in attrs.items():
+ for k, v in attrs.items():
if k not in exclusions:
setattr(obj, k, v)
obj.full_clean(exclude=exclusions)
@@ -611,7 +634,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
# error message; here we preserve field-specific errors raised from
# the model's full_clean method.
d = exc.update_error_dict({})
- for k,v in d.items():
+ for k, v in d.items():
v = v if isinstance(v, list) else [v]
v2 = []
for e in v:
@@ -648,8 +671,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
class Meta:
model = UnifiedJobTemplate
- fields = ('*', 'last_job_run', 'last_job_failed',
- 'next_job_run', 'status', 'execution_environment')
+ fields = ('*', 'last_job_run', 'last_job_failed', 'next_job_run', 'status', 'execution_environment')
def get_related(self, obj):
res = super(UnifiedJobTemplateSerializer, self).get_related(obj)
@@ -660,13 +682,12 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
if obj.next_schedule:
res['next_schedule'] = obj.next_schedule.get_absolute_url(request=self.context.get('request'))
if obj.execution_environment_id:
- res['execution_environment'] = self.reverse('api:execution_environment_detail',
- kwargs={'pk': obj.execution_environment_id})
+ res['execution_environment'] = self.reverse('api:execution_environment_detail', kwargs={'pk': obj.execution_environment_id})
return res
def get_types(self):
if type(self) is UnifiedJobTemplateSerializer:
- return ['project', 'inventory_source', 'job_template', 'system_job_template', 'workflow_job_template',]
+ return ['project', 'inventory_source', 'job_template', 'system_job_template', 'workflow_job_template']
else:
return super(UnifiedJobTemplateSerializer, self).get_types()
@@ -708,33 +729,39 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
class UnifiedJobSerializer(BaseSerializer):
show_capabilities = ['start', 'delete']
event_processing_finished = serializers.BooleanField(
- help_text=_('Indicates whether all of the events generated by this '
- 'unified job have been saved to the database.'),
- read_only=True
+ help_text=_('Indicates whether all of the events generated by this ' 'unified job have been saved to the database.'), read_only=True
)
class Meta:
model = UnifiedJob
- fields = ('*', 'unified_job_template', 'launch_type', 'status',
- 'execution_environment',
- 'failed', 'started', 'finished', 'canceled_on', 'elapsed', 'job_args',
- 'job_cwd', 'job_env', 'job_explanation',
- 'execution_node', 'controller_node',
- 'result_traceback', 'event_processing_finished')
+ fields = (
+ '*',
+ 'unified_job_template',
+ 'launch_type',
+ 'status',
+ 'execution_environment',
+ 'failed',
+ 'started',
+ 'finished',
+ 'canceled_on',
+ 'elapsed',
+ 'job_args',
+ 'job_cwd',
+ 'job_env',
+ 'job_explanation',
+ 'execution_node',
+ 'controller_node',
+ 'result_traceback',
+ 'event_processing_finished',
+ )
extra_kwargs = {
- 'unified_job_template': {
- 'source': 'unified_job_template_id',
- 'label': 'unified job template',
- },
- 'job_env': {
- 'read_only': True,
- 'label': 'job_env',
- }
+ 'unified_job_template': {'source': 'unified_job_template_id', 'label': 'unified job template'},
+ 'job_env': {'read_only': True, 'label': 'job_env'},
}
def get_types(self):
if type(self) is UnifiedJobSerializer:
- return ['project_update', 'inventory_update', 'job', 'ad_hoc_command', 'system_job', 'workflow_job',]
+ return ['project_update', 'inventory_update', 'job', 'ad_hoc_command', 'system_job', 'workflow_job']
else:
return super(UnifiedJobSerializer, self).get_types()
@@ -755,8 +782,7 @@ class UnifiedJobSerializer(BaseSerializer):
if obj.workflow_job_id:
res['source_workflow_job'] = self.reverse('api:workflow_job_detail', kwargs={'pk': obj.workflow_job_id})
if obj.execution_environment_id:
- res['execution_environment'] = self.reverse('api:execution_environment_detail',
- kwargs={'pk': obj.execution_environment_id})
+ res['execution_environment'] = self.reverse('api:execution_environment_detail', kwargs={'pk': obj.execution_environment_id})
return res
def get_summary_fields(self, obj):
@@ -819,7 +845,6 @@ class UnifiedJobSerializer(BaseSerializer):
class UnifiedJobListSerializer(UnifiedJobSerializer):
-
class Meta:
fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback', '-event_processing_finished')
@@ -882,8 +907,7 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
class UserSerializer(BaseSerializer):
- password = serializers.CharField(required=False, default='', write_only=True,
- help_text=_('Write-only field used to change the password.'))
+ password = serializers.CharField(required=False, default='', write_only=True, help_text=_('Write-only field used to change the password.'))
ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True)
external_account = serializers.SerializerMethodField(help_text=_('Set if the account is managed by an external service'))
is_system_auditor = serializers.BooleanField(default=False)
@@ -891,12 +915,23 @@ class UserSerializer(BaseSerializer):
class Meta:
model = User
- fields = ('*', '-name', '-description', '-modified',
- 'username', 'first_name', 'last_name',
- 'email', 'is_superuser', 'is_system_auditor', 'password', 'ldap_dn', 'last_login', 'external_account')
- extra_kwargs = {
- 'last_login': {'read_only': True}
- }
+ fields = (
+ '*',
+ '-name',
+ '-description',
+ '-modified',
+ 'username',
+ 'first_name',
+ 'last_name',
+ 'email',
+ 'is_superuser',
+ 'is_system_auditor',
+ 'password',
+ 'ldap_dn',
+ 'last_login',
+ 'external_account',
+ )
+ extra_kwargs = {'last_login': {'read_only': True}}
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
@@ -924,14 +959,15 @@ class UserSerializer(BaseSerializer):
new_password = None
except AttributeError:
pass
- if (getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None) or
- getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None) or
- getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None) or
- getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None) or
- getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)) and obj.social_auth.all():
+ if (
+ getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None)
+ or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None)
+ or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None)
+ or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None)
+ or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)
+ ) and obj.social_auth.all():
new_password = None
- if (getattr(settings, 'RADIUS_SERVER', None) or
- getattr(settings, 'TACACSPLUS_HOST', None)) and obj.enterprise_auth.all():
+ if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and obj.enterprise_auth.all():
new_password = None
if new_password:
obj.set_password(new_password)
@@ -969,19 +1005,21 @@ class UserSerializer(BaseSerializer):
def get_related(self, obj):
res = super(UserSerializer, self).get_related(obj)
- res.update(dict(
- teams = self.reverse('api:user_teams_list', kwargs={'pk': obj.pk}),
- organizations = self.reverse('api:user_organizations_list', kwargs={'pk': obj.pk}),
- admin_of_organizations = self.reverse('api:user_admin_of_organizations_list', kwargs={'pk': obj.pk}),
- projects = self.reverse('api:user_projects_list', kwargs={'pk': obj.pk}),
- credentials = self.reverse('api:user_credentials_list', kwargs={'pk': obj.pk}),
- roles = self.reverse('api:user_roles_list', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse('api:user_activity_stream_list', kwargs={'pk': obj.pk}),
- access_list = self.reverse('api:user_access_list', kwargs={'pk': obj.pk}),
- tokens = self.reverse('api:o_auth2_token_list', kwargs={'pk': obj.pk}),
- authorized_tokens = self.reverse('api:user_authorized_token_list', kwargs={'pk': obj.pk}),
- personal_tokens = self.reverse('api:user_personal_token_list', kwargs={'pk': obj.pk}),
- ))
+ res.update(
+ dict(
+ teams=self.reverse('api:user_teams_list', kwargs={'pk': obj.pk}),
+ organizations=self.reverse('api:user_organizations_list', kwargs={'pk': obj.pk}),
+ admin_of_organizations=self.reverse('api:user_admin_of_organizations_list', kwargs={'pk': obj.pk}),
+ projects=self.reverse('api:user_projects_list', kwargs={'pk': obj.pk}),
+ credentials=self.reverse('api:user_credentials_list', kwargs={'pk': obj.pk}),
+ roles=self.reverse('api:user_roles_list', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:user_activity_stream_list', kwargs={'pk': obj.pk}),
+ access_list=self.reverse('api:user_access_list', kwargs={'pk': obj.pk}),
+ tokens=self.reverse('api:o_auth2_token_list', kwargs={'pk': obj.pk}),
+ authorized_tokens=self.reverse('api:user_authorized_token_list', kwargs={'pk': obj.pk}),
+ personal_tokens=self.reverse('api:user_personal_token_list', kwargs={'pk': obj.pk}),
+ )
+ )
return res
def _validate_ldap_managed_field(self, value, field_name):
@@ -1021,6 +1059,7 @@ class UserActivityStreamSerializer(UserSerializer):
so by excluding it from fields here we avoid duplication, which
would carry some unintended consequences.
"""
+
class Meta:
model = User
fields = ('*', '-is_system_auditor')
@@ -1034,15 +1073,9 @@ class BaseOAuth2TokenSerializer(BaseSerializer):
class Meta:
model = OAuth2AccessToken
- fields = (
- '*', '-name', 'description', 'user', 'token', 'refresh_token',
- 'application', 'expires', 'scope',
- )
+ fields = ('*', '-name', 'description', 'user', 'token', 'refresh_token', 'application', 'expires', 'scope')
read_only_fields = ('user', 'token', 'expires', 'refresh_token')
- extra_kwargs = {
- 'scope': {'allow_null': False, 'required': False},
- 'user': {'allow_null': False, 'required': True}
- }
+ extra_kwargs = {'scope': {'allow_null': False, 'required': False}, 'user': {'allow_null': False, 'required': True}}
def get_token(self, obj):
request = self.context.get('request', None)
@@ -1071,12 +1104,8 @@ class BaseOAuth2TokenSerializer(BaseSerializer):
if obj.user:
ret['user'] = self.reverse('api:user_detail', kwargs={'pk': obj.user.pk})
if obj.application:
- ret['application'] = self.reverse(
- 'api:o_auth2_application_detail', kwargs={'pk': obj.application.pk}
- )
- ret['activity_stream'] = self.reverse(
- 'api:o_auth2_token_activity_stream_list', kwargs={'pk': obj.pk}
- )
+ ret['application'] = self.reverse('api:o_auth2_application_detail', kwargs={'pk': obj.application.pk})
+ ret['activity_stream'] = self.reverse('api:o_auth2_token_activity_stream_list', kwargs={'pk': obj.pk})
return ret
def _is_valid_scope(self, value):
@@ -1092,9 +1121,7 @@ class BaseOAuth2TokenSerializer(BaseSerializer):
def validate_scope(self, value):
if not self._is_valid_scope(value):
- raise serializers.ValidationError(_(
- 'Must be a simple space-separated string with allowed scopes {}.'
- ).format(self.ALLOWED_SCOPES))
+ raise serializers.ValidationError(_('Must be a simple space-separated string with allowed scopes {}.').format(self.ALLOWED_SCOPES))
return value
def create(self, validated_data):
@@ -1106,70 +1133,50 @@ class BaseOAuth2TokenSerializer(BaseSerializer):
class UserAuthorizedTokenSerializer(BaseOAuth2TokenSerializer):
-
class Meta:
extra_kwargs = {
'scope': {'allow_null': False, 'required': False},
'user': {'allow_null': False, 'required': True},
- 'application': {'allow_null': False, 'required': True}
+ 'application': {'allow_null': False, 'required': True},
}
def create(self, validated_data):
current_user = self.context['request'].user
validated_data['token'] = generate_token()
- validated_data['expires'] = now() + timedelta(
- seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS']
- )
+ validated_data['expires'] = now() + timedelta(seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'])
obj = super(UserAuthorizedTokenSerializer, self).create(validated_data)
obj.save()
if obj.application:
- RefreshToken.objects.create(
- user=current_user,
- token=generate_token(),
- application=obj.application,
- access_token=obj
- )
+ RefreshToken.objects.create(user=current_user, token=generate_token(), application=obj.application, access_token=obj)
return obj
class OAuth2TokenSerializer(BaseOAuth2TokenSerializer):
-
def create(self, validated_data):
current_user = self.context['request'].user
validated_data['token'] = generate_token()
- validated_data['expires'] = now() + timedelta(
- seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS']
- )
+ validated_data['expires'] = now() + timedelta(seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'])
obj = super(OAuth2TokenSerializer, self).create(validated_data)
if obj.application and obj.application.user:
obj.user = obj.application.user
obj.save()
if obj.application:
- RefreshToken.objects.create(
- user=current_user,
- token=generate_token(),
- application=obj.application,
- access_token=obj
- )
+ RefreshToken.objects.create(user=current_user, token=generate_token(), application=obj.application, access_token=obj)
return obj
class OAuth2TokenDetailSerializer(OAuth2TokenSerializer):
-
class Meta:
read_only_fields = ('*', 'user', 'application')
class UserPersonalTokenSerializer(BaseOAuth2TokenSerializer):
-
class Meta:
read_only_fields = ('user', 'token', 'expires', 'application')
def create(self, validated_data):
validated_data['token'] = generate_token()
- validated_data['expires'] = now() + timedelta(
- seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS']
- )
+ validated_data['expires'] = now() + timedelta(seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'])
validated_data['application'] = None
obj = super(UserPersonalTokenSerializer, self).create(validated_data)
obj.save()
@@ -1183,8 +1190,16 @@ class OAuth2ApplicationSerializer(BaseSerializer):
class Meta:
model = OAuth2Application
fields = (
- '*', 'description', '-user', 'client_id', 'client_secret', 'client_type',
- 'redirect_uris', 'authorization_grant_type', 'skip_authorization', 'organization'
+ '*',
+ 'description',
+ '-user',
+ 'client_id',
+ 'client_secret',
+ 'client_type',
+ 'redirect_uris',
+ 'authorization_grant_type',
+ 'skip_authorization',
+ 'organization',
)
read_only_fields = ('client_id', 'client_secret')
read_only_on_update_fields = ('user', 'authorization_grant_type')
@@ -1192,18 +1207,10 @@ class OAuth2ApplicationSerializer(BaseSerializer):
'user': {'allow_null': True, 'required': False},
'organization': {'allow_null': False},
'authorization_grant_type': {'allow_null': False, 'label': _('Authorization Grant Type')},
- 'client_secret': {
- 'label': _('Client Secret')
- },
- 'client_type': {
- 'label': _('Client Type')
- },
- 'redirect_uris': {
- 'label': _('Redirect URIs')
- },
- 'skip_authorization': {
- 'label': _('Skip Authorization')
- },
+ 'client_secret': {'label': _('Client Secret')},
+ 'client_type': {'label': _('Client Type')},
+ 'redirect_uris': {'label': _('Redirect URIs')},
+ 'skip_authorization': {'label': _('Skip Authorization')},
}
def to_representation(self, obj):
@@ -1217,12 +1224,12 @@ class OAuth2ApplicationSerializer(BaseSerializer):
def get_related(self, obj):
res = super(OAuth2ApplicationSerializer, self).get_related(obj)
- res.update(dict(
- tokens = self.reverse('api:o_auth2_application_token_list', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse(
- 'api:o_auth2_application_activity_stream_list', kwargs={'pk': obj.pk}
+ res.update(
+ dict(
+ tokens=self.reverse('api:o_auth2_application_token_list', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:o_auth2_application_activity_stream_list', kwargs={'pk': obj.pk}),
)
- ))
+ )
return res
def get_modified(self, obj):
@@ -1252,36 +1259,35 @@ class OrganizationSerializer(BaseSerializer):
class Meta:
model = Organization
- fields = ('*', 'max_hosts', 'custom_virtualenv', 'default_environment',)
- read_only_fields = ('*', 'custom_virtualenv',)
+ fields = ('*', 'max_hosts', 'custom_virtualenv', 'default_environment')
+ read_only_fields = ('*', 'custom_virtualenv')
def get_related(self, obj):
res = super(OrganizationSerializer, self).get_related(obj)
res.update(
- execution_environments = self.reverse('api:organization_execution_environments_list', kwargs={'pk': obj.pk}),
- projects = self.reverse('api:organization_projects_list', kwargs={'pk': obj.pk}),
- inventories = self.reverse('api:organization_inventories_list', kwargs={'pk': obj.pk}),
- job_templates = self.reverse('api:organization_job_templates_list', kwargs={'pk': obj.pk}),
- workflow_job_templates = self.reverse('api:organization_workflow_job_templates_list', kwargs={'pk': obj.pk}),
- users = self.reverse('api:organization_users_list', kwargs={'pk': obj.pk}),
- admins = self.reverse('api:organization_admins_list', kwargs={'pk': obj.pk}),
- teams = self.reverse('api:organization_teams_list', kwargs={'pk': obj.pk}),
- credentials = self.reverse('api:organization_credential_list', kwargs={'pk': obj.pk}),
- applications = self.reverse('api:organization_applications_list', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse('api:organization_activity_stream_list', kwargs={'pk': obj.pk}),
- notification_templates = self.reverse('api:organization_notification_templates_list', kwargs={'pk': obj.pk}),
- notification_templates_started = self.reverse('api:organization_notification_templates_started_list', kwargs={'pk': obj.pk}),
- notification_templates_success = self.reverse('api:organization_notification_templates_success_list', kwargs={'pk': obj.pk}),
- notification_templates_error = self.reverse('api:organization_notification_templates_error_list', kwargs={'pk': obj.pk}),
- notification_templates_approvals = self.reverse('api:organization_notification_templates_approvals_list', kwargs={'pk': obj.pk}),
- object_roles = self.reverse('api:organization_object_roles_list', kwargs={'pk': obj.pk}),
- access_list = self.reverse('api:organization_access_list', kwargs={'pk': obj.pk}),
- instance_groups = self.reverse('api:organization_instance_groups_list', kwargs={'pk': obj.pk}),
- galaxy_credentials = self.reverse('api:organization_galaxy_credentials_list', kwargs={'pk': obj.pk}),
+ execution_environments=self.reverse('api:organization_execution_environments_list', kwargs={'pk': obj.pk}),
+ projects=self.reverse('api:organization_projects_list', kwargs={'pk': obj.pk}),
+ inventories=self.reverse('api:organization_inventories_list', kwargs={'pk': obj.pk}),
+ job_templates=self.reverse('api:organization_job_templates_list', kwargs={'pk': obj.pk}),
+ workflow_job_templates=self.reverse('api:organization_workflow_job_templates_list', kwargs={'pk': obj.pk}),
+ users=self.reverse('api:organization_users_list', kwargs={'pk': obj.pk}),
+ admins=self.reverse('api:organization_admins_list', kwargs={'pk': obj.pk}),
+ teams=self.reverse('api:organization_teams_list', kwargs={'pk': obj.pk}),
+ credentials=self.reverse('api:organization_credential_list', kwargs={'pk': obj.pk}),
+ applications=self.reverse('api:organization_applications_list', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:organization_activity_stream_list', kwargs={'pk': obj.pk}),
+ notification_templates=self.reverse('api:organization_notification_templates_list', kwargs={'pk': obj.pk}),
+ notification_templates_started=self.reverse('api:organization_notification_templates_started_list', kwargs={'pk': obj.pk}),
+ notification_templates_success=self.reverse('api:organization_notification_templates_success_list', kwargs={'pk': obj.pk}),
+ notification_templates_error=self.reverse('api:organization_notification_templates_error_list', kwargs={'pk': obj.pk}),
+ notification_templates_approvals=self.reverse('api:organization_notification_templates_approvals_list', kwargs={'pk': obj.pk}),
+ object_roles=self.reverse('api:organization_object_roles_list', kwargs={'pk': obj.pk}),
+ access_list=self.reverse('api:organization_access_list', kwargs={'pk': obj.pk}),
+ instance_groups=self.reverse('api:organization_instance_groups_list', kwargs={'pk': obj.pk}),
+ galaxy_credentials=self.reverse('api:organization_galaxy_credentials_list', kwargs={'pk': obj.pk}),
)
if obj.default_environment:
- res['default_environment'] = self.reverse('api:execution_environment_detail',
- kwargs={'pk': obj.default_environment_id})
+ res['default_environment'] = self.reverse('api:execution_environment_detail', kwargs={'pk': obj.default_environment_id})
return res
def get_summary_fields(self, obj):
@@ -1289,16 +1295,14 @@ class OrganizationSerializer(BaseSerializer):
counts_dict = self.context.get('related_field_counts', None)
if counts_dict is not None and summary_dict is not None:
if obj.id not in counts_dict:
- summary_dict['related_field_counts'] = {
- 'inventories': 0, 'teams': 0, 'users': 0,
- 'job_templates': 0, 'admins': 0, 'projects': 0}
+ summary_dict['related_field_counts'] = {'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0, 'admins': 0, 'projects': 0}
else:
summary_dict['related_field_counts'] = counts_dict[obj.id]
# Organization participation roles (admin, member) can't be assigned
# to a team. This provides a hint to the ui so it can know to not
# display these roles for team role selection.
- for key in ('admin_role', 'member_role',):
+ for key in ('admin_role', 'member_role'):
if key in summary_dict.get('object_roles', {}):
summary_dict['object_roles'][key]['user_only'] = True
@@ -1320,16 +1324,25 @@ class OrganizationSerializer(BaseSerializer):
class ProjectOptionsSerializer(BaseSerializer):
-
class Meta:
- fields = ('*', 'local_path', 'scm_type', 'scm_url', 'scm_branch', 'scm_refspec',
- 'scm_clean', 'scm_delete_on_update', 'credential', 'timeout', 'scm_revision')
+ fields = (
+ '*',
+ 'local_path',
+ 'scm_type',
+ 'scm_url',
+ 'scm_branch',
+ 'scm_refspec',
+ 'scm_clean',
+ 'scm_delete_on_update',
+ 'credential',
+ 'timeout',
+ 'scm_revision',
+ )
def get_related(self, obj):
res = super(ProjectOptionsSerializer, self).get_related(obj)
if obj.credential:
- res['credential'] = self.reverse('api:credential_detail',
- kwargs={'pk': obj.credential.pk})
+ res['credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.credential.pk})
return res
def validate(self, attrs):
@@ -1379,8 +1392,7 @@ class ExecutionEnvironmentSerializer(BaseSerializer):
if obj.organization:
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
if obj.credential:
- res['credential'] = self.reverse('api:credential_detail',
- kwargs={'pk': obj.credential.pk})
+ res['credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.credential.pk})
return res
@@ -1390,50 +1402,54 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
last_update_failed = serializers.BooleanField(read_only=True)
last_updated = serializers.DateTimeField(read_only=True)
show_capabilities = ['start', 'schedule', 'edit', 'delete', 'copy']
- capabilities_prefetch = [
- 'admin', 'update',
- {'copy': 'organization.project_admin'}
- ]
+ capabilities_prefetch = ['admin', 'update', {'copy': 'organization.project_admin'}]
class Meta:
model = Project
- fields = ('*', '-execution_environment', 'organization', 'scm_update_on_launch',
- 'scm_update_cache_timeout', 'allow_override', 'custom_virtualenv', 'default_environment') + \
- ('last_update_failed', 'last_updated') # Backwards compatibility
- read_only_fields = ('*', 'custom_virtualenv',)
+ fields = (
+ '*',
+ '-execution_environment',
+ 'organization',
+ 'scm_update_on_launch',
+ 'scm_update_cache_timeout',
+ 'allow_override',
+ 'custom_virtualenv',
+ 'default_environment',
+ ) + (
+ 'last_update_failed',
+ 'last_updated',
+ ) # Backwards compatibility
+ read_only_fields = ('*', 'custom_virtualenv')
def get_related(self, obj):
res = super(ProjectSerializer, self).get_related(obj)
- res.update(dict(
- teams = self.reverse('api:project_teams_list', kwargs={'pk': obj.pk}),
- playbooks = self.reverse('api:project_playbooks', kwargs={'pk': obj.pk}),
- inventory_files = self.reverse('api:project_inventories', kwargs={'pk': obj.pk}),
- update = self.reverse('api:project_update_view', kwargs={'pk': obj.pk}),
- project_updates = self.reverse('api:project_updates_list', kwargs={'pk': obj.pk}),
- scm_inventory_sources = self.reverse('api:project_scm_inventory_sources', kwargs={'pk': obj.pk}),
- schedules = self.reverse('api:project_schedules_list', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse('api:project_activity_stream_list', kwargs={'pk': obj.pk}),
- notification_templates_started = self.reverse('api:project_notification_templates_started_list', kwargs={'pk': obj.pk}),
- notification_templates_success = self.reverse('api:project_notification_templates_success_list', kwargs={'pk': obj.pk}),
- notification_templates_error = self.reverse('api:project_notification_templates_error_list', kwargs={'pk': obj.pk}),
- access_list = self.reverse('api:project_access_list', kwargs={'pk': obj.pk}),
- object_roles = self.reverse('api:project_object_roles_list', kwargs={'pk': obj.pk}),
- copy = self.reverse('api:project_copy', kwargs={'pk': obj.pk})
-
- ))
+ res.update(
+ dict(
+ teams=self.reverse('api:project_teams_list', kwargs={'pk': obj.pk}),
+ playbooks=self.reverse('api:project_playbooks', kwargs={'pk': obj.pk}),
+ inventory_files=self.reverse('api:project_inventories', kwargs={'pk': obj.pk}),
+ update=self.reverse('api:project_update_view', kwargs={'pk': obj.pk}),
+ project_updates=self.reverse('api:project_updates_list', kwargs={'pk': obj.pk}),
+ scm_inventory_sources=self.reverse('api:project_scm_inventory_sources', kwargs={'pk': obj.pk}),
+ schedules=self.reverse('api:project_schedules_list', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:project_activity_stream_list', kwargs={'pk': obj.pk}),
+ notification_templates_started=self.reverse('api:project_notification_templates_started_list', kwargs={'pk': obj.pk}),
+ notification_templates_success=self.reverse('api:project_notification_templates_success_list', kwargs={'pk': obj.pk}),
+ notification_templates_error=self.reverse('api:project_notification_templates_error_list', kwargs={'pk': obj.pk}),
+ access_list=self.reverse('api:project_access_list', kwargs={'pk': obj.pk}),
+ object_roles=self.reverse('api:project_object_roles_list', kwargs={'pk': obj.pk}),
+ copy=self.reverse('api:project_copy', kwargs={'pk': obj.pk}),
+ )
+ )
if obj.organization:
- res['organization'] = self.reverse('api:organization_detail',
- kwargs={'pk': obj.organization.pk})
+ res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
if obj.default_environment:
- res['default_environment'] = self.reverse('api:execution_environment_detail',
- kwargs={'pk': obj.default_environment_id})
+ res['default_environment'] = self.reverse('api:execution_environment_detail', kwargs={'pk': obj.default_environment_id})
# Backwards compatibility.
if obj.current_update:
- res['current_update'] = self.reverse('api:project_update_detail',
- kwargs={'pk': obj.current_update.pk})
+ res['current_update'] = self.reverse('api:project_update_detail', kwargs={'pk': obj.current_update.pk})
if obj.last_update:
- res['last_update'] = self.reverse('api:project_update_detail',
- kwargs={'pk': obj.last_update.pk})
+ res['last_update'] = self.reverse('api:project_update_detail', kwargs={'pk': obj.last_update.pk})
return res
def to_representation(self, obj):
@@ -1450,16 +1466,18 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
# case where user is turning off this project setting
if self.instance.allow_override and not attrs['allow_override']:
used_by = set(
- JobTemplate.objects.filter(
- models.Q(project=self.instance),
- models.Q(ask_scm_branch_on_launch=True) | ~models.Q(scm_branch="")
- ).values_list('pk', flat=True)
+ JobTemplate.objects.filter(models.Q(project=self.instance), models.Q(ask_scm_branch_on_launch=True) | ~models.Q(scm_branch="")).values_list(
+ 'pk', flat=True
+ )
)
if used_by:
- raise serializers.ValidationError({
- 'allow_override': _('One or more job templates depend on branch override behavior for this project (ids: {}).').format(
- ' '.join([str(pk) for pk in used_by])
- )})
+ raise serializers.ValidationError(
+ {
+ 'allow_override': _('One or more job templates depend on branch override behavior for this project (ids: {}).').format(
+ ' '.join([str(pk) for pk in used_by])
+ )
+ }
+ )
if get_field_from_model_or_attrs('scm_type') == '':
for fd in ('scm_update_on_launch', 'scm_delete_on_update', 'scm_clean'):
@@ -1488,9 +1506,7 @@ class ProjectPlaybooksSerializer(ProjectSerializer):
class ProjectInventoriesSerializer(ProjectSerializer):
- inventory_files = serializers.ReadOnlyField(help_text=_(
- 'Array of inventory files and directories available within this project, '
- 'not comprehensive.'))
+ inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, ' 'not comprehensive.'))
class Meta:
model = Project
@@ -1512,7 +1528,6 @@ class ProjectUpdateViewSerializer(ProjectSerializer):
class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
-
class Meta:
model = ProjectUpdate
fields = ('*', 'project', 'job_type', 'job_tags', '-controller_node')
@@ -1520,32 +1535,28 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
def get_related(self, obj):
res = super(ProjectUpdateSerializer, self).get_related(obj)
try:
- res.update(dict(
- project = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk}),
- ))
+ res.update(dict(project=self.reverse('api:project_detail', kwargs={'pk': obj.project.pk})))
except ObjectDoesNotExist:
pass
- res.update(dict(
- cancel = self.reverse('api:project_update_cancel', kwargs={'pk': obj.pk}),
- scm_inventory_updates = self.reverse('api:project_update_scm_inventory_updates', kwargs={'pk': obj.pk}),
- notifications = self.reverse('api:project_update_notifications_list', kwargs={'pk': obj.pk}),
- events = self.reverse('api:project_update_events_list', kwargs={'pk': obj.pk}),
- ))
+ res.update(
+ dict(
+ cancel=self.reverse('api:project_update_cancel', kwargs={'pk': obj.pk}),
+ scm_inventory_updates=self.reverse('api:project_update_scm_inventory_updates', kwargs={'pk': obj.pk}),
+ notifications=self.reverse('api:project_update_notifications_list', kwargs={'pk': obj.pk}),
+ events=self.reverse('api:project_update_events_list', kwargs={'pk': obj.pk}),
+ )
+ )
return res
class ProjectUpdateDetailSerializer(ProjectUpdateSerializer):
- host_status_counts = serializers.SerializerMethodField(
- help_text=_('A count of hosts uniquely assigned to each status.'),
- )
- playbook_counts = serializers.SerializerMethodField(
- help_text=_('A count of all plays and tasks for the job run.'),
- )
+ host_status_counts = serializers.SerializerMethodField(help_text=_('A count of hosts uniquely assigned to each status.'))
+ playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
class Meta:
model = ProjectUpdate
- fields = ('*', 'host_status_counts', 'playbook_counts',)
+ fields = ('*', 'host_status_counts', 'playbook_counts')
def get_playbook_counts(self, obj):
task_count = obj.project_update_events.filter(event='playbook_on_task_start').count()
@@ -1565,7 +1576,6 @@ class ProjectUpdateDetailSerializer(ProjectUpdateSerializer):
class ProjectUpdateListSerializer(ProjectUpdateSerializer, UnifiedJobListSerializer):
-
class Meta:
model = ProjectUpdate
fields = ('*', '-controller_node') # field removal undone by UJ serializer
@@ -1580,46 +1590,54 @@ class ProjectUpdateCancelSerializer(ProjectUpdateSerializer):
class BaseSerializerWithVariables(BaseSerializer):
-
def validate_variables(self, value):
return vars_validate_or_raise(value)
class InventorySerializer(BaseSerializerWithVariables):
show_capabilities = ['edit', 'delete', 'adhoc', 'copy']
- capabilities_prefetch = [
- 'admin', 'adhoc',
- {'copy': 'organization.inventory_admin'}
- ]
-
+ capabilities_prefetch = ['admin', 'adhoc', {'copy': 'organization.inventory_admin'}]
class Meta:
model = Inventory
- fields = ('*', 'organization', 'kind', 'host_filter', 'variables', 'has_active_failures',
- 'total_hosts', 'hosts_with_active_failures', 'total_groups',
- 'has_inventory_sources', 'total_inventory_sources',
- 'inventory_sources_with_failures', 'insights_credential',
- 'pending_deletion',)
+ fields = (
+ '*',
+ 'organization',
+ 'kind',
+ 'host_filter',
+ 'variables',
+ 'has_active_failures',
+ 'total_hosts',
+ 'hosts_with_active_failures',
+ 'total_groups',
+ 'has_inventory_sources',
+ 'total_inventory_sources',
+ 'inventory_sources_with_failures',
+ 'insights_credential',
+ 'pending_deletion',
+ )
def get_related(self, obj):
res = super(InventorySerializer, self).get_related(obj)
- res.update(dict(
- hosts = self.reverse('api:inventory_hosts_list', kwargs={'pk': obj.pk}),
- groups = self.reverse('api:inventory_groups_list', kwargs={'pk': obj.pk}),
- root_groups = self.reverse('api:inventory_root_groups_list', kwargs={'pk': obj.pk}),
- variable_data = self.reverse('api:inventory_variable_data', kwargs={'pk': obj.pk}),
- script = self.reverse('api:inventory_script_view', kwargs={'pk': obj.pk}),
- tree = self.reverse('api:inventory_tree_view', kwargs={'pk': obj.pk}),
- inventory_sources = self.reverse('api:inventory_inventory_sources_list', kwargs={'pk': obj.pk}),
- update_inventory_sources = self.reverse('api:inventory_inventory_sources_update', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse('api:inventory_activity_stream_list', kwargs={'pk': obj.pk}),
- job_templates = self.reverse('api:inventory_job_template_list', kwargs={'pk': obj.pk}),
- ad_hoc_commands = self.reverse('api:inventory_ad_hoc_commands_list', kwargs={'pk': obj.pk}),
- access_list = self.reverse('api:inventory_access_list', kwargs={'pk': obj.pk}),
- object_roles = self.reverse('api:inventory_object_roles_list', kwargs={'pk': obj.pk}),
- instance_groups = self.reverse('api:inventory_instance_groups_list', kwargs={'pk': obj.pk}),
- copy = self.reverse('api:inventory_copy', kwargs={'pk': obj.pk})
- ))
+ res.update(
+ dict(
+ hosts=self.reverse('api:inventory_hosts_list', kwargs={'pk': obj.pk}),
+ groups=self.reverse('api:inventory_groups_list', kwargs={'pk': obj.pk}),
+ root_groups=self.reverse('api:inventory_root_groups_list', kwargs={'pk': obj.pk}),
+ variable_data=self.reverse('api:inventory_variable_data', kwargs={'pk': obj.pk}),
+ script=self.reverse('api:inventory_script_view', kwargs={'pk': obj.pk}),
+ tree=self.reverse('api:inventory_tree_view', kwargs={'pk': obj.pk}),
+ inventory_sources=self.reverse('api:inventory_inventory_sources_list', kwargs={'pk': obj.pk}),
+ update_inventory_sources=self.reverse('api:inventory_inventory_sources_update', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:inventory_activity_stream_list', kwargs={'pk': obj.pk}),
+ job_templates=self.reverse('api:inventory_job_template_list', kwargs={'pk': obj.pk}),
+ ad_hoc_commands=self.reverse('api:inventory_ad_hoc_commands_list', kwargs={'pk': obj.pk}),
+ access_list=self.reverse('api:inventory_access_list', kwargs={'pk': obj.pk}),
+ object_roles=self.reverse('api:inventory_object_roles_list', kwargs={'pk': obj.pk}),
+ instance_groups=self.reverse('api:inventory_instance_groups_list', kwargs={'pk': obj.pk}),
+ copy=self.reverse('api:inventory_copy', kwargs={'pk': obj.pk}),
+ )
+ )
if obj.insights_credential:
res['insights_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.insights_credential.pk})
if obj.organization:
@@ -1640,13 +1658,8 @@ class InventorySerializer(BaseSerializerWithVariables):
# __exact is allowed
continue
match = '__{}'.format(match)
- if re.match(
- 'ansible_facts[^=]+{}='.format(match),
- host_filter
- ):
- raise models.base.ValidationError({
- 'host_filter': 'ansible_facts does not support searching with {}'.format(match)
- })
+ if re.match('ansible_facts[^=]+{}='.format(match), host_filter):
+ raise models.base.ValidationError({'host_filter': 'ansible_facts does not support searching with {}'.format(match)})
SmartFilter().query_from_string(host_filter)
except RuntimeError as e:
raise models.base.ValidationError(str(e))
@@ -1666,13 +1679,11 @@ class InventorySerializer(BaseSerializerWithVariables):
host_filter = self.instance.host_filter
if kind == 'smart' and not host_filter:
- raise serializers.ValidationError({'host_filter': _(
- 'Smart inventories must specify host_filter')})
+ raise serializers.ValidationError({'host_filter': _('Smart inventories must specify host_filter')})
return super(InventorySerializer, self).validate(attrs)
class InventoryScriptSerializer(InventorySerializer):
-
class Meta:
fields = ()
@@ -1686,11 +1697,20 @@ class HostSerializer(BaseSerializerWithVariables):
class Meta:
model = Host
- fields = ('*', 'inventory', 'enabled', 'instance_id', 'variables',
- 'has_active_failures', 'has_inventory_sources', 'last_job',
- 'last_job_host_summary', 'insights_system_id', 'ansible_facts_modified',)
- read_only_fields = ('last_job', 'last_job_host_summary', 'insights_system_id',
- 'ansible_facts_modified',)
+ fields = (
+ '*',
+ 'inventory',
+ 'enabled',
+ 'instance_id',
+ 'variables',
+ 'has_active_failures',
+ 'has_inventory_sources',
+ 'last_job',
+ 'last_job_host_summary',
+ 'insights_system_id',
+ 'ansible_facts_modified',
+ )
+ read_only_fields = ('last_job', 'last_job_host_summary', 'insights_system_id', 'ansible_facts_modified')
def build_relational_field(self, field_name, relation_info):
field_class, field_kwargs = super(HostSerializer, self).build_relational_field(field_name, relation_info)
@@ -1702,20 +1722,22 @@ class HostSerializer(BaseSerializerWithVariables):
def get_related(self, obj):
res = super(HostSerializer, self).get_related(obj)
- res.update(dict(
- variable_data = self.reverse('api:host_variable_data', kwargs={'pk': obj.pk}),
- groups = self.reverse('api:host_groups_list', kwargs={'pk': obj.pk}),
- all_groups = self.reverse('api:host_all_groups_list', kwargs={'pk': obj.pk}),
- job_events = self.reverse('api:host_job_events_list', kwargs={'pk': obj.pk}),
- job_host_summaries = self.reverse('api:host_job_host_summaries_list', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse('api:host_activity_stream_list', kwargs={'pk': obj.pk}),
- inventory_sources = self.reverse('api:host_inventory_sources_list', kwargs={'pk': obj.pk}),
- smart_inventories = self.reverse('api:host_smart_inventories_list', kwargs={'pk': obj.pk}),
- ad_hoc_commands = self.reverse('api:host_ad_hoc_commands_list', kwargs={'pk': obj.pk}),
- ad_hoc_command_events = self.reverse('api:host_ad_hoc_command_events_list', kwargs={'pk': obj.pk}),
- insights = self.reverse('api:host_insights', kwargs={'pk': obj.pk}),
- ansible_facts = self.reverse('api:host_ansible_facts_detail', kwargs={'pk': obj.pk}),
- ))
+ res.update(
+ dict(
+ variable_data=self.reverse('api:host_variable_data', kwargs={'pk': obj.pk}),
+ groups=self.reverse('api:host_groups_list', kwargs={'pk': obj.pk}),
+ all_groups=self.reverse('api:host_all_groups_list', kwargs={'pk': obj.pk}),
+ job_events=self.reverse('api:host_job_events_list', kwargs={'pk': obj.pk}),
+ job_host_summaries=self.reverse('api:host_job_host_summaries_list', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:host_activity_stream_list', kwargs={'pk': obj.pk}),
+ inventory_sources=self.reverse('api:host_inventory_sources_list', kwargs={'pk': obj.pk}),
+ smart_inventories=self.reverse('api:host_smart_inventories_list', kwargs={'pk': obj.pk}),
+ ad_hoc_commands=self.reverse('api:host_ad_hoc_commands_list', kwargs={'pk': obj.pk}),
+ ad_hoc_command_events=self.reverse('api:host_ad_hoc_command_events_list', kwargs={'pk': obj.pk}),
+ insights=self.reverse('api:host_insights', kwargs={'pk': obj.pk}),
+ ansible_facts=self.reverse('api:host_ansible_facts_detail', kwargs={'pk': obj.pk}),
+ )
+ )
if obj.inventory:
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory.pk})
if obj.last_job:
@@ -1737,16 +1759,19 @@ class HostSerializer(BaseSerializerWithVariables):
group_list = [{'id': g.id, 'name': g.name} for g in obj.groups.all().order_by('id')[:5]]
group_cnt = obj.groups.count()
d.setdefault('groups', {'count': group_cnt, 'results': group_list})
- d.setdefault('recent_jobs', [{
- 'id': j.job.id,
- 'name': j.job.job_template.name if j.job.job_template is not None else "",
- 'type': j.job.job_type_name,
- 'status': j.job.status,
- 'finished': j.job.finished,
- } for j in obj.job_host_summaries.select_related('job__job_template').order_by('-created').defer(
- 'job__extra_vars',
- 'job__artifacts',
- )[:5]])
+ d.setdefault(
+ 'recent_jobs',
+ [
+ {
+ 'id': j.job.id,
+ 'name': j.job.job_template.name if j.job.job_template is not None else "",
+ 'type': j.job.job_type_name,
+ 'status': j.job.status,
+ 'finished': j.job.finished,
+ }
+ for j in obj.job_host_summaries.select_related('job__job_template').order_by('-created').defer('job__extra_vars', 'job__artifacts')[:5]
+ ],
+ )
return d
def _get_host_port_from_name(self, name):
@@ -1805,9 +1830,7 @@ class HostSerializer(BaseSerializerWithVariables):
return ret
def get_has_active_failures(self, obj):
- return bool(
- obj.last_job_host_summary and obj.last_job_host_summary.failed
- )
+ return bool(obj.last_job_host_summary and obj.last_job_host_summary.failed)
def get_has_inventory_sources(self, obj):
return obj.inventory_sources.exists()
@@ -1839,18 +1862,20 @@ class GroupSerializer(BaseSerializerWithVariables):
def get_related(self, obj):
res = super(GroupSerializer, self).get_related(obj)
- res.update(dict(
- variable_data = self.reverse('api:group_variable_data', kwargs={'pk': obj.pk}),
- hosts = self.reverse('api:group_hosts_list', kwargs={'pk': obj.pk}),
- potential_children = self.reverse('api:group_potential_children_list', kwargs={'pk': obj.pk}),
- children = self.reverse('api:group_children_list', kwargs={'pk': obj.pk}),
- all_hosts = self.reverse('api:group_all_hosts_list', kwargs={'pk': obj.pk}),
- job_events = self.reverse('api:group_job_events_list', kwargs={'pk': obj.pk}),
- job_host_summaries = self.reverse('api:group_job_host_summaries_list', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse('api:group_activity_stream_list', kwargs={'pk': obj.pk}),
- inventory_sources = self.reverse('api:group_inventory_sources_list', kwargs={'pk': obj.pk}),
- ad_hoc_commands = self.reverse('api:group_ad_hoc_commands_list', kwargs={'pk': obj.pk}),
- ))
+ res.update(
+ dict(
+ variable_data=self.reverse('api:group_variable_data', kwargs={'pk': obj.pk}),
+ hosts=self.reverse('api:group_hosts_list', kwargs={'pk': obj.pk}),
+ potential_children=self.reverse('api:group_potential_children_list', kwargs={'pk': obj.pk}),
+ children=self.reverse('api:group_children_list', kwargs={'pk': obj.pk}),
+ all_hosts=self.reverse('api:group_all_hosts_list', kwargs={'pk': obj.pk}),
+ job_events=self.reverse('api:group_job_events_list', kwargs={'pk': obj.pk}),
+ job_host_summaries=self.reverse('api:group_job_host_summaries_list', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:group_activity_stream_list', kwargs={'pk': obj.pk}),
+ inventory_sources=self.reverse('api:group_inventory_sources_list', kwargs={'pk': obj.pk}),
+ ad_hoc_commands=self.reverse('api:group_ad_hoc_commands_list', kwargs={'pk': obj.pk}),
+ )
+ )
if obj.inventory:
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory.pk})
return res
@@ -1897,7 +1922,6 @@ class GroupTreeSerializer(GroupSerializer):
class BaseVariableDataSerializer(BaseSerializer):
-
class Meta:
fields = ('variables',)
@@ -1913,19 +1937,16 @@ class BaseVariableDataSerializer(BaseSerializer):
class InventoryVariableDataSerializer(BaseVariableDataSerializer):
-
class Meta:
model = Inventory
class HostVariableDataSerializer(BaseVariableDataSerializer):
-
class Meta:
model = Host
class GroupVariableDataSerializer(BaseVariableDataSerializer):
-
class Meta:
model = Group
@@ -1934,9 +1955,7 @@ class CustomInventoryScriptSerializer(BaseSerializer):
script = serializers.CharField(trim_whitespace=False)
show_capabilities = ['edit', 'delete', 'copy']
- capabilities_prefetch = [
- {'edit': 'admin'}
- ]
+ capabilities_prefetch = [{'edit': 'admin'}]
class Meta:
model = CustomInventoryScript
@@ -1952,19 +1971,23 @@ class CustomInventoryScriptSerializer(BaseSerializer):
if obj is None:
return ret
request = self.context.get('request', None)
- if request.user not in obj.admin_role and \
- not request.user.is_superuser and \
- not request.user.is_system_auditor and \
- not (obj.organization is not None and request.user in obj.organization.auditor_role):
+ if (
+ request.user not in obj.admin_role
+ and not request.user.is_superuser
+ and not request.user.is_system_auditor
+ and not (obj.organization is not None and request.user in obj.organization.auditor_role)
+ ):
ret['script'] = None
return ret
def get_related(self, obj):
res = super(CustomInventoryScriptSerializer, self).get_related(obj)
- res.update(dict(
- object_roles = self.reverse('api:inventory_script_object_roles_list', kwargs={'pk': obj.pk}),
- copy = self.reverse('api:inventory_script_copy', kwargs={'pk': obj.pk}),
- ))
+ res.update(
+ dict(
+ object_roles=self.reverse('api:inventory_script_object_roles_list', kwargs={'pk': obj.pk}),
+ copy=self.reverse('api:inventory_script_copy', kwargs={'pk': obj.pk}),
+ )
+ )
if obj.organization:
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
@@ -1972,21 +1995,31 @@ class CustomInventoryScriptSerializer(BaseSerializer):
class InventorySourceOptionsSerializer(BaseSerializer):
- credential = DeprecatedCredentialField(
- help_text=_('Cloud credential to use for inventory updates.')
- )
+ credential = DeprecatedCredentialField(help_text=_('Cloud credential to use for inventory updates.'))
class Meta:
- fields = ('*', 'source', 'source_path', 'source_script', 'source_vars', 'credential',
- 'enabled_var', 'enabled_value', 'host_filter', 'overwrite', 'overwrite_vars',
- 'custom_virtualenv', 'timeout', 'verbosity')
- read_only_fields = ('*', 'custom_virtualenv',)
+ fields = (
+ '*',
+ 'source',
+ 'source_path',
+ 'source_script',
+ 'source_vars',
+ 'credential',
+ 'enabled_var',
+ 'enabled_value',
+ 'host_filter',
+ 'overwrite',
+ 'overwrite_vars',
+ 'custom_virtualenv',
+ 'timeout',
+ 'verbosity',
+ )
+ read_only_fields = ('*', 'custom_virtualenv')
def get_related(self, obj):
res = super(InventorySourceOptionsSerializer, self).get_related(obj)
if obj.credential: # TODO: remove when 'credential' field is removed
- res['credential'] = self.reverse('api:credential_detail',
- kwargs={'pk': obj.credential})
+ res['credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.credential})
if obj.source_script:
res['source_script'] = self.reverse('api:inventory_script_detail', kwargs={'pk': obj.source_script.pk})
return res
@@ -2033,10 +2066,7 @@ class InventorySourceOptionsSerializer(BaseSerializer):
if 'credential' in summary_fields:
cred = obj.get_cloud_credential()
if cred:
- summarized_cred = {
- 'id': cred.id, 'name': cred.name, 'description': cred.description,
- 'kind': cred.kind, 'cloud': True
- }
+ summarized_cred = {'id': cred.id, 'name': cred.name, 'description': cred.description, 'kind': cred.kind, 'cloud': True}
summary_fields['credential'] = summarized_cred
all_creds.append(summarized_cred)
summary_fields['credential']['credential_type_id'] = cred.credential_type_id
@@ -2052,44 +2082,40 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
last_update_failed = serializers.BooleanField(read_only=True)
last_updated = serializers.DateTimeField(read_only=True)
show_capabilities = ['start', 'schedule', 'edit', 'delete']
- capabilities_prefetch = [
- {'admin': 'inventory.admin'},
- {'start': 'inventory.update'}
- ]
+ capabilities_prefetch = [{'admin': 'inventory.admin'}, {'start': 'inventory.update'}]
class Meta:
model = InventorySource
- fields = ('*', 'name', 'inventory', 'update_on_launch', 'update_cache_timeout',
- 'source_project', 'update_on_project_update') + \
- ('last_update_failed', 'last_updated') # Backwards compatibility.
- extra_kwargs = {
- 'inventory': {'required': True}
- }
+ fields = ('*', 'name', 'inventory', 'update_on_launch', 'update_cache_timeout', 'source_project', 'update_on_project_update') + (
+ 'last_update_failed',
+ 'last_updated',
+ ) # Backwards compatibility.
+ extra_kwargs = {'inventory': {'required': True}}
def get_related(self, obj):
res = super(InventorySourceSerializer, self).get_related(obj)
- res.update(dict(
- update = self.reverse('api:inventory_source_update_view', kwargs={'pk': obj.pk}),
- inventory_updates = self.reverse('api:inventory_source_updates_list', kwargs={'pk': obj.pk}),
- schedules = self.reverse('api:inventory_source_schedules_list', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse('api:inventory_source_activity_stream_list', kwargs={'pk': obj.pk}),
- hosts = self.reverse('api:inventory_source_hosts_list', kwargs={'pk': obj.pk}),
- groups = self.reverse('api:inventory_source_groups_list', kwargs={'pk': obj.pk}),
- notification_templates_started = self.reverse('api:inventory_source_notification_templates_started_list', kwargs={'pk': obj.pk}),
- notification_templates_success = self.reverse('api:inventory_source_notification_templates_success_list', kwargs={'pk': obj.pk}),
- notification_templates_error = self.reverse('api:inventory_source_notification_templates_error_list', kwargs={'pk': obj.pk}),
- ))
+ res.update(
+ dict(
+ update=self.reverse('api:inventory_source_update_view', kwargs={'pk': obj.pk}),
+ inventory_updates=self.reverse('api:inventory_source_updates_list', kwargs={'pk': obj.pk}),
+ schedules=self.reverse('api:inventory_source_schedules_list', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:inventory_source_activity_stream_list', kwargs={'pk': obj.pk}),
+ hosts=self.reverse('api:inventory_source_hosts_list', kwargs={'pk': obj.pk}),
+ groups=self.reverse('api:inventory_source_groups_list', kwargs={'pk': obj.pk}),
+ notification_templates_started=self.reverse('api:inventory_source_notification_templates_started_list', kwargs={'pk': obj.pk}),
+ notification_templates_success=self.reverse('api:inventory_source_notification_templates_success_list', kwargs={'pk': obj.pk}),
+ notification_templates_error=self.reverse('api:inventory_source_notification_templates_error_list', kwargs={'pk': obj.pk}),
+ )
+ )
if obj.inventory:
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory.pk})
if obj.source_project_id is not None:
res['source_project'] = self.reverse('api:project_detail', kwargs={'pk': obj.source_project.pk})
# Backwards compatibility.
if obj.current_update:
- res['current_update'] = self.reverse('api:inventory_update_detail',
- kwargs={'pk': obj.current_update.pk})
+ res['current_update'] = self.reverse('api:inventory_update_detail', kwargs={'pk': obj.current_update.pk})
if obj.last_update:
- res['last_update'] = self.reverse('api:inventory_update_detail',
- kwargs={'pk': obj.last_update.pk})
+ res['last_update'] = self.reverse('api:inventory_update_detail', kwargs={'pk': obj.last_update.pk})
else:
res['credentials'] = self.reverse('api:inventory_source_credentials_list', kwargs={'pk': obj.pk})
return res
@@ -2175,20 +2201,12 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
return attrs.get(fd, self.instance and getattr(self.instance, fd) or None)
if get_field_from_model_or_attrs('source') == 'scm':
- if (('source' in attrs or 'source_project' in attrs) and
- get_field_from_model_or_attrs('source_project') is None):
- raise serializers.ValidationError(
- {"source_project": _("Project required for scm type sources.")}
- )
+ if ('source' in attrs or 'source_project' in attrs) and get_field_from_model_or_attrs('source_project') is None:
+ raise serializers.ValidationError({"source_project": _("Project required for scm type sources.")})
else:
- redundant_scm_fields = list(filter(
- lambda x: attrs.get(x, None),
- ['source_project', 'source_path', 'update_on_project_update']
- ))
+ redundant_scm_fields = list(filter(lambda x: attrs.get(x, None), ['source_project', 'source_path', 'update_on_project_update']))
if redundant_scm_fields:
- raise serializers.ValidationError(
- {"detail": _("Cannot set %s if not SCM type." % ' '.join(redundant_scm_fields))}
- )
+ raise serializers.ValidationError({"detail": _("Cannot set %s if not SCM type." % ' '.join(redundant_scm_fields))})
attrs = super(InventorySourceSerializer, self).validate(attrs)
@@ -2201,10 +2219,7 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
view = self.context.get('view', None)
if (not view) or (not view.request) or (view.request.user not in cred.use_role):
raise PermissionDenied()
- cred_error = InventorySource.cloud_credential_validation(
- get_field_from_model_or_attrs('source'),
- cred
- )
+ cred_error = InventorySource.cloud_credential_validation(get_field_from_model_or_attrs('source'), cred)
if cred_error:
raise serializers.ValidationError({"credential": cred_error})
@@ -2225,27 +2240,32 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
class Meta:
model = InventoryUpdate
- fields = ('*', 'inventory', 'inventory_source', 'license_error', 'org_host_limit_error',
- 'source_project_update', 'custom_virtualenv', '-controller_node',)
+ fields = (
+ '*',
+ 'inventory',
+ 'inventory_source',
+ 'license_error',
+ 'org_host_limit_error',
+ 'source_project_update',
+ 'custom_virtualenv',
+ '-controller_node',
+ )
def get_related(self, obj):
res = super(InventoryUpdateSerializer, self).get_related(obj)
try:
- res.update(dict(
- inventory_source = self.reverse(
- 'api:inventory_source_detail', kwargs={'pk': obj.inventory_source.pk}
- ),
- ))
+ res.update(dict(inventory_source=self.reverse('api:inventory_source_detail', kwargs={'pk': obj.inventory_source.pk})))
except ObjectDoesNotExist:
pass
- res.update(dict(
- cancel = self.reverse('api:inventory_update_cancel', kwargs={'pk': obj.pk}),
- notifications = self.reverse('api:inventory_update_notifications_list', kwargs={'pk': obj.pk}),
- events = self.reverse('api:inventory_update_events_list', kwargs={'pk': obj.pk}),
- ))
+ res.update(
+ dict(
+ cancel=self.reverse('api:inventory_update_cancel', kwargs={'pk': obj.pk}),
+ notifications=self.reverse('api:inventory_update_notifications_list', kwargs={'pk': obj.pk}),
+ events=self.reverse('api:inventory_update_events_list', kwargs={'pk': obj.pk}),
+ )
+ )
if obj.source_project_update_id:
- res['source_project_update'] = self.reverse('api:project_update_detail',
- kwargs={'pk': obj.source_project_update.pk})
+ res['source_project_update'] = self.reverse('api:project_update_detail', kwargs={'pk': obj.source_project_update.pk})
if obj.inventory:
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory.pk})
@@ -2256,14 +2276,11 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
class InventoryUpdateDetailSerializer(InventoryUpdateSerializer):
- source_project = serializers.SerializerMethodField(
- help_text=_('The project used for this job.'),
- method_name='get_source_project_id'
- )
+ source_project = serializers.SerializerMethodField(help_text=_('The project used for this job.'), method_name='get_source_project_id')
class Meta:
model = InventoryUpdate
- fields = ('*', 'source_project',)
+ fields = ('*', 'source_project')
def get_source_project(self, obj):
return getattrd(obj, 'source_project_update.unified_job_template', None)
@@ -2297,14 +2314,13 @@ class InventoryUpdateDetailSerializer(InventoryUpdateSerializer):
'name': cred.name,
'description': cred.description,
'kind': cred.kind,
- 'cloud': cred.credential_type.kind == 'cloud'
+ 'cloud': cred.credential_type.kind == 'cloud',
}
return summary_fields
class InventoryUpdateListSerializer(InventoryUpdateSerializer, UnifiedJobListSerializer):
-
class Meta:
model = InventoryUpdate
fields = ('*', '-controller_node') # field removal undone by UJ serializer
@@ -2327,17 +2343,19 @@ class TeamSerializer(BaseSerializer):
def get_related(self, obj):
res = super(TeamSerializer, self).get_related(obj)
- res.update(dict(
- projects = self.reverse('api:team_projects_list', kwargs={'pk': obj.pk}),
- users = self.reverse('api:team_users_list', kwargs={'pk': obj.pk}),
- credentials = self.reverse('api:team_credentials_list', kwargs={'pk': obj.pk}),
- roles = self.reverse('api:team_roles_list', kwargs={'pk': obj.pk}),
- object_roles = self.reverse('api:team_object_roles_list', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse('api:team_activity_stream_list', kwargs={'pk': obj.pk}),
- access_list = self.reverse('api:team_access_list', kwargs={'pk': obj.pk}),
- ))
+ res.update(
+ dict(
+ projects=self.reverse('api:team_projects_list', kwargs={'pk': obj.pk}),
+ users=self.reverse('api:team_users_list', kwargs={'pk': obj.pk}),
+ credentials=self.reverse('api:team_credentials_list', kwargs={'pk': obj.pk}),
+ roles=self.reverse('api:team_roles_list', kwargs={'pk': obj.pk}),
+ object_roles=self.reverse('api:team_object_roles_list', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:team_activity_stream_list', kwargs={'pk': obj.pk}),
+ access_list=self.reverse('api:team_access_list', kwargs={'pk': obj.pk}),
+ )
+ )
if obj.organization:
- res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
+ res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
return res
def to_representation(self, obj):
@@ -2348,7 +2366,6 @@ class TeamSerializer(BaseSerializer):
class RoleSerializer(BaseSerializer):
-
class Meta:
model = Role
fields = ('*', '-created', '-modified')
@@ -2393,7 +2410,7 @@ class ResourceAccessListElementSerializer(UserSerializer):
show_capabilities = [] # Clear fields from UserSerializer parent class
def to_representation(self, user):
- '''
+ """
With this method we derive "direct" and "indirect" access lists. Contained
in the direct access list are all the roles the user is a member of, and
all of the roles that are directly granted to any teams that the user is a
@@ -2402,7 +2419,7 @@ class ResourceAccessListElementSerializer(UserSerializer):
The indirect access list is a list of all of the roles that the user is
a member of that are ancestors of any roles that grant permissions to
the resource.
- '''
+ """
ret = super(ResourceAccessListElementSerializer, self).to_representation(user)
obj = self.context['view'].get_parent_object()
if self.context['view'].request is not None:
@@ -2414,7 +2431,7 @@ class ResourceAccessListElementSerializer(UserSerializer):
ret['summary_fields'] = {}
def format_role_perm(role):
- role_dict = { 'id': role.id, 'name': role.name, 'description': role.description}
+ role_dict = {'id': role.id, 'name': role.name, 'description': role.description}
try:
role_dict['resource_name'] = role.content_object.name
role_dict['resource_type'] = get_type_for_model(role.content_type.model_class())
@@ -2422,12 +2439,13 @@ class ResourceAccessListElementSerializer(UserSerializer):
except AttributeError:
pass
if role.content_type is not None:
- role_dict['user_capabilities'] = {'unattach': requesting_user.can_access(
- Role, 'unattach', role, user, 'members', data={}, skip_sub_obj_read_check=False)}
+ role_dict['user_capabilities'] = {
+ 'unattach': requesting_user.can_access(Role, 'unattach', role, user, 'members', data={}, skip_sub_obj_read_check=False)
+ }
else:
# Singleton roles should not be managed from this view, as per copy/edit rework spec
role_dict['user_capabilities'] = {'unattach': False}
- return { 'role': role_dict, 'descendant_roles': get_roles_on_resource(obj, role)}
+ return {'role': role_dict, 'descendant_roles': get_roles_on_resource(obj, role)}
def format_team_role_perm(naive_team_role, permissive_role_ids):
ret = []
@@ -2447,12 +2465,13 @@ class ResourceAccessListElementSerializer(UserSerializer):
role_dict['resource_name'] = role.content_object.name
role_dict['resource_type'] = get_type_for_model(role.content_type.model_class())
role_dict['related'] = reverse_gfk(role.content_object, self.context.get('request'))
- role_dict['user_capabilities'] = {'unattach': requesting_user.can_access(
- Role, 'unattach', role, team_role, 'parents', data={}, skip_sub_obj_read_check=False)}
+ role_dict['user_capabilities'] = {
+ 'unattach': requesting_user.can_access(Role, 'unattach', role, team_role, 'parents', data={}, skip_sub_obj_read_check=False)
+ }
else:
# Singleton roles should not be managed from this view, as per copy/edit rework spec
role_dict['user_capabilities'] = {'unattach': False}
- ret.append({ 'role': role_dict, 'descendant_roles': get_roles_on_resource(obj, team_role)})
+ ret.append({'role': role_dict, 'descendant_roles': get_roles_on_resource(obj, team_role)})
return ret
team_content_type = ContentType.objects.get_for_model(Team)
@@ -2461,42 +2480,33 @@ class ResourceAccessListElementSerializer(UserSerializer):
direct_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('id', flat=True)
all_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('ancestors__id', flat=True)
- direct_access_roles = user.roles \
- .filter(id__in=direct_permissive_role_ids).all()
+ direct_access_roles = user.roles.filter(id__in=direct_permissive_role_ids).all()
- direct_team_roles = Role.objects \
- .filter(content_type=team_content_type,
- members=user,
- children__in=direct_permissive_role_ids)
+ direct_team_roles = Role.objects.filter(content_type=team_content_type, members=user, children__in=direct_permissive_role_ids)
if content_type == team_content_type:
# When looking at the access list for a team, exclude the entries
# for that team. This exists primarily so we don't list the read role
# as a direct role when a user is a member or admin of a team
- direct_team_roles = direct_team_roles.exclude(
- children__content_type=team_content_type,
- children__object_id=obj.id
- )
-
+ direct_team_roles = direct_team_roles.exclude(children__content_type=team_content_type, children__object_id=obj.id)
- indirect_team_roles = Role.objects \
- .filter(content_type=team_content_type,
- members=user,
- children__in=all_permissive_role_ids) \
- .exclude(id__in=direct_team_roles)
+ indirect_team_roles = Role.objects.filter(content_type=team_content_type, members=user, children__in=all_permissive_role_ids).exclude(
+ id__in=direct_team_roles
+ )
- indirect_access_roles = user.roles \
- .filter(id__in=all_permissive_role_ids) \
- .exclude(id__in=direct_permissive_role_ids) \
- .exclude(id__in=direct_team_roles) \
- .exclude(id__in=indirect_team_roles)
+ indirect_access_roles = (
+ user.roles.filter(id__in=all_permissive_role_ids)
+ .exclude(id__in=direct_permissive_role_ids)
+ .exclude(id__in=direct_team_roles)
+ .exclude(id__in=indirect_team_roles)
+ )
- ret['summary_fields']['direct_access'] \
- = [format_role_perm(r) for r in direct_access_roles.distinct()] \
- + [y for x in (format_team_role_perm(r, direct_permissive_role_ids) for r in direct_team_roles.distinct()) for y in x] \
+ ret['summary_fields']['direct_access'] = (
+ [format_role_perm(r) for r in direct_access_roles.distinct()]
+ + [y for x in (format_team_role_perm(r, direct_permissive_role_ids) for r in direct_team_roles.distinct()) for y in x]
+ [y for x in (format_team_role_perm(r, all_permissive_role_ids) for r in indirect_team_roles.distinct()) for y in x]
+ )
- ret['summary_fields']['indirect_access'] \
- = [format_role_perm(r) for r in indirect_access_roles.distinct()]
+ ret['summary_fields']['indirect_access'] = [format_role_perm(r) for r in indirect_access_roles.distinct()]
return ret
@@ -2507,14 +2517,11 @@ class CredentialTypeSerializer(BaseSerializer):
class Meta:
model = CredentialType
- fields = ('*', 'kind', 'namespace', 'name', 'managed_by_tower', 'inputs',
- 'injectors')
+ fields = ('*', 'kind', 'namespace', 'name', 'managed_by_tower', 'inputs', 'injectors')
def validate(self, attrs):
if self.instance and self.instance.managed_by_tower:
- raise PermissionDenied(
- detail=_("Modifications not allowed for managed credential types")
- )
+ raise PermissionDenied(detail=_("Modifications not allowed for managed credential types"))
old_inputs = {}
if self.instance:
@@ -2524,14 +2531,10 @@ class CredentialTypeSerializer(BaseSerializer):
if self.instance and self.instance.credentials.exists():
if 'inputs' in attrs and old_inputs != self.instance.inputs:
- raise PermissionDenied(
- detail= _("Modifications to inputs are not allowed for credential types that are in use")
- )
+ raise PermissionDenied(detail=_("Modifications to inputs are not allowed for credential types that are in use"))
if 'kind' in attrs and attrs['kind'] not in ('cloud', 'net'):
- raise serializers.ValidationError({
- "kind": _("Must be 'cloud' or 'net', not %s") % attrs['kind']
- })
+ raise serializers.ValidationError({"kind": _("Must be 'cloud' or 'net', not %s") % attrs['kind']})
fields = attrs.get('inputs', {}).get('fields', [])
for field in fields:
@@ -2542,14 +2545,8 @@ class CredentialTypeSerializer(BaseSerializer):
def get_related(self, obj):
res = super(CredentialTypeSerializer, self).get_related(obj)
- res['credentials'] = self.reverse(
- 'api:credential_type_credential_list',
- kwargs={'pk': obj.pk}
- )
- res['activity_stream'] = self.reverse(
- 'api:credential_type_activity_stream_list',
- kwargs={'pk': obj.pk}
- )
+ res['credentials'] = self.reverse('api:credential_type_credential_list', kwargs={'pk': obj.pk})
+ res['activity_stream'] = self.reverse('api:credential_type_activity_stream_list', kwargs={'pk': obj.pk})
return res
def to_representation(self, data):
@@ -2568,10 +2565,7 @@ class CredentialTypeSerializer(BaseSerializer):
# API-created/modified CredentialType kinds are limited to
# `cloud` and `net`
if method in ('PUT', 'POST'):
- fields['kind']['choices'] = list(filter(
- lambda choice: choice[0] in ('cloud', 'net'),
- fields['kind']['choices']
- ))
+ fields['kind']['choices'] = list(filter(lambda choice: choice[0] in ('cloud', 'net'), fields['kind']['choices']))
return fields
@@ -2583,11 +2577,7 @@ class CredentialSerializer(BaseSerializer):
class Meta:
model = Credential
fields = ('*', 'organization', 'credential_type', 'managed_by_tower', 'inputs', 'kind', 'cloud', 'kubernetes')
- extra_kwargs = {
- 'credential_type': {
- 'label': _('Credential Type'),
- },
- }
+ extra_kwargs = {'credential_type': {'label': _('Credential Type')}}
def to_representation(self, data):
value = super(CredentialSerializer, self).to_representation(data)
@@ -2602,20 +2592,22 @@ class CredentialSerializer(BaseSerializer):
if obj.organization:
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
- res.update(dict(
- activity_stream = self.reverse('api:credential_activity_stream_list', kwargs={'pk': obj.pk}),
- access_list = self.reverse('api:credential_access_list', kwargs={'pk': obj.pk}),
- object_roles = self.reverse('api:credential_object_roles_list', kwargs={'pk': obj.pk}),
- owner_users = self.reverse('api:credential_owner_users_list', kwargs={'pk': obj.pk}),
- owner_teams = self.reverse('api:credential_owner_teams_list', kwargs={'pk': obj.pk}),
- copy = self.reverse('api:credential_copy', kwargs={'pk': obj.pk}),
- input_sources = self.reverse('api:credential_input_source_sublist', kwargs={'pk': obj.pk}),
- credential_type = self.reverse('api:credential_type_detail', kwargs={'pk': obj.credential_type.pk}),
- ))
+ res.update(
+ dict(
+ activity_stream=self.reverse('api:credential_activity_stream_list', kwargs={'pk': obj.pk}),
+ access_list=self.reverse('api:credential_access_list', kwargs={'pk': obj.pk}),
+ object_roles=self.reverse('api:credential_object_roles_list', kwargs={'pk': obj.pk}),
+ owner_users=self.reverse('api:credential_owner_users_list', kwargs={'pk': obj.pk}),
+ owner_teams=self.reverse('api:credential_owner_teams_list', kwargs={'pk': obj.pk}),
+ copy=self.reverse('api:credential_copy', kwargs={'pk': obj.pk}),
+ input_sources=self.reverse('api:credential_input_source_sublist', kwargs={'pk': obj.pk}),
+ credential_type=self.reverse('api:credential_type_detail', kwargs={'pk': obj.credential_type.pk}),
+ )
+ )
parents = [role for role in obj.admin_role.parents.all() if role.object_id is not None]
if parents:
- res.update({parents[0].content_type.name:parents[0].content_object.get_absolute_url(self.context.get('request'))})
+ res.update({parents[0].content_type.name: parents[0].content_object.get_absolute_url(self.context.get('request'))})
elif len(obj.admin_role.members.all()) > 0:
user = obj.admin_role.members.all()[0]
res.update({'user': self.reverse('api:user_detail', kwargs={'pk': user.pk})})
@@ -2627,30 +2619,32 @@ class CredentialSerializer(BaseSerializer):
summary_dict['owners'] = []
for user in obj.admin_role.members.all():
- summary_dict['owners'].append({
- 'id': user.pk,
- 'type': 'user',
- 'name': user.username,
- 'description': ' '.join([user.first_name, user.last_name]),
- 'url': self.reverse('api:user_detail', kwargs={'pk': user.pk}),
- })
+ summary_dict['owners'].append(
+ {
+ 'id': user.pk,
+ 'type': 'user',
+ 'name': user.username,
+ 'description': ' '.join([user.first_name, user.last_name]),
+ 'url': self.reverse('api:user_detail', kwargs={'pk': user.pk}),
+ }
+ )
for parent in [role for role in obj.admin_role.parents.all() if role.object_id is not None]:
- summary_dict['owners'].append({
- 'id': parent.content_object.pk,
- 'type': camelcase_to_underscore(parent.content_object.__class__.__name__),
- 'name': parent.content_object.name,
- 'description': parent.content_object.description,
- 'url': parent.content_object.get_absolute_url(self.context.get('request')),
- })
+ summary_dict['owners'].append(
+ {
+ 'id': parent.content_object.pk,
+ 'type': camelcase_to_underscore(parent.content_object.__class__.__name__),
+ 'name': parent.content_object.name,
+ 'description': parent.content_object.description,
+ 'url': parent.content_object.get_absolute_url(self.context.get('request')),
+ }
+ )
return summary_dict
def validate(self, attrs):
if self.instance and self.instance.managed_by_tower:
- raise PermissionDenied(
- detail=_("Modifications not allowed for managed credentials")
- )
+ raise PermissionDenied(detail=_("Modifications not allowed for managed credentials"))
return super(CredentialSerializer, self).validate(attrs)
def get_validation_exclusions(self, obj=None):
@@ -2661,14 +2655,8 @@ class CredentialSerializer(BaseSerializer):
return ret
def validate_organization(self, org):
- if (
- self.instance and
- self.instance.credential_type.kind == 'galaxy' and
- org is None
- ):
- raise serializers.ValidationError(_(
- "Galaxy credentials must be owned by an Organization."
- ))
+ if self.instance and self.instance.credential_type.kind == 'galaxy' and org is None:
+ raise serializers.ValidationError(_("Galaxy credentials must be owned by an Organization."))
return org
def validate_credential_type(self, credential_type):
@@ -2680,12 +2668,11 @@ class CredentialSerializer(BaseSerializer):
'unifiedjobtemplates',
'projects',
'projectupdates',
- 'workflowjobnodes'
+ 'workflowjobnodes',
):
if getattr(self.instance, related_objects).count() > 0:
raise ValidationError(
- _('You cannot change the credential type of the credential, as it may break the functionality'
- ' of the resources using it.'),
+ _('You cannot change the credential type of the credential, as it may break the functionality' ' of the resources using it.')
)
return credential_type
@@ -2695,19 +2682,27 @@ class CredentialSerializerCreate(CredentialSerializer):
user = serializers.PrimaryKeyRelatedField(
queryset=User.objects.all(),
- required=False, default=None, write_only=True, allow_null=True,
- help_text=_('Write-only field used to add user to owner role. If provided, '
- 'do not give either team or organization. Only valid for creation.'))
+ required=False,
+ default=None,
+ write_only=True,
+ allow_null=True,
+ help_text=_('Write-only field used to add user to owner role. If provided, ' 'do not give either team or organization. Only valid for creation.'),
+ )
team = serializers.PrimaryKeyRelatedField(
queryset=Team.objects.all(),
- required=False, default=None, write_only=True, allow_null=True,
- help_text=_('Write-only field used to add team to owner role. If provided, '
- 'do not give either user or organization. Only valid for creation.'))
+ required=False,
+ default=None,
+ write_only=True,
+ allow_null=True,
+ help_text=_('Write-only field used to add team to owner role. If provided, ' 'do not give either user or organization. Only valid for creation.'),
+ )
organization = serializers.PrimaryKeyRelatedField(
queryset=Organization.objects.all(),
- required=False, default=None, allow_null=True,
- help_text=_('Inherit permissions from organization roles. If provided on creation, '
- 'do not give either user or team.'))
+ required=False,
+ default=None,
+ allow_null=True,
+ help_text=_('Inherit permissions from organization roles. If provided on creation, ' 'do not give either user or team.'),
+ )
class Meta:
model = Credential
@@ -2727,22 +2722,15 @@ class CredentialSerializerCreate(CredentialSerializer):
if len(owner_fields) > 1:
received = ", ".join(sorted(owner_fields))
- raise serializers.ValidationError({"detail": _(
- "Only one of 'user', 'team', or 'organization' should be provided, "
- "received {} fields.".format(received)
- )})
+ raise serializers.ValidationError(
+ {"detail": _("Only one of 'user', 'team', or 'organization' should be provided, " "received {} fields.".format(received))}
+ )
if attrs.get('team'):
attrs['organization'] = attrs['team'].organization
- if (
- 'credential_type' in attrs and
- attrs['credential_type'].kind == 'galaxy' and
- list(owner_fields) != ['organization']
- ):
- raise serializers.ValidationError({"organization": _(
- "Galaxy credentials must be owned by an Organization."
- )})
+ if 'credential_type' in attrs and attrs['credential_type'].kind == 'galaxy' and list(owner_fields) != ['organization']:
+ raise serializers.ValidationError({"organization": _("Galaxy credentials must be owned by an Organization.")})
return super(CredentialSerializerCreate, self).validate(attrs)
@@ -2767,19 +2755,8 @@ class CredentialInputSourceSerializer(BaseSerializer):
class Meta:
model = CredentialInputSource
- fields = (
- '*',
- 'input_field_name',
- 'metadata',
- 'target_credential',
- 'source_credential',
- '-name',
- )
- extra_kwargs = {
- 'input_field_name': {'required': True},
- 'target_credential': {'required': True},
- 'source_credential': {'required': True},
- }
+ fields = ('*', 'input_field_name', 'metadata', 'target_credential', 'source_credential', '-name')
+ extra_kwargs = {'input_field_name': {'required': True}, 'target_credential': {'required': True}, 'source_credential': {'required': True}}
def get_related(self, obj):
res = super(CredentialInputSourceSerializer, self).get_related(obj)
@@ -2789,28 +2766,24 @@ class CredentialInputSourceSerializer(BaseSerializer):
class UserCredentialSerializerCreate(CredentialSerializerCreate):
-
class Meta:
model = Credential
fields = ('*', '-team', '-organization')
class TeamCredentialSerializerCreate(CredentialSerializerCreate):
-
class Meta:
model = Credential
fields = ('*', '-user', '-organization')
class OrganizationCredentialSerializerCreate(CredentialSerializerCreate):
-
class Meta:
model = Credential
fields = ('*', '-user', '-team')
class LabelsListMixin(object):
-
def _summary_field_labels(self, obj):
label_list = [{'id': x.id, 'name': x.name} for x in obj.labels.all()[:10]]
if has_model_field_prefetched(obj, 'labels'):
@@ -2829,12 +2802,26 @@ class LabelsListMixin(object):
class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
-
class Meta:
- fields = ('*', 'job_type', 'inventory', 'project', 'playbook', 'scm_branch',
- 'forks', 'limit', 'verbosity', 'extra_vars', 'job_tags',
- 'force_handlers', 'skip_tags', 'start_at_task', 'timeout',
- 'use_fact_cache', 'organization',)
+ fields = (
+ '*',
+ 'job_type',
+ 'inventory',
+ 'project',
+ 'playbook',
+ 'scm_branch',
+ 'forks',
+ 'limit',
+ 'verbosity',
+ 'extra_vars',
+ 'job_tags',
+ 'force_handlers',
+ 'skip_tags',
+ 'start_at_task',
+ 'timeout',
+ 'use_fact_cache',
+ 'organization',
+ )
read_only_fields = ('organization',)
def get_related(self, obj):
@@ -2853,10 +2840,7 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
if obj.organization_id:
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization_id})
if isinstance(obj, UnifiedJobTemplate):
- res['credentials'] = self.reverse(
- 'api:job_template_credentials_list',
- kwargs={'pk': obj.pk}
- )
+ res['credentials'] = self.reverse('api:job_template_credentials_list', kwargs={'pk': obj.pk})
elif isinstance(obj, UnifiedJob):
res['credentials'] = self.reverse('api:job_credentials_list', kwargs={'pk': obj.pk})
@@ -2879,16 +2863,12 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
project = attrs.get('project', self.instance.project if self.instance else None)
playbook = attrs.get('playbook', self.instance and self.instance.playbook or '')
scm_branch = attrs.get('scm_branch', self.instance.scm_branch if self.instance else None)
- ask_scm_branch_on_launch = attrs.get(
- 'ask_scm_branch_on_launch', self.instance.ask_scm_branch_on_launch if self.instance else None)
+ ask_scm_branch_on_launch = attrs.get('ask_scm_branch_on_launch', self.instance.ask_scm_branch_on_launch if self.instance else None)
if not project:
raise serializers.ValidationError({'project': _('This field is required.')})
playbook_not_found = bool(
- (
- project and project.scm_type and (not project.allow_override) and
- playbook and force_text(playbook) not in project.playbook_files
- ) or
- (project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks) # manual
+ (project and project.scm_type and (not project.allow_override) and playbook and force_text(playbook) not in project.playbook_files)
+ or (project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks) # manual
)
if playbook_not_found:
raise serializers.ValidationError({'playbook': _('Playbook not found for project.')})
@@ -2904,9 +2884,9 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
class JobTemplateMixin(object):
- '''
+ """
Provide recent jobs and survey details in summary_fields
- '''
+ """
def _recent_jobs(self, obj):
# Exclude "joblets", jobs that ran as part of a sliced workflow job
@@ -2914,11 +2894,17 @@ class JobTemplateMixin(object):
# Would like to apply an .only, but does not play well with non_polymorphic
# .only('id', 'status', 'finished', 'polymorphic_ctype_id')
optimized_qs = uj_qs.non_polymorphic()
- return [{
- 'id': x.id, 'status': x.status, 'finished': x.finished, 'canceled_on': x.canceled_on,
- # Make type consistent with API top-level key, for instance workflow_job
- 'type': x.job_type_name
- } for x in optimized_qs[:10]]
+ return [
+ {
+ 'id': x.id,
+ 'status': x.status,
+ 'finished': x.finished,
+ 'canceled_on': x.canceled_on,
+ # Make type consistent with API top-level key, for instance workflow_job
+ 'type': x.job_type_name,
+ }
+ for x in optimized_qs[:10]
+ ]
def get_summary_fields(self, obj):
d = super(JobTemplateMixin, self).get_summary_fields(obj)
@@ -2933,9 +2919,7 @@ class JobTemplateMixin(object):
if webhook_credential:
if webhook_credential.credential_type.kind != 'token':
- raise serializers.ValidationError({
- 'webhook_credential': _("Must be a Personal Access Token."),
- })
+ raise serializers.ValidationError({'webhook_credential': _("Must be a Personal Access Token.")})
msg = {'webhook_credential': _("Must match the selected webhook service.")}
if webhook_service:
@@ -2949,24 +2933,35 @@ class JobTemplateMixin(object):
class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobOptionsSerializer):
show_capabilities = ['start', 'schedule', 'copy', 'edit', 'delete']
- capabilities_prefetch = [
- 'admin', 'execute',
- {'copy': ['project.use', 'inventory.use']}
- ]
+ capabilities_prefetch = ['admin', 'execute', {'copy': ['project.use', 'inventory.use']}]
status = serializers.ChoiceField(choices=JobTemplate.JOB_TEMPLATE_STATUS_CHOICES, read_only=True, required=False)
class Meta:
model = JobTemplate
fields = (
- '*', 'host_config_key', 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch',
- 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch',
- 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch',
- 'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled',
- 'become_enabled', 'diff_mode', 'allow_simultaneous', 'custom_virtualenv',
- 'job_slice_count', 'webhook_service', 'webhook_credential',
+ '*',
+ 'host_config_key',
+ 'ask_scm_branch_on_launch',
+ 'ask_diff_mode_on_launch',
+ 'ask_variables_on_launch',
+ 'ask_limit_on_launch',
+ 'ask_tags_on_launch',
+ 'ask_skip_tags_on_launch',
+ 'ask_job_type_on_launch',
+ 'ask_verbosity_on_launch',
+ 'ask_inventory_on_launch',
+ 'ask_credential_on_launch',
+ 'survey_enabled',
+ 'become_enabled',
+ 'diff_mode',
+ 'allow_simultaneous',
+ 'custom_virtualenv',
+ 'job_slice_count',
+ 'webhook_service',
+ 'webhook_credential',
)
- read_only_fields = ('*', 'custom_virtualenv',)
+ read_only_fields = ('*', 'custom_virtualenv')
def get_related(self, obj):
res = super(JobTemplateSerializer, self).get_related(obj)
@@ -2977,14 +2972,14 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
launch=self.reverse('api:job_template_launch', kwargs={'pk': obj.pk}),
webhook_key=self.reverse('api:webhook_key', kwargs={'model_kwarg': 'job_templates', 'pk': obj.pk}),
webhook_receiver=(
- self.reverse('api:webhook_receiver_{}'.format(obj.webhook_service),
- kwargs={'model_kwarg': 'job_templates', 'pk': obj.pk})
- if obj.webhook_service else ''
+ self.reverse('api:webhook_receiver_{}'.format(obj.webhook_service), kwargs={'model_kwarg': 'job_templates', 'pk': obj.pk})
+ if obj.webhook_service
+ else ''
),
notification_templates_started=self.reverse('api:job_template_notification_templates_started_list', kwargs={'pk': obj.pk}),
notification_templates_success=self.reverse('api:job_template_notification_templates_success_list', kwargs={'pk': obj.pk}),
notification_templates_error=self.reverse('api:job_template_notification_templates_error_list', kwargs={'pk': obj.pk}),
- access_list=self.reverse('api:job_template_access_list', kwargs={'pk': obj.pk}),
+ access_list=self.reverse('api:job_template_access_list', kwargs={'pk': obj.pk}),
survey_spec=self.reverse('api:job_template_survey_spec', kwargs={'pk': obj.pk}),
labels=self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}),
object_roles=self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}),
@@ -2995,7 +2990,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
if obj.host_config_key:
res['callback'] = self.reverse('api:job_template_callback', kwargs={'pk': obj.pk})
if obj.organization_id:
- res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization_id})
+ res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization_id})
if obj.webhook_credential_id:
res['webhook_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.webhook_credential_id})
return res
@@ -3008,9 +3003,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
project = get_field_from_model_or_attrs('project')
if get_field_from_model_or_attrs('host_config_key') and not inventory:
- raise serializers.ValidationError({'host_config_key': _(
- "Cannot enable provisioning callback without an inventory set."
- )})
+ raise serializers.ValidationError({'host_config_key': _("Cannot enable provisioning callback without an inventory set.")})
prompting_error_message = _("Must either set a default value or ask to prompt on launch.")
if project is None:
@@ -3034,7 +3027,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
'name': cred.name,
'description': cred.description,
'kind': cred.kind,
- 'cloud': cred.credential_type.kind == 'cloud'
+ 'cloud': cred.credential_type.kind == 'cloud',
}
all_creds.append(summarized_cred)
summary_fields['credentials'] = all_creds
@@ -3042,9 +3035,9 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
class JobTemplateWithSpecSerializer(JobTemplateSerializer):
- '''
+ """
Used for activity stream entries.
- '''
+ """
class Meta:
model = JobTemplate
@@ -3059,34 +3052,43 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
class Meta:
model = Job
fields = (
- '*', 'job_template', 'passwords_needed_to_start', 'allow_simultaneous',
- 'artifacts', 'scm_revision', 'instance_group', 'diff_mode', 'job_slice_number',
- 'job_slice_count', 'webhook_service', 'webhook_credential', 'webhook_guid',
+ '*',
+ 'job_template',
+ 'passwords_needed_to_start',
+ 'allow_simultaneous',
+ 'artifacts',
+ 'scm_revision',
+ 'instance_group',
+ 'diff_mode',
+ 'job_slice_number',
+ 'job_slice_count',
+ 'webhook_service',
+ 'webhook_credential',
+ 'webhook_guid',
)
def get_related(self, obj):
res = super(JobSerializer, self).get_related(obj)
- res.update(dict(
- job_events = self.reverse('api:job_job_events_list', kwargs={'pk': obj.pk}),
- job_host_summaries = self.reverse('api:job_job_host_summaries_list', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse('api:job_activity_stream_list', kwargs={'pk': obj.pk}),
- notifications = self.reverse('api:job_notifications_list', kwargs={'pk': obj.pk}),
- labels = self.reverse('api:job_label_list', kwargs={'pk': obj.pk}),
- create_schedule = self.reverse('api:job_create_schedule', kwargs={'pk': obj.pk}),
- ))
+ res.update(
+ dict(
+ job_events=self.reverse('api:job_job_events_list', kwargs={'pk': obj.pk}),
+ job_host_summaries=self.reverse('api:job_job_host_summaries_list', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:job_activity_stream_list', kwargs={'pk': obj.pk}),
+ notifications=self.reverse('api:job_notifications_list', kwargs={'pk': obj.pk}),
+ labels=self.reverse('api:job_label_list', kwargs={'pk': obj.pk}),
+ create_schedule=self.reverse('api:job_create_schedule', kwargs={'pk': obj.pk}),
+ )
+ )
try:
if obj.job_template:
- res['job_template'] = self.reverse('api:job_template_detail',
- kwargs={'pk': obj.job_template.pk})
+ res['job_template'] = self.reverse('api:job_template_detail', kwargs={'pk': obj.job_template.pk})
except ObjectDoesNotExist:
setattr(obj, 'job_template', None)
if obj.can_cancel or True:
res['cancel'] = self.reverse('api:job_cancel', kwargs={'pk': obj.pk})
try:
if obj.project_update:
- res['project_update'] = self.reverse(
- 'api:project_update_detail', kwargs={'pk': obj.project_update.pk}
- )
+ res['project_update'] = self.reverse('api:project_update_detail', kwargs={'pk': obj.project_update.pk})
except ObjectDoesNotExist:
pass
res['relaunch'] = self.reverse('api:job_relaunch', kwargs={'pk': obj.pk})
@@ -3118,7 +3120,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
'name': cred.name,
'description': cred.description,
'kind': cred.kind,
- 'cloud': cred.credential_type.kind == 'cloud'
+ 'cloud': cred.credential_type.kind == 'cloud',
}
all_creds.append(summarized_cred)
summary_fields['credentials'] = all_creds
@@ -3127,12 +3129,8 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
class JobDetailSerializer(JobSerializer):
- host_status_counts = serializers.SerializerMethodField(
- help_text=_('A count of hosts uniquely assigned to each status.'),
- )
- playbook_counts = serializers.SerializerMethodField(
- help_text=_('A count of all plays and tasks for the job run.'),
- )
+ host_status_counts = serializers.SerializerMethodField(help_text=_('A count of hosts uniquely assigned to each status.'))
+ playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
custom_virtualenv = serializers.ReadOnlyField()
class Meta:
@@ -3170,26 +3168,23 @@ class JobRelaunchSerializer(BaseSerializer):
passwords_needed_to_start = serializers.SerializerMethodField()
retry_counts = serializers.SerializerMethodField()
hosts = serializers.ChoiceField(
- required=False, allow_null=True, default='all',
- choices=[
- ('all', _('No change to job limit')),
- ('failed', _('All failed and unreachable hosts'))
- ],
- write_only=True
+ required=False,
+ allow_null=True,
+ default='all',
+ choices=[('all', _('No change to job limit')), ('failed', _('All failed and unreachable hosts'))],
+ write_only=True,
)
credential_passwords = VerbatimField(required=True, write_only=True)
class Meta:
model = Job
- fields = ('passwords_needed_to_start', 'retry_counts', 'hosts', 'credential_passwords',)
+ fields = ('passwords_needed_to_start', 'retry_counts', 'hosts', 'credential_passwords')
def validate_credential_passwords(self, value):
pnts = self.instance.passwords_needed_to_start
missing = set(pnts) - set(key for key in value if value[key])
if missing:
- raise serializers.ValidationError(_(
- 'Missing passwords needed to start: {}'.format(', '.join(missing))
- ))
+ raise serializers.ValidationError(_('Missing passwords needed to start: {}'.format(', '.join(missing))))
return value
def to_representation(self, obj):
@@ -3235,12 +3230,12 @@ class JobCreateScheduleSerializer(BaseSerializer):
class Meta:
model = Job
- fields = ('can_schedule', 'prompts',)
+ fields = ('can_schedule', 'prompts')
def get_can_schedule(self, obj):
- '''
+ """
Need both a job template and job prompts to schedule
- '''
+ """
return obj.can_schedule
@staticmethod
@@ -3265,17 +3260,25 @@ class JobCreateScheduleSerializer(BaseSerializer):
class AdHocCommandSerializer(UnifiedJobSerializer):
-
class Meta:
model = AdHocCommand
- fields = ('*', 'job_type', 'inventory', 'limit', 'credential',
- 'module_name', 'module_args', 'forks', 'verbosity', 'extra_vars',
- 'become_enabled', 'diff_mode', '-unified_job_template', '-description')
- extra_kwargs = {
- 'name': {
- 'read_only': True,
- },
- }
+ fields = (
+ '*',
+ 'job_type',
+ 'inventory',
+ 'limit',
+ 'credential',
+ 'module_name',
+ 'module_args',
+ 'forks',
+ 'verbosity',
+ 'extra_vars',
+ 'become_enabled',
+ 'diff_mode',
+ '-unified_job_template',
+ '-description',
+ )
+ extra_kwargs = {'name': {'read_only': True}}
def get_field_names(self, declared_fields, info):
field_names = super(AdHocCommandSerializer, self).get_field_names(declared_fields, info)
@@ -3303,11 +3306,13 @@ class AdHocCommandSerializer(UnifiedJobSerializer):
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory_id})
if obj.credential_id:
res['credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.credential_id})
- res.update(dict(
- events = self.reverse('api:ad_hoc_command_ad_hoc_command_events_list', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse('api:ad_hoc_command_activity_stream_list', kwargs={'pk': obj.pk}),
- notifications = self.reverse('api:ad_hoc_command_notifications_list', kwargs={'pk': obj.pk}),
- ))
+ res.update(
+ dict(
+ events=self.reverse('api:ad_hoc_command_ad_hoc_command_events_list', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:ad_hoc_command_activity_stream_list', kwargs={'pk': obj.pk}),
+ notifications=self.reverse('api:ad_hoc_command_notifications_list', kwargs={'pk': obj.pk}),
+ )
+ )
res['cancel'] = self.reverse('api:ad_hoc_command_cancel', kwargs={'pk': obj.pk})
res['relaunch'] = self.reverse('api:ad_hoc_command_relaunch', kwargs={'pk': obj.pk})
return res
@@ -3331,21 +3336,17 @@ class AdHocCommandSerializer(UnifiedJobSerializer):
def validate_extra_vars(self, value):
redacted_extra_vars, removed_vars = extract_ansible_vars(value)
if removed_vars:
- raise serializers.ValidationError(_(
- "{} are prohibited from use in ad hoc commands."
- ).format(", ".join(sorted(removed_vars, reverse=True))))
+ raise serializers.ValidationError(_("{} are prohibited from use in ad hoc commands.").format(", ".join(sorted(removed_vars, reverse=True))))
return vars_validate_or_raise(value)
class AdHocCommandDetailSerializer(AdHocCommandSerializer):
- host_status_counts = serializers.SerializerMethodField(
- help_text=_('A count of hosts uniquely assigned to each status.'),
- )
+ host_status_counts = serializers.SerializerMethodField(help_text=_('A count of hosts uniquely assigned to each status.'))
class Meta:
model = AdHocCommand
- fields = ('*', 'host_status_counts',)
+ fields = ('*', 'host_status_counts')
def get_host_status_counts(self, obj):
try:
@@ -3365,7 +3366,6 @@ class AdHocCommandCancelSerializer(AdHocCommandSerializer):
class AdHocCommandRelaunchSerializer(AdHocCommandSerializer):
-
class Meta:
fields = ()
@@ -3377,22 +3377,22 @@ class AdHocCommandRelaunchSerializer(AdHocCommandSerializer):
class SystemJobTemplateSerializer(UnifiedJobTemplateSerializer):
-
class Meta:
model = SystemJobTemplate
- fields = ('*', 'job_type',)
+ fields = ('*', 'job_type')
def get_related(self, obj):
res = super(SystemJobTemplateSerializer, self).get_related(obj)
- res.update(dict(
- jobs = self.reverse('api:system_job_template_jobs_list', kwargs={'pk': obj.pk}),
- schedules = self.reverse('api:system_job_template_schedules_list', kwargs={'pk': obj.pk}),
- launch = self.reverse('api:system_job_template_launch', kwargs={'pk': obj.pk}),
- notification_templates_started = self.reverse('api:system_job_template_notification_templates_started_list', kwargs={'pk': obj.pk}),
- notification_templates_success = self.reverse('api:system_job_template_notification_templates_success_list', kwargs={'pk': obj.pk}),
- notification_templates_error = self.reverse('api:system_job_template_notification_templates_error_list', kwargs={'pk': obj.pk}),
-
- ))
+ res.update(
+ dict(
+ jobs=self.reverse('api:system_job_template_jobs_list', kwargs={'pk': obj.pk}),
+ schedules=self.reverse('api:system_job_template_schedules_list', kwargs={'pk': obj.pk}),
+ launch=self.reverse('api:system_job_template_launch', kwargs={'pk': obj.pk}),
+ notification_templates_started=self.reverse('api:system_job_template_notification_templates_started_list', kwargs={'pk': obj.pk}),
+ notification_templates_success=self.reverse('api:system_job_template_notification_templates_success_list', kwargs={'pk': obj.pk}),
+ notification_templates_error=self.reverse('api:system_job_template_notification_templates_error_list', kwargs={'pk': obj.pk}),
+ )
+ )
return res
@@ -3402,13 +3402,12 @@ class SystemJobSerializer(UnifiedJobSerializer):
class Meta:
model = SystemJob
- fields = ('*', 'system_job_template', 'job_type', 'extra_vars', 'result_stdout', '-controller_node',)
+ fields = ('*', 'system_job_template', 'job_type', 'extra_vars', 'result_stdout', '-controller_node')
def get_related(self, obj):
res = super(SystemJobSerializer, self).get_related(obj)
if obj.system_job_template:
- res['system_job_template'] = self.reverse('api:system_job_template_detail',
- kwargs={'pk': obj.system_job_template.pk})
+ res['system_job_template'] = self.reverse('api:system_job_template_detail', kwargs={'pk': obj.system_job_template.pk})
res['notifications'] = self.reverse('api:system_job_notifications_list', kwargs={'pk': obj.pk})
if obj.can_cancel or True:
res['cancel'] = self.reverse('api:system_job_cancel', kwargs={'pk': obj.pk})
@@ -3419,10 +3418,8 @@ class SystemJobSerializer(UnifiedJobSerializer):
try:
return obj.result_stdout
except StdoutMaxBytesExceeded as e:
- return _(
- "Standard Output too large to display ({text_size} bytes), "
- "only download supported for sizes over {supported_size} bytes.").format(
- text_size=e.total, supported_size=e.supported
+ return _("Standard Output too large to display ({text_size} bytes), " "only download supported for sizes over {supported_size} bytes.").format(
+ text_size=e.total, supported_size=e.supported
)
@@ -3436,56 +3433,59 @@ class SystemJobCancelSerializer(SystemJobSerializer):
class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJobTemplateSerializer):
show_capabilities = ['start', 'schedule', 'edit', 'copy', 'delete']
- capabilities_prefetch = [
- 'admin', 'execute',
- {'copy': 'organization.workflow_admin'}
- ]
+ capabilities_prefetch = ['admin', 'execute', {'copy': 'organization.workflow_admin'}]
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
class Meta:
model = WorkflowJobTemplate
fields = (
- '*', 'extra_vars', 'organization', 'survey_enabled', 'allow_simultaneous',
- 'ask_variables_on_launch', 'inventory', 'limit', 'scm_branch',
- 'ask_inventory_on_launch', 'ask_scm_branch_on_launch', 'ask_limit_on_launch',
- 'webhook_service', 'webhook_credential',
+ '*',
+ 'extra_vars',
+ 'organization',
+ 'survey_enabled',
+ 'allow_simultaneous',
+ 'ask_variables_on_launch',
+ 'inventory',
+ 'limit',
+ 'scm_branch',
+ 'ask_inventory_on_launch',
+ 'ask_scm_branch_on_launch',
+ 'ask_limit_on_launch',
+ 'webhook_service',
+ 'webhook_credential',
)
def get_related(self, obj):
res = super(WorkflowJobTemplateSerializer, self).get_related(obj)
res.update(
- workflow_jobs = self.reverse('api:workflow_job_template_jobs_list', kwargs={'pk': obj.pk}),
- schedules = self.reverse('api:workflow_job_template_schedules_list', kwargs={'pk': obj.pk}),
- launch = self.reverse('api:workflow_job_template_launch', kwargs={'pk': obj.pk}),
+ workflow_jobs=self.reverse('api:workflow_job_template_jobs_list', kwargs={'pk': obj.pk}),
+ schedules=self.reverse('api:workflow_job_template_schedules_list', kwargs={'pk': obj.pk}),
+ launch=self.reverse('api:workflow_job_template_launch', kwargs={'pk': obj.pk}),
webhook_key=self.reverse('api:webhook_key', kwargs={'model_kwarg': 'workflow_job_templates', 'pk': obj.pk}),
webhook_receiver=(
- self.reverse('api:webhook_receiver_{}'.format(obj.webhook_service),
- kwargs={'model_kwarg': 'workflow_job_templates', 'pk': obj.pk})
- if obj.webhook_service else ''
+ self.reverse('api:webhook_receiver_{}'.format(obj.webhook_service), kwargs={'model_kwarg': 'workflow_job_templates', 'pk': obj.pk})
+ if obj.webhook_service
+ else ''
),
- workflow_nodes = self.reverse('api:workflow_job_template_workflow_nodes_list', kwargs={'pk': obj.pk}),
- labels = self.reverse('api:workflow_job_template_label_list', kwargs={'pk': obj.pk}),
- activity_stream = self.reverse('api:workflow_job_template_activity_stream_list', kwargs={'pk': obj.pk}),
- notification_templates_started = self.reverse('api:workflow_job_template_notification_templates_started_list', kwargs={'pk': obj.pk}),
- notification_templates_success = self.reverse('api:workflow_job_template_notification_templates_success_list', kwargs={'pk': obj.pk}),
- notification_templates_error = self.reverse('api:workflow_job_template_notification_templates_error_list', kwargs={'pk': obj.pk}),
- notification_templates_approvals = self.reverse('api:workflow_job_template_notification_templates_approvals_list', kwargs={'pk': obj.pk}),
- access_list = self.reverse('api:workflow_job_template_access_list', kwargs={'pk': obj.pk}),
- object_roles = self.reverse('api:workflow_job_template_object_roles_list', kwargs={'pk': obj.pk}),
- survey_spec = self.reverse('api:workflow_job_template_survey_spec', kwargs={'pk': obj.pk}),
- copy = self.reverse('api:workflow_job_template_copy', kwargs={'pk': obj.pk}),
+ workflow_nodes=self.reverse('api:workflow_job_template_workflow_nodes_list', kwargs={'pk': obj.pk}),
+ labels=self.reverse('api:workflow_job_template_label_list', kwargs={'pk': obj.pk}),
+ activity_stream=self.reverse('api:workflow_job_template_activity_stream_list', kwargs={'pk': obj.pk}),
+ notification_templates_started=self.reverse('api:workflow_job_template_notification_templates_started_list', kwargs={'pk': obj.pk}),
+ notification_templates_success=self.reverse('api:workflow_job_template_notification_templates_success_list', kwargs={'pk': obj.pk}),
+ notification_templates_error=self.reverse('api:workflow_job_template_notification_templates_error_list', kwargs={'pk': obj.pk}),
+ notification_templates_approvals=self.reverse('api:workflow_job_template_notification_templates_approvals_list', kwargs={'pk': obj.pk}),
+ access_list=self.reverse('api:workflow_job_template_access_list', kwargs={'pk': obj.pk}),
+ object_roles=self.reverse('api:workflow_job_template_object_roles_list', kwargs={'pk': obj.pk}),
+ survey_spec=self.reverse('api:workflow_job_template_survey_spec', kwargs={'pk': obj.pk}),
+ copy=self.reverse('api:workflow_job_template_copy', kwargs={'pk': obj.pk}),
)
if obj.organization:
- res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
+ res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
if obj.webhook_credential_id:
res['webhook_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.webhook_credential_id})
if obj.inventory_id:
- res['inventory'] = self.reverse(
- 'api:inventory_detail', kwargs={
- 'pk': obj.inventory_id
- }
- )
+ res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory_id})
return res
def validate_extra_vars(self, value):
@@ -3509,9 +3509,9 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
class WorkflowJobTemplateWithSpecSerializer(WorkflowJobTemplateSerializer):
- '''
+ """
Used for activity stream entries.
- '''
+ """
class Meta:
model = WorkflowJobTemplate
@@ -3525,16 +3525,27 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
class Meta:
model = WorkflowJob
fields = (
- '*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous', 'job_template',
- 'is_sliced_job', '-execution_node', '-event_processing_finished', '-controller_node',
- 'inventory', 'limit', 'scm_branch', 'webhook_service', 'webhook_credential', 'webhook_guid',
+ '*',
+ 'workflow_job_template',
+ 'extra_vars',
+ 'allow_simultaneous',
+ 'job_template',
+ 'is_sliced_job',
+ '-execution_node',
+ '-event_processing_finished',
+ '-controller_node',
+ 'inventory',
+ 'limit',
+ 'scm_branch',
+ 'webhook_service',
+ 'webhook_credential',
+ 'webhook_guid',
)
def get_related(self, obj):
res = super(WorkflowJobSerializer, self).get_related(obj)
if obj.workflow_job_template:
- res['workflow_job_template'] = self.reverse('api:workflow_job_template_detail',
- kwargs={'pk': obj.workflow_job_template.pk})
+ res['workflow_job_template'] = self.reverse('api:workflow_job_template_detail', kwargs={'pk': obj.workflow_job_template.pk})
res['notifications'] = self.reverse('api:workflow_job_notifications_list', kwargs={'pk': obj.pk})
if obj.job_template_id:
res['job_template'] = self.reverse('api:job_template_detail', kwargs={'pk': obj.job_template_id})
@@ -3556,9 +3567,8 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer):
-
class Meta:
- fields = ('*', '-execution_node', '-controller_node',)
+ fields = ('*', '-execution_node', '-controller_node')
class WorkflowJobCancelSerializer(WorkflowJobSerializer):
@@ -3570,7 +3580,6 @@ class WorkflowJobCancelSerializer(WorkflowJobSerializer):
class WorkflowApprovalViewSerializer(UnifiedJobSerializer):
-
class Meta:
model = WorkflowApproval
fields = []
@@ -3584,7 +3593,7 @@ class WorkflowApprovalSerializer(UnifiedJobSerializer):
class Meta:
model = WorkflowApproval
- fields = ('*', '-controller_node', '-execution_node', 'can_approve_or_deny', 'approval_expiration', 'timed_out',)
+ fields = ('*', '-controller_node', '-execution_node', 'can_approve_or_deny', 'approval_expiration', 'timed_out')
def get_approval_expiration(self, obj):
if obj.status != 'pending' or obj.timeout == 0:
@@ -3600,8 +3609,7 @@ class WorkflowApprovalSerializer(UnifiedJobSerializer):
res = super(WorkflowApprovalSerializer, self).get_related(obj)
if obj.workflow_approval_template:
- res['workflow_approval_template'] = self.reverse('api:workflow_approval_template_detail',
- kwargs={'pk': obj.workflow_approval_template.pk})
+ res['workflow_approval_template'] = self.reverse('api:workflow_approval_template_detail', kwargs={'pk': obj.workflow_approval_template.pk})
res['approve'] = self.reverse('api:workflow_approval_approve', kwargs={'pk': obj.pk})
res['deny'] = self.reverse('api:workflow_approval_deny', kwargs={'pk': obj.pk})
if obj.approved_or_denied_by:
@@ -3615,56 +3623,60 @@ class WorkflowApprovalActivityStreamSerializer(WorkflowApprovalSerializer):
However, when we generate an activity stream record, we *want* to record
these types of changes. This serializer allows us to do so.
"""
+
status = serializers.ChoiceField(choices=JobTemplate.JOB_TEMPLATE_STATUS_CHOICES)
timed_out = serializers.BooleanField()
-
class WorkflowApprovalListSerializer(WorkflowApprovalSerializer, UnifiedJobListSerializer):
-
class Meta:
- fields = ('*', '-controller_node', '-execution_node', 'can_approve_or_deny', 'approval_expiration', 'timed_out',)
+ fields = ('*', '-controller_node', '-execution_node', 'can_approve_or_deny', 'approval_expiration', 'timed_out')
class WorkflowApprovalTemplateSerializer(UnifiedJobTemplateSerializer):
-
class Meta:
model = WorkflowApprovalTemplate
- fields = ('*', 'timeout', 'name',)
+ fields = ('*', 'timeout', 'name')
def get_related(self, obj):
res = super(WorkflowApprovalTemplateSerializer, self).get_related(obj)
if 'last_job' in res:
del res['last_job']
- res.update(jobs = self.reverse('api:workflow_approval_template_jobs_list', kwargs={'pk': obj.pk}))
+ res.update(jobs=self.reverse('api:workflow_approval_template_jobs_list', kwargs={'pk': obj.pk}))
return res
class LaunchConfigurationBaseSerializer(BaseSerializer):
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
- job_type = serializers.ChoiceField(allow_blank=True, allow_null=True, required=False, default=None,
- choices=NEW_JOB_TYPE_CHOICES)
+ job_type = serializers.ChoiceField(allow_blank=True, allow_null=True, required=False, default=None, choices=NEW_JOB_TYPE_CHOICES)
job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
diff_mode = serializers.NullBooleanField(required=False, default=None)
- verbosity = serializers.ChoiceField(allow_null=True, required=False, default=None,
- choices=VERBOSITY_CHOICES)
+ verbosity = serializers.ChoiceField(allow_null=True, required=False, default=None, choices=VERBOSITY_CHOICES)
exclude_errors = ()
class Meta:
- fields = ('*', 'extra_data', 'inventory', # Saved launch-time config fields
- 'scm_branch', 'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags', 'diff_mode', 'verbosity')
+ fields = (
+ '*',
+ 'extra_data',
+ 'inventory', # Saved launch-time config fields
+ 'scm_branch',
+ 'job_type',
+ 'job_tags',
+ 'skip_tags',
+ 'limit',
+ 'skip_tags',
+ 'diff_mode',
+ 'verbosity',
+ )
def get_related(self, obj):
res = super(LaunchConfigurationBaseSerializer, self).get_related(obj)
if obj.inventory_id:
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory_id})
- res['credentials'] = self.reverse(
- 'api:{}_credentials_list'.format(get_type_for_model(self.Meta.model)),
- kwargs={'pk': obj.pk}
- )
+ res['credentials'] = self.reverse('api:{}_credentials_list'.format(get_type_for_model(self.Meta.model)), kwargs={'pk': obj.pk})
return res
def _build_mock_obj(self, attrs):
@@ -3738,15 +3750,13 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
# questions not done here or on launch, but doing so could
# leak info about values, so it should not be added
if not ('default' in element and element['default']):
- raise serializers.ValidationError(
- {"extra_data": _('Provided variable {} has no database value to replace with.').format(key)})
+ raise serializers.ValidationError({"extra_data": _('Provided variable {} has no database value to replace with.').format(key)})
else:
attrs['extra_data'][key] = db_extra_data[key]
# Build unsaved version of this config, use it to detect prompts errors
mock_obj = self._build_mock_obj(attrs)
- accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(
- _exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
+ accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
# Remove all unprocessed $encrypted$ strings, indicating default usage
if 'extra_data' in attrs and password_dict:
@@ -3756,9 +3766,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
attrs['extra_data'].pop(key)
attrs.get('survey_passwords', {}).pop(key, None)
else:
- errors.setdefault('extra_vars', []).append(
- _('"$encrypted$ is a reserved keyword, may not be used for {}."'.format(key))
- )
+ errors.setdefault('extra_vars', []).append(_('"$encrypted$ is a reserved keyword, may not be used for {}."'.format(key)))
# Launch configs call extra_vars extra_data for historical reasons
if 'extra_vars' in errors:
@@ -3781,9 +3789,21 @@ class WorkflowJobTemplateNodeSerializer(LaunchConfigurationBaseSerializer):
class Meta:
model = WorkflowJobTemplateNode
- fields = ('*', 'workflow_job_template', '-name', '-description', 'id', 'url', 'related',
- 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes', 'all_parents_must_converge',
- 'identifier',)
+ fields = (
+ '*',
+ 'workflow_job_template',
+ '-name',
+ '-description',
+ 'id',
+ 'url',
+ 'related',
+ 'unified_job_template',
+ 'success_nodes',
+ 'failure_nodes',
+ 'always_nodes',
+ 'all_parents_must_converge',
+ 'identifier',
+ )
def get_related(self, obj):
res = super(WorkflowJobTemplateNodeSerializer, self).get_related(obj)
@@ -3821,9 +3841,23 @@ class WorkflowJobNodeSerializer(LaunchConfigurationBaseSerializer):
class Meta:
model = WorkflowJobNode
- fields = ('*', 'job', 'workflow_job', '-name', '-description', 'id', 'url', 'related',
- 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',
- 'all_parents_must_converge', 'do_not_run', 'identifier')
+ fields = (
+ '*',
+ 'job',
+ 'workflow_job',
+ '-name',
+ '-description',
+ 'id',
+ 'url',
+ 'related',
+ 'unified_job_template',
+ 'success_nodes',
+ 'failure_nodes',
+ 'always_nodes',
+ 'all_parents_must_converge',
+ 'do_not_run',
+ 'identifier',
+ )
def get_related(self, obj):
res = super(WorkflowJobNodeSerializer, self).get_related(obj)
@@ -3854,13 +3888,14 @@ class WorkflowJobNodeDetailSerializer(WorkflowJobNodeSerializer):
class WorkflowJobTemplateNodeDetailSerializer(WorkflowJobTemplateNodeSerializer):
- '''
+ """
Influence the api browser sample data to not include workflow_job_template
when editing a WorkflowNode.
Note: I was not able to accomplish this through the use of extra_kwargs.
Maybe something to do with workflow_job_template being a relational field?
- '''
+ """
+
def build_relational_field(self, field_name, relation_info):
field_class, field_kwargs = super(WorkflowJobTemplateNodeDetailSerializer, self).build_relational_field(field_name, relation_info)
if self.instance and field_name == 'workflow_job_template':
@@ -3870,10 +3905,9 @@ class WorkflowJobTemplateNodeDetailSerializer(WorkflowJobTemplateNodeSerializer)
class WorkflowJobTemplateNodeCreateApprovalSerializer(BaseSerializer):
-
class Meta:
model = WorkflowApprovalTemplate
- fields = ('timeout', 'name', 'description',)
+ fields = ('timeout', 'name', 'description')
def to_representation(self, obj):
return {}
@@ -3888,28 +3922,37 @@ class AdHocCommandListSerializer(AdHocCommandSerializer, UnifiedJobListSerialize
class SystemJobListSerializer(SystemJobSerializer, UnifiedJobListSerializer):
-
class Meta:
model = SystemJob
fields = ('*', '-controller_node') # field removal undone by UJ serializer
class JobHostSummarySerializer(BaseSerializer):
-
class Meta:
model = JobHostSummary
- fields = ('*', '-name', '-description', 'job', 'host', 'host_name', 'changed',
- 'dark', 'failures', 'ok', 'processed', 'skipped', 'failed',
- 'ignored', 'rescued')
+ fields = (
+ '*',
+ '-name',
+ '-description',
+ 'job',
+ 'host',
+ 'host_name',
+ 'changed',
+ 'dark',
+ 'failures',
+ 'ok',
+ 'processed',
+ 'skipped',
+ 'failed',
+ 'ignored',
+ 'rescued',
+ )
def get_related(self, obj):
res = super(JobHostSummarySerializer, self).get_related(obj)
- res.update(dict(
- job=self.reverse('api:job_detail', kwargs={'pk': obj.job.pk})))
+ res.update(dict(job=self.reverse('api:job_detail', kwargs={'pk': obj.job.pk})))
if obj.host is not None:
- res.update(dict(
- host=self.reverse('api:host_detail', kwargs={'pk': obj.host.pk})
- ))
+ res.update(dict(host=self.reverse('api:host_detail', kwargs={'pk': obj.host.pk})))
return res
def get_summary_fields(self, obj):
@@ -3929,17 +3972,35 @@ class JobEventSerializer(BaseSerializer):
class Meta:
model = JobEvent
- fields = ('*', '-name', '-description', 'job', 'event', 'counter',
- 'event_display', 'event_data', 'event_level', 'failed',
- 'changed', 'uuid', 'parent_uuid', 'host', 'host_name',
- 'playbook', 'play', 'task', 'role', 'stdout', 'start_line', 'end_line',
- 'verbosity')
+ fields = (
+ '*',
+ '-name',
+ '-description',
+ 'job',
+ 'event',
+ 'counter',
+ 'event_display',
+ 'event_data',
+ 'event_level',
+ 'failed',
+ 'changed',
+ 'uuid',
+ 'parent_uuid',
+ 'host',
+ 'host_name',
+ 'playbook',
+ 'play',
+ 'task',
+ 'role',
+ 'stdout',
+ 'start_line',
+ 'end_line',
+ 'verbosity',
+ )
def get_related(self, obj):
res = super(JobEventSerializer, self).get_related(obj)
- res.update(dict(
- job = self.reverse('api:job_detail', kwargs={'pk': obj.job_id}),
- ))
+ res.update(dict(job=self.reverse('api:job_detail', kwargs={'pk': obj.job_id})))
res['children'] = self.reverse('api:job_event_children_list', kwargs={'pk': obj.pk})
if obj.host_id:
res['host'] = self.reverse('api:host_detail', kwargs={'pk': obj.host_id})
@@ -3974,14 +4035,11 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
class Meta:
model = ProjectUpdateEvent
- fields = ('*', '-name', '-description', '-job', '-job_id',
- '-parent_uuid', '-parent', '-host', 'project_update')
+ fields = ('*', '-name', '-description', '-job', '-job_id', '-parent_uuid', '-parent', '-host', 'project_update')
def get_related(self, obj):
res = super(JobEventSerializer, self).get_related(obj)
- res['project_update'] = self.reverse(
- 'api:project_update_detail', kwargs={'pk': obj.project_update_id}
- )
+ res['project_update'] = self.reverse('api:project_update_detail', kwargs={'pk': obj.project_update_id})
return res
def get_stdout(self, obj):
@@ -3995,11 +4053,7 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
# of these types of events
if obj.event_data.get('task_action') in ('git', 'svn'):
try:
- return json.loads(
- UriCleaner.remove_sensitive(
- json.dumps(obj.event_data)
- )
- )
+ return json.loads(UriCleaner.remove_sensitive(json.dumps(obj.event_data)))
except Exception:
logger.exception("Failed to sanitize event_data")
return {}
@@ -4013,16 +4067,29 @@ class AdHocCommandEventSerializer(BaseSerializer):
class Meta:
model = AdHocCommandEvent
- fields = ('*', '-name', '-description', 'ad_hoc_command', 'event',
- 'counter', 'event_display', 'event_data', 'failed',
- 'changed', 'uuid', 'host', 'host_name', 'stdout',
- 'start_line', 'end_line', 'verbosity')
+ fields = (
+ '*',
+ '-name',
+ '-description',
+ 'ad_hoc_command',
+ 'event',
+ 'counter',
+ 'event_display',
+ 'event_data',
+ 'failed',
+ 'changed',
+ 'uuid',
+ 'host',
+ 'host_name',
+ 'stdout',
+ 'start_line',
+ 'end_line',
+ 'verbosity',
+ )
def get_related(self, obj):
res = super(AdHocCommandEventSerializer, self).get_related(obj)
- res.update(dict(
- ad_hoc_command = self.reverse('api:ad_hoc_command_detail', kwargs={'pk': obj.ad_hoc_command_id}),
- ))
+ res.update(dict(ad_hoc_command=self.reverse('api:ad_hoc_command_detail', kwargs={'pk': obj.ad_hoc_command_id})))
if obj.host:
res['host'] = self.reverse('api:host_detail', kwargs={'pk': obj.host.pk})
return res
@@ -4039,32 +4106,24 @@ class AdHocCommandEventSerializer(BaseSerializer):
class InventoryUpdateEventSerializer(AdHocCommandEventSerializer):
-
class Meta:
model = InventoryUpdateEvent
- fields = ('*', '-name', '-description', '-ad_hoc_command', '-host',
- '-host_name', 'inventory_update')
+ fields = ('*', '-name', '-description', '-ad_hoc_command', '-host', '-host_name', 'inventory_update')
def get_related(self, obj):
res = super(AdHocCommandEventSerializer, self).get_related(obj)
- res['inventory_update'] = self.reverse(
- 'api:inventory_update_detail', kwargs={'pk': obj.inventory_update_id}
- )
+ res['inventory_update'] = self.reverse('api:inventory_update_detail', kwargs={'pk': obj.inventory_update_id})
return res
class SystemJobEventSerializer(AdHocCommandEventSerializer):
-
class Meta:
model = SystemJobEvent
- fields = ('*', '-name', '-description', '-ad_hoc_command', '-host',
- '-host_name', 'system_job')
+ fields = ('*', '-name', '-description', '-ad_hoc_command', '-host', '-host_name', 'system_job')
def get_related(self, obj):
res = super(AdHocCommandEventSerializer, self).get_related(obj)
- res['system_job'] = self.reverse(
- 'api:system_job_detail', kwargs={'pk': obj.system_job_id}
- )
+ res['system_job'] = self.reverse('api:system_job_detail', kwargs={'pk': obj.system_job_id})
return res
@@ -4082,14 +4141,8 @@ class JobLaunchSerializer(BaseSerializer):
# Accepted on launch fields
extra_vars = serializers.JSONField(required=False, write_only=True)
- inventory = serializers.PrimaryKeyRelatedField(
- queryset=Inventory.objects.all(),
- required=False, write_only=True
- )
- credentials = serializers.PrimaryKeyRelatedField(
- many=True, queryset=Credential.objects.all(),
- required=False, write_only=True
- )
+ inventory = serializers.PrimaryKeyRelatedField(queryset=Inventory.objects.all(), required=False, write_only=True)
+ credentials = serializers.PrimaryKeyRelatedField(many=True, queryset=Credential.objects.all(), required=False, write_only=True)
credential_passwords = VerbatimField(required=False, write_only=True)
scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True)
diff_mode = serializers.BooleanField(required=False, write_only=True)
@@ -4101,19 +4154,50 @@ class JobLaunchSerializer(BaseSerializer):
class Meta:
model = JobTemplate
- fields = ('can_start_without_user_input', 'passwords_needed_to_start',
- 'extra_vars', 'inventory', 'scm_branch', 'limit', 'job_tags', 'skip_tags', 'job_type', 'verbosity', 'diff_mode',
- 'credentials', 'credential_passwords',
- 'ask_scm_branch_on_launch', 'ask_variables_on_launch', 'ask_tags_on_launch',
- 'ask_diff_mode_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_limit_on_launch',
- 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch',
- 'survey_enabled', 'variables_needed_to_start', 'credential_needed_to_start',
- 'inventory_needed_to_start', 'job_template_data', 'defaults', 'verbosity')
+ fields = (
+ 'can_start_without_user_input',
+ 'passwords_needed_to_start',
+ 'extra_vars',
+ 'inventory',
+ 'scm_branch',
+ 'limit',
+ 'job_tags',
+ 'skip_tags',
+ 'job_type',
+ 'verbosity',
+ 'diff_mode',
+ 'credentials',
+ 'credential_passwords',
+ 'ask_scm_branch_on_launch',
+ 'ask_variables_on_launch',
+ 'ask_tags_on_launch',
+ 'ask_diff_mode_on_launch',
+ 'ask_skip_tags_on_launch',
+ 'ask_job_type_on_launch',
+ 'ask_limit_on_launch',
+ 'ask_verbosity_on_launch',
+ 'ask_inventory_on_launch',
+ 'ask_credential_on_launch',
+ 'survey_enabled',
+ 'variables_needed_to_start',
+ 'credential_needed_to_start',
+ 'inventory_needed_to_start',
+ 'job_template_data',
+ 'defaults',
+ 'verbosity',
+ )
read_only_fields = (
'ask_scm_branch_on_launch',
- 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch',
- 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch',
- 'ask_inventory_on_launch', 'ask_credential_on_launch',)
+ 'ask_diff_mode_on_launch',
+ 'ask_variables_on_launch',
+ 'ask_limit_on_launch',
+ 'ask_tags_on_launch',
+ 'ask_skip_tags_on_launch',
+ 'ask_job_type_on_launch',
+ 'ask_verbosity_on_launch',
+ 'ask_inventory_on_launch',
+ 'ask_credential_on_launch',
+ )
def get_credential_needed_to_start(self, obj):
return False
@@ -4130,17 +4214,10 @@ class JobLaunchSerializer(BaseSerializer):
defaults_dict = {}
for field_name in JobTemplate.get_ask_mapping().keys():
if field_name == 'inventory':
- defaults_dict[field_name] = dict(
- name=getattrd(obj, '%s.name' % field_name, None),
- id=getattrd(obj, '%s.pk' % field_name, None))
+ defaults_dict[field_name] = dict(name=getattrd(obj, '%s.name' % field_name, None), id=getattrd(obj, '%s.pk' % field_name, None))
elif field_name == 'credentials':
for cred in obj.credentials.all():
- cred_dict = dict(
- id=cred.id,
- name=cred.name,
- credential_type=cred.credential_type.pk,
- passwords_needed=cred.passwords_needed
- )
+ cred_dict = dict(id=cred.id, name=cred.name, credential_type=cred.credential_type.pk, passwords_needed=cred.passwords_needed)
if cred.credential_type.managed_by_tower and 'vault_id' in cred.credential_type.defined_fields:
cred_dict['vault_id'] = cred.get_input('vault_id', default=None)
defaults_dict.setdefault(field_name, []).append(cred_dict)
@@ -4157,9 +4234,7 @@ class JobLaunchSerializer(BaseSerializer):
def validate(self, attrs):
template = self.context.get('template')
- accepted, rejected, errors = template._accept_or_ignore_job_kwargs(
- _exclude_errors=['prompts'], # make several error types non-blocking
- **attrs)
+ accepted, rejected, errors = template._accept_or_ignore_job_kwargs(_exclude_errors=['prompts'], **attrs) # make several error types non-blocking
self._ignored_fields = rejected
# Basic validation - cannot run a playbook without a playbook
@@ -4179,14 +4254,9 @@ class JobLaunchSerializer(BaseSerializer):
distinct_cred_kinds = []
for cred in accepted.get('credentials', []):
if cred.unique_hash() in distinct_cred_kinds:
- errors.setdefault('credentials', []).append(_(
- 'Cannot assign multiple {} credentials.'
- ).format(cred.unique_hash(display=True)))
- if cred.credential_type.kind not in ('ssh', 'vault', 'cloud',
- 'net', 'kubernetes'):
- errors.setdefault('credentials', []).append(_(
- 'Cannot assign a Credential of kind `{}`'
- ).format(cred.credential_type.kind))
+ errors.setdefault('credentials', []).append(_('Cannot assign multiple {} credentials.').format(cred.unique_hash(display=True)))
+ if cred.credential_type.kind not in ('ssh', 'vault', 'cloud', 'net', 'kubernetes'):
+ errors.setdefault('credentials', []).append(_('Cannot assign a Credential of kind `{}`').format(cred.credential_type.kind))
distinct_cred_kinds.append(cred.unique_hash())
# Prohibit removing credentials from the JT list (unsupported for now)
@@ -4197,18 +4267,16 @@ class JobLaunchSerializer(BaseSerializer):
for cred in removed_creds:
if cred.unique_hash() in provided_mapping.keys():
continue # User replaced credential with new of same type
- errors.setdefault('credentials', []).append(_(
- 'Removing {} credential at launch time without replacement is not supported. '
- 'Provided list lacked credential(s): {}.'
- ).format(cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])))
+ errors.setdefault('credentials', []).append(
+ _('Removing {} credential at launch time without replacement is not supported. ' 'Provided list lacked credential(s): {}.').format(
+ cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])
+ )
+ )
# verify that credentials (either provided or existing) don't
# require launch-time passwords that have not been provided
if 'credentials' in accepted:
- launch_credentials = Credential.unique_dict(
- list(template_credentials.all()) +
- list(accepted['credentials'])
- ).values()
+ launch_credentials = Credential.unique_dict(list(template_credentials.all()) + list(accepted['credentials'])).values()
else:
launch_credentials = template_credentials
passwords = attrs.get('credential_passwords', {}) # get from original attrs
@@ -4246,22 +4314,31 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
variables_needed_to_start = serializers.ReadOnlyField()
survey_enabled = serializers.SerializerMethodField()
extra_vars = VerbatimField(required=False, write_only=True)
- inventory = serializers.PrimaryKeyRelatedField(
- queryset=Inventory.objects.all(),
- required=False, write_only=True
- )
+ inventory = serializers.PrimaryKeyRelatedField(queryset=Inventory.objects.all(), required=False, write_only=True)
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True)
workflow_job_template_data = serializers.SerializerMethodField()
class Meta:
model = WorkflowJobTemplate
- fields = ('ask_inventory_on_launch', 'ask_limit_on_launch', 'ask_scm_branch_on_launch',
- 'can_start_without_user_input', 'defaults', 'extra_vars',
- 'inventory', 'limit', 'scm_branch',
- 'survey_enabled', 'variables_needed_to_start',
- 'node_templates_missing', 'node_prompts_rejected',
- 'workflow_job_template_data', 'survey_enabled', 'ask_variables_on_launch')
+ fields = (
+ 'ask_inventory_on_launch',
+ 'ask_limit_on_launch',
+ 'ask_scm_branch_on_launch',
+ 'can_start_without_user_input',
+ 'defaults',
+ 'extra_vars',
+ 'inventory',
+ 'limit',
+ 'scm_branch',
+ 'survey_enabled',
+ 'variables_needed_to_start',
+ 'node_templates_missing',
+ 'node_prompts_rejected',
+ 'workflow_job_template_data',
+ 'survey_enabled',
+ 'ask_variables_on_launch',
+ )
read_only_fields = ('ask_inventory_on_launch', 'ask_variables_on_launch')
def get_survey_enabled(self, obj):
@@ -4273,9 +4350,7 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
defaults_dict = {}
for field_name in WorkflowJobTemplate.get_ask_mapping().keys():
if field_name == 'inventory':
- defaults_dict[field_name] = dict(
- name=getattrd(obj, '%s.name' % field_name, None),
- id=getattrd(obj, '%s.pk' % field_name, None))
+ defaults_dict[field_name] = dict(name=getattrd(obj, '%s.name' % field_name, None), id=getattrd(obj, '%s.pk' % field_name, None))
else:
defaults_dict[field_name] = getattr(obj, field_name)
return defaults_dict
@@ -4318,13 +4393,7 @@ class NotificationTemplateSerializer(BaseSerializer):
model = NotificationTemplate
fields = ('*', 'organization', 'notification_type', 'notification_configuration', 'messages')
-
- type_map = {"string": (str,),
- "int": (int,),
- "bool": (bool,),
- "list": (list,),
- "password": (str,),
- "object": (dict, OrderedDict)}
+ type_map = {"string": (str,), "int": (int,), "bool": (bool,), "list": (list,), "password": (str,), "object": (dict, OrderedDict)}
def to_representation(self, obj):
ret = super(NotificationTemplateSerializer, self).to_representation(obj)
@@ -4334,11 +4403,13 @@ class NotificationTemplateSerializer(BaseSerializer):
def get_related(self, obj):
res = super(NotificationTemplateSerializer, self).get_related(obj)
- res.update(dict(
- test = self.reverse('api:notification_template_test', kwargs={'pk': obj.pk}),
- notifications = self.reverse('api:notification_template_notification_list', kwargs={'pk': obj.pk}),
- copy = self.reverse('api:notification_template_copy', kwargs={'pk': obj.pk}),
- ))
+ res.update(
+ dict(
+ test=self.reverse('api:notification_template_test', kwargs={'pk': obj.pk}),
+ notifications=self.reverse('api:notification_template_notification_list', kwargs={'pk': obj.pk}),
+ copy=self.reverse('api:notification_template_copy', kwargs={'pk': obj.pk}),
+ )
+ )
if obj.organization:
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
return res
@@ -4392,8 +4463,9 @@ class NotificationTemplateSerializer(BaseSerializer):
if event == 'workflow_approval':
for subevent in event_messages:
if subevent not in ('running', 'approved', 'timed_out', 'denied'):
- error_list.append(_("Workflow Approval event '{}' invalid, must be one of "
- "'running', 'approved', 'timed_out', or 'denied'").format(subevent))
+ error_list.append(
+ _("Workflow Approval event '{}' invalid, must be one of " "'running', 'approved', 'timed_out', or 'denied'").format(subevent)
+ )
continue
subevent_messages = event_messages[subevent]
if subevent_messages is None:
@@ -4450,8 +4522,9 @@ class NotificationTemplateSerializer(BaseSerializer):
try:
potential_body = json.loads(body)
if not isinstance(potential_body, dict):
- error_list.append(_("Webhook body for '{}' should be a json dictionary. Found type '{}'."
- .format(event, type(potential_body).__name__)))
+ error_list.append(
+ _("Webhook body for '{}' should be a json dictionary. Found type '{}'.".format(event, type(potential_body).__name__))
+ )
except json.JSONDecodeError as exc:
error_list.append(_("Webhook body for '{}' is not a valid json dictionary ({}).".format(event, exc)))
@@ -4508,8 +4581,7 @@ class NotificationTemplateSerializer(BaseSerializer):
error_list.append(_("Missing required fields for Notification Configuration: {}.").format(missing_fields))
if incorrect_type_fields:
for type_field_error in incorrect_type_fields:
- error_list.append(_("Configuration field '{}' incorrect type, expected {}.").format(type_field_error[0],
- type_field_error[1]))
+ error_list.append(_("Configuration field '{}' incorrect type, expected {}.").format(type_field_error[0], type_field_error[1]))
if error_list:
raise serializers.ValidationError(error_list)
@@ -4524,14 +4596,23 @@ class NotificationTemplateSerializer(BaseSerializer):
class NotificationSerializer(BaseSerializer):
- body = serializers.SerializerMethodField(
- help_text=_('Notification body')
- )
+ body = serializers.SerializerMethodField(help_text=_('Notification body'))
class Meta:
model = Notification
- fields = ('*', '-name', '-description', 'notification_template', 'error', 'status', 'notifications_sent',
- 'notification_type', 'recipients', 'subject', 'body')
+ fields = (
+ '*',
+ '-name',
+ '-description',
+ 'notification_template',
+ 'error',
+ 'status',
+ 'notifications_sent',
+ 'notification_type',
+ 'recipients',
+ 'subject',
+ 'body',
+ )
def get_body(self, obj):
if obj.notification_type in ('webhook', 'pagerduty'):
@@ -4550,9 +4631,7 @@ class NotificationSerializer(BaseSerializer):
def get_related(self, obj):
res = super(NotificationSerializer, self).get_related(obj)
- res.update(dict(
- notification_template = self.reverse('api:notification_template_detail', kwargs={'pk': obj.notification_template.pk}),
- ))
+ res.update(dict(notification_template=self.reverse('api:notification_template_detail', kwargs={'pk': obj.notification_template.pk})))
return res
def to_representation(self, obj):
@@ -4566,7 +4645,6 @@ class NotificationSerializer(BaseSerializer):
class LabelSerializer(BaseSerializer):
-
class Meta:
model = Label
fields = ('*', '-description', 'organization')
@@ -4579,7 +4657,6 @@ class LabelSerializer(BaseSerializer):
class SchedulePreviewSerializer(BaseSerializer):
-
class Meta:
model = Schedule
fields = ('rrule',)
@@ -4638,6 +4715,7 @@ class SchedulePreviewSerializer(BaseSerializer):
Schedule.rrulestr(rrule_value)
except Exception as e:
import traceback
+
logger.error(traceback.format_exc())
raise serializers.ValidationError(_("rrule parsing failed validation: {}").format(e))
return value
@@ -4651,8 +4729,7 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
class Meta:
model = Schedule
- fields = ('*', 'unified_job_template', 'enabled', 'dtstart', 'dtend', 'rrule', 'next_run', 'timezone',
- 'until')
+ fields = ('*', 'unified_job_template', 'enabled', 'dtstart', 'dtend', 'rrule', 'next_run', 'timezone', 'until')
def get_timezone(self, obj):
return obj.timezone
@@ -4662,9 +4739,7 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
def get_related(self, obj):
res = super(ScheduleSerializer, self).get_related(obj)
- res.update(dict(
- unified_jobs = self.reverse('api:schedule_unified_jobs_list', kwargs={'pk': obj.pk}),
- ))
+ res.update(dict(unified_jobs=self.reverse('api:schedule_unified_jobs_list', kwargs={'pk': obj.pk})))
if obj.unified_job_template:
res['unified_job_template'] = obj.unified_job_template.get_absolute_url(self.context.get('request'))
try:
@@ -4705,9 +4780,12 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
elif type(value) == Project and value.scm_type == '':
raise serializers.ValidationError(_('Manual Project cannot have a schedule set.'))
elif type(value) == InventorySource and value.source == 'scm' and value.update_on_project_update:
- raise serializers.ValidationError(_(
- 'Inventory sources with `update_on_project_update` cannot be scheduled. '
- 'Schedule its source project `{}` instead.'.format(value.source_project.name)))
+ raise serializers.ValidationError(
+ _(
+ 'Inventory sources with `update_on_project_update` cannot be scheduled. '
+ 'Schedule its source project `{}` instead.'.format(value.source_project.name)
+ )
+ )
return value
@@ -4715,22 +4793,35 @@ class InstanceSerializer(BaseSerializer):
consumed_capacity = serializers.SerializerMethodField()
percent_capacity_remaining = serializers.SerializerMethodField()
- jobs_running = serializers.IntegerField(
- help_text=_('Count of jobs in the running or waiting state that '
- 'are targeted for this instance'),
- read_only=True
- )
- jobs_total = serializers.IntegerField(
- help_text=_('Count of all jobs that target this instance'),
- read_only=True
- )
+ jobs_running = serializers.IntegerField(help_text=_('Count of jobs in the running or waiting state that ' 'are targeted for this instance'), read_only=True)
+ jobs_total = serializers.IntegerField(help_text=_('Count of all jobs that target this instance'), read_only=True)
class Meta:
model = Instance
read_only_fields = ('uuid', 'hostname', 'version')
- fields = ("id", "type", "url", "related", "uuid", "hostname", "created", "modified", 'capacity_adjustment',
- "version", "capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running", "jobs_total",
- "cpu", "memory", "cpu_capacity", "mem_capacity", "enabled", "managed_by_policy")
+ fields = (
+ "id",
+ "type",
+ "url",
+ "related",
+ "uuid",
+ "hostname",
+ "created",
+ "modified",
+ 'capacity_adjustment',
+ "version",
+ "capacity",
+ "consumed_capacity",
+ "percent_capacity_remaining",
+ "jobs_running",
+ "jobs_total",
+ "cpu",
+ "memory",
+ "cpu_capacity",
+ "mem_capacity",
+ "enabled",
+ "managed_by_policy",
+ )
def get_related(self, obj):
res = super(InstanceSerializer, self).get_related(obj)
@@ -4756,57 +4847,72 @@ class InstanceGroupSerializer(BaseSerializer):
consumed_capacity = serializers.SerializerMethodField()
percent_capacity_remaining = serializers.SerializerMethodField()
jobs_running = serializers.IntegerField(
- help_text=_('Count of jobs in the running or waiting state that '
- 'are targeted for this instance group'),
- read_only=True
- )
- jobs_total = serializers.IntegerField(
- help_text=_('Count of all jobs that target this instance group'),
- read_only=True
+ help_text=_('Count of jobs in the running or waiting state that ' 'are targeted for this instance group'), read_only=True
)
+ jobs_total = serializers.IntegerField(help_text=_('Count of all jobs that target this instance group'), read_only=True)
instances = serializers.SerializerMethodField()
- is_controller = serializers.BooleanField(
- help_text=_('Indicates whether instance group controls any other group'),
- read_only=True
- )
+ is_controller = serializers.BooleanField(help_text=_('Indicates whether instance group controls any other group'), read_only=True)
is_isolated = serializers.BooleanField(
- help_text=_('Indicates whether instances in this group are isolated.'
- 'Isolated groups have a designated controller group.'),
- read_only=True
+ help_text=_('Indicates whether instances in this group are isolated.' 'Isolated groups have a designated controller group.'), read_only=True
)
is_container_group = serializers.BooleanField(
required=False,
- help_text=_('Indicates whether instances in this group are containerized.'
- 'Containerized groups have a designated Openshift or Kubernetes cluster.')
+ help_text=_('Indicates whether instances in this group are containerized.' 'Containerized groups have a designated Openshift or Kubernetes cluster.'),
)
# NOTE: help_text is duplicated from field definitions, no obvious way of
# both defining field details here and also getting the field's help_text
policy_instance_percentage = serializers.IntegerField(
- default=0, min_value=0, max_value=100, required=False, initial=0,
+ default=0,
+ min_value=0,
+ max_value=100,
+ required=False,
+ initial=0,
label=_('Policy Instance Percentage'),
- help_text=_("Minimum percentage of all instances that will be automatically assigned to "
- "this group when new instances come online.")
+ help_text=_("Minimum percentage of all instances that will be automatically assigned to " "this group when new instances come online."),
)
policy_instance_minimum = serializers.IntegerField(
- default=0, min_value=0, required=False, initial=0,
+ default=0,
+ min_value=0,
+ required=False,
+ initial=0,
label=_('Policy Instance Minimum'),
- help_text=_("Static minimum number of Instances that will be automatically assign to "
- "this group when new instances come online.")
+ help_text=_("Static minimum number of Instances that will be automatically assign to " "this group when new instances come online."),
)
policy_instance_list = serializers.ListField(
- child=serializers.CharField(), required=False,
+ child=serializers.CharField(),
+ required=False,
label=_('Policy Instance List'),
- help_text=_("List of exact-match Instances that will be assigned to this group")
+ help_text=_("List of exact-match Instances that will be assigned to this group"),
)
class Meta:
model = InstanceGroup
- fields = ("id", "type", "url", "related", "name", "created", "modified",
- "capacity", "committed_capacity", "consumed_capacity",
- "percent_capacity_remaining", "jobs_running", "jobs_total",
- "instances", "controller", "is_controller", "is_isolated", "is_container_group", "credential",
- "policy_instance_percentage", "policy_instance_minimum", "policy_instance_list",
- "pod_spec_override", "summary_fields")
+ fields = (
+ "id",
+ "type",
+ "url",
+ "related",
+ "name",
+ "created",
+ "modified",
+ "capacity",
+ "committed_capacity",
+ "consumed_capacity",
+ "percent_capacity_remaining",
+ "jobs_running",
+ "jobs_total",
+ "instances",
+ "controller",
+ "is_controller",
+ "is_isolated",
+ "is_container_group",
+ "credential",
+ "policy_instance_percentage",
+ "policy_instance_minimum",
+ "policy_instance_list",
+ "pod_spec_override",
+ "summary_fields",
+ )
def get_related(self, obj):
res = super(InstanceGroupSerializer, self).get_related(obj)
@@ -4857,8 +4963,7 @@ class InstanceGroupSerializer(BaseSerializer):
attrs = super(InstanceGroupSerializer, self).validate(attrs)
if attrs.get('credential') and not attrs.get('is_container_group'):
- raise serializers.ValidationError({'is_container_group': _(
- 'is_container_group must be True when associating a credential to an Instance Group')})
+ raise serializers.ValidationError({'is_container_group': _('is_container_group must be True when associating a credential to an Instance Group')})
return attrs
@@ -4869,8 +4974,7 @@ class InstanceGroupSerializer(BaseSerializer):
jobs_qs = UnifiedJob.objects.filter(status__in=('running', 'waiting'))
if self.parent: # Is ListView:
ig_qs = self.parent.instance
- self.context['capacity_map'] = InstanceGroup.objects.capacity_values(
- qs=ig_qs, tasks=jobs_qs, breakdown=True)
+ self.context['capacity_map'] = InstanceGroup.objects.capacity_values(qs=ig_qs, tasks=jobs_qs, breakdown=True)
return self.context['capacity_map']
def get_consumed_capacity(self, obj):
@@ -4886,9 +4990,7 @@ class InstanceGroupSerializer(BaseSerializer):
if consumed >= obj.capacity:
return 0.0
else:
- return float("{0:.2f}".format(
- ((float(obj.capacity) - float(consumed)) / (float(obj.capacity))) * 100)
- )
+ return float("{0:.2f}".format(((float(obj.capacity) - float(consumed)) / (float(obj.capacity))) * 100))
def get_instances(self, obj):
return obj.instances.count()
@@ -4897,10 +4999,8 @@ class InstanceGroupSerializer(BaseSerializer):
class ActivityStreamSerializer(BaseSerializer):
changes = serializers.SerializerMethodField()
- object_association = serializers.SerializerMethodField(
- help_text=_("When present, shows the field name of the role or relationship that changed."))
- object_type = serializers.SerializerMethodField(
- help_text=_("When present, shows the model on which the role or relationship was defined."))
+ object_association = serializers.SerializerMethodField(help_text=_("When present, shows the field name of the role or relationship that changed."))
+ object_type = serializers.SerializerMethodField(help_text=_("When present, shows the model on which the role or relationship was defined."))
@cached_property
def _local_summarizable_fk_fields(self):
@@ -4926,8 +5026,21 @@ class ActivityStreamSerializer(BaseSerializer):
class Meta:
model = ActivityStream
- fields = ('*', '-name', '-description', '-created', '-modified', 'timestamp', 'operation',
- 'changes', 'object1', 'object2', 'object_association', 'action_node', 'object_type')
+ fields = (
+ '*',
+ '-name',
+ '-description',
+ '-created',
+ '-modified',
+ 'timestamp',
+ 'operation',
+ 'changes',
+ 'object1',
+ 'object2',
+ 'object_association',
+ 'action_node',
+ 'object_type',
+ )
def get_fields(self):
ret = super(ActivityStreamSerializer, self).get_fields()
@@ -4935,11 +5048,15 @@ class ActivityStreamSerializer(BaseSerializer):
if key == 'changes':
field.help_text = _('A summary of the new and changed values when an object is created, updated, or deleted')
if key == 'object1':
- field.help_text = _('For create, update, and delete events this is the object type that was affected. '
- 'For associate and disassociate events this is the object type associated or disassociated with object2.')
+ field.help_text = _(
+ 'For create, update, and delete events this is the object type that was affected. '
+ 'For associate and disassociate events this is the object type associated or disassociated with object2.'
+ )
if key == 'object2':
- field.help_text = _('Unpopulated for create, update, and delete events. For associate and disassociate '
- 'events this is the object type that object1 is being associated with.')
+ field.help_text = _(
+ 'Unpopulated for create, update, and delete events. For associate and disassociate '
+ 'events this is the object type that object1 is being associated with.'
+ )
if key == 'operation':
field.help_text = _('The action taken with respect to the given object(s).')
return ret
@@ -5010,10 +5127,7 @@ class ActivityStreamSerializer(BaseSerializer):
if fk == 'schedule':
data['unified_job_template'] = item.unified_job_template.get_absolute_url(self.context.get('request'))
if obj.setting and obj.setting.get('category', None):
- data['setting'] = self.reverse(
- 'api:setting_singleton_detail',
- kwargs={'category_slug': obj.setting['category']}
- )
+ data['setting'] = self.reverse('api:setting_singleton_detail', kwargs={'category_slug': obj.setting['category']})
return data
def _get_related_objects(self, obj, fk):
@@ -5027,11 +5141,13 @@ class ActivityStreamSerializer(BaseSerializer):
return related_manager.all()
def _summarize_parent_ujt(self, obj, fk, summary_fields):
- summary_keys = {'job': 'job_template',
- 'workflow_job_template_node': 'workflow_job_template',
- 'workflow_approval_template': 'workflow_job_template',
- 'workflow_approval': 'workflow_job',
- 'schedule': 'unified_job_template'}
+ summary_keys = {
+ 'job': 'job_template',
+ 'workflow_job_template_node': 'workflow_job_template',
+ 'workflow_approval_template': 'workflow_job_template',
+ 'workflow_approval': 'workflow_job',
+ 'schedule': 'unified_job_template',
+ }
if fk not in summary_keys:
return
related_obj = getattr(obj, summary_keys[fk], None)
@@ -5065,10 +5181,7 @@ class ActivityStreamSerializer(BaseSerializer):
except ObjectDoesNotExist:
pass
if obj.actor is not None:
- summary_fields['actor'] = dict(id = obj.actor.id,
- username = obj.actor.username,
- first_name = obj.actor.first_name,
- last_name = obj.actor.last_name)
+ summary_fields['actor'] = dict(id=obj.actor.id, username=obj.actor.username, first_name=obj.actor.first_name, last_name=obj.actor.last_name)
elif obj.deleted_actor:
summary_fields['actor'] = obj.deleted_actor.copy()
summary_fields['actor']['id'] = None
diff --git a/awx/api/swagger.py b/awx/api/swagger.py
index fd54928251..b7a23a9f6d 100644
--- a/awx/api/swagger.py
+++ b/awx/api/swagger.py
@@ -14,7 +14,6 @@ from rest_framework_swagger import renderers
class SuperUserSchemaGenerator(SchemaGenerator):
-
def has_view_permissions(self, path, method, view):
#
# Generate the Swagger schema as if you were a superuser and
@@ -25,17 +24,17 @@ class SuperUserSchemaGenerator(SchemaGenerator):
class AutoSchema(DRFAuthSchema):
-
def get_link(self, path, method, base_url):
link = super(AutoSchema, self).get_link(path, method, base_url)
try:
serializer = self.view.get_serializer()
except Exception:
serializer = None
- warnings.warn('{}.get_serializer() raised an exception during '
- 'schema generation. Serializer fields will not be '
- 'generated for {} {}.'
- .format(self.view.__class__.__name__, method, path))
+ warnings.warn(
+ '{}.get_serializer() raised an exception during '
+ 'schema generation. Serializer fields will not be '
+ 'generated for {} {}.'.format(self.view.__class__.__name__, method, path)
+ )
link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False)
@@ -43,9 +42,7 @@ class AutoSchema(DRFAuthSchema):
if hasattr(self.view, 'swagger_topic'):
link.__dict__['topic'] = str(self.view.swagger_topic).title()
elif serializer and hasattr(serializer, 'Meta'):
- link.__dict__['topic'] = str(
- serializer.Meta.model._meta.verbose_name_plural
- ).title()
+ link.__dict__['topic'] = str(serializer.Meta.model._meta.verbose_name_plural).title()
elif hasattr(self.view, 'model'):
link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title()
else:
@@ -62,18 +59,10 @@ class SwaggerSchemaView(APIView):
_ignore_model_permissions = True
exclude_from_schema = True
permission_classes = [AllowAny]
- renderer_classes = [
- CoreJSONRenderer,
- renderers.OpenAPIRenderer,
- renderers.SwaggerUIRenderer
- ]
+ renderer_classes = [CoreJSONRenderer, renderers.OpenAPIRenderer, renderers.SwaggerUIRenderer]
def get(self, request):
- generator = SuperUserSchemaGenerator(
- title='Ansible Tower API',
- patterns=None,
- urlconf=None
- )
+ generator = SuperUserSchemaGenerator(title='Ansible Tower API', patterns=None, urlconf=None)
schema = generator.get_schema(request=request)
# python core-api doesn't support the deprecation yet, so track it
# ourselves and return it in a response header
@@ -103,11 +92,6 @@ class SwaggerSchemaView(APIView):
schema._data[topic]._data[path] = node
if not schema:
- raise exceptions.ValidationError(
- 'The schema generator did not return a schema Document'
- )
+ raise exceptions.ValidationError('The schema generator did not return a schema Document')
- return Response(
- schema,
- headers={'X-Deprecated-Paths': json.dumps(_deprecated)}
- )
+ return Response(schema, headers={'X-Deprecated-Paths': json.dumps(_deprecated)})
diff --git a/awx/api/urls/activity_stream.py b/awx/api/urls/activity_stream.py
index cfca532970..907f856aa8 100644
--- a/awx/api/urls/activity_stream.py
+++ b/awx/api/urls/activity_stream.py
@@ -3,10 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- ActivityStreamList,
- ActivityStreamDetail,
-)
+from awx.api.views import ActivityStreamList, ActivityStreamDetail
urls = [
diff --git a/awx/api/urls/ad_hoc_command_event.py b/awx/api/urls/ad_hoc_command_event.py
index 21af85d2ad..20d3dc024d 100644
--- a/awx/api/urls/ad_hoc_command_event.py
+++ b/awx/api/urls/ad_hoc_command_event.py
@@ -3,10 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- AdHocCommandEventList,
- AdHocCommandEventDetail,
-)
+from awx.api.views import AdHocCommandEventList, AdHocCommandEventDetail
urls = [
diff --git a/awx/api/urls/credential_input_source.py b/awx/api/urls/credential_input_source.py
index 5f660dfdf8..6b365e5840 100644
--- a/awx/api/urls/credential_input_source.py
+++ b/awx/api/urls/credential_input_source.py
@@ -3,10 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- CredentialInputSourceDetail,
- CredentialInputSourceList,
-)
+from awx.api.views import CredentialInputSourceDetail, CredentialInputSourceList
urls = [
diff --git a/awx/api/urls/credential_type.py b/awx/api/urls/credential_type.py
index 5fa033fd33..ab799c8c54 100644
--- a/awx/api/urls/credential_type.py
+++ b/awx/api/urls/credential_type.py
@@ -3,13 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- CredentialTypeList,
- CredentialTypeDetail,
- CredentialTypeCredentialList,
- CredentialTypeActivityStreamList,
- CredentialTypeExternalTest,
-)
+from awx.api.views import CredentialTypeList, CredentialTypeDetail, CredentialTypeCredentialList, CredentialTypeActivityStreamList, CredentialTypeExternalTest
urls = [
diff --git a/awx/api/urls/instance.py b/awx/api/urls/instance.py
index 5ad8bda712..abff37c5d9 100644
--- a/awx/api/urls/instance.py
+++ b/awx/api/urls/instance.py
@@ -3,20 +3,14 @@
from django.conf.urls import url
-from awx.api.views import (
- InstanceList,
- InstanceDetail,
- InstanceUnifiedJobsList,
- InstanceInstanceGroupsList,
-)
+from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList
urls = [
url(r'^$', InstanceList.as_view(), name='instance_list'),
url(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
- url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(),
- name='instance_instance_groups_list'),
+ url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
]
__all__ = ['urls']
diff --git a/awx/api/urls/instance_group.py b/awx/api/urls/instance_group.py
index 58976103a5..3bb9eabefc 100644
--- a/awx/api/urls/instance_group.py
+++ b/awx/api/urls/instance_group.py
@@ -3,12 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- InstanceGroupList,
- InstanceGroupDetail,
- InstanceGroupUnifiedJobsList,
- InstanceGroupInstanceList,
-)
+from awx.api.views import InstanceGroupList, InstanceGroupDetail, InstanceGroupUnifiedJobsList, InstanceGroupInstanceList
urls = [
diff --git a/awx/api/urls/inventory_script.py b/awx/api/urls/inventory_script.py
index 03852e78cb..a3e72e27f6 100644
--- a/awx/api/urls/inventory_script.py
+++ b/awx/api/urls/inventory_script.py
@@ -3,12 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- InventoryScriptList,
- InventoryScriptDetail,
- InventoryScriptObjectRolesList,
- InventoryScriptCopy,
-)
+from awx.api.views import InventoryScriptList, InventoryScriptDetail, InventoryScriptObjectRolesList, InventoryScriptCopy
urls = [
diff --git a/awx/api/urls/inventory_source.py b/awx/api/urls/inventory_source.py
index 736797c5a7..02cf30c408 100644
--- a/awx/api/urls/inventory_source.py
+++ b/awx/api/urls/inventory_source.py
@@ -29,12 +29,21 @@ urls = [
url(r'^(?P<pk>[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'),
url(r'^(?P<pk>[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'),
url(r'^(?P<pk>[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', InventorySourceNotificationTemplatesStartedList.as_view(),
- name='inventory_source_notification_templates_started_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', InventorySourceNotificationTemplatesErrorList.as_view(),
- name='inventory_source_notification_templates_error_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', InventorySourceNotificationTemplatesSuccessList.as_view(),
- name='inventory_source_notification_templates_success_list'),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_started/$',
+ InventorySourceNotificationTemplatesStartedList.as_view(),
+ name='inventory_source_notification_templates_started_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_error/$',
+ InventorySourceNotificationTemplatesErrorList.as_view(),
+ name='inventory_source_notification_templates_error_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_success/$',
+ InventorySourceNotificationTemplatesSuccessList.as_view(),
+ name='inventory_source_notification_templates_success_list',
+ ),
]
__all__ = ['urls']
diff --git a/awx/api/urls/job_event.py b/awx/api/urls/job_event.py
index b91c6731a6..96f5146555 100644
--- a/awx/api/urls/job_event.py
+++ b/awx/api/urls/job_event.py
@@ -3,12 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- JobEventList,
- JobEventDetail,
- JobEventChildrenList,
- JobEventHostsList,
-)
+from awx.api.views import JobEventList, JobEventDetail, JobEventChildrenList, JobEventHostsList
urls = [
diff --git a/awx/api/urls/job_host_summary.py b/awx/api/urls/job_host_summary.py
index 808511e178..8252a982d0 100644
--- a/awx/api/urls/job_host_summary.py
+++ b/awx/api/urls/job_host_summary.py
@@ -3,13 +3,9 @@
from django.conf.urls import url
-from awx.api.views import (
- JobHostSummaryDetail,
-)
+from awx.api.views import JobHostSummaryDetail
-urls = [
- url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'),
-]
+urls = [url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')]
__all__ = ['urls']
diff --git a/awx/api/urls/job_template.py b/awx/api/urls/job_template.py
index 77252eb7e3..967f821fd1 100644
--- a/awx/api/urls/job_template.py
+++ b/awx/api/urls/job_template.py
@@ -34,12 +34,21 @@ urls = [
url(r'^(?P<pk>[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'),
url(r'^(?P<pk>[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', JobTemplateNotificationTemplatesStartedList.as_view(),
- name='job_template_notification_templates_started_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', JobTemplateNotificationTemplatesErrorList.as_view(),
- name='job_template_notification_templates_error_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', JobTemplateNotificationTemplatesSuccessList.as_view(),
- name='job_template_notification_templates_success_list'),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_started/$',
+ JobTemplateNotificationTemplatesStartedList.as_view(),
+ name='job_template_notification_templates_started_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_error/$',
+ JobTemplateNotificationTemplatesErrorList.as_view(),
+ name='job_template_notification_templates_error_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_success/$',
+ JobTemplateNotificationTemplatesSuccessList.as_view(),
+ name='job_template_notification_templates_success_list',
+ ),
url(r'^(?P<pk>[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),
diff --git a/awx/api/urls/label.py b/awx/api/urls/label.py
index 60d70a5bd1..368fca7aa8 100644
--- a/awx/api/urls/label.py
+++ b/awx/api/urls/label.py
@@ -3,15 +3,9 @@
from django.conf.urls import url
-from awx.api.views import (
- LabelList,
- LabelDetail,
-)
+from awx.api.views import LabelList, LabelDetail
-urls = [
- url(r'^$', LabelList.as_view(), name='label_list'),
- url(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail'),
-]
+urls = [url(r'^$', LabelList.as_view(), name='label_list'), url(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail')]
__all__ = ['urls']
diff --git a/awx/api/urls/notification.py b/awx/api/urls/notification.py
index 12089afdaa..960318ee15 100644
--- a/awx/api/urls/notification.py
+++ b/awx/api/urls/notification.py
@@ -3,15 +3,9 @@
from django.conf.urls import url
-from awx.api.views import (
- NotificationList,
- NotificationDetail,
-)
+from awx.api.views import NotificationList, NotificationDetail
-urls = [
- url(r'^$', NotificationList.as_view(), name='notification_list'),
- url(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'),
-]
+urls = [url(r'^$', NotificationList.as_view(), name='notification_list'), url(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail')]
__all__ = ['urls']
diff --git a/awx/api/urls/oauth2.py b/awx/api/urls/oauth2.py
index 6e9eea3d9f..720ba2416f 100644
--- a/awx/api/urls/oauth2.py
+++ b/awx/api/urls/oauth2.py
@@ -16,32 +16,12 @@ from awx.api.views import (
urls = [
url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
- url(
- r'^applications/(?P<pk>[0-9]+)/$',
- OAuth2ApplicationDetail.as_view(),
- name='o_auth2_application_detail'
- ),
- url(
- r'^applications/(?P<pk>[0-9]+)/tokens/$',
- ApplicationOAuth2TokenList.as_view(),
- name='o_auth2_application_token_list'
- ),
- url(
- r'^applications/(?P<pk>[0-9]+)/activity_stream/$',
- OAuth2ApplicationActivityStreamList.as_view(),
- name='o_auth2_application_activity_stream_list'
- ),
+ url(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
+ url(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'),
+ url(r'^applications/(?P<pk>[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'),
url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
- url(
- r'^tokens/(?P<pk>[0-9]+)/$',
- OAuth2TokenDetail.as_view(),
- name='o_auth2_token_detail'
- ),
- url(
- r'^tokens/(?P<pk>[0-9]+)/activity_stream/$',
- OAuth2TokenActivityStreamList.as_view(),
- name='o_auth2_token_activity_stream_list'
- ),
+ url(r'^tokens/(?P<pk>[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'),
+ url(r'^tokens/(?P<pk>[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'),
]
__all__ = ['urls']
diff --git a/awx/api/urls/oauth2_root.py b/awx/api/urls/oauth2_root.py
index 1ddfb5320b..61e1e15850 100644
--- a/awx/api/urls/oauth2_root.py
+++ b/awx/api/urls/oauth2_root.py
@@ -10,13 +10,10 @@ from oauthlib import oauth2
from oauth2_provider import views
from awx.main.models import RefreshToken
-from awx.api.views import (
- ApiOAuthAuthorizationRootView,
-)
+from awx.api.views import ApiOAuthAuthorizationRootView
class TokenView(views.TokenView):
-
def create_token_response(self, request):
# Django OAuth2 Toolkit has a bug whereby refresh tokens are *never*
# properly expired (ugh):
@@ -26,9 +23,7 @@ class TokenView(views.TokenView):
# This code detects and auto-expires them on refresh grant
# requests.
if request.POST.get('grant_type') == 'refresh_token' and 'refresh_token' in request.POST:
- refresh_token = RefreshToken.objects.filter(
- token=request.POST['refresh_token']
- ).first()
+ refresh_token = RefreshToken.objects.filter(token=request.POST['refresh_token']).first()
if refresh_token:
expire_seconds = settings.OAUTH2_PROVIDER.get('REFRESH_TOKEN_EXPIRE_SECONDS', 0)
if refresh_token.created + timedelta(seconds=expire_seconds) < now():
diff --git a/awx/api/urls/organization.py b/awx/api/urls/organization.py
index 9d8fecf4bc..9eac94da48 100644
--- a/awx/api/urls/organization.py
+++ b/awx/api/urls/organization.py
@@ -43,14 +43,26 @@ urls = [
url(r'^(?P<pk>[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', OrganizationNotificationTemplatesStartedList.as_view(),
- name='organization_notification_templates_started_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', OrganizationNotificationTemplatesErrorList.as_view(),
- name='organization_notification_templates_error_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(),
- name='organization_notification_templates_success_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', OrganizationNotificationTemplatesApprovalList.as_view(),
- name='organization_notification_templates_approvals_list'),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_started/$',
+ OrganizationNotificationTemplatesStartedList.as_view(),
+ name='organization_notification_templates_started_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_error/$',
+ OrganizationNotificationTemplatesErrorList.as_view(),
+ name='organization_notification_templates_error_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_success/$',
+ OrganizationNotificationTemplatesSuccessList.as_view(),
+ name='organization_notification_templates_success_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_approvals/$',
+ OrganizationNotificationTemplatesApprovalList.as_view(),
+ name='organization_notification_templates_approvals_list',
+ ),
url(r'^(?P<pk>[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'),
url(r'^(?P<pk>[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),
diff --git a/awx/api/urls/project.py b/awx/api/urls/project.py
index c0909873df..ea356a651b 100644
--- a/awx/api/urls/project.py
+++ b/awx/api/urls/project.py
@@ -35,10 +35,16 @@ urls = [
url(r'^(?P<pk>[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', ProjectNotificationTemplatesSuccessList.as_view(),
- name='project_notification_templates_success_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', ProjectNotificationTemplatesStartedList.as_view(),
- name='project_notification_templates_started_list'),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_success/$',
+ ProjectNotificationTemplatesSuccessList.as_view(),
+ name='project_notification_templates_success_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_started/$',
+ ProjectNotificationTemplatesStartedList.as_view(),
+ name='project_notification_templates_started_list',
+ ),
url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'),
diff --git a/awx/api/urls/role.py b/awx/api/urls/role.py
index f404aa6640..2b6aed19b5 100644
--- a/awx/api/urls/role.py
+++ b/awx/api/urls/role.py
@@ -3,14 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- RoleList,
- RoleDetail,
- RoleUsersList,
- RoleTeamsList,
- RoleParentsList,
- RoleChildrenList,
-)
+from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList, RoleParentsList, RoleChildrenList
urls = [
diff --git a/awx/api/urls/schedule.py b/awx/api/urls/schedule.py
index edd5724356..c3c91f7ae0 100644
--- a/awx/api/urls/schedule.py
+++ b/awx/api/urls/schedule.py
@@ -3,12 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- ScheduleList,
- ScheduleDetail,
- ScheduleUnifiedJobsList,
- ScheduleCredentialsList,
-)
+from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList
urls = [
diff --git a/awx/api/urls/system_job.py b/awx/api/urls/system_job.py
index b95d1d7329..8b060a2d85 100644
--- a/awx/api/urls/system_job.py
+++ b/awx/api/urls/system_job.py
@@ -3,13 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- SystemJobList,
- SystemJobDetail,
- SystemJobCancel,
- SystemJobNotificationsList,
- SystemJobEventsList
-)
+from awx.api.views import SystemJobList, SystemJobDetail, SystemJobCancel, SystemJobNotificationsList, SystemJobEventsList
urls = [
diff --git a/awx/api/urls/system_job_template.py b/awx/api/urls/system_job_template.py
index 396271417d..532d35d97a 100644
--- a/awx/api/urls/system_job_template.py
+++ b/awx/api/urls/system_job_template.py
@@ -21,12 +21,21 @@ urls = [
url(r'^(?P<pk>[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'),
url(r'^(?P<pk>[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'),
url(r'^(?P<pk>[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', SystemJobTemplateNotificationTemplatesStartedList.as_view(),
- name='system_job_template_notification_templates_started_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', SystemJobTemplateNotificationTemplatesErrorList.as_view(),
- name='system_job_template_notification_templates_error_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', SystemJobTemplateNotificationTemplatesSuccessList.as_view(),
- name='system_job_template_notification_templates_success_list'),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_started/$',
+ SystemJobTemplateNotificationTemplatesStartedList.as_view(),
+ name='system_job_template_notification_templates_started_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_error/$',
+ SystemJobTemplateNotificationTemplatesErrorList.as_view(),
+ name='system_job_template_notification_templates_error_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_success/$',
+ SystemJobTemplateNotificationTemplatesSuccessList.as_view(),
+ name='system_job_template_notification_templates_success_list',
+ ),
]
__all__ = ['urls']
diff --git a/awx/api/urls/urls.py b/awx/api/urls/urls.py
index 2beeb47a47..1df8641e5d 100644
--- a/awx/api/urls/urls.py
+++ b/awx/api/urls/urls.py
@@ -5,10 +5,7 @@ from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
-from awx.api.generics import (
- LoggedLoginView,
- LoggedLogoutView,
-)
+from awx.api.generics import LoggedLoginView, LoggedLogoutView
from awx.api.views import (
ApiRootView,
ApiV2RootView,
@@ -33,9 +30,7 @@ from awx.api.views import (
OAuth2ApplicationDetail,
)
-from awx.api.views.metrics import (
- MetricsView,
-)
+from awx.api.views.metrics import MetricsView
from .organization import urls as organization_urls
from .user import urls as user_urls
@@ -146,17 +141,11 @@ app_name = 'api'
urlpatterns = [
url(r'^$', ApiRootView.as_view(), name='api_root_view'),
url(r'^(?P<version>(v2))/', include(v2_urls)),
- url(r'^login/$', LoggedLoginView.as_view(
- template_name='rest_framework/login.html',
- extra_context={'inside_login_context': True}
- ), name='login'),
- url(r'^logout/$', LoggedLogoutView.as_view(
- next_page='/api/', redirect_field_name='next'
- ), name='logout'),
+ url(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
+ url(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
url(r'^o/', include(oauth2_root_urls)),
]
if settings.SETTINGS_MODULE == 'awx.settings.development':
from awx.api.swagger import SwaggerSchemaView
- urlpatterns += [
- url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view'),
- ]
+
+ urlpatterns += [url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')]
diff --git a/awx/api/urls/user.py b/awx/api/urls/user.py
index ca8d531f46..340c428ba5 100644
--- a/awx/api/urls/user.py
+++ b/awx/api/urls/user.py
@@ -20,7 +20,7 @@ from awx.api.views import (
UserAuthorizedTokenList,
)
-urls = [
+urls = [
url(r'^$', UserList.as_view(), name='user_list'),
url(r'^(?P<pk>[0-9]+)/$', UserDetail.as_view(), name='user_detail'),
url(r'^(?P<pk>[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'),
@@ -35,7 +35,6 @@ urls = [
url(r'^(?P<pk>[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'),
url(r'^(?P<pk>[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'),
url(r'^(?P<pk>[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'),
-
-]
+]
__all__ = ['urls']
diff --git a/awx/api/urls/webhooks.py b/awx/api/urls/webhooks.py
index 1a168d3baa..f6739a5df9 100644
--- a/awx/api/urls/webhooks.py
+++ b/awx/api/urls/webhooks.py
@@ -1,10 +1,6 @@
from django.conf.urls import url
-from awx.api.views import (
- WebhookKeyView,
- GithubWebhookReceiver,
- GitlabWebhookReceiver,
-)
+from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
urlpatterns = [
diff --git a/awx/api/urls/workflow_approval.py b/awx/api/urls/workflow_approval.py
index dc58da1d3a..a3c6454af1 100644
--- a/awx/api/urls/workflow_approval.py
+++ b/awx/api/urls/workflow_approval.py
@@ -3,12 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- WorkflowApprovalList,
- WorkflowApprovalDetail,
- WorkflowApprovalApprove,
- WorkflowApprovalDeny,
-)
+from awx.api.views import WorkflowApprovalList, WorkflowApprovalDetail, WorkflowApprovalApprove, WorkflowApprovalDeny
urls = [
diff --git a/awx/api/urls/workflow_approval_template.py b/awx/api/urls/workflow_approval_template.py
index 8a22ee83b3..f49929b283 100644
--- a/awx/api/urls/workflow_approval_template.py
+++ b/awx/api/urls/workflow_approval_template.py
@@ -3,10 +3,7 @@
from django.conf.urls import url
-from awx.api.views import (
- WorkflowApprovalTemplateDetail,
- WorkflowApprovalTemplateJobsList,
-)
+from awx.api.views import WorkflowApprovalTemplateDetail, WorkflowApprovalTemplateJobsList
urls = [
diff --git a/awx/api/urls/workflow_job_template.py b/awx/api/urls/workflow_job_template.py
index b9deda499a..90b3c043fc 100644
--- a/awx/api/urls/workflow_job_template.py
+++ b/awx/api/urls/workflow_job_template.py
@@ -33,14 +33,26 @@ urls = [
url(r'^(?P<pk>[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'),
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', WorkflowJobTemplateNotificationTemplatesStartedList.as_view(),
- name='workflow_job_template_notification_templates_started_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', WorkflowJobTemplateNotificationTemplatesErrorList.as_view(),
- name='workflow_job_template_notification_templates_error_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
- name='workflow_job_template_notification_templates_success_list'),
- url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(),
- name='workflow_job_template_notification_templates_approvals_list'),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_started/$',
+ WorkflowJobTemplateNotificationTemplatesStartedList.as_view(),
+ name='workflow_job_template_notification_templates_started_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_error/$',
+ WorkflowJobTemplateNotificationTemplatesErrorList.as_view(),
+ name='workflow_job_template_notification_templates_error_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_success/$',
+ WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
+ name='workflow_job_template_notification_templates_success_list',
+ ),
+ url(
+ r'^(?P<pk>[0-9]+)/notification_templates_approvals/$',
+ WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(),
+ name='workflow_job_template_notification_templates_approvals_list',
+ ),
url(r'^(?P<pk>[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'),
diff --git a/awx/api/versioning.py b/awx/api/versioning.py
index 3ad9638832..9fc57ac71e 100644
--- a/awx/api/versioning.py
+++ b/awx/api/versioning.py
@@ -40,13 +40,10 @@ def reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra
class URLPathVersioning(BaseVersioning):
-
def reverse(self, viewname, args=None, kwargs=None, request=None, format=None, **extra):
if request.version is not None:
kwargs = {} if (kwargs is None) else kwargs
kwargs[self.version_param] = request.version
request = None
- return super(BaseVersioning, self).reverse(
- viewname, args, kwargs, request, format, **extra
- )
+ return super(BaseVersioning, self).reverse(viewname, args, kwargs, request, format, **extra)
diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py
index 2d29519de8..fa175eff7b 100644
--- a/awx/api/views/__init__.py
+++ b/awx/api/views/__init__.py
@@ -65,13 +65,25 @@ from wsgiref.util import FileWrapper
from awx.main.tasks import send_notifications, update_inventory_computed_fields
from awx.main.access import get_user_queryset, HostAccess
from awx.api.generics import (
- APIView, BaseUsersList, CopyAPIView, DeleteLastUnattachLabelMixin,
- GenericAPIView, ListAPIView, ListCreateAPIView,
- ResourceAccessList, RetrieveAPIView, RetrieveDestroyAPIView,
- RetrieveUpdateAPIView, RetrieveUpdateDestroyAPIView, SimpleListAPIView,
- SubDetailAPIView, SubListAPIView, SubListAttachDetachAPIView,
- SubListCreateAPIView, SubListCreateAttachDetachAPIView,
- SubListDestroyAPIView
+ APIView,
+ BaseUsersList,
+ CopyAPIView,
+ DeleteLastUnattachLabelMixin,
+ GenericAPIView,
+ ListAPIView,
+ ListCreateAPIView,
+ ResourceAccessList,
+ RetrieveAPIView,
+ RetrieveDestroyAPIView,
+ RetrieveUpdateAPIView,
+ RetrieveUpdateDestroyAPIView,
+ SimpleListAPIView,
+ SubDetailAPIView,
+ SubListAPIView,
+ SubListAttachDetachAPIView,
+ SubListCreateAPIView,
+ SubListCreateAttachDetachAPIView,
+ SubListDestroyAPIView,
)
from awx.api.versioning import reverse
from awx.main import models
@@ -84,17 +96,21 @@ from awx.main.utils import (
get_pk_from_dict,
schedule_task_manager,
ignore_inventory_computed_fields,
- set_environ
+ set_environ,
)
from awx.main.utils.encryption import encrypt_value
from awx.main.utils.filters import SmartFilter
from awx.main.utils.insights import filter_insights_api_response
from awx.main.redact import UriCleaner
from awx.api.permissions import (
- JobTemplateCallbackPermission, TaskPermission, ProjectUpdatePermission,
- InventoryInventorySourcesUpdatePermission, UserPermission,
- InstanceGroupTowerPermission, VariableDataPermission,
- WorkflowApprovalPermission
+ JobTemplateCallbackPermission,
+ TaskPermission,
+ ProjectUpdatePermission,
+ InventoryInventorySourcesUpdatePermission,
+ UserPermission,
+ InstanceGroupTowerPermission,
+ VariableDataPermission,
+ WorkflowApprovalPermission,
)
from awx.api import renderers
from awx.api import serializers
@@ -102,11 +118,14 @@ from awx.api.metadata import RoleMetadata
from awx.main.constants import ACTIVE_STATES
from awx.main.scheduler.dag_workflow import WorkflowDAG
from awx.api.views.mixin import (
- ControlledByScmMixin, InstanceGroupMembershipMixin,
- OrganizationCountsMixin, RelatedJobsPreventDeleteMixin,
- UnifiedJobDeletionMixin, NoTruncateMixin,
+ ControlledByScmMixin,
+ InstanceGroupMembershipMixin,
+ OrganizationCountsMixin,
+ RelatedJobsPreventDeleteMixin,
+ UnifiedJobDeletionMixin,
+ NoTruncateMixin,
)
-from awx.api.views.organization import ( # noqa
+from awx.api.views.organization import ( # noqa
OrganizationList,
OrganizationDetail,
OrganizationInventoriesList,
@@ -129,7 +148,7 @@ from awx.api.views.organization import ( # noqa
OrganizationAccessList,
OrganizationObjectRolesList,
)
-from awx.api.views.inventory import ( # noqa
+from awx.api.views.inventory import ( # noqa
InventoryList,
InventoryDetail,
InventoryUpdateEventsList,
@@ -146,7 +165,7 @@ from awx.api.views.inventory import ( # noqa
InventoryJobTemplateList,
InventoryCopy,
)
-from awx.api.views.root import ( # noqa
+from awx.api.views.root import ( # noqa
ApiRootView,
ApiOAuthAuthorizationRootView,
ApiVersionRootView,
@@ -156,20 +175,16 @@ from awx.api.views.root import ( # noqa
ApiV2SubscriptionView,
ApiV2AttachView,
)
-from awx.api.views.webhooks import ( # noqa
- WebhookKeyView,
- GithubWebhookReceiver,
- GitlabWebhookReceiver,
-)
+from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver # noqa
logger = logging.getLogger('awx.api.views')
def api_exception_handler(exc, context):
- '''
+ """
Override default API exception handler to catch IntegrityError exceptions.
- '''
+ """
if isinstance(exc, IntegrityError):
exc = ParseError(exc.args[0])
if isinstance(exc, FieldError):
@@ -204,40 +219,46 @@ class DashboardView(APIView):
user_inventory_external = user_inventory.filter(has_inventory_sources=True)
# if there are *zero* inventories, this aggregrate query will be None, fall back to 0
failed_inventory = user_inventory.aggregate(Sum('inventory_sources_with_failures'))['inventory_sources_with_failures__sum'] or 0
- data['inventories'] = {'url': reverse('api:inventory_list', request=request),
- 'total': user_inventory.count(),
- 'total_with_inventory_source': user_inventory_external.count(),
- 'job_failed': inventory_with_failed_hosts.count(),
- 'inventory_failed': failed_inventory}
+ data['inventories'] = {
+ 'url': reverse('api:inventory_list', request=request),
+ 'total': user_inventory.count(),
+ 'total_with_inventory_source': user_inventory_external.count(),
+ 'job_failed': inventory_with_failed_hosts.count(),
+ 'inventory_failed': failed_inventory,
+ }
user_inventory_sources = get_user_queryset(request.user, models.InventorySource)
ec2_inventory_sources = user_inventory_sources.filter(source='ec2')
ec2_inventory_failed = ec2_inventory_sources.filter(status='failed')
data['inventory_sources'] = {}
- data['inventory_sources']['ec2'] = {'url': reverse('api:inventory_source_list', request=request) + "?source=ec2",
- 'failures_url': reverse('api:inventory_source_list', request=request) + "?source=ec2&status=failed",
- 'label': 'Amazon EC2',
- 'total': ec2_inventory_sources.count(),
- 'failed': ec2_inventory_failed.count()}
+ data['inventory_sources']['ec2'] = {
+ 'url': reverse('api:inventory_source_list', request=request) + "?source=ec2",
+ 'failures_url': reverse('api:inventory_source_list', request=request) + "?source=ec2&status=failed",
+ 'label': 'Amazon EC2',
+ 'total': ec2_inventory_sources.count(),
+ 'failed': ec2_inventory_failed.count(),
+ }
user_groups = get_user_queryset(request.user, models.Group)
groups_inventory_failed = models.Group.objects.filter(inventory_sources__last_job_failed=True).count()
- data['groups'] = {'url': reverse('api:group_list', request=request),
- 'total': user_groups.count(),
- 'inventory_failed': groups_inventory_failed}
+ data['groups'] = {'url': reverse('api:group_list', request=request), 'total': user_groups.count(), 'inventory_failed': groups_inventory_failed}
user_hosts = get_user_queryset(request.user, models.Host)
user_hosts_failed = user_hosts.filter(last_job_host_summary__failed=True)
- data['hosts'] = {'url': reverse('api:host_list', request=request),
- 'failures_url': reverse('api:host_list', request=request) + "?last_job_host_summary__failed=True",
- 'total': user_hosts.count(),
- 'failed': user_hosts_failed.count()}
+ data['hosts'] = {
+ 'url': reverse('api:host_list', request=request),
+ 'failures_url': reverse('api:host_list', request=request) + "?last_job_host_summary__failed=True",
+ 'total': user_hosts.count(),
+ 'failed': user_hosts_failed.count(),
+ }
user_projects = get_user_queryset(request.user, models.Project)
user_projects_failed = user_projects.filter(last_job_failed=True)
- data['projects'] = {'url': reverse('api:project_list', request=request),
- 'failures_url': reverse('api:project_list', request=request) + "?last_job_failed=True",
- 'total': user_projects.count(),
- 'failed': user_projects_failed.count()}
+ data['projects'] = {
+ 'url': reverse('api:project_list', request=request),
+ 'failures_url': reverse('api:project_list', request=request) + "?last_job_failed=True",
+ 'total': user_projects.count(),
+ 'failed': user_projects_failed.count(),
+ }
git_projects = user_projects.filter(scm_type='git')
git_failed_projects = git_projects.filter(last_job_failed=True)
@@ -246,37 +267,38 @@ class DashboardView(APIView):
archive_projects = user_projects.filter(scm_type='archive')
archive_failed_projects = archive_projects.filter(last_job_failed=True)
data['scm_types'] = {}
- data['scm_types']['git'] = {'url': reverse('api:project_list', request=request) + "?scm_type=git",
- 'label': 'Git',
- 'failures_url': reverse('api:project_list', request=request) + "?scm_type=git&last_job_failed=True",
- 'total': git_projects.count(),
- 'failed': git_failed_projects.count()}
- data['scm_types']['svn'] = {'url': reverse('api:project_list', request=request) + "?scm_type=svn",
- 'label': 'Subversion',
- 'failures_url': reverse('api:project_list', request=request) + "?scm_type=svn&last_job_failed=True",
- 'total': svn_projects.count(),
- 'failed': svn_failed_projects.count()}
- data['scm_types']['archive'] = {'url': reverse('api:project_list', request=request) + "?scm_type=archive",
- 'label': 'Remote Archive',
- 'failures_url': reverse('api:project_list', request=request) + "?scm_type=archive&last_job_failed=True",
- 'total': archive_projects.count(),
- 'failed': archive_failed_projects.count()}
+ data['scm_types']['git'] = {
+ 'url': reverse('api:project_list', request=request) + "?scm_type=git",
+ 'label': 'Git',
+ 'failures_url': reverse('api:project_list', request=request) + "?scm_type=git&last_job_failed=True",
+ 'total': git_projects.count(),
+ 'failed': git_failed_projects.count(),
+ }
+ data['scm_types']['svn'] = {
+ 'url': reverse('api:project_list', request=request) + "?scm_type=svn",
+ 'label': 'Subversion',
+ 'failures_url': reverse('api:project_list', request=request) + "?scm_type=svn&last_job_failed=True",
+ 'total': svn_projects.count(),
+ 'failed': svn_failed_projects.count(),
+ }
+ data['scm_types']['archive'] = {
+ 'url': reverse('api:project_list', request=request) + "?scm_type=archive",
+ 'label': 'Remote Archive',
+ 'failures_url': reverse('api:project_list', request=request) + "?scm_type=archive&last_job_failed=True",
+ 'total': archive_projects.count(),
+ 'failed': archive_failed_projects.count(),
+ }
user_list = get_user_queryset(request.user, models.User)
team_list = get_user_queryset(request.user, models.Team)
credential_list = get_user_queryset(request.user, models.Credential)
job_template_list = get_user_queryset(request.user, models.JobTemplate)
organization_list = get_user_queryset(request.user, models.Organization)
- data['users'] = {'url': reverse('api:user_list', request=request),
- 'total': user_list.count()}
- data['organizations'] = {'url': reverse('api:organization_list', request=request),
- 'total': organization_list.count()}
- data['teams'] = {'url': reverse('api:team_list', request=request),
- 'total': team_list.count()}
- data['credentials'] = {'url': reverse('api:credential_list', request=request),
- 'total': credential_list.count()}
- data['job_templates'] = {'url': reverse('api:job_template_list', request=request),
- 'total': job_template_list.count()}
+ data['users'] = {'url': reverse('api:user_list', request=request), 'total': user_list.count()}
+ data['organizations'] = {'url': reverse('api:organization_list', request=request), 'total': organization_list.count()}
+ data['teams'] = {'url': reverse('api:team_list', request=request), 'total': team_list.count()}
+ data['credentials'] = {'url': reverse('api:credential_list', request=request), 'total': credential_list.count()}
+ data['job_templates'] = {'url': reverse('api:job_template_list', request=request), 'total': job_template_list.count()}
return Response(data)
@@ -325,11 +347,9 @@ class DashboardJobsGraphView(APIView):
dashboard_data = {"jobs": {"successful": [], "failed": []}}
for element in success_qss.time_series(end_date, start_date, interval=interval):
- dashboard_data['jobs']['successful'].append([time.mktime(element[0].timetuple()),
- element[1]])
+ dashboard_data['jobs']['successful'].append([time.mktime(element[0].timetuple()), element[1]])
for element in failed_qss.time_series(end_date, start_date, interval=interval):
- dashboard_data['jobs']['failed'].append([time.mktime(element[0].timetuple()),
- element[1]])
+ dashboard_data['jobs']['failed'].append([time.mktime(element[0].timetuple()), element[1]])
return Response(dashboard_data)
@@ -347,7 +367,6 @@ class InstanceDetail(RetrieveUpdateAPIView):
model = models.Instance
serializer_class = serializers.InstanceSerializer
-
def update(self, request, *args, **kwargs):
r = super(InstanceDetail, self).update(request, *args, **kwargs)
if status.is_success(r.status_code):
@@ -467,10 +486,7 @@ class SchedulePreview(GenericAPIView):
continue
schedule.append(event)
- return Response({
- 'local': schedule,
- 'utc': [s.astimezone(pytz.utc) for s in schedule]
- })
+ return Response({'local': schedule, 'utc': [s.astimezone(pytz.utc) for s in schedule]})
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -479,10 +495,7 @@ class ScheduleZoneInfo(APIView):
swagger_topic = 'System Configuration'
def get(self, request):
- zones = [
- {'name': zone}
- for zone in models.Schedule.get_zoneinfo()
- ]
+ zones = [{'name': zone} for zone in models.Schedule.get_zoneinfo()]
return Response(zones)
@@ -501,19 +514,18 @@ class LaunchConfigCredentialsBase(SubListAttachDetachAPIView):
if self.relationship not in ask_mapping:
return {"msg": _("Related template cannot accept {} on launch.").format(self.relationship)}
elif sub.passwords_needed:
- return {"msg": _("Credential that requires user input on launch "
- "cannot be used in saved launch configuration.")}
+ return {"msg": _("Credential that requires user input on launch " "cannot be used in saved launch configuration.")}
ask_field_name = ask_mapping[self.relationship]
if not getattr(parent.unified_job_template, ask_field_name):
return {"msg": _("Related template is not configured to accept credentials on launch.")}
elif sub.unique_hash() in [cred.unique_hash() for cred in parent.credentials.all()]:
- return {"msg": _("This launch configuration already provides a {credential_type} credential.").format(
- credential_type=sub.unique_hash(display=True))}
+ return {
+ "msg": _("This launch configuration already provides a {credential_type} credential.").format(credential_type=sub.unique_hash(display=True))
+ }
elif sub.pk in parent.unified_job_template.credentials.values_list('pk', flat=True):
- return {"msg": _("Related template already uses {credential_type} credential.").format(
- credential_type=sub.name)}
+ return {"msg": _("Related template already uses {credential_type} credential.").format(credential_type=sub.name)}
# None means there were no validation errors
return None
@@ -542,6 +554,7 @@ class AuthView(APIView):
def get(self, request):
from rest_framework.reverse import reverse
+
data = OrderedDict()
err_backend, err_message = request.session.get('social_auth_error', (None, None))
auth_backends = list(load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items())
@@ -550,10 +563,7 @@ class AuthView(APIView):
for name, backend in auth_backends:
login_url = reverse('social:begin', args=(name,))
complete_url = request.build_absolute_uri(reverse('social:complete', args=(name,)))
- backend_data = {
- 'login_url': login_url,
- 'complete_url': complete_url,
- }
+ backend_data = {'login_url': login_url, 'complete_url': complete_url}
if name == 'saml':
backend_data['metadata_url'] = reverse('sso:saml_metadata')
for idp in sorted(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.keys()):
@@ -597,8 +607,8 @@ class TeamRolesList(SubListAttachDetachAPIView):
serializer_class = serializers.RoleSerializerWithParentAccess
metadata_class = RoleMetadata
parent_model = models.Team
- relationship='member_role.children'
- search_fields = ('role_field', 'content_type__model',)
+ relationship = 'member_role.children'
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
team = get_object_or_404(models.Team, pk=self.kwargs['pk'])
@@ -636,7 +646,7 @@ class TeamObjectRolesList(SubListAPIView):
model = models.Role
serializer_class = serializers.RoleSerializer
parent_model = models.Team
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
po = self.get_parent_object()
@@ -655,10 +665,7 @@ class TeamProjectsList(SubListAPIView):
self.check_parent_access(team)
model_ct = ContentType.objects.get_for_model(self.model)
parent_ct = ContentType.objects.get_for_model(self.parent_model)
- proj_roles = models.Role.objects.filter(
- Q(ancestors__content_type=parent_ct) & Q(ancestors__object_id=team.pk),
- content_type=model_ct
- )
+ proj_roles = models.Role.objects.filter(Q(ancestors__content_type=parent_ct) & Q(ancestors__object_id=team.pk), content_type=model_ct)
return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in proj_roles])
@@ -675,14 +682,16 @@ class TeamActivityStreamList(SubListAPIView):
self.check_parent_access(parent)
qs = self.request.user.get_queryset(self.model)
- return qs.filter(Q(team=parent) |
- Q(project__in=models.Project.accessible_objects(parent, 'read_role')) |
- Q(credential__in=models.Credential.accessible_objects(parent, 'read_role')))
+ return qs.filter(
+ Q(team=parent)
+ | Q(project__in=models.Project.accessible_objects(parent, 'read_role'))
+ | Q(credential__in=models.Credential.accessible_objects(parent, 'read_role'))
+ )
class TeamAccessList(ResourceAccessList):
- model = models.User # needs to be User for AccessLists's
+ model = models.User # needs to be User for AccessLists's
parent_model = models.Team
@@ -711,7 +720,7 @@ class ExecutionEnvironmentJobTemplateList(SubListAPIView):
class ExecutionEnvironmentCopy(CopyAPIView):
-
+
model = models.ExecutionEnvironment
copy_return_serializer_class = serializers.ExecutionEnvironmentSerializer
@@ -856,13 +865,9 @@ class ProjectUpdateView(RetrieveAPIView):
else:
data = OrderedDict()
data['project_update'] = project_update.id
- data.update(
- serializers.ProjectUpdateSerializer(project_update, context=self.get_serializer_context()).to_representation(project_update)
- )
+ data.update(serializers.ProjectUpdateSerializer(project_update, context=self.get_serializer_context()).to_representation(project_update))
headers = {'Location': project_update.get_absolute_url(request=request)}
- return Response(data,
- headers=headers,
- status=status.HTTP_202_ACCEPTED)
+ return Response(data, headers=headers, status=status.HTTP_202_ACCEPTED)
else:
return self.http_method_not_allowed(request, *args, **kwargs)
@@ -928,7 +933,7 @@ class ProjectUpdateNotificationsList(SubListAPIView):
serializer_class = serializers.NotificationSerializer
parent_model = models.ProjectUpdate
relationship = 'notifications'
- search_fields = ('subject', 'notification_type', 'body',)
+ search_fields = ('subject', 'notification_type', 'body')
class ProjectUpdateScmInventoryUpdates(SubListAPIView):
@@ -943,7 +948,7 @@ class ProjectUpdateScmInventoryUpdates(SubListAPIView):
class ProjectAccessList(ResourceAccessList):
- model = models.User # needs to be User for AccessLists's
+ model = models.User # needs to be User for AccessLists's
parent_model = models.Project
@@ -952,7 +957,7 @@ class ProjectObjectRolesList(SubListAPIView):
model = models.Role
serializer_class = serializers.RoleSerializer
parent_model = models.Project
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
po = self.get_parent_object()
@@ -1121,8 +1126,7 @@ class UserTeamsList(SubListAPIView):
u = get_object_or_404(models.User, pk=self.kwargs['pk'])
if not self.request.user.can_access(models.User, 'read', u):
raise PermissionDenied()
- return models.Team.accessible_objects(self.request.user, 'read_role').filter(
- Q(member_role__members=u) | Q(admin_role__members=u)).distinct()
+ return models.Team.accessible_objects(self.request.user, 'read_role').filter(Q(member_role__members=u) | Q(admin_role__members=u)).distinct()
class UserRolesList(SubListAttachDetachAPIView):
@@ -1131,9 +1135,9 @@ class UserRolesList(SubListAttachDetachAPIView):
serializer_class = serializers.RoleSerializerWithParentAccess
metadata_class = RoleMetadata
parent_model = models.User
- relationship='roles'
+ relationship = 'roles'
permission_classes = (IsAuthenticated,)
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
u = get_object_or_404(models.User, pk=self.kwargs['pk'])
@@ -1141,9 +1145,7 @@ class UserRolesList(SubListAttachDetachAPIView):
raise PermissionDenied()
content_type = ContentType.objects.get_for_model(models.User)
- return models.Role.filter_visible_roles(
- self.request.user, u.roles.all()
- ).exclude(content_type=content_type, object_id=u.id)
+ return models.Role.filter_visible_roles(self.request.user, u.roles.all()).exclude(content_type=content_type, object_id=u.id)
def post(self, request, *args, **kwargs):
sub_id = request.data.get('id', None)
@@ -1163,7 +1165,6 @@ class UserRolesList(SubListAttachDetachAPIView):
data = dict(msg=_("You cannot grant private credential access to another user"))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
-
return super(UserRolesList, self).post(request, *args, **kwargs)
def check_parent_access(self, parent=None):
@@ -1270,7 +1271,7 @@ class UserDetail(RetrieveUpdateDestroyAPIView):
class UserAccessList(ResourceAccessList):
- model = models.User # needs to be User for AccessLists's
+ model = models.User # needs to be User for AccessLists's
parent_model = models.User
@@ -1407,7 +1408,6 @@ class CredentialDetail(RetrieveUpdateDestroyAPIView):
return super(CredentialDetail, self).destroy(request, *args, **kwargs)
-
class CredentialActivityStreamList(SubListAPIView):
model = models.ActivityStream
@@ -1419,7 +1419,7 @@ class CredentialActivityStreamList(SubListAPIView):
class CredentialAccessList(ResourceAccessList):
- model = models.User # needs to be User for AccessLists's
+ model = models.User # needs to be User for AccessLists's
parent_model = models.Credential
@@ -1428,7 +1428,7 @@ class CredentialObjectRolesList(SubListAPIView):
model = models.Role
serializer_class = serializers.RoleSerializer
parent_model = models.Credential
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
po = self.get_parent_object()
@@ -1473,10 +1473,7 @@ class CredentialExternalTest(SubDetailAPIView):
message = exc.__class__.__name__
args = getattr(exc, 'args', [])
for a in args:
- if isinstance(
- getattr(a, 'reason', None),
- ConnectTimeoutError
- ):
+ if isinstance(getattr(a, 'reason', None), ConnectTimeoutError):
message = str(a.reason)
return Response({'inputs': message}, status=status.HTTP_400_BAD_REQUEST)
@@ -1533,16 +1530,12 @@ class CredentialTypeExternalTest(SubDetailAPIView):
message = exc.__class__.__name__
args = getattr(exc, 'args', [])
for a in args:
- if isinstance(
- getattr(a, 'reason', None),
- ConnectTimeoutError
- ):
+ if isinstance(getattr(a, 'reason', None), ConnectTimeoutError):
message = str(a.reason)
return Response({'inputs': message}, status=status.HTTP_400_BAD_REQUEST)
class HostRelatedSearchMixin(object):
-
@property
def related_search_fields(self):
# Edge-case handle: https://github.com/ansible/ansible-tower/issues/7712
@@ -1580,8 +1573,7 @@ class HostDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, RetrieveUp
def delete(self, request, *args, **kwargs):
if self.get_object().inventory.pending_deletion:
- return Response({"error": _("The inventory for this host is already being deleted.")},
- status=status.HTTP_400_BAD_REQUEST)
+ return Response({"error": _("The inventory for this host is already being deleted.")}, status=status.HTTP_400_BAD_REQUEST)
return super(HostDetail, self).delete(request, *args, **kwargs)
@@ -1704,22 +1696,18 @@ class HostInsights(GenericAPIView):
raise BadGateway(_('Unknown exception {} while trying to GET {}').format(e, url))
if res.status_code == 401:
- raise BadGateway(
- _('Unauthorized access. Please check your Insights Credential username and password.'))
+ raise BadGateway(_('Unauthorized access. Please check your Insights Credential username and password.'))
elif res.status_code != 200:
raise BadGateway(
- _(
- 'Failed to access the Insights API at URL {}.'
- ' Server responded with {} status code and message {}'
- ).format(url, res.status_code, res.content)
+ _('Failed to access the Insights API at URL {}.' ' Server responded with {} status code and message {}').format(
+ url, res.status_code, res.content
+ )
)
try:
return res.json()
except ValueError:
- raise BadGateway(
- _('Expected JSON response from Insights at URL {}'
- ' but instead got {}').format(url, res.content))
+ raise BadGateway(_('Expected JSON response from Insights at URL {}' ' but instead got {}').format(url, res.content))
def _get_session(self, username, password):
session = requests.Session()
@@ -1727,29 +1715,23 @@ class HostInsights(GenericAPIView):
return session
-
def _get_platform_info(self, host, session, headers):
- url = '{}/api/inventory/v1/hosts?insights_id={}'.format(
- settings.INSIGHTS_URL_BASE, host.insights_system_id)
+ url = '{}/api/inventory/v1/hosts?insights_id={}'.format(settings.INSIGHTS_URL_BASE, host.insights_system_id)
res = self._call_insights_api(url, session, headers)
try:
res['results'][0]['id']
except (IndexError, KeyError):
- raise NotFound(
- _('Could not translate Insights system ID {}'
- ' into an Insights platform ID.').format(host.insights_system_id))
+ raise NotFound(_('Could not translate Insights system ID {}' ' into an Insights platform ID.').format(host.insights_system_id))
return res['results'][0]
def _get_reports(self, platform_id, session, headers):
- url = '{}/api/insights/v1/system/{}/reports/'.format(
- settings.INSIGHTS_URL_BASE, platform_id)
+ url = '{}/api/insights/v1/system/{}/reports/'.format(settings.INSIGHTS_URL_BASE, platform_id)
return self._call_insights_api(url, session, headers)
def _get_remediations(self, platform_id, session, headers):
- url = '{}/api/remediations/v1/remediations?system={}'.format(
- settings.INSIGHTS_URL_BASE, platform_id)
+ url = '{}/api/remediations/v1/remediations?system={}'.format(settings.INSIGHTS_URL_BASE, platform_id)
remediations = []
@@ -1768,27 +1750,19 @@ class HostInsights(GenericAPIView):
reports = self._get_reports(platform_id, session, headers)
remediations = self._get_remediations(platform_id, session, headers)
- return {
- 'insights_content': filter_insights_api_response(platform_info, reports, remediations)
- }
+ return {'insights_content': filter_insights_api_response(platform_info, reports, remediations)}
def get(self, request, *args, **kwargs):
host = self.get_object()
cred = None
if host.insights_system_id is None:
- return Response(
- dict(error=_('This host is not recognized as an Insights host.')),
- status=status.HTTP_404_NOT_FOUND
- )
+ return Response(dict(error=_('This host is not recognized as an Insights host.')), status=status.HTTP_404_NOT_FOUND)
if host.inventory and host.inventory.insights_credential:
cred = host.inventory.insights_credential
else:
- return Response(
- dict(error=_('The Insights Credential for "{}" was not found.').format(host.inventory.name)),
- status=status.HTTP_404_NOT_FOUND
- )
+ return Response(dict(error=_('The Insights Credential for "{}" was not found.').format(host.inventory.name)), status=status.HTTP_404_NOT_FOUND)
username = cred.get_input('username', default='')
password = cred.get_input('password', default='')
@@ -1812,7 +1786,7 @@ class GroupList(ListCreateAPIView):
class EnforceParentRelationshipMixin(object):
- '''
+ """
Useful when you have a self-refering ManyToManyRelationship.
* Tower uses a shallow (2-deep only) url pattern. For example:
@@ -1823,7 +1797,8 @@ class EnforceParentRelationshipMixin(object):
and you would create child_child_model's off of /api/v2/child_model/87/child_child_model_set
Now, when creating child_child_model related to child_model you still want to
link child_child_model to parent_model. That's what this class is for
- '''
+ """
+
enforce_parent_relationship = ''
def update_raw_data(self, data):
@@ -1886,9 +1861,7 @@ class GroupPotentialChildrenList(SubListAPIView):
return qs.exclude(pk__in=except_pks)
-class GroupHostsList(HostRelatedSearchMixin,
- ControlledByScmMixin,
- SubListCreateAttachDetachAPIView):
+class GroupHostsList(HostRelatedSearchMixin, ControlledByScmMixin, SubListCreateAttachDetachAPIView):
''' the list of hosts directly below a group '''
model = models.Host
@@ -1905,10 +1878,9 @@ class GroupHostsList(HostRelatedSearchMixin,
# Inject parent group inventory ID into new host data.
request.data['inventory'] = parent_group.inventory_id
existing_hosts = models.Host.objects.filter(inventory=parent_group.inventory, name=request.data.get('name', ''))
- if existing_hosts.count() > 0 and ('variables' not in request.data or
- request.data['variables'] == '' or
- request.data['variables'] == '{}' or
- request.data['variables'] == '---'):
+ if existing_hosts.count() > 0 and (
+ 'variables' not in request.data or request.data['variables'] == '' or request.data['variables'] == '{}' or request.data['variables'] == '---'
+ ):
request.data['id'] = existing_hosts[0].id
return self.attach(request, *args, **kwargs)
return super(GroupHostsList, self).create(request, *args, **kwargs)
@@ -1925,7 +1897,7 @@ class GroupAllHostsList(HostRelatedSearchMixin, SubListAPIView):
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
- qs = self.request.user.get_queryset(self.model).distinct() # need distinct for '&' operator
+ qs = self.request.user.get_queryset(self.model).distinct() # need distinct for '&' operator
sublist_qs = parent.all_hosts.distinct()
return qs & sublist_qs
@@ -1986,7 +1958,7 @@ class InventoryRootGroupsList(SubListCreateAttachDetachAPIView):
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
- qs = self.request.user.get_queryset(self.model).distinct() # need distinct for '&' operator
+ qs = self.request.user.get_queryset(self.model).distinct() # need distinct for '&' operator
return qs & parent.root_groups
@@ -2044,12 +2016,7 @@ class InventoryScriptView(RetrieveAPIView):
hosts_q['enabled'] = True
host = get_object_or_404(obj.hosts, **hosts_q)
return Response(host.variables_dict)
- return Response(obj.get_script_data(
- hostvars=hostvars,
- towervars=towervars,
- show_all=show_all,
- slice_number=slice_number, slice_count=slice_count
- ))
+ return Response(obj.get_script_data(hostvars=hostvars, towervars=towervars, show_all=show_all, slice_number=slice_number, slice_count=slice_count))
class InventoryTreeView(RetrieveAPIView):
@@ -2078,8 +2045,7 @@ class InventoryTreeView(RetrieveAPIView):
all_group_data_map = dict((x['id'], x) for x in all_group_data)
tree_data = [all_group_data_map[x] for x in root_group_pks]
for group_data in tree_data:
- self._populate_group_children(group_data, all_group_data_map,
- group_children_map)
+ self._populate_group_children(group_data, all_group_data_map, group_children_map)
return Response(tree_data)
@@ -2108,8 +2074,7 @@ class InventoryInventorySourcesUpdate(RetrieveAPIView):
inventory = self.get_object()
update_data = []
for inventory_source in inventory.inventory_sources.exclude(source=''):
- details = {'inventory_source': inventory_source.pk,
- 'can_update': inventory_source.can_update}
+ details = {'inventory_source': inventory_source.pk, 'can_update': inventory_source.can_update}
update_data.append(details)
return Response(update_data)
@@ -2138,8 +2103,7 @@ class InventoryInventorySourcesUpdate(RetrieveAPIView):
elif failures and not successes:
status_code = status.HTTP_400_BAD_REQUEST
elif not failures and not successes:
- return Response({'detail': _('No inventory sources to update.')},
- status=status.HTTP_400_BAD_REQUEST)
+ return Response({'detail': _('No inventory sources to update.')}, status=status.HTTP_400_BAD_REQUEST)
else:
status_code = status.HTTP_200_OK
return Response(update_data, status=status_code)
@@ -2187,9 +2151,10 @@ class InventorySourceNotificationTemplatesAnyList(SubListCreateAttachDetachAPIVi
def post(self, request, *args, **kwargs):
parent = self.get_parent_object()
if parent.source not in models.CLOUD_INVENTORY_SOURCES:
- return Response(dict(msg=_("Notification Templates can only be assigned when source is one of {}.")
- .format(models.CLOUD_INVENTORY_SOURCES, parent.source)),
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(msg=_("Notification Templates can only be assigned when source is one of {}.").format(models.CLOUD_INVENTORY_SOURCES, parent.source)),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
return super(InventorySourceNotificationTemplatesAnyList, self).post(request, *args, **kwargs)
@@ -2221,6 +2186,7 @@ class InventorySourceHostsList(HostRelatedSearchMixin, SubListDestroyAPIView):
with ignore_inventory_computed_fields():
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
from awx.main.signals import disable_activity_stream
+
with disable_activity_stream():
# job host summary deletion necessary to avoid deadlock
models.JobHostSummary.objects.filter(host__inventory_sources=inv_source).update(host=None)
@@ -2230,9 +2196,7 @@ class InventorySourceHostsList(HostRelatedSearchMixin, SubListDestroyAPIView):
# Advance delete of group-host memberships to prevent deadlock
# Activity stream doesn't record disassociation here anyway
# no signals-related reason to not bulk-delete
- models.Host.groups.through.objects.filter(
- host__inventory_sources=inv_source
- ).delete()
+ models.Host.groups.through.objects.filter(host__inventory_sources=inv_source).delete()
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
update_inventory_computed_fields.delay(inv_source.inventory_id)
return r
@@ -2251,15 +2215,14 @@ class InventorySourceGroupsList(SubListDestroyAPIView):
with ignore_inventory_computed_fields():
if not settings.ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC:
from awx.main.signals import disable_activity_stream
+
with disable_activity_stream():
models.Group.objects.filter(inventory_sources=inv_source).delete()
r = super(InventorySourceGroupsList, self).perform_list_destroy([])
else:
# Advance delete of group-host memberships to prevent deadlock
# Same arguments for bulk delete as with host list
- models.Group.hosts.through.objects.filter(
- group__inventory_sources=inv_source
- ).delete()
+ models.Group.hosts.through.objects.filter(group__inventory_sources=inv_source).delete()
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
update_inventory_computed_fields.delay(inv_source.inventory_id)
return r
@@ -2355,7 +2318,7 @@ class InventoryUpdateNotificationsList(SubListAPIView):
serializer_class = serializers.NotificationSerializer
parent_model = models.InventoryUpdate
relationship = 'notifications'
- search_fields = ('subject', 'notification_type', 'body',)
+ search_fields = ('subject', 'notification_type', 'body')
class JobTemplateList(ListCreateAPIView):
@@ -2418,11 +2381,11 @@ class JobTemplateLaunch(RetrieveAPIView):
return data
def modernize_launch_payload(self, data, obj):
- '''
+ """
Steps to do simple translations of request data to support
old field structure to launch endpoint
TODO: delete this method with future API version changes
- '''
+ """
modern_data = data.copy()
id_fd = '{}_id'.format('inventory')
@@ -2435,14 +2398,11 @@ class JobTemplateLaunch(RetrieveAPIView):
return modern_data
-
def post(self, request, *args, **kwargs):
obj = self.get_object()
try:
- modern_data = self.modernize_launch_payload(
- data=request.data, obj=obj
- )
+ modern_data = self.modernize_launch_payload(data=request.data, obj=obj)
except ParseError as exc:
return Response(exc.detail, status=status.HTTP_400_BAD_REQUEST)
@@ -2474,12 +2434,11 @@ class JobTemplateLaunch(RetrieveAPIView):
headers = {'Location': new_job.get_absolute_url(request)}
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
-
def sanitize_for_response(self, data):
- '''
+ """
Model objects cannot be serialized by DRF,
this replaces objects with their ids for inclusion in response
- '''
+ """
def display_value(val):
if hasattr(val, 'id'):
@@ -2516,15 +2475,7 @@ class JobTemplateSurveySpec(GenericAPIView):
obj_permission_type = 'admin'
serializer_class = serializers.EmptySerializer
- ALLOWED_TYPES = {
- 'text': str,
- 'textarea': str,
- 'password': str,
- 'multiplechoice': str,
- 'multiselect': str,
- 'integer': int,
- 'float': float
- }
+ ALLOWED_TYPES = {'text': str, 'textarea': str, 'password': str, 'multiplechoice': str, 'multiselect': str, 'integer': int, 'float': float}
def get(self, request, *args, **kwargs):
obj = self.get_object()
@@ -2545,15 +2496,11 @@ class JobTemplateSurveySpec(GenericAPIView):
@staticmethod
def _validate_spec_data(new_spec, old_spec):
schema_errors = {}
- for field, expect_type, type_label in [
- ('name', str, 'string'),
- ('description', str, 'string'),
- ('spec', list, 'list of items')]:
+ for field, expect_type, type_label in [('name', str, 'string'), ('description', str, 'string'), ('spec', list, 'list of items')]:
if field not in new_spec:
schema_errors['error'] = _("Field '{}' is missing from survey spec.").format(field)
elif not isinstance(new_spec[field], expect_type):
- schema_errors['error'] = _("Expected {} for field '{}', received {} type.").format(
- type_label, field, type(new_spec[field]).__name__)
+ schema_errors['error'] = _("Expected {} for field '{}', received {} type.").format(type_label, field, type(new_spec[field]).__name__)
if isinstance(new_spec.get('spec', None), list) and len(new_spec["spec"]) < 1:
schema_errors['error'] = _("'spec' doesn't contain any items.")
@@ -2564,18 +2511,16 @@ class JobTemplateSurveySpec(GenericAPIView):
variable_set = set()
old_spec_dict = models.JobTemplate.pivot_spec(old_spec)
for idx, survey_item in enumerate(new_spec["spec"]):
- context = dict(
- idx=str(idx),
- survey_item=survey_item
- )
+ context = dict(idx=str(idx), survey_item=survey_item)
# General element validation
if not isinstance(survey_item, dict):
return Response(dict(error=_("Survey question %s is not a json object.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
for field_name in ['type', 'question_name', 'variable', 'required']:
if field_name not in survey_item:
- return Response(dict(error=_("'{field_name}' missing from survey question {idx}").format(
- field_name=field_name, **context
- )), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(error=_("'{field_name}' missing from survey question {idx}").format(field_name=field_name, **context)),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
val = survey_item[field_name]
allow_types = str
type_label = 'string'
@@ -2583,12 +2528,19 @@ class JobTemplateSurveySpec(GenericAPIView):
allow_types = bool
type_label = 'boolean'
if not isinstance(val, allow_types):
- return Response(dict(error=_("'{field_name}' in survey question {idx} expected to be {type_label}.").format(
- field_name=field_name, type_label=type_label, **context
- )), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(
+ error=_("'{field_name}' in survey question {idx} expected to be {type_label}.").format(
+ field_name=field_name, type_label=type_label, **context
+ )
+ ),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
if survey_item['variable'] in variable_set:
- return Response(dict(error=_("'variable' '%(item)s' duplicated in survey question %(survey)s.") % {
- 'item': survey_item['variable'], 'survey': str(idx)}), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(error=_("'variable' '%(item)s' duplicated in survey question %(survey)s.") % {'item': survey_item['variable'], 'survey': str(idx)}),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
else:
variable_set.add(survey_item['variable'])
@@ -2596,29 +2548,36 @@ class JobTemplateSurveySpec(GenericAPIView):
# validate question type <-> default type
qtype = survey_item["type"]
if qtype not in JobTemplateSurveySpec.ALLOWED_TYPES:
- return Response(dict(error=_(
- "'{survey_item[type]}' in survey question {idx} is not one of '{allowed_types}' allowed question types."
- ).format(
- allowed_types=', '.join(JobTemplateSurveySpec.ALLOWED_TYPES.keys()), **context
- )), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(
+ error=_("'{survey_item[type]}' in survey question {idx} is not one of '{allowed_types}' allowed question types.").format(
+ allowed_types=', '.join(JobTemplateSurveySpec.ALLOWED_TYPES.keys()), **context
+ )
+ ),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
if 'default' in survey_item and survey_item['default'] != '':
if not isinstance(survey_item['default'], JobTemplateSurveySpec.ALLOWED_TYPES[qtype]):
type_label = 'string'
if qtype in ['integer', 'float']:
type_label = qtype
- return Response(dict(error=_(
- "Default value {survey_item[default]} in survey question {idx} expected to be {type_label}."
- ).format(
- type_label=type_label, **context
- )), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(
+ error=_("Default value {survey_item[default]} in survey question {idx} expected to be {type_label}.").format(
+ type_label=type_label, **context
+ )
+ ),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
# additional type-specific properties, the UI provides these even
# if not applicable to the question, TODO: request that they not do this
for key in ['min', 'max']:
if key in survey_item:
if survey_item[key] is not None and (not isinstance(survey_item[key], int)):
- return Response(dict(error=_(
- "The {min_or_max} limit in survey question {idx} expected to be integer."
- ).format(min_or_max=key, **context)), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(error=_("The {min_or_max} limit in survey question {idx} expected to be integer.").format(min_or_max=key, **context)),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
# if it's a multiselect or multiple choice, it must have coices listed
# choices and defualts must come in as strings seperated by /n characters.
if qtype == 'multiselect' or qtype == 'multiplechoice':
@@ -2626,9 +2585,10 @@ class JobTemplateSurveySpec(GenericAPIView):
if isinstance(survey_item['choices'], str):
survey_item['choices'] = '\n'.join(choice for choice in survey_item['choices'].splitlines() if choice.strip() != '')
else:
- return Response(dict(error=_(
- "Survey question {idx} of type {survey_item[type]} must specify choices.".format(**context)
- )), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(error=_("Survey question {idx} of type {survey_item[type]} must specify choices.".format(**context))),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
# If there is a default string split it out removing extra /n characters.
# Note: There can still be extra newline characters added in the API, these are sanitized out using .strip()
if 'default' in survey_item:
@@ -2640,23 +2600,27 @@ class JobTemplateSurveySpec(GenericAPIView):
if qtype == 'multiplechoice':
# Multiplechoice types should only have 1 default.
if len(list_of_defaults) > 1:
- return Response(dict(error=_(
- "Multiple Choice (Single Select) can only have one default value.".format(**context)
- )), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(error=_("Multiple Choice (Single Select) can only have one default value.".format(**context))),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
if any(item not in survey_item['choices'] for item in list_of_defaults):
- return Response(dict(error=_(
- "Default choice must be answered from the choices listed.".format(**context)
- )), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(error=_("Default choice must be answered from the choices listed.".format(**context))), status=status.HTTP_400_BAD_REQUEST
+ )
# Process encryption substitution
- if ("default" in survey_item and isinstance(survey_item['default'], str) and
- survey_item['default'].startswith('$encrypted$')):
+ if "default" in survey_item and isinstance(survey_item['default'], str) and survey_item['default'].startswith('$encrypted$'):
# Submission expects the existence of encrypted DB value to replace given default
if qtype != "password":
- return Response(dict(error=_(
- "$encrypted$ is a reserved keyword for password question defaults, "
- "survey question {idx} is type {survey_item[type]}."
- ).format(**context)), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(
+ error=_(
+ "$encrypted$ is a reserved keyword for password question defaults, " "survey question {idx} is type {survey_item[type]}."
+ ).format(**context)
+ ),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
old_element = old_spec_dict.get(survey_item['variable'], {})
encryptedish_default_exists = False
if 'default' in old_element:
@@ -2667,9 +2631,10 @@ class JobTemplateSurveySpec(GenericAPIView):
elif old_default == "": # unencrypted blank string is allowed as DB value as special case
encryptedish_default_exists = True
if not encryptedish_default_exists:
- return Response(dict(error=_(
- "$encrypted$ is a reserved keyword, may not be used for new default in position {idx}."
- ).format(**context)), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(error=_("$encrypted$ is a reserved keyword, may not be used for new default in position {idx}.").format(**context)),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
survey_item['default'] = old_element['default']
elif qtype == "password" and 'default' in survey_item:
# Submission provides new encrypted default
@@ -2733,15 +2698,13 @@ class JobTemplateCredentialsList(SubListCreateAttachDetachAPIView):
self.check_parent_access(parent)
sublist_qs = getattrd(parent, self.relationship)
sublist_qs = sublist_qs.prefetch_related(
- 'created_by', 'modified_by',
- 'admin_role', 'use_role', 'read_role',
- 'admin_role__parents', 'admin_role__members')
+ 'created_by', 'modified_by', 'admin_role', 'use_role', 'read_role', 'admin_role__parents', 'admin_role__members'
+ )
return sublist_qs
def is_valid_relation(self, parent, sub, created=False):
if sub.unique_hash() in [cred.unique_hash() for cred in parent.credentials.all()]:
- return {"error": _("Cannot assign multiple {credential_type} credentials.").format(
- credential_type=sub.unique_hash(display=True))}
+ return {"error": _("Cannot assign multiple {credential_type} credentials.").format(credential_type=sub.unique_hash(display=True))}
kind = sub.credential_type.kind
if kind not in ('ssh', 'vault', 'cloud', 'net', 'kubernetes'):
return {'error': _('Cannot assign a Credential of kind `{}`.').format(kind)}
@@ -2767,8 +2730,9 @@ class JobTemplateLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDeta
del request.data['name']
del request.data['organization']
if models.Label.objects.filter(unifiedjobtemplate_labels=self.kwargs['pk']).count() > 100:
- return Response(dict(msg=_('Maximum number of labels for {} reached.'.format(
- self.parent_model._meta.verbose_name_raw))), status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ dict(msg=_('Maximum number of labels for {} reached.'.format(self.parent_model._meta.verbose_name_raw))), status=status.HTTP_400_BAD_REQUEST
+ )
return super(JobTemplateLabelList, self).post(request, *args, **kwargs)
@@ -2785,10 +2749,10 @@ class JobTemplateCallback(GenericAPIView):
return super(JobTemplateCallback, self).dispatch(*args, **kwargs)
def find_matching_hosts(self):
- '''
+ """
Find the host(s) in the job template's inventory that match the remote
host for the current request.
- '''
+ """
# Find the list of remote host names/IPs to check.
remote_hosts = set()
for header in settings.REMOTE_HOST_HEADERS:
@@ -2845,13 +2809,9 @@ class JobTemplateCallback(GenericAPIView):
def get(self, request, *args, **kwargs):
job_template = self.get_object()
matching_hosts = self.find_matching_hosts()
- data = dict(
- host_config_key=job_template.host_config_key,
- matching_hosts=[x.name for x in matching_hosts],
- )
+ data = dict(host_config_key=job_template.host_config_key, matching_hosts=[x.name for x in matching_hosts])
if settings.DEBUG:
- d = dict([(k,v) for k,v in request.META.items()
- if k.startswith('HTTP_') or k.startswith('REMOTE_')])
+ d = dict([(k, v) for k, v in request.META.items() if k.startswith('HTTP_') or k.startswith('REMOTE_')])
data['request_meta'] = d
return Response(data)
@@ -2868,14 +2828,12 @@ class JobTemplateCallback(GenericAPIView):
# match again.
inventory_sources_already_updated = []
if len(matching_hosts) != 1:
- inventory_sources = job_template.inventory.inventory_sources.filter( update_on_launch=True)
+ inventory_sources = job_template.inventory.inventory_sources.filter(update_on_launch=True)
inventory_update_pks = set()
for inventory_source in inventory_sources:
if inventory_source.needs_update_on_launch:
# FIXME: Doesn't check for any existing updates.
- inventory_update = inventory_source.create_inventory_update(
- **{'_eager_fields': {'launch_type': 'callback'}}
- )
+ inventory_update = inventory_source.create_inventory_update(**{'_eager_fields': {'launch_type': 'callback'}})
inventory_update.signal_start()
inventory_update_pks.add(inventory_update.pk)
inventory_update_qs = models.InventoryUpdate.objects.filter(pk__in=inventory_update_pks, status__in=('pending', 'waiting', 'running'))
@@ -2903,9 +2861,7 @@ class JobTemplateCallback(GenericAPIView):
limit = host.name
# NOTE: We limit this to one job waiting per host per callblack to keep them from stacking crazily
- if models.Job.objects.filter(
- status__in=['pending', 'waiting', 'running'], job_template=job_template, limit=limit
- ).count() > 0:
+ if models.Job.objects.filter(status__in=['pending', 'waiting', 'running'], job_template=job_template, limit=limit).count() > 0:
data = dict(msg=_('Host callback job already pending.'))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
@@ -2959,7 +2915,7 @@ class JobTemplateInstanceGroupsList(SubListAttachDetachAPIView):
class JobTemplateAccessList(ResourceAccessList):
- model = models.User # needs to be User for AccessLists's
+ model = models.User # needs to be User for AccessLists's
parent_model = models.JobTemplate
@@ -2968,7 +2924,7 @@ class JobTemplateObjectRolesList(SubListAPIView):
model = models.Role
serializer_class = serializers.RoleSerializer
parent_model = models.JobTemplate
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
po = self.get_parent_object()
@@ -2986,7 +2942,7 @@ class WorkflowJobNodeList(ListAPIView):
model = models.WorkflowJobNode
serializer_class = serializers.WorkflowJobNodeListSerializer
- search_fields = ('unified_job_template__name', 'unified_job_template__description',)
+ search_fields = ('unified_job_template__name', 'unified_job_template__description')
class WorkflowJobNodeDetail(RetrieveAPIView):
@@ -3007,7 +2963,7 @@ class WorkflowJobTemplateNodeList(ListCreateAPIView):
model = models.WorkflowJobTemplateNode
serializer_class = serializers.WorkflowJobTemplateNodeSerializer
- search_fields = ('unified_job_template__name', 'unified_job_template__description',)
+ search_fields = ('unified_job_template__name', 'unified_job_template__description')
class WorkflowJobTemplateNodeDetail(RetrieveUpdateDestroyAPIView):
@@ -3029,12 +2985,13 @@ class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, Su
parent_model = models.WorkflowJobTemplateNode
relationship = ''
enforce_parent_relationship = 'workflow_job_template'
- search_fields = ('unified_job_template__name', 'unified_job_template__description',)
+ search_fields = ('unified_job_template__name', 'unified_job_template__description')
'''
Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by
'relationship'
'''
+
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
@@ -3054,8 +3011,7 @@ class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, Su
'''
relationships = ['success_nodes', 'failure_nodes', 'always_nodes']
relationships.remove(self.relationship)
- qs = functools.reduce(lambda x, y: (x | y),
- (Q(**{'{}__in'.format(r): [sub.id]}) for r in relationships))
+ qs = functools.reduce(lambda x, y: (x | y), (Q(**{'{}__in'.format(r): [sub.id]}) for r in relationships))
if models.WorkflowJobTemplateNode.objects.filter(Q(pk=parent.id) & qs).exists():
return {"Error": _("Relationship not allowed.")}
@@ -3083,10 +3039,7 @@ class WorkflowJobTemplateNodeCreateApproval(RetrieveAPIView):
if not serializer.is_valid():
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
approval_template = obj.create_approval_template(**serializer.validated_data)
- data = serializers.WorkflowApprovalTemplateSerializer(
- approval_template,
- context=self.get_serializer_context()
- ).data
+ data = serializers.WorkflowApprovalTemplateSerializer(approval_template, context=self.get_serializer_context()).data
return Response(data, status=status.HTTP_201_CREATED)
def check_permissions(self, request):
@@ -3117,10 +3070,10 @@ class WorkflowJobNodeChildrenBaseList(SubListAPIView):
serializer_class = serializers.WorkflowJobNodeListSerializer
parent_model = models.WorkflowJobNode
relationship = ''
- search_fields = ('unified_job_template__name', 'unified_job_template__description',)
+ search_fields = ('unified_job_template__name', 'unified_job_template__description')
#
- #Limit the set of WorkflowJobeNodes to the related nodes of specified by
+ # Limit the set of WorkflowJobeNodes to the related nodes of specified by
#'relationship'
#
def get_queryset(self):
@@ -3165,20 +3118,22 @@ class WorkflowJobTemplateCopy(CopyAPIView):
if not request.user.can_access(obj.__class__, 'read', obj):
raise PermissionDenied()
can_copy, messages = request.user.can_access_with_errors(self.model, 'copy', obj)
- data = OrderedDict([
- ('can_copy', can_copy), ('can_copy_without_user_input', can_copy),
- ('templates_unable_to_copy', [] if can_copy else ['all']),
- ('credentials_unable_to_copy', [] if can_copy else ['all']),
- ('inventories_unable_to_copy', [] if can_copy else ['all'])
- ])
+ data = OrderedDict(
+ [
+ ('can_copy', can_copy),
+ ('can_copy_without_user_input', can_copy),
+ ('templates_unable_to_copy', [] if can_copy else ['all']),
+ ('credentials_unable_to_copy', [] if can_copy else ['all']),
+ ('inventories_unable_to_copy', [] if can_copy else ['all']),
+ ]
+ )
if messages and can_copy:
data['can_copy_without_user_input'] = False
data.update(messages)
return Response(data)
def _build_create_dict(self, obj):
- """Special processing of fields managed by char_prompts
- """
+ """Special processing of fields managed by char_prompts"""
r = super(WorkflowJobTemplateCopy, self)._build_create_dict(obj)
field_names = set(f.name for f in obj._meta.get_fields())
for field_name, ask_field_name in obj.get_ask_mapping().items():
@@ -3203,8 +3158,7 @@ class WorkflowJobTemplateCopy(CopyAPIView):
elif field_name in ['credentials']:
for cred in item.all():
if not user.can_access(cred.__class__, 'use', cred):
- logger.debug(
- 'Deep copy: removing {} from relationship due to permissions'.format(cred))
+ logger.debug('Deep copy: removing {} from relationship due to permissions'.format(cred))
item.remove(cred.pk)
obj.save()
@@ -3305,7 +3259,7 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
parent_model = models.WorkflowJobTemplate
relationship = 'workflow_job_template_nodes'
parent_key = 'workflow_job_template'
- search_fields = ('unified_job_template__name', 'unified_job_template__description',)
+ search_fields = ('unified_job_template__name', 'unified_job_template__description')
def get_queryset(self):
return super(WorkflowJobTemplateWorkflowNodesList, self).get_queryset().order_by('id')
@@ -3360,7 +3314,7 @@ class WorkflowJobTemplateNotificationTemplatesApprovalList(WorkflowJobTemplateNo
class WorkflowJobTemplateAccessList(ResourceAccessList):
- model = models.User # needs to be User for AccessLists's
+ model = models.User # needs to be User for AccessLists's
parent_model = models.WorkflowJobTemplate
@@ -3369,7 +3323,7 @@ class WorkflowJobTemplateObjectRolesList(SubListAPIView):
model = models.Role
serializer_class = serializers.RoleSerializer
parent_model = models.WorkflowJobTemplate
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
po = self.get_parent_object()
@@ -3389,8 +3343,7 @@ class WorkflowJobTemplateActivityStreamList(SubListAPIView):
parent = self.get_parent_object()
self.check_parent_access(parent)
qs = self.request.user.get_queryset(self.model)
- return qs.filter(Q(workflow_job_template=parent) |
- Q(workflow_job_template_node__workflow_job_template=parent)).distinct()
+ return qs.filter(Q(workflow_job_template=parent) | Q(workflow_job_template_node__workflow_job_template=parent)).distinct()
class WorkflowJobList(ListAPIView):
@@ -3413,7 +3366,7 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
parent_model = models.WorkflowJob
relationship = 'workflow_job_nodes'
parent_key = 'workflow_job'
- search_fields = ('unified_job_template__name', 'unified_job_template__description',)
+ search_fields = ('unified_job_template__name', 'unified_job_template__description')
def get_queryset(self):
return super(WorkflowJobWorkflowNodesList, self).get_queryset().order_by('id')
@@ -3441,12 +3394,13 @@ class WorkflowJobNotificationsList(SubListAPIView):
serializer_class = serializers.NotificationSerializer
parent_model = models.WorkflowJob
relationship = 'notifications'
- search_fields = ('subject', 'notification_type', 'body',)
+ search_fields = ('subject', 'notification_type', 'body')
def get_sublist_queryset(self, parent):
- return self.model.objects.filter(Q(unifiedjob_notifications=parent) |
- Q(unifiedjob_notifications__unified_job_node__workflow_job=parent,
- unifiedjob_notifications__workflowapproval__isnull=False)).distinct()
+ return self.model.objects.filter(
+ Q(unifiedjob_notifications=parent)
+ | Q(unifiedjob_notifications__unified_job_node__workflow_job=parent, unifiedjob_notifications__workflowapproval__isnull=False)
+ ).distinct()
class WorkflowJobActivityStreamList(SubListAPIView):
@@ -3655,19 +3609,22 @@ class JobRelaunch(RetrieveAPIView):
retry_hosts = serializer.validated_data.get('hosts', None)
if retry_hosts and retry_hosts != 'all':
if obj.status in ACTIVE_STATES:
- return Response({'hosts': _(
- 'Wait until job finishes before retrying on {status_value} hosts.'
- ).format(status_value=retry_hosts)}, status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ {'hosts': _('Wait until job finishes before retrying on {status_value} hosts.').format(status_value=retry_hosts)},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
host_qs = obj.retry_qs(retry_hosts)
if not obj.job_events.filter(event='playbook_on_stats').exists():
- return Response({'hosts': _(
- 'Cannot retry on {status_value} hosts, playbook stats not available.'
- ).format(status_value=retry_hosts)}, status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ {'hosts': _('Cannot retry on {status_value} hosts, playbook stats not available.').format(status_value=retry_hosts)},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
retry_host_list = host_qs.values_list('name', flat=True)
if len(retry_host_list) == 0:
- return Response({'hosts': _(
- 'Cannot relaunch because previous job had 0 {status_value} hosts.'
- ).format(status_value=retry_hosts)}, status=status.HTTP_400_BAD_REQUEST)
+ return Response(
+ {'hosts': _('Cannot relaunch because previous job had 0 {status_value} hosts.').format(status_value=retry_hosts)},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
copy_kwargs['limit'] = ','.join(retry_host_list)
new_job = obj.copy_unified_job(**copy_kwargs)
@@ -3695,15 +3652,12 @@ class JobCreateSchedule(RetrieveAPIView):
if not obj.can_schedule:
if getattr(obj, 'passwords_needed_to_start', None):
- return Response({"error": _('Cannot create schedule because job requires credential passwords.')},
- status=status.HTTP_400_BAD_REQUEST)
+ return Response({"error": _('Cannot create schedule because job requires credential passwords.')}, status=status.HTTP_400_BAD_REQUEST)
try:
obj.launch_config
except ObjectDoesNotExist:
- return Response({"error": _('Cannot create schedule because job was launched by legacy method.')},
- status=status.HTTP_400_BAD_REQUEST)
- return Response({"error": _('Cannot create schedule because a related resource is missing.')},
- status=status.HTTP_400_BAD_REQUEST)
+ return Response({"error": _('Cannot create schedule because job was launched by legacy method.')}, status=status.HTTP_400_BAD_REQUEST)
+ return Response({"error": _('Cannot create schedule because a related resource is missing.')}, status=status.HTTP_400_BAD_REQUEST)
config = obj.launch_config
@@ -3727,7 +3681,7 @@ class JobCreateSchedule(RetrieveAPIView):
survey_passwords=config.survey_passwords,
inventory=config.inventory,
char_prompts=config.char_prompts,
- credentials=set(config.credentials.all())
+ credentials=set(config.credentials.all()),
)
if not request.user.can_access(models.Schedule, 'add', schedule_data):
raise PermissionDenied()
@@ -3748,14 +3702,14 @@ class JobNotificationsList(SubListAPIView):
serializer_class = serializers.NotificationSerializer
parent_model = models.Job
relationship = 'notifications'
- search_fields = ('subject', 'notification_type', 'body',)
+ search_fields = ('subject', 'notification_type', 'body')
class BaseJobHostSummariesList(SubListAPIView):
model = models.JobHostSummary
serializer_class = serializers.JobHostSummarySerializer
- parent_model = None # Subclasses must define this attribute.
+ parent_model = None # Subclasses must define this attribute.
relationship = 'job_host_summaries'
name = _('Job Host Summaries List')
search_fields = ('host_name',)
@@ -3840,7 +3794,7 @@ class BaseJobEventsList(NoTruncateMixin, SubListAPIView):
model = models.JobEvent
serializer_class = serializers.JobEventSerializer
- parent_model = None # Subclasses must define this attribute.
+ parent_model = None # Subclasses must define this attribute.
relationship = 'job_events'
name = _('Job Events List')
search_fields = ('stdout',)
@@ -3995,17 +3949,14 @@ class AdHocCommandRelaunch(GenericAPIView):
# Re-validate ad hoc command against serializer to check if module is
# still allowed.
data = {}
- for field in ('job_type', 'inventory_id', 'limit', 'credential_id',
- 'module_name', 'module_args', 'forks', 'verbosity',
- 'extra_vars', 'become_enabled'):
+ for field in ('job_type', 'inventory_id', 'limit', 'credential_id', 'module_name', 'module_args', 'forks', 'verbosity', 'extra_vars', 'become_enabled'):
if field.endswith('_id'):
data[field[:-3]] = getattr(obj, field)
else:
data[field] = getattr(obj, field)
serializer = serializers.AdHocCommandSerializer(data=data, context=self.get_serializer_context())
if not serializer.is_valid():
- return Response(serializer.errors,
- status=status.HTTP_400_BAD_REQUEST)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# Check for passwords needed before copying ad hoc command.
needed = obj.passwords_needed_to_start
@@ -4051,7 +4002,7 @@ class BaseAdHocCommandEventsList(NoTruncateMixin, SubListAPIView):
model = models.AdHocCommandEvent
serializer_class = serializers.AdHocCommandEventSerializer
- parent_model = None # Subclasses must define this attribute.
+ parent_model = None # Subclasses must define this attribute.
relationship = 'ad_hoc_command_events'
name = _('Ad Hoc Command Events List')
search_fields = ('stdout',)
@@ -4062,7 +4013,7 @@ class HostAdHocCommandEventsList(BaseAdHocCommandEventsList):
parent_model = models.Host
-#class GroupJobEventsList(BaseJobEventsList):
+# class GroupJobEventsList(BaseJobEventsList):
# parent_model = Group
@@ -4086,7 +4037,7 @@ class AdHocCommandNotificationsList(SubListAPIView):
serializer_class = serializers.NotificationSerializer
parent_model = models.AdHocCommand
relationship = 'notifications'
- search_fields = ('subject', 'notification_type', 'body',)
+ search_fields = ('subject', 'notification_type', 'body')
class SystemJobList(ListAPIView):
@@ -4127,21 +4078,21 @@ class SystemJobNotificationsList(SubListAPIView):
serializer_class = serializers.NotificationSerializer
parent_model = models.SystemJob
relationship = 'notifications'
- search_fields = ('subject', 'notification_type', 'body',)
+ search_fields = ('subject', 'notification_type', 'body')
class UnifiedJobTemplateList(ListAPIView):
model = models.UnifiedJobTemplate
serializer_class = serializers.UnifiedJobTemplateSerializer
- search_fields = ('description', 'name', 'jobtemplate__playbook',)
+ search_fields = ('description', 'name', 'jobtemplate__playbook')
class UnifiedJobList(ListAPIView):
model = models.UnifiedJob
serializer_class = serializers.UnifiedJobListSerializer
- search_fields = ('description', 'name', 'job__playbook',)
+ search_fields = ('description', 'name', 'job__playbook')
def redact_ansi(line):
@@ -4152,7 +4103,6 @@ def redact_ansi(line):
class StdoutFilter(object):
-
def __init__(self, fileobj):
self._functions = []
self.fileobj = fileobj
@@ -4188,10 +4138,15 @@ class UnifiedJobStdout(RetrieveAPIView):
authentication_classes = api_settings.DEFAULT_AUTHENTICATION_CLASSES
serializer_class = serializers.UnifiedJobStdoutSerializer
- renderer_classes = [renderers.BrowsableAPIRenderer, StaticHTMLRenderer,
- renderers.PlainTextRenderer, renderers.AnsiTextRenderer,
- JSONRenderer, renderers.DownloadTextRenderer,
- renderers.AnsiDownloadRenderer]
+ renderer_classes = [
+ renderers.BrowsableAPIRenderer,
+ StaticHTMLRenderer,
+ renderers.PlainTextRenderer,
+ renderers.AnsiTextRenderer,
+ JSONRenderer,
+ renderers.DownloadTextRenderer,
+ renderers.AnsiDownloadRenderer,
+ ]
filter_backends = ()
def retrieve(self, request, *args, **kwargs):
@@ -4213,12 +4168,7 @@ class UnifiedJobStdout(RetrieveAPIView):
body = ansiconv.to_html(html.escape(content))
- context = {
- 'title': get_view_name(self.__class__),
- 'body': mark_safe(body),
- 'dark': dark_bg,
- 'content_only': content_only,
- }
+ context = {'title': get_view_name(self.__class__), 'body': mark_safe(body), 'dark': dark_bg, 'content_only': content_only}
data = render_to_string('api/stdout.html', context).strip()
if target_format == 'api':
@@ -4235,9 +4185,7 @@ class UnifiedJobStdout(RetrieveAPIView):
return Response(unified_job.result_stdout_raw)
elif target_format in {'txt_download', 'ansi_download'}:
filename = '{type}_{pk}{suffix}.txt'.format(
- type=camelcase_to_underscore(unified_job.__class__.__name__),
- pk=unified_job.id,
- suffix='.ansi' if target_format == 'ansi_download' else ''
+ type=camelcase_to_underscore(unified_job.__class__.__name__), pk=unified_job.id, suffix='.ansi' if target_format == 'ansi_download' else ''
)
content_fd = unified_job.result_stdout_raw_handle(enforce_max_bytes=False)
redactor = StdoutFilter(content_fd)
@@ -4252,10 +4200,8 @@ class UnifiedJobStdout(RetrieveAPIView):
return super(UnifiedJobStdout, self).retrieve(request, *args, **kwargs)
except models.StdoutMaxBytesExceeded as e:
response_message = _(
- "Standard Output too large to display ({text_size} bytes), "
- "only download supported for sizes over {supported_size} bytes.").format(
- text_size=e.total, supported_size=e.supported
- )
+ "Standard Output too large to display ({text_size} bytes), " "only download supported for sizes over {supported_size} bytes."
+ ).format(text_size=e.total, supported_size=e.supported)
if request.accepted_renderer.format == 'json':
return Response({'range': {'start': 0, 'end': 1, 'absolute_end': 1}, 'content': response_message})
else:
@@ -4300,8 +4246,7 @@ class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView):
hours_old = now() - dateutil.relativedelta.relativedelta(hours=8)
if obj.notifications.filter(status='pending', created__gt=hours_old).exists():
- return Response({"error": _("Delete not allowed while there are pending notifications")},
- status=status.HTTP_405_METHOD_NOT_ALLOWED)
+ return Response({"error": _("Delete not allowed while there are pending notifications")}, status=status.HTTP_405_METHOD_NOT_ALLOWED)
return super(NotificationTemplateDetail, self).delete(request, *args, **kwargs)
@@ -4332,9 +4277,7 @@ class NotificationTemplateTest(GenericAPIView):
data['notification'] = notification.id
data.update(serializers.NotificationSerializer(notification, context=self.get_serializer_context()).to_representation(notification))
headers = {'Location': notification.get_absolute_url(request=request)}
- return Response(data,
- headers=headers,
- status=status.HTTP_202_ACCEPTED)
+ return Response(data, headers=headers, status=status.HTTP_202_ACCEPTED)
class NotificationTemplateNotificationList(SubListAPIView):
@@ -4344,7 +4287,7 @@ class NotificationTemplateNotificationList(SubListAPIView):
parent_model = models.NotificationTemplate
relationship = 'notifications'
parent_key = 'notification_template'
- search_fields = ('subject', 'notification_type', 'body',)
+ search_fields = ('subject', 'notification_type', 'body')
class NotificationTemplateCopy(CopyAPIView):
@@ -4357,7 +4300,7 @@ class NotificationList(ListAPIView):
model = models.Notification
serializer_class = serializers.NotificationSerializer
- search_fields = ('subject', 'notification_type', 'body',)
+ search_fields = ('subject', 'notification_type', 'body')
class NotificationDetail(RetrieveAPIView):
@@ -4396,7 +4339,7 @@ class RoleList(ListAPIView):
model = models.Role
serializer_class = serializers.RoleSerializer
permission_classes = (IsAuthenticated,)
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
class RoleDetail(RetrieveAPIView):
@@ -4480,9 +4423,7 @@ class RoleTeamsList(SubListAttachDetachAPIView):
data = dict(msg=_("You cannot grant system-level permissions to a team."))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
- if not request.user.can_access(self.parent_model, action, role, team,
- self.relationship, request.data,
- skip_sub_obj_read_check=False):
+ if not request.user.can_access(self.parent_model, action, role, team, self.relationship, request.data, skip_sub_obj_read_check=False):
raise PermissionDenied()
if request.data.get('disassociate', None):
team.member_role.children.remove(role)
@@ -4498,7 +4439,7 @@ class RoleParentsList(SubListAPIView):
parent_model = models.Role
relationship = 'parents'
permission_classes = (IsAuthenticated,)
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
role = models.Role.objects.get(pk=self.kwargs['pk'])
@@ -4512,7 +4453,7 @@ class RoleChildrenList(SubListAPIView):
parent_model = models.Role
relationship = 'children'
permission_classes = (IsAuthenticated,)
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
role = models.Role.objects.get(pk=self.kwargs['pk'])
diff --git a/awx/api/views/inventory.py b/awx/api/views/inventory.py
index 607a71c6d5..9a484285cc 100644
--- a/awx/api/views/inventory.py
+++ b/awx/api/views/inventory.py
@@ -28,14 +28,7 @@ from awx.main.models import (
InventorySource,
CustomInventoryScript,
)
-from awx.api.generics import (
- ListCreateAPIView,
- RetrieveUpdateDestroyAPIView,
- SubListAPIView,
- SubListAttachDetachAPIView,
- ResourceAccessList,
- CopyAPIView,
-)
+from awx.api.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView, SubListAPIView, SubListAttachDetachAPIView, ResourceAccessList, CopyAPIView
from awx.api.serializers import (
InventorySerializer,
@@ -46,10 +39,7 @@ from awx.api.serializers import (
CustomInventoryScriptSerializer,
JobTemplateSerializer,
)
-from awx.api.views.mixin import (
- RelatedJobsPreventDeleteMixin,
- ControlledByScmMixin,
-)
+from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, ControlledByScmMixin
logger = logging.getLogger('awx.api.views.organization')
@@ -101,7 +91,7 @@ class InventoryScriptObjectRolesList(SubListAPIView):
model = Role
serializer_class = RoleSerializer
parent_model = CustomInventoryScript
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
po = self.get_parent_object()
@@ -134,8 +124,7 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, Retri
# Do not allow changes to an Inventory kind.
if kind is not None and obj.kind != kind:
- return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')),
- status=status.HTTP_405_METHOD_NOT_ALLOWED)
+ return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED)
return super(InventoryDetail, self).update(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
@@ -175,7 +164,7 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
class InventoryAccessList(ResourceAccessList):
- model = User # needs to be User for AccessLists's
+ model = User # needs to be User for AccessLists's
parent_model = Inventory
@@ -184,7 +173,7 @@ class InventoryObjectRolesList(SubListAPIView):
model = Role
serializer_class = RoleSerializer
parent_model = Inventory
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
po = self.get_parent_object()
diff --git a/awx/api/views/metrics.py b/awx/api/views/metrics.py
index 39744e1bcd..dd40f11900 100644
--- a/awx/api/views/metrics.py
+++ b/awx/api/views/metrics.py
@@ -17,9 +17,7 @@ from rest_framework.exceptions import PermissionDenied
from awx.main.analytics.metrics import metrics
from awx.api import renderers
-from awx.api.generics import (
- APIView,
-)
+from awx.api.generics import APIView
logger = logging.getLogger('awx.analytics')
@@ -30,13 +28,10 @@ class MetricsView(APIView):
name = _('Metrics')
swagger_topic = 'Metrics'
- renderer_classes = [renderers.PlainTextRenderer,
- renderers.PrometheusJSONRenderer,
- renderers.BrowsableAPIRenderer,]
+ renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
def get(self, request):
''' Show Metrics Details '''
- if (request.user.is_superuser or request.user.is_system_auditor):
+ if request.user.is_superuser or request.user.is_system_auditor:
return Response(metrics().decode('UTF-8'))
raise PermissionDenied()
-
diff --git a/awx/api/views/mixin.py b/awx/api/views/mixin.py
index 9b57278e2e..ea2e8b38d4 100644
--- a/awx/api/views/mixin.py
+++ b/awx/api/views/mixin.py
@@ -16,14 +16,8 @@ from rest_framework.response import Response
from rest_framework import status
from awx.main.constants import ACTIVE_STATES
-from awx.main.utils import (
- get_object_or_400,
- parse_yaml_or_json,
-)
-from awx.main.models.ha import (
- Instance,
- InstanceGroup,
-)
+from awx.main.utils import get_object_or_400, parse_yaml_or_json
+from awx.main.models.ha import Instance, InstanceGroup
from awx.main.models.organization import Team
from awx.main.models.projects import Project
from awx.main.models.inventory import Inventory
@@ -34,9 +28,10 @@ logger = logging.getLogger('awx.api.views.mixin')
class UnifiedJobDeletionMixin(object):
- '''
+ """
Special handling when deleting a running unified job object.
- '''
+ """
+
def destroy(self, request, *args, **kwargs):
obj = self.get_object()
if not request.user.can_access(self.model, 'delete', obj):
@@ -53,22 +48,21 @@ class UnifiedJobDeletionMixin(object):
# Prohibit deletion if job events are still coming in
if obj.finished and now() < obj.finished + dateutil.relativedelta.relativedelta(minutes=1):
# less than 1 minute has passed since job finished and events are not in
- return Response({"error": _("Job has not finished processing events.")},
- status=status.HTTP_400_BAD_REQUEST)
+ return Response({"error": _("Job has not finished processing events.")}, status=status.HTTP_400_BAD_REQUEST)
else:
# if it has been > 1 minute, events are probably lost
- logger.warning('Allowing deletion of {} through the API without all events '
- 'processed.'.format(obj.log_format))
+ logger.warning('Allowing deletion of {} through the API without all events ' 'processed.'.format(obj.log_format))
obj.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class InstanceGroupMembershipMixin(object):
- '''
+ """
This mixin overloads attach/detach so that it calls InstanceGroup.save(),
triggering a background recalculation of policy-based instance group
membership.
- '''
+ """
+
def attach(self, request, *args, **kwargs):
response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs)
sub_id, res = self.attach_validate(request)
@@ -84,9 +78,7 @@ class InstanceGroupMembershipMixin(object):
ig_obj = get_object_or_400(ig_qs, pk=sub_id)
else:
# similar to get_parent_object, but selected for update
- parent_filter = {
- self.lookup_field: self.kwargs.get(self.lookup_field, None),
- }
+ parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
ig_obj = get_object_or_404(ig_qs, **parent_filter)
if inst_name not in ig_obj.policy_instance_list:
ig_obj.policy_instance_list.append(inst_name)
@@ -126,9 +118,7 @@ class InstanceGroupMembershipMixin(object):
ig_obj = get_object_or_400(ig_qs, pk=sub_id)
else:
# similar to get_parent_object, but selected for update
- parent_filter = {
- self.lookup_field: self.kwargs.get(self.lookup_field, None),
- }
+ parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
ig_obj = get_object_or_404(ig_qs, **parent_filter)
if inst_name in ig_obj.policy_instance_list:
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
@@ -146,16 +136,13 @@ class RelatedJobsPreventDeleteMixin(object):
if len(active_jobs) > 0:
raise ActiveJobConflict(active_jobs)
time_cutoff = now() - dateutil.relativedelta.relativedelta(minutes=1)
- recent_jobs = obj._get_related_jobs().filter(finished__gte = time_cutoff)
+ recent_jobs = obj._get_related_jobs().filter(finished__gte=time_cutoff)
for unified_job in recent_jobs.get_real_instances():
if not unified_job.event_processing_finished:
- raise PermissionDenied(_(
- 'Related job {} is still processing events.'
- ).format(unified_job.log_format))
+ raise PermissionDenied(_('Related job {} is still processing events.').format(unified_job.log_format))
class OrganizationCountsMixin(object):
-
def get_serializer_context(self, *args, **kwargs):
full_context = super(OrganizationCountsMixin, self).get_serializer_context(*args, **kwargs)
@@ -177,26 +164,23 @@ class OrganizationCountsMixin(object):
# Produce counts of Foreign Key relationships
db_results['inventories'] = inv_qs.values('organization').annotate(Count('organization')).order_by('organization')
- db_results['teams'] = Team.accessible_objects(
- self.request.user, 'read_role').values('organization').annotate(
- Count('organization')).order_by('organization')
+ db_results['teams'] = (
+ Team.accessible_objects(self.request.user, 'read_role').values('organization').annotate(Count('organization')).order_by('organization')
+ )
db_results['job_templates'] = jt_qs.values('organization').annotate(Count('organization')).order_by('organization')
db_results['projects'] = project_qs.values('organization').annotate(Count('organization')).order_by('organization')
# Other members and admins of organization are always viewable
- db_results['users'] = org_qs.annotate(
- users=Count('member_role__members', distinct=True),
- admins=Count('admin_role__members', distinct=True)
- ).values('id', 'users', 'admins')
+ db_results['users'] = org_qs.annotate(users=Count('member_role__members', distinct=True), admins=Count('admin_role__members', distinct=True)).values(
+ 'id', 'users', 'admins'
+ )
count_context = {}
for org in org_id_list:
org_id = org['id']
- count_context[org_id] = {
- 'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0,
- 'admins': 0, 'projects': 0}
+ count_context[org_id] = {'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0, 'admins': 0, 'projects': 0}
for res, count_qs in db_results.items():
if res == 'users':
@@ -218,21 +202,20 @@ class OrganizationCountsMixin(object):
class ControlledByScmMixin(object):
- '''
+ """
Special method to reset SCM inventory commit hash
if anything that it manages changes.
- '''
+ """
def _reset_inv_src_rev(self, obj):
if self.request.method in SAFE_METHODS or not obj:
return
- project_following_sources = obj.inventory_sources.filter(
- update_on_project_update=True, source='scm')
+ project_following_sources = obj.inventory_sources.filter(update_on_project_update=True, source='scm')
if project_following_sources:
# Allow inventory changes unrelated to variables
if self.model == Inventory and (
- not self.request or not self.request.data or
- parse_yaml_or_json(self.request.data.get('variables', '')) == parse_yaml_or_json(obj.variables)):
+ not self.request or not self.request.data or parse_yaml_or_json(self.request.data.get('variables', '')) == parse_yaml_or_json(obj.variables)
+ ):
return
project_following_sources.update(scm_last_revision='')
diff --git a/awx/api/views/organization.py b/awx/api/views/organization.py
index b33259a8ad..4a12a7d599 100644
--- a/awx/api/views/organization.py
+++ b/awx/api/views/organization.py
@@ -24,7 +24,7 @@ from awx.main.models import (
User,
Team,
InstanceGroup,
- Credential
+ Credential,
)
from awx.api.generics import (
ListCreateAPIView,
@@ -47,13 +47,12 @@ from awx.api.serializers import (
NotificationTemplateSerializer,
InstanceGroupSerializer,
ExecutionEnvironmentSerializer,
- ProjectSerializer, JobTemplateSerializer, WorkflowJobTemplateSerializer,
- CredentialSerializer
-)
-from awx.api.views.mixin import (
- RelatedJobsPreventDeleteMixin,
- OrganizationCountsMixin,
+ ProjectSerializer,
+ JobTemplateSerializer,
+ WorkflowJobTemplateSerializer,
+ CredentialSerializer,
)
+from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
logger = logging.getLogger('awx.api.views.organization')
@@ -84,23 +83,20 @@ class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPI
org_counts = {}
access_kwargs = {'accessor': self.request.user, 'role_field': 'read_role'}
- direct_counts = Organization.objects.filter(id=org_id).annotate(
- users=Count('member_role__members', distinct=True),
- admins=Count('admin_role__members', distinct=True)
- ).values('users', 'admins')
+ direct_counts = (
+ Organization.objects.filter(id=org_id)
+ .annotate(users=Count('member_role__members', distinct=True), admins=Count('admin_role__members', distinct=True))
+ .values('users', 'admins')
+ )
if not direct_counts:
return full_context
org_counts = direct_counts[0]
- org_counts['inventories'] = Inventory.accessible_objects(**access_kwargs).filter(
- organization__id=org_id).count()
- org_counts['teams'] = Team.accessible_objects(**access_kwargs).filter(
- organization__id=org_id).count()
- org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(
- organization__id=org_id).count()
- org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(
- organization__id=org_id).count()
+ org_counts['inventories'] = Inventory.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
+ org_counts['teams'] = Team.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
+ org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
+ org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
org_counts['hosts'] = Host.objects.org_active_count(org_id)
full_context['related_field_counts'] = {}
@@ -240,14 +236,12 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
def is_valid_relation(self, parent, sub, created=False):
if sub.kind != 'galaxy_api_token':
- return {'msg': _(
- f"Credential must be a Galaxy credential, not {sub.credential_type.name}."
- )}
+ return {'msg': _(f"Credential must be a Galaxy credential, not {sub.credential_type.name}.")}
class OrganizationAccessList(ResourceAccessList):
- model = User # needs to be User for AccessLists's
+ model = User # needs to be User for AccessLists's
parent_model = Organization
@@ -256,7 +250,7 @@ class OrganizationObjectRolesList(SubListAPIView):
model = Role
serializer_class = RoleSerializer
parent_model = Organization
- search_fields = ('role_field', 'content_type__model',)
+ search_fields = ('role_field', 'content_type__model')
def get_queryset(self):
po = self.get_parent_object()
diff --git a/awx/api/views/root.py b/awx/api/views/root.py
index d6fc20d105..012d0c7c96 100644
--- a/awx/api/views/root.py
+++ b/awx/api/views/root.py
@@ -24,22 +24,11 @@ from awx.api.generics import APIView
from awx.conf.registry import settings_registry
from awx.main.analytics import all_collectors
from awx.main.ha import is_ha_environment
-from awx.main.utils import (
- get_awx_version,
- get_ansible_version,
- get_custom_venv_choices,
- to_python_boolean,
-)
+from awx.main.utils import get_awx_version, get_ansible_version, get_custom_venv_choices, to_python_boolean
from awx.main.utils.licensing import validate_entitlement_manifest
from awx.api.versioning import reverse, drf_reverse
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
-from awx.main.models import (
- Project,
- Organization,
- Instance,
- InstanceGroup,
- JobTemplate,
-)
+from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
from awx.main.utils import set_environ
logger = logging.getLogger('awx.api.views.root')
@@ -60,7 +49,7 @@ class ApiRootView(APIView):
data = OrderedDict()
data['description'] = _('AWX REST API')
data['current_version'] = v2
- data['available_versions'] = dict(v2 = v2)
+ data['available_versions'] = dict(v2=v2)
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
data['custom_logo'] = settings.CUSTOM_LOGO
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
@@ -146,6 +135,7 @@ class ApiV2PingView(APIView):
"""A simple view that reports very basic information about this
instance, which is acceptable to be public information.
"""
+
permission_classes = (AllowAny,)
authentication_classes = ()
name = _('Ping')
@@ -157,23 +147,19 @@ class ApiV2PingView(APIView):
Everything returned here should be considered public / insecure, as
this requires no auth and is intended for use by the installer process.
"""
- response = {
- 'ha': is_ha_environment(),
- 'version': get_awx_version(),
- 'active_node': settings.CLUSTER_HOST_ID,
- 'install_uuid': settings.INSTALL_UUID,
- }
+ response = {'ha': is_ha_environment(), 'version': get_awx_version(), 'active_node': settings.CLUSTER_HOST_ID, 'install_uuid': settings.INSTALL_UUID}
response['instances'] = []
for instance in Instance.objects.all():
- response['instances'].append(dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified,
- capacity=instance.capacity, version=instance.version))
+ response['instances'].append(
+ dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified, capacity=instance.capacity, version=instance.version)
+ )
sorted(response['instances'], key=operator.itemgetter('node'))
response['instance_groups'] = []
for instance_group in InstanceGroup.objects.prefetch_related('instances'):
- response['instance_groups'].append(dict(name=instance_group.name,
- capacity=instance_group.capacity,
- instances=[x.hostname for x in instance_group.instances.all()]))
+ response['instance_groups'].append(
+ dict(name=instance_group.name, capacity=instance_group.capacity, instances=[x.hostname for x in instance_group.instances.all()])
+ )
return Response(response)
@@ -190,6 +176,7 @@ class ApiV2SubscriptionView(APIView):
def post(self, request):
from awx.main.utils.common import get_licenser
+
data = request.data.copy()
if data.get('subscriptions_password') == '$encrypted$':
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
@@ -203,10 +190,7 @@ class ApiV2SubscriptionView(APIView):
settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password']
except Exception as exc:
msg = _("Invalid Subscription")
- if (
- isinstance(exc, requests.exceptions.HTTPError) and
- getattr(getattr(exc, 'response', None), 'status_code', None) == 401
- ):
+ if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
msg = _("The provided credentials are invalid (HTTP 401).")
elif isinstance(exc, requests.exceptions.ProxyError):
msg = _("Unable to connect to proxy server.")
@@ -215,8 +199,7 @@ class ApiV2SubscriptionView(APIView):
elif isinstance(exc, (ValueError, OSError)) and exc.args:
msg = exc.args[0]
else:
- logger.exception(smart_text(u"Invalid subscription submitted."),
- extra=dict(actor=request.user.username))
+ logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
return Response(validated)
@@ -242,16 +225,14 @@ class ApiV2AttachView(APIView):
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
if pool_id and user and pw:
from awx.main.utils.common import get_licenser
+
data = request.data.copy()
try:
with set_environ(**settings.AWX_TASK_ENV):
validated = get_licenser().validate_rh(user, pw)
except Exception as exc:
msg = _("Invalid Subscription")
- if (
- isinstance(exc, requests.exceptions.HTTPError) and
- getattr(getattr(exc, 'response', None), 'status_code', None) == 401
- ):
+ if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
msg = _("The provided credentials are invalid (HTTP 401).")
elif isinstance(exc, requests.exceptions.ProxyError):
msg = _("Unable to connect to proxy server.")
@@ -260,8 +241,7 @@ class ApiV2AttachView(APIView):
elif isinstance(exc, (ValueError, OSError)) and exc.args:
msg = exc.args[0]
else:
- logger.exception(smart_text(u"Invalid subscription submitted."),
- extra=dict(actor=request.user.username))
+ logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
for sub in validated:
if sub['pool_id'] == pool_id:
@@ -287,6 +267,7 @@ class ApiV2ConfigView(APIView):
'''Return various sitewide configuration settings'''
from awx.main.utils.common import get_licenser
+
license_data = get_licenser().validate()
if not license_data.get('valid_key', False):
@@ -314,22 +295,23 @@ class ApiV2ConfigView(APIView):
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
data['user_ldap_fields'] = user_ldap_fields
- if request.user.is_superuser \
- or request.user.is_system_auditor \
- or Organization.accessible_objects(request.user, 'admin_role').exists() \
- or Organization.accessible_objects(request.user, 'auditor_role').exists() \
- or Organization.accessible_objects(request.user, 'project_admin_role').exists():
- data.update(dict(
- project_base_dir = settings.PROJECTS_ROOT,
- project_local_paths = Project.get_local_path_choices(),
- custom_virtualenvs = get_custom_venv_choices()
- ))
+ if (
+ request.user.is_superuser
+ or request.user.is_system_auditor
+ or Organization.accessible_objects(request.user, 'admin_role').exists()
+ or Organization.accessible_objects(request.user, 'auditor_role').exists()
+ or Organization.accessible_objects(request.user, 'project_admin_role').exists()
+ ):
+ data.update(
+ dict(
+ project_base_dir=settings.PROJECTS_ROOT, project_local_paths=Project.get_local_path_choices(), custom_virtualenvs=get_custom_venv_choices()
+ )
+ )
elif JobTemplate.accessible_objects(request.user, 'admin_role').exists():
data['custom_virtualenvs'] = get_custom_venv_choices()
return Response(data)
-
def post(self, request):
if not isinstance(request.data, dict):
return Response({"error": _("Invalid subscription data")}, status=status.HTTP_400_BAD_REQUEST)
@@ -346,11 +328,11 @@ class ApiV2ConfigView(APIView):
try:
data_actual = json.dumps(request.data)
except Exception:
- logger.info(smart_text(u"Invalid JSON submitted for license."),
- extra=dict(actor=request.user.username))
+ logger.info(smart_text(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username))
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
from awx.main.utils.common import get_licenser
+
license_data = json.loads(data_actual)
if 'license_key' in license_data:
return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
@@ -358,10 +340,7 @@ class ApiV2ConfigView(APIView):
try:
json_actual = json.loads(base64.b64decode(license_data['manifest']))
if 'license_key' in json_actual:
- return Response(
- {"error": _('Legacy license submitted. A subscription manifest is now required.')},
- status=status.HTTP_400_BAD_REQUEST
- )
+ return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
except Exception:
pass
try:
@@ -375,8 +354,7 @@ class ApiV2ConfigView(APIView):
try:
license_data_validated = get_licenser().license_from_manifest(license_data)
except Exception:
- logger.warning(smart_text(u"Invalid subscription submitted."),
- extra=dict(actor=request.user.username))
+ logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
else:
license_data_validated = get_licenser().validate()
@@ -387,8 +365,7 @@ class ApiV2ConfigView(APIView):
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
return Response(license_data_validated)
- logger.warning(smart_text(u"Invalid subscription submitted."),
- extra=dict(actor=request.user.username))
+ logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request):
diff --git a/awx/api/views/webhooks.py b/awx/api/views/webhooks.py
index 6be88a316b..921839a3f5 100644
--- a/awx/api/views/webhooks.py
+++ b/awx/api/views/webhooks.py
@@ -26,10 +26,7 @@ class WebhookKeyView(GenericAPIView):
permission_classes = (WebhookKeyPermission,)
def get_queryset(self):
- qs_models = {
- 'job_templates': JobTemplate,
- 'workflow_job_templates': WorkflowJobTemplate,
- }
+ qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
self.model = qs_models.get(self.kwargs['model_kwarg'])
return super().get_queryset()
@@ -57,10 +54,7 @@ class WebhookReceiverBase(APIView):
ref_keys = {}
def get_queryset(self):
- qs_models = {
- 'job_templates': JobTemplate,
- 'workflow_job_templates': WorkflowJobTemplate,
- }
+ qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
model = qs_models.get(self.kwargs['model_kwarg'])
if model is None:
raise PermissionDenied
@@ -120,10 +114,7 @@ class WebhookReceiverBase(APIView):
# Ensure that the full contents of the request are captured for multiple uses.
request.body
- logger.debug(
- "headers: {}\n"
- "data: {}\n".format(request.headers, request.data)
- )
+ logger.debug("headers: {}\n" "data: {}\n".format(request.headers, request.data))
obj = self.get_object()
self.check_signature(obj)
@@ -132,16 +123,11 @@ class WebhookReceiverBase(APIView):
event_ref = self.get_event_ref()
status_api = self.get_event_status_api()
- kwargs = {
- 'unified_job_template_id': obj.id,
- 'webhook_service': obj.webhook_service,
- 'webhook_guid': event_guid,
- }
+ kwargs = {'unified_job_template_id': obj.id, 'webhook_service': obj.webhook_service, 'webhook_guid': event_guid}
if WorkflowJob.objects.filter(**kwargs).exists() or Job.objects.filter(**kwargs).exists():
# Short circuit if this webhook has already been received and acted upon.
logger.debug("Webhook previously received, returning without action.")
- return Response({'message': _("Webhook previously received, aborting.")},
- status=status.HTTP_202_ACCEPTED)
+ return Response({'message': _("Webhook previously received, aborting.")}, status=status.HTTP_202_ACCEPTED)
kwargs = {
'_eager_fields': {
@@ -156,7 +142,7 @@ class WebhookReceiverBase(APIView):
'tower_webhook_event_ref': event_ref,
'tower_webhook_status_api': status_api,
'tower_webhook_payload': request.data,
- }
+ },
}
new_job = obj.create_unified_job(**kwargs)
@@ -205,11 +191,7 @@ class GithubWebhookReceiver(WebhookReceiverBase):
class GitlabWebhookReceiver(WebhookReceiverBase):
service = 'gitlab'
- ref_keys = {
- 'Push Hook': 'checkout_sha',
- 'Tag Push Hook': 'checkout_sha',
- 'Merge Request Hook': 'object_attributes.last_commit.id',
- }
+ ref_keys = {'Push Hook': 'checkout_sha', 'Tag Push Hook': 'checkout_sha', 'Merge Request Hook': 'object_attributes.last_commit.id'}
def get_event_type(self):
return self.request.META.get('HTTP_X_GITLAB_EVENT')
@@ -229,8 +211,7 @@ class GitlabWebhookReceiver(WebhookReceiverBase):
return
parsed = urllib.parse.urlparse(repo_url)
- return "{}://{}/api/v4/projects/{}/statuses/{}".format(
- parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
+ return "{}://{}/api/v4/projects/{}/statuses/{}".format(parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
def get_signature(self):
return force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
diff --git a/awx/asgi.py b/awx/asgi.py
index eb141aabdb..30ef2ffa3f 100644
--- a/awx/asgi.py
+++ b/awx/asgi.py
@@ -4,11 +4,12 @@ import os
import logging
import django
from awx import __version__ as tower_version
+
# Prepare the AWX environment.
from awx import prepare_env, MODE
from channels.routing import get_default_application # noqa
-prepare_env() # NOQA
+prepare_env() # NOQA
"""
diff --git a/awx/conf/access.py b/awx/conf/access.py
index 84f4ca348c..6d390a8f52 100644
--- a/awx/conf/access.py
+++ b/awx/conf/access.py
@@ -10,12 +10,12 @@ from awx.conf.models import Setting
class SettingAccess(BaseAccess):
- '''
+ """
- I can see settings when I am a super user or system auditor.
- I can edit settings when I am a super user.
- I can clear settings when I am a super user.
- I can always see/edit/clear my own user settings.
- '''
+ """
model = Setting
diff --git a/awx/conf/apps.py b/awx/conf/apps.py
index 4f9a36395c..a61e489858 100644
--- a/awx/conf/apps.py
+++ b/awx/conf/apps.py
@@ -1,5 +1,6 @@
# Django
from django.apps import AppConfig
+
# from django.core import checks
from django.utils.translation import ugettext_lazy as _
@@ -12,4 +13,5 @@ class ConfConfig(AppConfig):
def ready(self):
self.module.autodiscover()
from .settings import SettingsWrapper
+
SettingsWrapper.initialize()
diff --git a/awx/conf/fields.py b/awx/conf/fields.py
index e28a44aa32..eb5c962641 100644
--- a/awx/conf/fields.py
+++ b/awx/conf/fields.py
@@ -10,10 +10,7 @@ from django.core.validators import URLValidator, _lazy_re_compile
from django.utils.translation import ugettext_lazy as _
# Django REST Framework
-from rest_framework.fields import ( # noqa
- BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField,
- IntegerField, ListField, NullBooleanField
-)
+from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, NullBooleanField # noqa
from rest_framework.serializers import PrimaryKeyRelatedField # noqa
logger = logging.getLogger('awx.conf.fields')
@@ -27,7 +24,6 @@ logger = logging.getLogger('awx.conf.fields')
class CharField(CharField):
-
def to_representation(self, value):
# django_rest_frameworks' default CharField implementation casts `None`
# to a string `"None"`:
@@ -39,7 +35,6 @@ class CharField(CharField):
class IntegerField(IntegerField):
-
def get_value(self, dictionary):
ret = super(IntegerField, self).get_value(dictionary)
# Handle UI corner case
@@ -60,9 +55,7 @@ class StringListField(ListField):
class StringListBooleanField(ListField):
- default_error_messages = {
- 'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.'),
- }
+ default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
child = CharField()
def to_representation(self, value):
@@ -101,10 +94,7 @@ class StringListBooleanField(ListField):
class StringListPathField(StringListField):
- default_error_messages = {
- 'type_error': _('Expected list of strings but got {input_type} instead.'),
- 'path_error': _('{path} is not a valid path choice.'),
- }
+ default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
def to_internal_value(self, paths):
if isinstance(paths, (list, tuple)):
@@ -123,12 +113,12 @@ class URLField(CharField):
# these lines set up a custom regex that allow numbers in the
# top-level domain
tld_re = (
- r'\.' # dot
- r'(?!-)' # can't start with a dash
- r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
- r'|xn--[a-z0-9]{1,59})' # or punycode label
- r'(?<!-)' # can't end with a dash
- r'\.?' # may have a trailing dot
+ r'\.' # dot
+ r'(?!-)' # can't start with a dash
+ r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
+ r'|xn--[a-z0-9]{1,59})' # or punycode label
+ r'(?<!-)' # can't end with a dash
+ r'\.?' # may have a trailing dot
)
host_re = '(' + URLValidator.hostname_re + URLValidator.domain_re + tld_re + '|localhost)'
@@ -139,7 +129,9 @@ class URLField(CharField):
r'(?:' + URLValidator.ipv4_re + '|' + URLValidator.ipv6_re + '|' + host_re + ')'
r'(?::\d{2,5})?' # port
r'(?:[/?#][^\s]*)?' # resource path
- r'\Z', re.IGNORECASE)
+ r'\Z',
+ re.IGNORECASE,
+ )
def __init__(self, **kwargs):
schemes = kwargs.pop('schemes', None)
@@ -184,9 +176,7 @@ class URLField(CharField):
class KeyValueField(DictField):
child = CharField()
- default_error_messages = {
- 'invalid_child': _('"{input}" is not a valid string.')
- }
+ default_error_messages = {'invalid_child': _('"{input}" is not a valid string.')}
def to_internal_value(self, data):
ret = super(KeyValueField, self).to_internal_value(data)
@@ -199,9 +189,7 @@ class KeyValueField(DictField):
class ListTuplesField(ListField):
- default_error_messages = {
- 'type_error': _('Expected a list of tuples of max length 2 but got {input_type} instead.'),
- }
+ default_error_messages = {'type_error': _('Expected a list of tuples of max length 2 but got {input_type} instead.')}
def to_representation(self, value):
if isinstance(value, (list, tuple)):
diff --git a/awx/conf/license.py b/awx/conf/license.py
index 3929c37921..4ccdd2155e 100644
--- a/awx/conf/license.py
+++ b/awx/conf/license.py
@@ -6,6 +6,7 @@ __all__ = ['get_license']
def _get_validated_license_data():
from awx.main.utils import get_licenser
+
return get_licenser().validate()
diff --git a/awx/conf/migrations/0001_initial.py b/awx/conf/migrations/0001_initial.py
index 22330e330c..8bb9b6bcec 100644
--- a/awx/conf/migrations/0001_initial.py
+++ b/awx/conf/migrations/0001_initial.py
@@ -8,9 +8,7 @@ from django.conf import settings
class Migration(migrations.Migration):
- dependencies = [
- migrations.swappable_dependency(settings.AUTH_USER_MODEL),
- ]
+ dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
operations = [
migrations.CreateModel(
@@ -21,11 +19,11 @@ class Migration(migrations.Migration):
('modified', models.DateTimeField(default=None, editable=False)),
('key', models.CharField(max_length=255)),
('value', jsonfield.fields.JSONField(null=True)),
- ('user', models.ForeignKey(related_name='settings', default=None, editable=False,
- to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True)),
+ (
+ 'user',
+ models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True),
+ ),
],
- options={
- 'abstract': False,
- },
- ),
+ options={'abstract': False},
+ )
]
diff --git a/awx/conf/migrations/0002_v310_copy_tower_settings.py b/awx/conf/migrations/0002_v310_copy_tower_settings.py
index fc947c39b7..be036b38a6 100644
--- a/awx/conf/migrations/0002_v310_copy_tower_settings.py
+++ b/awx/conf/migrations/0002_v310_copy_tower_settings.py
@@ -15,11 +15,7 @@ def copy_tower_settings(apps, schema_editor):
if tower_setting.key == 'LICENSE':
value = json.loads(value)
setting, created = Setting.objects.get_or_create(
- key=tower_setting.key,
- user=tower_setting.user,
- created=tower_setting.created,
- modified=tower_setting.modified,
- defaults=dict(value=value),
+ key=tower_setting.key, user=tower_setting.user, created=tower_setting.created, modified=tower_setting.modified, defaults=dict(value=value)
)
if not created and setting.value != value:
setting.value = value
@@ -36,18 +32,9 @@ def revert_tower_settings(apps, schema_editor):
# LICENSE is stored as a JSON object; convert it back to a string.
if setting.key == 'LICENSE':
value = json.dumps(value)
- defaults = dict(
- value=value,
- value_type='string',
- description='',
- category='',
- )
+ defaults = dict(value=value, value_type='string', description='', category='')
try:
- tower_setting, created = TowerSettings.objects.get_or_create(
- key=setting.key,
- user=setting.user,
- defaults=defaults,
- )
+ tower_setting, created = TowerSettings.objects.get_or_create(key=setting.key, user=setting.user, defaults=defaults)
if not created:
update_fields = []
for k, v in defaults.items():
@@ -62,15 +49,8 @@ def revert_tower_settings(apps, schema_editor):
class Migration(migrations.Migration):
- dependencies = [
- ('conf', '0001_initial'),
- ('main', '0004_squashed_v310_release'),
- ]
+ dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
- run_before = [
- ('main', '0005_squashed_v310_v313_updates'),
- ]
+ run_before = [('main', '0005_squashed_v310_v313_updates')]
- operations = [
- migrations.RunPython(copy_tower_settings, revert_tower_settings),
- ]
+ operations = [migrations.RunPython(copy_tower_settings, revert_tower_settings)]
diff --git a/awx/conf/migrations/0003_v310_JSONField_changes.py b/awx/conf/migrations/0003_v310_JSONField_changes.py
index 78a4c02de7..2550d2fff0 100644
--- a/awx/conf/migrations/0003_v310_JSONField_changes.py
+++ b/awx/conf/migrations/0003_v310_JSONField_changes.py
@@ -7,14 +7,6 @@ import awx.main.fields
class Migration(migrations.Migration):
- dependencies = [
- ('conf', '0002_v310_copy_tower_settings'),
- ]
+ dependencies = [('conf', '0002_v310_copy_tower_settings')]
- operations = [
- migrations.AlterField(
- model_name='setting',
- name='value',
- field=awx.main.fields.JSONField(null=True),
- ),
- ]
+ operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONField(null=True))]
diff --git a/awx/conf/migrations/0004_v320_reencrypt.py b/awx/conf/migrations/0004_v320_reencrypt.py
index 99fcd7ffce..de74493283 100644
--- a/awx/conf/migrations/0004_v320_reencrypt.py
+++ b/awx/conf/migrations/0004_v320_reencrypt.py
@@ -6,9 +6,7 @@ from django.db import migrations
class Migration(migrations.Migration):
- dependencies = [
- ('conf', '0003_v310_JSONField_changes'),
- ]
+ dependencies = [('conf', '0003_v310_JSONField_changes')]
operations = [
# This list is intentionally empty.
diff --git a/awx/conf/migrations/0005_v330_rename_two_session_settings.py b/awx/conf/migrations/0005_v330_rename_two_session_settings.py
index 1f20343951..f1034ee50a 100644
--- a/awx/conf/migrations/0005_v330_rename_two_session_settings.py
+++ b/awx/conf/migrations/0005_v330_rename_two_session_settings.py
@@ -2,8 +2,8 @@
from __future__ import unicode_literals
from django.db import migrations
from awx.conf.migrations import _rename_setting
-
-
+
+
def copy_session_settings(apps, schema_editor):
_rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_PER_USER', new_key='SESSIONS_PER_USER')
_rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_EXPIRATION', new_key='SESSION_COOKIE_AGE')
@@ -16,11 +16,6 @@ def reverse_copy_session_settings(apps, schema_editor):
class Migration(migrations.Migration):
- dependencies = [
- ('conf', '0004_v320_reencrypt'),
- ]
-
- operations = [
- migrations.RunPython(copy_session_settings, reverse_copy_session_settings),
- ]
+ dependencies = [('conf', '0004_v320_reencrypt')]
+ operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]
diff --git a/awx/conf/migrations/0006_v331_ldap_group_type.py b/awx/conf/migrations/0006_v331_ldap_group_type.py
index 8bfe3ef0e2..b53255f29e 100644
--- a/awx/conf/migrations/0006_v331_ldap_group_type.py
+++ b/awx/conf/migrations/0006_v331_ldap_group_type.py
@@ -9,10 +9,6 @@ from django.db import migrations
class Migration(migrations.Migration):
- dependencies = [
- ('conf', '0005_v330_rename_two_session_settings'),
- ]
+ dependencies = [('conf', '0005_v330_rename_two_session_settings')]
- operations = [
- migrations.RunPython(fill_ldap_group_type_params),
- ]
+ operations = [migrations.RunPython(fill_ldap_group_type_params)]
diff --git a/awx/conf/migrations/0007_v380_rename_more_settings.py b/awx/conf/migrations/0007_v380_rename_more_settings.py
index a57b7ec4bc..2a007d0f45 100644
--- a/awx/conf/migrations/0007_v380_rename_more_settings.py
+++ b/awx/conf/migrations/0007_v380_rename_more_settings.py
@@ -10,10 +10,6 @@ def copy_allowed_ips(apps, schema_editor):
class Migration(migrations.Migration):
- dependencies = [
- ('conf', '0006_v331_ldap_group_type'),
- ]
+ dependencies = [('conf', '0006_v331_ldap_group_type')]
- operations = [
- migrations.RunPython(copy_allowed_ips),
- ]
+ operations = [migrations.RunPython(copy_allowed_ips)]
diff --git a/awx/conf/migrations/0008_subscriptions.py b/awx/conf/migrations/0008_subscriptions.py
index dacd066b4d..991e366264 100644
--- a/awx/conf/migrations/0008_subscriptions.py
+++ b/awx/conf/migrations/0008_subscriptions.py
@@ -15,12 +15,6 @@ def _noop(apps, schema_editor):
class Migration(migrations.Migration):
- dependencies = [
- ('conf', '0007_v380_rename_more_settings'),
- ]
+ dependencies = [('conf', '0007_v380_rename_more_settings')]
-
- operations = [
- migrations.RunPython(clear_old_license, _noop),
- migrations.RunPython(prefill_rh_credentials, _noop)
- ]
+ operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]
diff --git a/awx/conf/migrations/_ldap_group_type.py b/awx/conf/migrations/_ldap_group_type.py
index c7f850d41c..e8de5ca4aa 100644
--- a/awx/conf/migrations/_ldap_group_type.py
+++ b/awx/conf/migrations/_ldap_group_type.py
@@ -1,4 +1,3 @@
-
import inspect
from django.conf import settings
@@ -16,10 +15,7 @@ def fill_ldap_group_type_params(apps, schema_editor):
entry = qs[0]
group_type_params = entry.value
else:
- entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS',
- value=group_type_params,
- created=now(),
- modified=now())
+ entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now())
init_attrs = set(inspect.getargspec(group_type.__init__).args[1:])
for k in list(group_type_params.keys()):
diff --git a/awx/conf/migrations/_reencrypt.py b/awx/conf/migrations/_reencrypt.py
index d8f06c0305..b7931464ad 100644
--- a/awx/conf/migrations/_reencrypt.py
+++ b/awx/conf/migrations/_reencrypt.py
@@ -11,15 +11,16 @@ __all__ = ['get_encryption_key', 'decrypt_field']
def get_encryption_key(field_name, pk=None):
- '''
+ """
Generate key for encrypted password based on field name,
``settings.SECRET_KEY``, and instance pk (if available).
:param pk: (optional) the primary key of the ``awx.conf.model.Setting``;
can be omitted in situations where you're encrypting a setting
that is not database-persistent (like a read-only setting)
- '''
+ """
from django.conf import settings
+
h = hashlib.sha1()
h.update(settings.SECRET_KEY)
if pk is not None:
@@ -29,11 +30,11 @@ def get_encryption_key(field_name, pk=None):
def decrypt_value(encryption_key, value):
- raw_data = value[len('$encrypted$'):]
+ raw_data = value[len('$encrypted$') :]
# If the encrypted string contains a UTF8 marker, discard it
utf8 = raw_data.startswith('UTF8$')
if utf8:
- raw_data = raw_data[len('UTF8$'):]
+ raw_data = raw_data[len('UTF8$') :]
algo, b64data = raw_data.split('$', 1)
if algo != 'AES':
raise ValueError('unsupported algorithm: %s' % algo)
@@ -48,9 +49,9 @@ def decrypt_value(encryption_key, value):
def decrypt_field(instance, field_name, subfield=None):
- '''
+ """
Return content of the given instance and field name decrypted.
- '''
+ """
value = getattr(instance, field_name)
if isinstance(value, dict) and subfield is not None:
value = value[subfield]
diff --git a/awx/conf/migrations/_rename_setting.py b/awx/conf/migrations/_rename_setting.py
index dbbc347edf..b739b877d9 100644
--- a/awx/conf/migrations/_rename_setting.py
+++ b/awx/conf/migrations/_rename_setting.py
@@ -6,11 +6,11 @@ from django.conf import settings
logger = logging.getLogger('awx.conf.settings')
-__all__ = ['rename_setting']
-
-
+__all__ = ['rename_setting']
+
+
def rename_setting(apps, schema_editor, old_key, new_key):
-
+
old_setting = None
Setting = apps.get_model('conf', 'Setting')
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):
@@ -24,9 +24,4 @@ def rename_setting(apps, schema_editor, old_key, new_key):
if hasattr(settings, old_key):
old_setting = getattr(settings, old_key)
if old_setting is not None:
- Setting.objects.create(key=new_key,
- value=old_setting,
- created=now(),
- modified=now()
- )
-
+ Setting.objects.create(key=new_key, value=old_setting, created=now(), modified=now())
diff --git a/awx/conf/migrations/_subscriptions.py b/awx/conf/migrations/_subscriptions.py
index 2b979fb68e..de8320011e 100644
--- a/awx/conf/migrations/_subscriptions.py
+++ b/awx/conf/migrations/_subscriptions.py
@@ -6,7 +6,7 @@ from awx.main.utils.encryption import decrypt_field, encrypt_field
logger = logging.getLogger('awx.conf.settings')
__all__ = ['clear_old_license', 'prefill_rh_credentials']
-
+
def clear_old_license(apps, schema_editor):
Setting = apps.get_model('conf', 'Setting')
@@ -17,10 +17,7 @@ def _migrate_setting(apps, old_key, new_key, encrypted=False):
Setting = apps.get_model('conf', 'Setting')
if not Setting.objects.filter(key=old_key).exists():
return
- new_setting = Setting.objects.create(key=new_key,
- created=now(),
- modified=now()
- )
+ new_setting = Setting.objects.create(key=new_key, created=now(), modified=now())
if encrypted:
new_setting.value = decrypt_field(Setting.objects.filter(key=old_key).first(), 'value')
new_setting.value = encrypt_field(new_setting, 'value')
diff --git a/awx/conf/models.py b/awx/conf/models.py
index fe28fd89a8..90ee8f06c0 100644
--- a/awx/conf/models.py
+++ b/awx/conf/models.py
@@ -18,20 +18,9 @@ __all__ = ['Setting']
class Setting(CreatedModifiedModel):
- key = models.CharField(
- max_length=255,
- )
- value = JSONField(
- null=True,
- )
- user = prevent_search(models.ForeignKey(
- 'auth.User',
- related_name='settings',
- default=None,
- null=True,
- editable=False,
- on_delete=models.CASCADE,
- ))
+ key = models.CharField(max_length=255)
+ value = JSONField(null=True)
+ user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
def __str__(self):
try:
@@ -66,6 +55,7 @@ class Setting(CreatedModifiedModel):
# field and save again.
if encrypted and new_instance:
from awx.main.signals import disable_activity_stream
+
with disable_activity_stream():
self.value = self._saved_value
self.save(update_fields=['value'])
@@ -82,6 +72,7 @@ class Setting(CreatedModifiedModel):
import awx.conf.signals # noqa
from awx.main.registrar import activity_stream_registrar # noqa
+
activity_stream_registrar.connect(Setting)
import awx.conf.access # noqa
diff --git a/awx/conf/registry.py b/awx/conf/registry.py
index e8e52fe695..627099a57a 100644
--- a/awx/conf/registry.py
+++ b/awx/conf/registry.py
@@ -69,10 +69,7 @@ class SettingsRegistry(object):
return self._dependent_settings.get(setting, set())
def get_registered_categories(self):
- categories = {
- 'all': _('All'),
- 'changed': _('Changed'),
- }
+ categories = {'all': _('All'), 'changed': _('Changed')}
for setting, kwargs in self._registry.items():
category_slug = kwargs.get('category_slug', None)
if category_slug is None or category_slug in categories:
@@ -95,8 +92,11 @@ class SettingsRegistry(object):
continue
if kwargs.get('category_slug', None) in slugs_to_ignore:
continue
- if (read_only in {True, False} and kwargs.get('read_only', False) != read_only and
- setting not in ('INSTALL_UUID', 'AWX_ISOLATED_PRIVATE_KEY', 'AWX_ISOLATED_PUBLIC_KEY')):
+ if (
+ read_only in {True, False}
+ and kwargs.get('read_only', False) != read_only
+ and setting not in ('INSTALL_UUID', 'AWX_ISOLATED_PRIVATE_KEY', 'AWX_ISOLATED_PUBLIC_KEY')
+ ):
# Note: Doesn't catch fields that set read_only via __init__;
# read-only field kwargs should always include read_only=True.
continue
@@ -117,6 +117,7 @@ class SettingsRegistry(object):
def get_setting_field(self, setting, mixin_class=None, for_user=False, **kwargs):
from rest_framework.fields import empty
+
field_kwargs = {}
field_kwargs.update(self._registry[setting])
field_kwargs.update(kwargs)
@@ -141,11 +142,7 @@ class SettingsRegistry(object):
field_instance.placeholder = placeholder
field_instance.defined_in_file = defined_in_file
if field_instance.defined_in_file:
- field_instance.help_text = (
- str(_('This value has been set manually in a settings file.')) +
- '\n\n' +
- str(field_instance.help_text)
- )
+ field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
field_instance.encrypted = encrypted
original_field_instance = field_instance
if field_class != original_field_class:
diff --git a/awx/conf/serializers.py b/awx/conf/serializers.py
index e297fe1e69..838a636aaa 100644
--- a/awx/conf/serializers.py
+++ b/awx/conf/serializers.py
@@ -30,15 +30,9 @@ class SettingSerializer(BaseSerializer):
class SettingCategorySerializer(serializers.Serializer):
"""Serialize setting category """
- url = serializers.CharField(
- read_only=True,
- )
- slug = serializers.CharField(
- read_only=True,
- )
- name = serializers.CharField(
- read_only=True,
- )
+ url = serializers.CharField(read_only=True)
+ slug = serializers.CharField(read_only=True)
+ name = serializers.CharField(read_only=True)
class SettingFieldMixin(object):
diff --git a/awx/conf/settings.py b/awx/conf/settings.py
index 500b4408f5..57d3265d72 100644
--- a/awx/conf/settings.py
+++ b/awx/conf/settings.py
@@ -62,12 +62,12 @@ __all__ = ['SettingsWrapper', 'get_settings_to_cache', 'SETTING_CACHE_NOTSET']
@contextlib.contextmanager
def _ctit_db_wrapper(trans_safe=False):
- '''
+ """
Wrapper to avoid undesired actions by Django ORM when managing settings
if only getting a setting, can use trans_safe=True, which will avoid
throwing errors if the prior context was a broken transaction.
Any database errors will be logged, but exception will be suppressed.
- '''
+ """
rollback_set = None
is_atomic = None
try:
@@ -115,7 +115,6 @@ class TransientSetting(object):
class EncryptedCacheProxy(object):
-
def __init__(self, cache, registry, encrypter=None, decrypter=None):
"""
This proxy wraps a Django cache backend and overwrites the
@@ -145,19 +144,11 @@ class EncryptedCacheProxy(object):
def set(self, key, value, log=True, **kwargs):
if log is True:
- logger.debug('cache set(%r, %r, %r)', key, filter_sensitive(self.registry, key, value),
- SETTING_CACHE_TIMEOUT)
- self.cache.set(
- key,
- self._handle_encryption(self.encrypter, key, value),
- **kwargs
- )
+ logger.debug('cache set(%r, %r, %r)', key, filter_sensitive(self.registry, key, value), SETTING_CACHE_TIMEOUT)
+ self.cache.set(key, self._handle_encryption(self.encrypter, key, value), **kwargs)
def set_many(self, data, **kwargs):
- filtered_data = dict(
- (key, filter_sensitive(self.registry, key, value))
- for key, value in data.items()
- )
+ filtered_data = dict((key, filter_sensitive(self.registry, key, value)) for key, value in data.items())
logger.debug('cache set_many(%r, %r)', filtered_data, SETTING_CACHE_TIMEOUT)
for key, value in data.items():
self.set(key, value, log=False, **kwargs)
@@ -168,18 +159,11 @@ class EncryptedCacheProxy(object):
# as part of the AES key when encrypting/decrypting
obj_id = self.cache.get(Setting.get_cache_id_key(key), default=empty)
if obj_id is empty:
- logger.info('Efficiency notice: Corresponding id not stored in cache %s',
- Setting.get_cache_id_key(key))
+ logger.info('Efficiency notice: Corresponding id not stored in cache %s', Setting.get_cache_id_key(key))
obj_id = getattr(self._get_setting_from_db(key), 'pk', None)
elif obj_id == SETTING_CACHE_NONE:
obj_id = None
- return method(
- TransientSetting(
- pk=obj_id,
- value=value
- ),
- 'value'
- )
+ return method(TransientSetting(pk=obj_id, value=value), 'value')
# If the field in question isn't an "encrypted" field, this function is
# a no-op; it just returns the provided value
@@ -206,9 +190,9 @@ def get_settings_to_cache(registry):
def get_cache_value(value):
- '''Returns the proper special cache setting for a value
+ """Returns the proper special cache setting for a value
based on instance type.
- '''
+ """
if value is None:
value = SETTING_CACHE_NONE
elif isinstance(value, (list, tuple)) and len(value) == 0:
@@ -219,7 +203,6 @@ def get_cache_value(value):
class SettingsWrapper(UserSettingsHolder):
-
@classmethod
def initialize(cls, cache=None, registry=None):
"""
@@ -231,11 +214,7 @@ class SettingsWrapper(UserSettingsHolder):
``awx.conf.settings_registry`` is used by default.
"""
if not getattr(settings, '_awx_conf_settings', False):
- settings_wrapper = cls(
- settings._wrapped,
- cache=cache or django_cache,
- registry=registry or settings_registry
- )
+ settings_wrapper = cls(settings._wrapped, cache=cache or django_cache, registry=registry or settings_registry)
settings._wrapped = settings_wrapper
def __init__(self, default_settings, cache, registry):
@@ -322,7 +301,7 @@ class SettingsWrapper(UserSettingsHolder):
try:
value = decrypt_field(setting, 'value')
except ValueError as e:
- #TODO: Remove in Tower 3.3
+ # TODO: Remove in Tower 3.3
logger.debug('encountered error decrypting field: %s - attempting fallback to old', e)
value = old_decrypt_field(setting, 'value')
@@ -345,8 +324,7 @@ class SettingsWrapper(UserSettingsHolder):
# Generate a cache key for each setting and store them all at once.
settings_to_cache = dict([(Setting.get_cache_key(k), v) for k, v in settings_to_cache.items()])
for k, id_val in setting_ids.items():
- logger.debug('Saving id in cache for encrypted setting %s, %s',
- Setting.get_cache_id_key(k), id_val)
+ logger.debug('Saving id in cache for encrypted setting %s, %s', Setting.get_cache_id_key(k), id_val)
self.cache.cache.set(Setting.get_cache_id_key(k), id_val)
settings_to_cache['_awx_conf_preload_expires'] = self._awx_conf_preload_expires
self.cache.set_many(settings_to_cache, timeout=SETTING_CACHE_TIMEOUT)
@@ -420,9 +398,7 @@ class SettingsWrapper(UserSettingsHolder):
else:
return value
except Exception:
- logger.warning(
- 'The current value "%r" for setting "%s" is invalid.',
- value, name, exc_info=True)
+ logger.warning('The current value "%r" for setting "%s" is invalid.', value, name, exc_info=True)
return empty
def _get_default(self, name):
@@ -453,8 +429,7 @@ class SettingsWrapper(UserSettingsHolder):
setting_value = field.run_validation(data)
db_value = field.to_representation(setting_value)
except Exception as e:
- logger.exception('Unable to assign value "%r" to setting "%s".',
- value, name, exc_info=True)
+ logger.exception('Unable to assign value "%r" to setting "%s".', value, name, exc_info=True)
raise e
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
@@ -492,8 +467,7 @@ class SettingsWrapper(UserSettingsHolder):
def __dir__(self):
keys = []
with _ctit_db_wrapper(trans_safe=True):
- for setting in Setting.objects.filter(
- key__in=self.all_supported_settings, user__isnull=True):
+ for setting in Setting.objects.filter(key__in=self.all_supported_settings, user__isnull=True):
# Skip returning settings that have been overridden but are
# considered to be "not set".
if setting.value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:
@@ -511,7 +485,7 @@ class SettingsWrapper(UserSettingsHolder):
with _ctit_db_wrapper(trans_safe=True):
set_locally = Setting.objects.filter(key=setting, user__isnull=True).exists()
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
- return (set_locally or set_on_default)
+ return set_locally or set_on_default
def __getattr_without_cache__(self, name):
diff --git a/awx/conf/signals.py b/awx/conf/signals.py
index 698b758b98..ad388501f4 100644
--- a/awx/conf/signals.py
+++ b/awx/conf/signals.py
@@ -30,12 +30,7 @@ def handle_setting_change(key, for_delete=False):
# Send setting_changed signal with new value for each setting.
for setting_key in setting_keys:
- setting_changed.send(
- sender=Setting,
- setting=setting_key,
- value=getattr(settings, setting_key, None),
- enter=not bool(for_delete),
- )
+ setting_changed.send(sender=Setting, setting=setting_key, value=getattr(settings, setting_key, None), enter=not bool(for_delete))
@receiver(post_save, sender=Setting)
diff --git a/awx/conf/tests/functional/conftest.py b/awx/conf/tests/functional/conftest.py
index 707d75e6fa..a3d2145a99 100644
--- a/awx/conf/tests/functional/conftest.py
+++ b/awx/conf/tests/functional/conftest.py
@@ -5,10 +5,7 @@ import pytest
from django.urls import resolve
from django.contrib.auth.models import User
-from rest_framework.test import (
- APIRequestFactory,
- force_authenticate,
-)
+from rest_framework.test import APIRequestFactory, force_authenticate
@pytest.fixture
@@ -41,4 +38,5 @@ def api_request(admin):
response = view(request, *view_args, **view_kwargs)
response.render()
return response
+
return rf
diff --git a/awx/conf/tests/functional/test_api.py b/awx/conf/tests/functional/test_api.py
index 869627878a..2d09423386 100644
--- a/awx/conf/tests/functional/test_api.py
+++ b/awx/conf/tests/functional/test_api.py
@@ -45,44 +45,19 @@ def dummy_validate():
@pytest.mark.django_db
def test_non_admin_user_does_not_see_categories(api_request, dummy_setting, normal_user):
- with dummy_setting(
- 'FOO_BAR',
- field_class=fields.IntegerField,
- category='FooBar',
- category_slug='foobar'
- ):
- response = api_request(
- 'get',
- reverse('api:setting_category_list',
- kwargs={'version': 'v2'})
- )
+ with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'):
+ response = api_request('get', reverse('api:setting_category_list', kwargs={'version': 'v2'}))
assert response.data['results']
- response = api_request(
- 'get',
- reverse('api:setting_category_list',
- kwargs={'version': 'v2'}),
- user=normal_user
- )
+ response = api_request('get', reverse('api:setting_category_list', kwargs={'version': 'v2'}), user=normal_user)
assert not response.data['results']
@pytest.mark.django_db
def test_setting_singleton_detail_retrieve(api_request, dummy_setting):
- with dummy_setting(
- 'FOO_BAR_1',
- field_class=fields.IntegerField,
- category='FooBar',
- category_slug='foobar'
- ), dummy_setting(
- 'FOO_BAR_2',
- field_class=fields.IntegerField,
- category='FooBar',
- category_slug='foobar'
+ with dummy_setting('FOO_BAR_1', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_setting(
+ 'FOO_BAR_2', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'
):
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.status_code == 200
assert 'FOO_BAR_1' in response.data and response.data['FOO_BAR_1'] is None
assert 'FOO_BAR_2' in response.data and response.data['FOO_BAR_2'] is None
@@ -90,97 +65,43 @@ def test_setting_singleton_detail_retrieve(api_request, dummy_setting):
@pytest.mark.django_db
def test_setting_singleton_detail_invalid_retrieve(api_request, dummy_setting, normal_user):
- with dummy_setting(
- 'FOO_BAR_1',
- field_class=fields.IntegerField,
- category='FooBar',
- category_slug='foobar'
- ), dummy_setting(
- 'FOO_BAR_2',
- field_class=fields.IntegerField,
- category='FooBar',
- category_slug='foobar'
+ with dummy_setting('FOO_BAR_1', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_setting(
+ 'FOO_BAR_2', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'
):
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'barfoo'})
- )
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'barfoo'}))
assert response.status_code == 404
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
- user = normal_user
- )
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), user=normal_user)
assert response.status_code == 403
@pytest.mark.django_db
def test_setting_signleton_retrieve_hierachy(api_request, dummy_setting):
- with dummy_setting(
- 'FOO_BAR',
- field_class=fields.IntegerField,
- default=0,
- category='FooBar',
- category_slug='foobar'
- ):
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
+ with dummy_setting('FOO_BAR', field_class=fields.IntegerField, default=0, category='FooBar', category_slug='foobar'):
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == 0
s = Setting(key='FOO_BAR', value=1)
s.save()
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == 1
@pytest.mark.django_db
def test_setting_singleton_retrieve_readonly(api_request, dummy_setting):
- with dummy_setting(
- 'FOO_BAR',
- field_class=fields.IntegerField,
- read_only=True,
- default=2,
- category='FooBar',
- category_slug='foobar'
- ):
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
+ with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=2, category='FooBar', category_slug='foobar'):
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == 2
@pytest.mark.django_db
def test_setting_singleton_update(api_request, dummy_setting):
- with dummy_setting(
- 'FOO_BAR',
- field_class=fields.IntegerField,
- category='FooBar',
- category_slug='foobar'
- ), mock.patch('awx.conf.views.handle_setting_changes'):
- api_request(
- 'patch',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
- data={'FOO_BAR': 3}
- )
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
+ with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch(
+ 'awx.conf.views.handle_setting_changes'
+ ):
+ api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 3})
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == 3
- api_request(
- 'patch',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
- data={'FOO_BAR': 4}
- )
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
+ api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 4})
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == 4
@@ -190,138 +111,70 @@ def test_setting_singleton_update_hybriddictfield_with_forbidden(api_request, du
# indicating that only the defined fields can be filled in. Make
# sure that the _Forbidden validator doesn't get used for the
# fields. See also https://github.com/ansible/awx/issues/4099.
- with dummy_setting(
- 'FOO_BAR',
- field_class=sso_fields.SAMLOrgAttrField,
- category='FooBar',
- category_slug='foobar',
- ), mock.patch('awx.conf.views.handle_setting_changes'):
+ with dummy_setting('FOO_BAR', field_class=sso_fields.SAMLOrgAttrField, category='FooBar', category_slug='foobar'), mock.patch(
+ 'awx.conf.views.handle_setting_changes'
+ ):
api_request(
'patch',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
- data={'FOO_BAR': {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}}
- )
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
+ data={'FOO_BAR': {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}},
)
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}
@pytest.mark.django_db
def test_setting_singleton_update_dont_change_readonly_fields(api_request, dummy_setting):
- with dummy_setting(
- 'FOO_BAR',
- field_class=fields.IntegerField,
- read_only=True,
- default=4,
- category='FooBar',
- category_slug='foobar'
- ), mock.patch('awx.conf.views.handle_setting_changes'):
- api_request(
- 'patch',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
- data={'FOO_BAR': 5}
- )
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
+ with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=4, category='FooBar', category_slug='foobar'), mock.patch(
+ 'awx.conf.views.handle_setting_changes'
+ ):
+ api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 5})
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == 4
@pytest.mark.django_db
def test_setting_singleton_update_dont_change_encrypted_mark(api_request, dummy_setting):
- with dummy_setting(
- 'FOO_BAR',
- field_class=fields.CharField,
- encrypted=True,
- category='FooBar',
- category_slug='foobar'
- ), mock.patch('awx.conf.views.handle_setting_changes'):
- api_request(
- 'patch',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
- data={'FOO_BAR': 'password'}
- )
+ with dummy_setting('FOO_BAR', field_class=fields.CharField, encrypted=True, category='FooBar', category_slug='foobar'), mock.patch(
+ 'awx.conf.views.handle_setting_changes'
+ ):
+ api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 'password'})
assert Setting.objects.get(key='FOO_BAR').value.startswith('$encrypted$')
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == '$encrypted$'
- api_request(
- 'patch',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
- data={'FOO_BAR': '$encrypted$'}
- )
+ api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': '$encrypted$'})
assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'password'
- api_request(
- 'patch',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
- data={'FOO_BAR': 'new_pw'}
- )
+ api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 'new_pw'})
assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'new_pw'
@pytest.mark.django_db
def test_setting_singleton_update_runs_custom_validate(api_request, dummy_setting, dummy_validate):
-
def func_raising_exception(serializer, attrs):
raise serializers.ValidationError('Error')
- with dummy_setting(
- 'FOO_BAR',
- field_class=fields.IntegerField,
- category='FooBar',
- category_slug='foobar'
- ), dummy_validate(
+ with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_validate(
'foobar', func_raising_exception
), mock.patch('awx.conf.views.handle_setting_changes'):
- response = api_request(
- 'patch',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
- data={'FOO_BAR': 23}
- )
+ response = api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 23})
assert response.status_code == 400
@pytest.mark.django_db
def test_setting_singleton_delete(api_request, dummy_setting):
- with dummy_setting(
- 'FOO_BAR',
- field_class=fields.IntegerField,
- category='FooBar',
- category_slug='foobar'
- ), mock.patch('awx.conf.views.handle_setting_changes'):
- api_request(
- 'delete',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
+ with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch(
+ 'awx.conf.views.handle_setting_changes'
+ ):
+ api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert not response.data['FOO_BAR']
@pytest.mark.django_db
def test_setting_singleton_delete_no_read_only_fields(api_request, dummy_setting):
- with dummy_setting(
- 'FOO_BAR',
- field_class=fields.IntegerField,
- read_only=True,
- default=23,
- category='FooBar',
- category_slug='foobar'
- ), mock.patch('awx.conf.views.handle_setting_changes'):
- api_request(
- 'delete',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
- response = api_request(
- 'get',
- reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
- )
+ with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=23, category='FooBar', category_slug='foobar'), mock.patch(
+ 'awx.conf.views.handle_setting_changes'
+ ):
+ api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
+ response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == 23
-
diff --git a/awx/conf/tests/test_env.py b/awx/conf/tests/test_env.py
index 135c90d99b..b63da8ed8a 100644
--- a/awx/conf/tests/test_env.py
+++ b/awx/conf/tests/test_env.py
@@ -1,5 +1,3 @@
-
-
# Ensure that our autouse overwrites are working
def test_cache(settings):
assert settings.CACHES['default']['BACKEND'] == 'django.core.cache.backends.locmem.LocMemCache'
diff --git a/awx/conf/tests/unit/test_fields.py b/awx/conf/tests/unit/test_fields.py
index bac32a4846..7eb4b450fd 100644
--- a/awx/conf/tests/unit/test_fields.py
+++ b/awx/conf/tests/unit/test_fields.py
@@ -4,7 +4,7 @@ from rest_framework.fields import ValidationError
from awx.conf.fields import StringListBooleanField, StringListPathField, ListTuplesField, URLField
-class TestStringListBooleanField():
+class TestStringListBooleanField:
FIELD_VALUES = [
("hello", "hello"),
@@ -23,10 +23,7 @@ class TestStringListBooleanField():
("NULL", None),
]
- FIELD_VALUES_INVALID = [
- 1.245,
- {"a": "b"},
- ]
+ FIELD_VALUES_INVALID = [1.245, {"a": "b"}]
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
def test_to_internal_value_valid(self, value_in, value_known):
@@ -39,8 +36,7 @@ class TestStringListBooleanField():
field = StringListBooleanField()
with pytest.raises(ValidationError) as e:
field.to_internal_value(value)
- assert e.value.detail[0] == "Expected None, True, False, a string or list " \
- "of strings but got {} instead.".format(type(value))
+ assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
def test_to_representation_valid(self, value_in, value_known):
@@ -53,22 +49,14 @@ class TestStringListBooleanField():
field = StringListBooleanField()
with pytest.raises(ValidationError) as e:
field.to_representation(value)
- assert e.value.detail[0] == "Expected None, True, False, a string or list " \
- "of strings but got {} instead.".format(type(value))
+ assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
-class TestListTuplesField():
+class TestListTuplesField:
- FIELD_VALUES = [
- ([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")]),
- ]
+ FIELD_VALUES = [([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")])]
- FIELD_VALUES_INVALID = [
- ("abc", type("abc")),
- ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))),
- (['a', 'b'], type('a')),
- (123, type(123)),
- ]
+ FIELD_VALUES_INVALID = [("abc", type("abc")), ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))), (['a', 'b'], type('a')), (123, type(123))]
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
def test_to_internal_value_valid(self, value_in, value_known):
@@ -81,11 +69,10 @@ class TestListTuplesField():
field = ListTuplesField()
with pytest.raises(ValidationError) as e:
field.to_internal_value(value)
- assert e.value.detail[0] == "Expected a list of tuples of max length 2 " \
- "but got {} instead.".format(t)
+ assert e.value.detail[0] == "Expected a list of tuples of max length 2 " "but got {} instead.".format(t)
-class TestStringListPathField():
+class TestStringListPathField:
FIELD_VALUES = [
((".", "..", "/"), [".", "..", "/"]),
@@ -93,22 +80,12 @@ class TestStringListPathField():
(("///home///",), ["/home"]),
(("/home/././././",), ["/home"]),
(("/home", "/home", "/home/"), ["/home"]),
- (["/home/", "/home/", "/opt/", "/opt/", "/var/"], ["/home", "/opt", "/var"])
+ (["/home/", "/home/", "/opt/", "/opt/", "/var/"], ["/home", "/opt", "/var"]),
]
- FIELD_VALUES_INVALID_TYPE = [
- 1.245,
- {"a": "b"},
- ("/home"),
- ]
+ FIELD_VALUES_INVALID_TYPE = [1.245, {"a": "b"}, ("/home")]
- FIELD_VALUES_INVALID_PATH = [
- "",
- "~/",
- "home",
- "/invalid_path",
- "/home/invalid_path",
- ]
+ FIELD_VALUES_INVALID_PATH = ["", "~/", "home", "/invalid_path", "/home/invalid_path"]
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
def test_to_internal_value_valid(self, value_in, value_known):
@@ -131,16 +108,19 @@ class TestStringListPathField():
assert e.value.detail[0] == "{} is not a valid path choice.".format(value)
-class TestURLField():
+class TestURLField:
regex = "^https://www.example.org$"
- @pytest.mark.parametrize("url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",[
- ("ldap://www.example.org42", "ldap", None, True, True),
- ("https://www.example.org42", "https", None, False, False),
- ("https://www.example.org", None, regex, None, True),
- ("https://www.example3.org", None, regex, None, False),
- ("ftp://www.example.org", "https", None, None, False)
- ])
+ @pytest.mark.parametrize(
+ "url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",
+ [
+ ("ldap://www.example.org42", "ldap", None, True, True),
+ ("https://www.example.org42", "https", None, False, False),
+ ("https://www.example.org", None, regex, None, True),
+ ("https://www.example3.org", None, regex, None, False),
+ ("ftp://www.example.org", "https", None, None, False),
+ ],
+ )
def test_urls(self, url, schemes, regex, allow_numbers_in_top_level_domain, expect_no_error):
kwargs = {}
kwargs.setdefault("allow_numbers_in_top_level_domain", allow_numbers_in_top_level_domain)
diff --git a/awx/conf/tests/unit/test_registry.py b/awx/conf/tests/unit/test_registry.py
index ea5c66375f..6a817985e2 100644
--- a/awx/conf/tests/unit/test_registry.py
+++ b/awx/conf/tests/unit/test_registry.py
@@ -33,30 +33,18 @@ def reg(request):
if marker.name == 'defined_in_file':
settings.configure(**marker.kwargs)
- settings._wrapped = SettingsWrapper(settings._wrapped,
- cache,
- registry)
+ settings._wrapped = SettingsWrapper(settings._wrapped, cache, registry)
return registry
def test_simple_setting_registration(reg):
assert reg.get_registered_settings() == []
- reg.register(
- 'AWX_SOME_SETTING_ENABLED',
- field_class=fields.BooleanField,
- category=_('System'),
- category_slug='system',
- )
+ reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED']
def test_simple_setting_unregistration(reg):
- reg.register(
- 'AWX_SOME_SETTING_ENABLED',
- field_class=fields.BooleanField,
- category=_('System'),
- category_slug='system',
- )
+ reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED']
reg.unregister('AWX_SOME_SETTING_ENABLED')
@@ -67,12 +55,7 @@ def test_duplicate_setting_registration(reg):
"ensure that settings cannot be registered twice."
with pytest.raises(ImproperlyConfigured):
for i in range(2):
- reg.register(
- 'AWX_SOME_SETTING_ENABLED',
- field_class=fields.BooleanField,
- category=_('System'),
- category_slug='system',
- )
+ reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
def test_field_class_required_for_registration(reg):
@@ -82,110 +65,42 @@ def test_field_class_required_for_registration(reg):
def test_get_registered_settings_by_slug(reg):
- reg.register(
- 'AWX_SOME_SETTING_ENABLED',
- field_class=fields.BooleanField,
- category=_('System'),
- category_slug='system',
- )
- assert reg.get_registered_settings(category_slug='system') == [
- 'AWX_SOME_SETTING_ENABLED'
- ]
+ reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
+ assert reg.get_registered_settings(category_slug='system') == ['AWX_SOME_SETTING_ENABLED']
assert reg.get_registered_settings(category_slug='other') == []
def test_get_registered_read_only_settings(reg):
- reg.register(
- 'AWX_SOME_SETTING_ENABLED',
- field_class=fields.BooleanField,
- category=_('System'),
- category_slug='system'
- )
- reg.register(
- 'AWX_SOME_READ_ONLY',
- field_class=fields.BooleanField,
- category=_('System'),
- category_slug='system',
- read_only=True
- )
- assert reg.get_registered_settings(read_only=True) ==[
- 'AWX_SOME_READ_ONLY'
- ]
- assert reg.get_registered_settings(read_only=False) == [
- 'AWX_SOME_SETTING_ENABLED'
- ]
- assert reg.get_registered_settings() == [
- 'AWX_SOME_SETTING_ENABLED',
- 'AWX_SOME_READ_ONLY'
- ]
+ reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
+ reg.register('AWX_SOME_READ_ONLY', field_class=fields.BooleanField, category=_('System'), category_slug='system', read_only=True)
+ assert reg.get_registered_settings(read_only=True) == ['AWX_SOME_READ_ONLY']
+ assert reg.get_registered_settings(read_only=False) == ['AWX_SOME_SETTING_ENABLED']
+ assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED', 'AWX_SOME_READ_ONLY']
def test_get_dependent_settings(reg):
+ reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
reg.register(
- 'AWX_SOME_SETTING_ENABLED',
- field_class=fields.BooleanField,
- category=_('System'),
- category_slug='system'
- )
- reg.register(
- 'AWX_SOME_DEPENDENT_SETTING',
- field_class=fields.BooleanField,
- category=_('System'),
- category_slug='system',
- depends_on=['AWX_SOME_SETTING_ENABLED']
+ 'AWX_SOME_DEPENDENT_SETTING', field_class=fields.BooleanField, category=_('System'), category_slug='system', depends_on=['AWX_SOME_SETTING_ENABLED']
)
- assert reg.get_dependent_settings('AWX_SOME_SETTING_ENABLED') == set([
- 'AWX_SOME_DEPENDENT_SETTING'
- ])
+ assert reg.get_dependent_settings('AWX_SOME_SETTING_ENABLED') == set(['AWX_SOME_DEPENDENT_SETTING'])
def test_get_registered_categories(reg):
- reg.register(
- 'AWX_SOME_SETTING_ENABLED',
- field_class=fields.BooleanField,
- category=_('System'),
- category_slug='system'
- )
- reg.register(
- 'AWX_SOME_OTHER_SETTING_ENABLED',
- field_class=fields.BooleanField,
- category=_('OtherSystem'),
- category_slug='other-system'
- )
- assert reg.get_registered_categories() == {
- 'all': _('All'),
- 'changed': _('Changed'),
- 'system': _('System'),
- 'other-system': _('OtherSystem'),
- }
+ reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
+ reg.register('AWX_SOME_OTHER_SETTING_ENABLED', field_class=fields.BooleanField, category=_('OtherSystem'), category_slug='other-system')
+ assert reg.get_registered_categories() == {'all': _('All'), 'changed': _('Changed'), 'system': _('System'), 'other-system': _('OtherSystem')}
def test_is_setting_encrypted(reg):
- reg.register(
- 'AWX_SOME_SETTING_ENABLED',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system'
- )
- reg.register(
- 'AWX_SOME_ENCRYPTED_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- encrypted=True
- )
+ reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.CharField, category=_('System'), category_slug='system')
+ reg.register('AWX_SOME_ENCRYPTED_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
assert reg.is_setting_encrypted('AWX_SOME_SETTING_ENABLED') is False
assert reg.is_setting_encrypted('AWX_SOME_ENCRYPTED_SETTING') is True
def test_simple_field(reg):
- reg.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- placeholder='Example Value',
- )
+ reg.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', placeholder='Example Value')
field = reg.get_setting_field('AWX_SOME_SETTING')
assert isinstance(field, fields.CharField)
@@ -196,31 +111,20 @@ def test_simple_field(reg):
def test_field_with_custom_attribute(reg):
- reg.register(
- 'AWX_SOME_SETTING_ENABLED',
- field_class=fields.BooleanField,
- category_slug='system',
- )
+ reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category_slug='system')
- field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED',
- category_slug='other-system')
+ field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED', category_slug='other-system')
assert field.category_slug == 'other-system'
def test_field_with_custom_mixin(reg):
class GreatMixin(object):
-
def is_great(self):
return True
- reg.register(
- 'AWX_SOME_SETTING_ENABLED',
- field_class=fields.BooleanField,
- category_slug='system',
- )
+ reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category_slug='system')
- field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED',
- mixin_class=GreatMixin)
+ field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED', mixin_class=GreatMixin)
assert isinstance(field, fields.BooleanField)
assert isinstance(field, GreatMixin)
assert field.is_great() is True
@@ -228,12 +132,7 @@ def test_field_with_custom_mixin(reg):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_default_value_from_settings(reg):
- reg.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- )
+ reg.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
field = reg.get_setting_field('AWX_SOME_SETTING')
assert field.default == 'DEFAULT'
@@ -242,16 +141,10 @@ def test_default_value_from_settings(reg):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_default_value_from_settings_with_custom_representation(reg):
class LowercaseCharField(fields.CharField):
-
def to_representation(self, value):
return value.lower()
- reg.register(
- 'AWX_SOME_SETTING',
- field_class=LowercaseCharField,
- category=_('System'),
- category_slug='system',
- )
+ reg.register('AWX_SOME_SETTING', field_class=LowercaseCharField, category=_('System'), category_slug='system')
field = reg.get_setting_field('AWX_SOME_SETTING')
assert field.default == 'default'
diff --git a/awx/conf/tests/unit/test_settings.py b/awx/conf/tests/unit/test_settings.py
index 7e3058e344..da97d41c6f 100644
--- a/awx/conf/tests/unit/test_settings.py
+++ b/awx/conf/tests/unit/test_settings.py
@@ -53,9 +53,7 @@ def settings(request):
defaults['DEFAULTS_SNAPSHOT'] = {}
settings.configure(**defaults)
- settings._wrapped = SettingsWrapper(settings._wrapped,
- cache,
- registry)
+ settings._wrapped = SettingsWrapper(settings._wrapped, cache, registry)
return settings
@@ -67,14 +65,7 @@ def test_unregistered_setting(settings):
def test_read_only_setting(settings):
- settings.registry.register(
- 'AWX_READ_ONLY',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- default='NO-EDITS',
- read_only=True
- )
+ settings.registry.register('AWX_READ_ONLY', field_class=fields.CharField, category=_('System'), category_slug='system', default='NO-EDITS', read_only=True)
assert settings.AWX_READ_ONLY == 'NO-EDITS'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True)
@@ -85,13 +76,7 @@ def test_read_only_setting(settings):
@pytest.mark.parametrize('read_only', [True, False])
def test_setting_defined_in_file(settings, read_only):
kwargs = {'read_only': True} if read_only else {}
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- **kwargs
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', **kwargs)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True)
@@ -100,13 +85,7 @@ def test_setting_defined_in_file(settings, read_only):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_setting_defined_in_file_with_empty_default(settings):
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- default='',
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='')
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True)
@@ -115,13 +94,7 @@ def test_setting_defined_in_file_with_empty_default(settings):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_setting_defined_in_file_with_specific_default(settings):
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- default=123
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default=123)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True)
@@ -131,12 +104,7 @@ def test_setting_defined_in_file_with_specific_default(settings):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_read_only_defaults_are_cached(settings):
"read-only settings are stored in the cache"
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system'
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
@@ -144,12 +112,7 @@ def test_read_only_defaults_are_cached(settings):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_cache_respects_timeout(settings):
"only preload the cache every SETTING_CACHE_TIMEOUT settings"
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system'
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
assert settings.AWX_SOME_SETTING == 'DEFAULT'
cache_expiration = settings.cache.get('_awx_conf_preload_expires')
@@ -161,13 +124,7 @@ def test_cache_respects_timeout(settings):
def test_default_setting(settings, mocker):
"settings that specify a default are inserted into the cache"
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- default='DEFAULT'
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
@@ -177,24 +134,13 @@ def test_default_setting(settings, mocker):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_setting_is_from_setting_file(settings, mocker):
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system'
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is True
def test_setting_is_not_from_setting_file(settings, mocker):
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- default='DEFAULT'
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
@@ -204,19 +150,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
def test_empty_setting(settings, mocker):
"settings with no default and no defined value are not valid"
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system'
- )
-
- mocks = mocker.Mock(**{
- 'order_by.return_value': mocker.Mock(**{
- '__iter__': lambda self: iter([]),
- 'first.return_value': None
- }),
- })
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
+
+ mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
with pytest.raises(AttributeError):
settings.AWX_SOME_SETTING
@@ -225,21 +161,10 @@ def test_empty_setting(settings, mocker):
def test_setting_from_db(settings, mocker):
"settings can be loaded from the database"
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- default='DEFAULT'
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
- mocks = mocker.Mock(**{
- 'order_by.return_value': mocker.Mock(**{
- '__iter__': lambda self: iter([setting_from_db]),
- 'first.return_value': setting_from_db
- }),
- })
+ mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
assert settings.AWX_SOME_SETTING == 'FROM_DB'
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
@@ -248,12 +173,7 @@ def test_setting_from_db(settings, mocker):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_read_only_setting_assignment(settings):
"read-only settings cannot be overwritten"
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system'
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
assert settings.AWX_SOME_SETTING == 'DEFAULT'
with pytest.raises(ImproperlyConfigured):
settings.AWX_SOME_SETTING = 'CHANGED'
@@ -262,41 +182,26 @@ def test_read_only_setting_assignment(settings):
def test_db_setting_create(settings, mocker):
"settings are stored in the database when set for the first time"
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system'
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None})
- with apply_patches([
- mocker.patch('awx.conf.models.Setting.objects.filter',
- return_value=setting_list),
- mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock())
- ]):
+ with apply_patches(
+ [
+ mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list),
+ mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock()),
+ ]
+ ):
settings.AWX_SOME_SETTING = 'NEW-VALUE'
- models.Setting.objects.create.assert_called_with(
- key='AWX_SOME_SETTING',
- user=None,
- value='NEW-VALUE'
- )
+ models.Setting.objects.create.assert_called_with(key='AWX_SOME_SETTING', user=None, value='NEW-VALUE')
def test_db_setting_update(settings, mocker):
"settings are updated in the database when their value changes"
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system'
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
- setting_list = mocker.Mock(**{
- 'order_by.return_value.first.return_value': existing_setting
- })
+ setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
settings.AWX_SOME_SETTING = 'NEW-VALUE'
@@ -306,12 +211,7 @@ def test_db_setting_update(settings, mocker):
def test_db_setting_deletion(settings, mocker):
"settings are auto-deleted from the database"
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system'
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
@@ -323,12 +223,7 @@ def test_db_setting_deletion(settings, mocker):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_read_only_setting_deletion(settings):
"read-only settings cannot be deleted"
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system'
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
assert settings.AWX_SOME_SETTING == 'DEFAULT'
with pytest.raises(ImproperlyConfigured):
del settings.AWX_SOME_SETTING
@@ -337,36 +232,22 @@ def test_read_only_setting_deletion(settings):
def test_charfield_properly_sets_none(settings, mocker):
"see: https://github.com/ansible/ansible-tower/issues/5322"
- settings.registry.register(
- 'AWX_SOME_SETTING',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- allow_null=True
- )
+ settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', allow_null=True)
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None})
- with apply_patches([
- mocker.patch('awx.conf.models.Setting.objects.filter',
- return_value=setting_list),
- mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock())
- ]):
+ with apply_patches(
+ [
+ mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list),
+ mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock()),
+ ]
+ ):
settings.AWX_SOME_SETTING = None
- models.Setting.objects.create.assert_called_with(
- key='AWX_SOME_SETTING',
- user=None,
- value=None
- )
+ models.Setting.objects.create.assert_called_with(key='AWX_SOME_SETTING', user=None, value=None)
def test_settings_use_cache(settings, mocker):
- settings.registry.register(
- 'AWX_VAR',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system'
- )
+ settings.registry.register('AWX_VAR', field_class=fields.CharField, category=_('System'), category_slug='system')
settings.cache.set('AWX_VAR', 'foobar')
settings.cache.set('_awx_conf_preload_expires', 100)
# Will fail test if database is used
@@ -374,13 +255,7 @@ def test_settings_use_cache(settings, mocker):
def test_settings_use_an_encrypted_cache(settings, mocker):
- settings.registry.register(
- 'AWX_ENCRYPTED',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- encrypted=True
- )
+ settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
assert isinstance(settings.cache, EncryptedCacheProxy)
assert settings.cache.__dict__['encrypter'] == encrypt_field
assert settings.cache.__dict__['decrypter'] == decrypt_field
@@ -393,34 +268,18 @@ def test_settings_use_an_encrypted_cache(settings, mocker):
def test_sensitive_cache_data_is_encrypted(settings, mocker):
"fields marked as `encrypted` are stored in the cache with encryption"
- settings.registry.register(
- 'AWX_ENCRYPTED',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- encrypted=True
- )
+ settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
def rot13(obj, attribute):
assert obj.pk == 123
return codecs.encode(getattr(obj, attribute), 'rot_13')
native_cache = LocMemCache(str(uuid4()), {})
- cache = EncryptedCacheProxy(
- native_cache,
- settings.registry,
- encrypter=rot13,
- decrypter=rot13
- )
+ cache = EncryptedCacheProxy(native_cache, settings.registry, encrypter=rot13, decrypter=rot13)
# Insert the setting value into the database; the encryption process will
# use its primary key as part of the encryption key
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
- mocks = mocker.Mock(**{
- 'order_by.return_value': mocker.Mock(**{
- '__iter__': lambda self: iter([setting_from_db]),
- 'first.return_value': setting_from_db
- }),
- })
+ mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
cache.set('AWX_ENCRYPTED', 'SECRET!')
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
@@ -429,26 +288,14 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
def test_readonly_sensitive_cache_data_is_encrypted(settings):
"readonly fields marked as `encrypted` are stored in the cache with encryption"
- settings.registry.register(
- 'AWX_ENCRYPTED',
- field_class=fields.CharField,
- category=_('System'),
- category_slug='system',
- read_only=True,
- encrypted=True
- )
+ settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', read_only=True, encrypted=True)
def rot13(obj, attribute):
assert obj.pk is None
return codecs.encode(getattr(obj, attribute), 'rot_13')
native_cache = LocMemCache(str(uuid4()), {})
- cache = EncryptedCacheProxy(
- native_cache,
- settings.registry,
- encrypter=rot13,
- decrypter=rot13
- )
+ cache = EncryptedCacheProxy(native_cache, settings.registry, encrypter=rot13, decrypter=rot13)
cache.set('AWX_ENCRYPTED', 'SECRET!')
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
diff --git a/awx/conf/urls.py b/awx/conf/urls.py
index d42956a356..61134d20b8 100644
--- a/awx/conf/urls.py
+++ b/awx/conf/urls.py
@@ -3,14 +3,10 @@
from django.conf.urls import url
-from awx.conf.views import (
- SettingCategoryList,
- SettingSingletonDetail,
- SettingLoggingTest,
-)
+from awx.conf.views import SettingCategoryList, SettingSingletonDetail, SettingLoggingTest
-urlpatterns = [
+urlpatterns = [
url(r'^$', SettingCategoryList.as_view(), name='setting_category_list'),
url(r'^(?P<category_slug>[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'),
url(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'),
diff --git a/awx/conf/utils.py b/awx/conf/utils.py
index 0b2ce9b738..9d27290799 100755
--- a/awx/conf/utils.py
+++ b/awx/conf/utils.py
@@ -7,7 +7,4 @@ __all__ = ['conf_to_dict']
def conf_to_dict(obj):
- return {
- 'category': settings_registry.get_setting_category(obj.key),
- 'name': obj.key,
- }
+ return {'category': settings_registry.get_setting_category(obj.key), 'name': obj.key}
diff --git a/awx/conf/views.py b/awx/conf/views.py
index 18f8a6d2d5..c5a6a16f0f 100644
--- a/awx/conf/views.py
+++ b/awx/conf/views.py
@@ -22,12 +22,7 @@ from rest_framework import serializers
from rest_framework import status
# Tower
-from awx.api.generics import (
- APIView,
- GenericAPIView,
- ListAPIView,
- RetrieveUpdateDestroyAPIView,
-)
+from awx.api.generics import APIView, GenericAPIView, ListAPIView, RetrieveUpdateDestroyAPIView
from awx.api.permissions import IsSuperUser
from awx.api.versioning import reverse
from awx.main.utils import camelcase_to_underscore
@@ -81,9 +76,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
if self.category_slug not in category_slugs:
raise PermissionDenied()
- registered_settings = settings_registry.get_registered_settings(
- category_slug=self.category_slug, read_only=False,
- )
+ registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug, read_only=False)
if self.category_slug == 'user':
return Setting.objects.filter(key__in=registered_settings, user=self.request.user)
else:
@@ -91,9 +84,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
def get_object(self):
settings_qs = self.get_queryset()
- registered_settings = settings_registry.get_registered_settings(
- category_slug=self.category_slug,
- )
+ registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug)
all_settings = {}
for setting in settings_qs:
all_settings[setting.key] = setting.value
@@ -117,9 +108,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
for key, value in serializer.validated_data.items():
if key == 'LICENSE' or settings_registry.is_setting_read_only(key):
continue
- if settings_registry.is_setting_encrypted(key) and \
- isinstance(value, str) and \
- value.startswith('$encrypted$'):
+ if settings_registry.is_setting_encrypted(key) and isinstance(value, str) and value.startswith('$encrypted$'):
continue
setattr(serializer.instance, key, value)
setting = settings_qs.filter(key=key).order_by('pk').first()
@@ -133,7 +122,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
if settings_change_list:
connection.on_commit(lambda: handle_setting_changes.delay(settings_change_list))
-
def destroy(self, request, *args, **kwargs):
instance = self.get_object()
self.perform_destroy(instance)
@@ -170,7 +158,7 @@ class SettingLoggingTest(GenericAPIView):
enabled = getattr(settings, 'LOG_AGGREGATOR_ENABLED', False)
if not enabled:
return Response({'error': 'Logging not enabled'}, status=status.HTTP_409_CONFLICT)
-
+
# Send test message to configured logger based on db settings
try:
default_logger = settings.LOG_AGGREGATOR_LOGGERS[0]
@@ -179,18 +167,15 @@ class SettingLoggingTest(GenericAPIView):
except IndexError:
default_logger = 'awx'
logging.getLogger(default_logger).error('AWX Connection Test Message')
-
+
hostname = getattr(settings, 'LOG_AGGREGATOR_HOST', None)
protocol = getattr(settings, 'LOG_AGGREGATOR_PROTOCOL', None)
try:
- subprocess.check_output(
- ['rsyslogd', '-N1', '-f', '/var/lib/awx/rsyslog/rsyslog.conf'],
- stderr=subprocess.STDOUT
- )
+ subprocess.check_output(['rsyslogd', '-N1', '-f', '/var/lib/awx/rsyslog/rsyslog.conf'], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as exc:
return Response({'error': exc.output}, status=status.HTTP_400_BAD_REQUEST)
-
+
# Check to ensure port is open at host
if protocol in ['udp', 'tcp']:
port = getattr(settings, 'LOG_AGGREGATOR_PORT', None)
@@ -206,7 +191,7 @@ class SettingLoggingTest(GenericAPIView):
else:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
- s.settimeout(.5)
+ s.settimeout(0.5)
s.connect((hostname, int(port)))
s.shutdown(SHUT_RDWR)
s.close()
diff --git a/awx/main/access.py b/awx/main/access.py
index cdb0c0eb53..a856a52436 100644
--- a/awx/main/access.py
+++ b/awx/main/access.py
@@ -28,21 +28,60 @@ from awx.main.utils import (
get_licenser,
)
from awx.main.models import (
- ActivityStream, AdHocCommand, AdHocCommandEvent, Credential, CredentialType,
- CredentialInputSource, CustomInventoryScript, ExecutionEnvironment, Group, Host, Instance,
- InstanceGroup, Inventory, InventorySource, InventoryUpdate, InventoryUpdateEvent, Job,
- JobEvent, JobHostSummary, JobLaunchConfig, JobTemplate, Label, Notification,
- NotificationTemplate, Organization, Project, ProjectUpdate,
- ProjectUpdateEvent, Role, Schedule, SystemJob, SystemJobEvent,
- SystemJobTemplate, Team, UnifiedJob, UnifiedJobTemplate, WorkflowJob,
- WorkflowJobNode, WorkflowJobTemplate, WorkflowJobTemplateNode,
- WorkflowApproval, WorkflowApprovalTemplate,
- ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR
+ ActivityStream,
+ AdHocCommand,
+ AdHocCommandEvent,
+ Credential,
+ CredentialType,
+ CredentialInputSource,
+ CustomInventoryScript,
+ ExecutionEnvironment,
+ Group,
+ Host,
+ Instance,
+ InstanceGroup,
+ Inventory,
+ InventorySource,
+ InventoryUpdate,
+ InventoryUpdateEvent,
+ Job,
+ JobEvent,
+ JobHostSummary,
+ JobLaunchConfig,
+ JobTemplate,
+ Label,
+ Notification,
+ NotificationTemplate,
+ Organization,
+ Project,
+ ProjectUpdate,
+ ProjectUpdateEvent,
+ Role,
+ Schedule,
+ SystemJob,
+ SystemJobEvent,
+ SystemJobTemplate,
+ Team,
+ UnifiedJob,
+ UnifiedJobTemplate,
+ WorkflowJob,
+ WorkflowJobNode,
+ WorkflowJobTemplate,
+ WorkflowJobTemplateNode,
+ WorkflowApproval,
+ WorkflowApprovalTemplate,
+ ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
+ ROLE_SINGLETON_SYSTEM_AUDITOR,
)
from awx.main.models.mixins import ResourceMixin
-__all__ = ['get_user_queryset', 'check_user_access', 'check_user_access_with_errors',
- 'user_accessible_objects', 'consumer_access',]
+__all__ = [
+ 'get_user_queryset',
+ 'check_user_access',
+ 'check_user_access_with_errors',
+ 'user_accessible_objects',
+ 'consumer_access',
+]
logger = logging.getLogger('awx.main.access')
@@ -83,9 +122,9 @@ def get_object_from_data(field, Model, data, obj=None):
def vars_are_encrypted(vars):
- '''Returns True if any of the values in the dictionary vars contains
+ """Returns True if any of the values in the dictionary vars contains
content which is encrypted by the AWX encryption algorithm
- '''
+ """
for value in vars.values():
if isinstance(value, str):
if value.startswith('$encrypted$'):
@@ -102,85 +141,83 @@ def user_accessible_objects(user, role_name):
def get_user_queryset(user, model_class):
- '''
+ """
Return a queryset for the given model_class containing only the instances
that should be visible to the given user.
- '''
+ """
access_class = access_registry[model_class]
access_instance = access_class(user)
return access_instance.get_queryset()
def check_user_access(user, model_class, action, *args, **kwargs):
- '''
+ """
Return True if user can perform action against model_class with the
provided parameters.
- '''
+ """
access_class = access_registry[model_class]
access_instance = access_class(user)
access_method = getattr(access_instance, 'can_%s' % action)
result = access_method(*args, **kwargs)
- logger.debug('%s.%s %r returned %r', access_instance.__class__.__name__,
- getattr(access_method, '__name__', 'unknown'), args, result)
+ logger.debug('%s.%s %r returned %r', access_instance.__class__.__name__, getattr(access_method, '__name__', 'unknown'), args, result)
return result
def check_user_access_with_errors(user, model_class, action, *args, **kwargs):
- '''
+ """
Return T/F permission and summary of problems with the action.
- '''
+ """
access_class = access_registry[model_class]
access_instance = access_class(user, save_messages=True)
access_method = getattr(access_instance, 'can_%s' % action, None)
result = access_method(*args, **kwargs)
- logger.debug('%s.%s %r returned %r', access_instance.__class__.__name__,
- access_method.__name__, args, result)
+ logger.debug('%s.%s %r returned %r', access_instance.__class__.__name__, access_method.__name__, args, result)
return (result, access_instance.messages)
def get_user_capabilities(user, instance, **kwargs):
- '''
+ """
Returns a dictionary of capabilities the user has on the particular
instance. *NOTE* This is not a direct mapping of can_* methods into this
dictionary, it is intended to munge some queries in a way that is
convenient for the user interface to consume and hide or show various
actions in the interface.
- '''
+ """
access_class = access_registry[instance.__class__]
return access_class(user).get_user_capabilities(instance, **kwargs)
def check_superuser(func):
- '''
+ """
check_superuser is a decorator that provides a simple short circuit
for access checks. If the User object is a superuser, return True, otherwise
execute the logic of the can_access method.
- '''
+ """
+
def wrapper(self, *args, **kwargs):
if self.user.is_superuser:
return True
return func(self, *args, **kwargs)
+
return wrapper
def consumer_access(group_name):
- '''
+ """
consumer_access returns the proper Access class based on group_name
for a channels consumer.
- '''
- class_map = {'job_events': JobAccess,
- 'workflow_events': WorkflowJobAccess,
- 'ad_hoc_command_events': AdHocCommandAccess}
+ """
+ class_map = {'job_events': JobAccess, 'workflow_events': WorkflowJobAccess, 'ad_hoc_command_events': AdHocCommandAccess}
return class_map.get(group_name)
class BaseAccess(object):
- '''
+ """
Base class for checking user access to a given model. Subclasses should
define the model attribute, override the get_queryset method to return only
the instances the user should be able to view, and override/define can_*
methods to verify a user's permission to perform a particular action.
- '''
+ """
model = None
select_related = ()
@@ -236,28 +273,25 @@ class BaseAccess(object):
return self.can_add({'reference_obj': obj})
def can_copy_related(self, obj):
- '''
+ """
can_copy_related() should only be used to check if the user have access to related
many to many credentials in when copying the object. It does not check if the user
has permission for any other related objects. Therefore, when checking if the user
can copy an object, it should always be used in conjunction with can_add()
- '''
+ """
return True
- def can_attach(self, obj, sub_obj, relationship, data,
- skip_sub_obj_read_check=False):
+ def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
if skip_sub_obj_read_check:
return self.can_change(obj, None)
else:
- return bool(self.can_change(obj, None) and
- self.user.can_access(type(sub_obj), 'read', sub_obj))
+ return bool(self.can_change(obj, None) and self.user.can_access(type(sub_obj), 'read', sub_obj))
def can_unattach(self, obj, sub_obj, relationship, data=None):
return self.can_change(obj, data)
- def check_related(self, field, Model, data, role_field='admin_role',
- obj=None, mandatory=False):
- '''
+ def check_related(self, field, Model, data, role_field='admin_role', obj=None, mandatory=False):
+ """
Check permission for related field, in scenarios:
- creating a new resource, user must have permission if
resource is specified in `data`
@@ -266,7 +300,7 @@ class BaseAccess(object):
If `mandatory` is set, new resources require the field and
existing field will always be checked
- '''
+ """
new = None
changed = True
if data and 'reference_obj' in data:
@@ -320,10 +354,8 @@ class BaseAccess(object):
report_violation = lambda message: None
else:
report_violation = lambda message: logger.warning(message)
- if (
- validation_info.get('trial', False) is True or
- validation_info['instance_count'] == 10 # basic 10 license
- ):
+ if validation_info.get('trial', False) is True or validation_info['instance_count'] == 10: # basic 10 license
+
def report_violation(message):
raise PermissionDenied(message)
@@ -351,7 +383,7 @@ class BaseAccess(object):
inventory = get_object_from_data('inventory', Inventory, data)
if inventory is None: # In this case a missing inventory error is launched
- return # further down the line, so just ignore it.
+ return # further down the line, so just ignore it.
org = inventory.organization
if org is None or org.max_hosts == 0:
@@ -360,18 +392,22 @@ class BaseAccess(object):
active_count = Host.objects.org_active_count(org.id)
if active_count > org.max_hosts:
raise PermissionDenied(
- _("You have already reached the maximum number of %s hosts"
- " allowed for your organization. Contact your System Administrator"
- " for assistance." % org.max_hosts)
+ _(
+ "You have already reached the maximum number of %s hosts"
+ " allowed for your organization. Contact your System Administrator"
+ " for assistance." % org.max_hosts
+ )
)
if add_host_name:
host_exists = Host.objects.filter(inventory__organization=org.id, name=add_host_name).exists()
if not host_exists and active_count == org.max_hosts:
raise PermissionDenied(
- _("You have already reached the maximum number of %s hosts"
- " allowed for your organization. Contact your System Administrator"
- " for assistance." % org.max_hosts)
+ _(
+ "You have already reached the maximum number of %s hosts"
+ " allowed for your organization. Contact your System Administrator"
+ " for assistance." % org.max_hosts
+ )
)
def get_user_capabilities(self, obj, method_list=[], parent_obj=None, capabilities_cache={}):
@@ -443,14 +479,14 @@ class BaseAccess(object):
def get_method_capability(self, method, obj, parent_obj):
try:
- if method in ['change']: # 3 args
+ if method in ['change']: # 3 args
return self.can_change(obj, {})
elif method in ['delete', 'run_ad_hoc_commands', 'copy']:
access_method = getattr(self, "can_%s" % method)
return access_method(obj)
elif method in ['start']:
return self.can_start(obj, validate_license=False)
- elif method in ['attach', 'unattach']: # parent/sub-object call
+ elif method in ['attach', 'unattach']: # parent/sub-object call
access_method = getattr(self, "can_%s" % method)
if type(parent_obj) == Team:
relationship = 'parents'
@@ -464,14 +500,15 @@ class BaseAccess(object):
class NotificationAttachMixin(BaseAccess):
- '''For models that can have notifications attached
+ """For models that can have notifications attached
I can attach a notification template when
- I have notification_admin_role to organization of the NT
- I can read the object I am attaching it to
I can unattach when those same critiera are met
- '''
+ """
+
notification_attach_roles = None
def _can_attach(self, notification_template, resource_obj):
@@ -486,17 +523,14 @@ class NotificationAttachMixin(BaseAccess):
if isinstance(sub_obj, NotificationTemplate):
# reverse obj and sub_obj
return self._can_attach(notification_template=sub_obj, resource_obj=obj)
- return super(NotificationAttachMixin, self).can_attach(
- obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
+ return super(NotificationAttachMixin, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
@check_superuser
def can_unattach(self, obj, sub_obj, relationship, data=None):
if isinstance(sub_obj, NotificationTemplate):
# due to this special case, we use symmetrical logic with attach permission
return self._can_attach(notification_template=sub_obj, resource_obj=obj)
- return super(NotificationAttachMixin, self).can_unattach(
- obj, sub_obj, relationship, data=data
- )
+ return super(NotificationAttachMixin, self).can_unattach(obj, sub_obj, relationship, data=data)
class InstanceAccess(BaseAccess):
@@ -505,24 +539,17 @@ class InstanceAccess(BaseAccess):
prefetch_related = ('rampart_groups',)
def filtered_queryset(self):
- return Instance.objects.filter(
- rampart_groups__in=self.user.get_queryset(InstanceGroup)).distinct()
-
+ return Instance.objects.filter(rampart_groups__in=self.user.get_queryset(InstanceGroup)).distinct()
- def can_attach(self, obj, sub_obj, relationship, data,
- skip_sub_obj_read_check=False):
+ def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
return self.user.is_superuser
- return super(InstanceAccess, self).can_attach(
- obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check
- )
+ return super(InstanceAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
def can_unattach(self, obj, sub_obj, relationship, data=None):
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
return self.user.is_superuser
- return super(InstanceAccess, self).can_unattach(
- obj, sub_obj, relationship, relationship, data=data
- )
+ return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, relationship, data=data)
def can_add(self, data):
return False
@@ -540,8 +567,7 @@ class InstanceGroupAccess(BaseAccess):
prefetch_related = ('instances',)
def filtered_queryset(self):
- return InstanceGroup.objects.filter(
- organization__in=Organization.accessible_pk_qs(self.user, 'admin_role')).distinct()
+ return InstanceGroup.objects.filter(organization__in=Organization.accessible_pk_qs(self.user, 'admin_role')).distinct()
def can_add(self, data):
return self.user.is_superuser
@@ -551,7 +577,7 @@ class InstanceGroupAccess(BaseAccess):
class UserAccess(BaseAccess):
- '''
+ """
I can see user records when:
- I'm a superuser
- I'm in a role with them (such as in an organization or team)
@@ -561,34 +587,30 @@ class UserAccess(BaseAccess):
I can change all fields for a user (admin access) or delete when:
- I'm a superuser.
- I'm their org admin.
- '''
+ """
model = User
prefetch_related = ('profile',)
def filtered_queryset(self):
- if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and \
- (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
+ if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
qs = User.objects.all()
else:
qs = (
- User.objects.filter(
- pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members')
- ) |
- User.objects.filter(
- pk=self.user.id
- ) |
- User.objects.filter(
- pk__in=Role.objects.filter(singleton_name__in = [ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]).values('members')
+ User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members'))
+ | User.objects.filter(pk=self.user.id)
+ | User.objects.filter(
+ pk__in=Role.objects.filter(singleton_name__in=[ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]).values('members')
)
).distinct()
return qs
-
def can_add(self, data):
if data is not None and ('is_superuser' in data or 'is_system_auditor' in data):
- if (to_python_boolean(data.get('is_superuser', 'false'), allow_none=True) or
- to_python_boolean(data.get('is_system_auditor', 'false'), allow_none=True)) and not self.user.is_superuser:
+ if (
+ to_python_boolean(data.get('is_superuser', 'false'), allow_none=True)
+ or to_python_boolean(data.get('is_system_auditor', 'false'), allow_none=True)
+ ) and not self.user.is_superuser:
return False
if self.user.is_superuser:
return True
@@ -598,8 +620,7 @@ class UserAccess(BaseAccess):
def can_change(self, obj, data):
if data is not None and ('is_superuser' in data or 'is_system_auditor' in data):
- if to_python_boolean(data.get('is_superuser', 'false'), allow_none=True) and \
- not self.user.is_superuser:
+ if to_python_boolean(data.get('is_superuser', 'false'), allow_none=True) and not self.user.is_superuser:
return False
if to_python_boolean(data.get('is_system_auditor', 'false'), allow_none=True) and not (self.user.is_superuser or self.user == obj):
return False
@@ -612,19 +633,17 @@ class UserAccess(BaseAccess):
@staticmethod
def user_organizations(u):
- '''
+ """
Returns all organizations that count `u` as a member
- '''
+ """
return Organization.accessible_objects(u, 'member_role')
def is_all_org_admin(self, u):
- '''
+ """
returns True if `u` is member of any organization that is
not also an organization that `self.user` admins
- '''
- return not self.user_organizations(u).exclude(
- pk__in=Organization.accessible_pk_qs(self.user, 'admin_role')
- ).exists()
+ """
+ return not self.user_organizations(u).exclude(pk__in=Organization.accessible_pk_qs(self.user, 'admin_role')).exists()
def user_is_orphaned(self, u):
return not self.user_organizations(u).exists()
@@ -640,9 +659,7 @@ class UserAccess(BaseAccess):
if not allow_orphans:
# in these cases only superusers can modify orphan users
return False
- return not obj.roles.all().exclude(
- ancestors__in=self.user.roles.all()
- ).exists()
+ return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists()
else:
return self.is_all_org_admin(obj)
@@ -678,7 +695,7 @@ class UserAccess(BaseAccess):
class OAuth2ApplicationAccess(BaseAccess):
- '''
+ """
I can read, change or delete OAuth 2 applications when:
- I am a superuser.
- I am the admin of the organization of the user of the application.
@@ -686,7 +703,7 @@ class OAuth2ApplicationAccess(BaseAccess):
I can create OAuth 2 applications when:
- I am a superuser.
- I am the admin of the organization of the application.
- '''
+ """
model = OAuth2Application
select_related = ('user',)
@@ -697,8 +714,7 @@ class OAuth2ApplicationAccess(BaseAccess):
return self.model.objects.filter(organization__in=org_access_qs)
def can_change(self, obj, data):
- return self.user.is_superuser or self.check_related('organization', Organization, data, obj=obj,
- role_field='admin_role', mandatory=True)
+ return self.user.is_superuser or self.check_related('organization', Organization, data, obj=obj, role_field='admin_role', mandatory=True)
def can_delete(self, obj):
return self.user.is_superuser or obj.organization in self.user.admin_of_organizations
@@ -712,7 +728,7 @@ class OAuth2ApplicationAccess(BaseAccess):
class OAuth2TokenAccess(BaseAccess):
- '''
+ """
I can read, change or delete an app token when:
- I am a superuser.
- I am the admin of the organization of the application of the token.
@@ -724,7 +740,7 @@ class OAuth2TokenAccess(BaseAccess):
- I am the superuser
I can create an OAuth2 Personal Access Token when:
- I am a user. But I can only create a PAT for myself.
- '''
+ """
model = OAuth2AccessToken
@@ -732,9 +748,8 @@ class OAuth2TokenAccess(BaseAccess):
prefetch_related = ('refresh_token',)
def filtered_queryset(self):
- org_access_qs = Organization.objects.filter(
- Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
- return self.model.objects.filter(application__organization__in=org_access_qs) | self.model.objects.filter(user__id=self.user.pk)
+ org_access_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
+ return self.model.objects.filter(application__organization__in=org_access_qs) | self.model.objects.filter(user__id=self.user.pk)
def can_delete(self, obj):
if (self.user.is_superuser) | (obj.user == self.user):
@@ -756,7 +771,7 @@ class OAuth2TokenAccess(BaseAccess):
class OrganizationAccess(NotificationAttachMixin, BaseAccess):
- '''
+ """
I can see organizations when:
- I am a superuser.
- I am an admin or user in that organization.
@@ -765,10 +780,13 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
- I'm an admin of that organization.
I can associate/disassociate instance groups when:
- I am a superuser.
- '''
+ """
model = Organization
- prefetch_related = ('created_by', 'modified_by',)
+ prefetch_related = (
+ 'created_by',
+ 'modified_by',
+ )
# organization admin_role is not a parent of organization auditor_role
notification_attach_roles = ['admin_role', 'auditor_role']
@@ -814,7 +832,7 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
class InventoryAccess(BaseAccess):
- '''
+ """
I can see inventory when:
- I'm a superuser.
- I'm an org admin of the inventory's org.
@@ -832,7 +850,7 @@ class InventoryAccess(BaseAccess):
- I'm a superuser.
- I'm an org admin of the inventory's org.
- I have read/write/admin permission on an inventory with the run_ad_hoc_commands flag set.
- '''
+ """
model = Inventory
prefetch_related = ('created_by', 'modified_by', 'organization')
@@ -849,13 +867,13 @@ class InventoryAccess(BaseAccess):
# If no data is specified, just checking for generic add permission?
if not data:
return Organization.accessible_objects(self.user, 'inventory_admin_role').exists()
- return (self.check_related('organization', Organization, data, role_field='inventory_admin_role') and
- self.check_related('insights_credential', Credential, data, role_field='use_role'))
+ return self.check_related('organization', Organization, data, role_field='inventory_admin_role') and self.check_related(
+ 'insights_credential', Credential, data, role_field='use_role'
+ )
@check_superuser
def can_change(self, obj, data):
- return (self.can_admin(obj, data) and
- self.check_related('insights_credential', Credential, data, obj=obj, role_field='use_role'))
+ return self.can_admin(obj, data) and self.check_related('insights_credential', Credential, data, obj=obj, role_field='use_role')
@check_superuser
def can_admin(self, obj, data):
@@ -867,9 +885,8 @@ class InventoryAccess(BaseAccess):
# Verify that the user has access to the new organization if moving an
# inventory to a new organization. Otherwise, just check for admin permission.
return (
- self.check_related('organization', Organization, data, obj=obj, role_field='inventory_admin_role',
- mandatory=org_admin_mandatory) and
- self.user in obj.admin_role
+ self.check_related('organization', Organization, data, obj=obj, role_field='inventory_admin_role', mandatory=org_admin_mandatory)
+ and self.user in obj.admin_role
)
@check_superuser
@@ -896,14 +913,19 @@ class InventoryAccess(BaseAccess):
class HostAccess(BaseAccess):
- '''
+ """
I can see hosts whenever I can see their inventory.
I can change or delete hosts whenver I can change their inventory.
- '''
+ """
model = Host
- select_related = ('created_by', 'modified_by', 'inventory',
- 'last_job__job_template', 'last_job_host_summary__job',)
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ 'inventory',
+ 'last_job__job_template',
+ 'last_job_host_summary__job',
+ )
prefetch_related = ('groups', 'inventory_sources')
def filtered_queryset(self):
@@ -936,17 +958,14 @@ class HostAccess(BaseAccess):
self.check_license(add_host_name=data['name'])
# Check the per-org limit
- self.check_org_host_limit({'inventory': obj.inventory},
- add_host_name=data['name'])
+ self.check_org_host_limit({'inventory': obj.inventory}, add_host_name=data['name'])
# Checks for admin or change permission on inventory, controls whether
# the user can edit variable data.
return obj and self.user in obj.inventory.admin_role
- def can_attach(self, obj, sub_obj, relationship, data,
- skip_sub_obj_read_check=False):
- if not super(HostAccess, self).can_attach(obj, sub_obj, relationship,
- data, skip_sub_obj_read_check):
+ def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
+ if not super(HostAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check):
return False
# Prevent assignments between different inventories.
if obj.inventory != sub_obj.inventory:
@@ -958,14 +977,21 @@ class HostAccess(BaseAccess):
class GroupAccess(BaseAccess):
- '''
+ """
I can see groups whenever I can see their inventory.
I can change or delete groups whenever I can change their inventory.
- '''
+ """
model = Group
- select_related = ('created_by', 'modified_by', 'inventory',)
- prefetch_related = ('parents', 'children',)
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ 'inventory',
+ )
+ prefetch_related = (
+ 'parents',
+ 'children',
+ )
def filtered_queryset(self):
return Group.objects.filter(inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
@@ -985,10 +1011,8 @@ class GroupAccess(BaseAccess):
# the user can attach subgroups or edit variable data.
return obj and self.user in obj.inventory.admin_role
- def can_attach(self, obj, sub_obj, relationship, data,
- skip_sub_obj_read_check=False):
- if not super(GroupAccess, self).can_attach(obj, sub_obj, relationship,
- data, skip_sub_obj_read_check):
+ def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
+ if not super(GroupAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check):
return False
# Prevent assignments between different inventories.
if obj.inventory != sub_obj.inventory:
@@ -1000,15 +1024,14 @@ class GroupAccess(BaseAccess):
class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
- '''
+ """
I can see inventory sources whenever I can see their inventory.
I can change inventory sources whenever I can change their inventory.
- '''
+ """
model = InventorySource
select_related = ('created_by', 'modified_by', 'inventory')
- prefetch_related = ('credentials__credential_type', 'last_job',
- 'source_script', 'source_project')
+ prefetch_related = ('credentials__credential_type', 'last_job', 'source_script', 'source_project')
def filtered_queryset(self):
return self.model.objects.filter(inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
@@ -1023,8 +1046,7 @@ class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
return self.check_related('inventory', Inventory, data)
def can_delete(self, obj):
- if not self.user.is_superuser and \
- not (obj and obj.inventory and self.user.can_access(Inventory, 'admin', obj.inventory, None)):
+ if not self.user.is_superuser and not (obj and obj.inventory and self.user.can_access(Inventory, 'admin', obj.inventory, None)):
return False
return True
@@ -1032,9 +1054,8 @@ class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
def can_change(self, obj, data):
# Checks for admin change permission on inventory.
if obj and obj.inventory:
- return (
- self.user.can_access(Inventory, 'change', obj.inventory, None) and
- self.check_related('source_project', Project, data, obj=obj, role_field='use_role')
+ return self.user.can_access(Inventory, 'change', obj.inventory, None) and self.check_related(
+ 'source_project', Project, data, obj=obj, role_field='use_role'
)
# Can't change inventory sources attached to only the inventory, since
# these are created automatically from the management command.
@@ -1049,11 +1070,8 @@ class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
@check_superuser
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
if relationship == 'credentials' and isinstance(sub_obj, Credential):
- return (
- obj and obj.inventory and self.user in obj.inventory.admin_role and
- self.user in sub_obj.use_role)
- return super(InventorySourceAccess, self).can_attach(
- obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
+ return obj and obj.inventory and self.user in obj.inventory.admin_role and self.user in sub_obj.use_role
+ return super(InventorySourceAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
@check_superuser
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
@@ -1063,14 +1081,18 @@ class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
class InventoryUpdateAccess(BaseAccess):
- '''
+ """
I can see inventory updates when I can see the inventory source.
I can change inventory updates whenever I can change their source.
I can delete when I can change/delete the inventory source.
- '''
+ """
model = InventoryUpdate
- select_related = ('created_by', 'modified_by', 'inventory_source',)
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ 'inventory_source',
+ )
prefetch_related = ('unified_job_template', 'instance_group', 'credentials__credential_type', 'inventory', 'source_script')
def filtered_queryset(self):
@@ -1093,17 +1115,20 @@ class InventoryUpdateAccess(BaseAccess):
class CredentialTypeAccess(BaseAccess):
- '''
+ """
I can see credentials types when:
- I'm authenticated
I can create when:
- I'm a superuser:
I can change when:
- I'm a superuser and the type is not "managed by Tower"
- '''
+ """
model = CredentialType
- prefetch_related = ('created_by', 'modified_by',)
+ prefetch_related = (
+ 'created_by',
+ 'modified_by',
+ )
def can_use(self, obj):
return True
@@ -1113,7 +1138,7 @@ class CredentialTypeAccess(BaseAccess):
class CredentialAccess(BaseAccess):
- '''
+ """
I can see credentials when:
- I'm a superuser.
- It's a user credential and it's my credential.
@@ -1130,13 +1155,14 @@ class CredentialAccess(BaseAccess):
- It's my user credential.
- It's a user credential for a user in an org I admin.
- It's a team credential for a team in an org I admin.
- '''
+ """
model = Credential
- select_related = ('created_by', 'modified_by',)
- prefetch_related = ('admin_role', 'use_role', 'read_role',
- 'admin_role__parents', 'admin_role__members',
- 'credential_type', 'organization')
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ )
+ prefetch_related = ('admin_role', 'use_role', 'read_role', 'admin_role__parents', 'admin_role__members', 'credential_type', 'organization')
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@@ -1155,8 +1181,7 @@ class CredentialAccess(BaseAccess):
return False
if data and data.get('organization', None):
organization_obj = get_object_from_data('organization', Organization, data)
- if not any([check_user_access(self.user, Organization, 'change', organization_obj, None),
- self.user in organization_obj.credential_admin_role]):
+ if not any([check_user_access(self.user, Organization, 'change', organization_obj, None), self.user in organization_obj.credential_admin_role]):
return False
if not any(data.get(key, None) for key in ('user', 'team', 'organization')):
return False # you have to provide 1 owner field
@@ -1175,7 +1200,7 @@ class CredentialAccess(BaseAccess):
def can_delete(self, obj):
# Unassociated credentials may be marked deleted by anyone, though we
# shouldn't ever end up with those.
- #if obj.user is None and obj.team is None:
+ # if obj.user is None and obj.team is None:
# return True
return self.can_change(obj, None)
@@ -1188,7 +1213,7 @@ class CredentialAccess(BaseAccess):
class CredentialInputSourceAccess(BaseAccess):
- '''
+ """
I can see a CredentialInputSource when:
- I can see the associated target_credential
I can create/change a CredentialInputSource when:
@@ -1196,20 +1221,18 @@ class CredentialInputSourceAccess(BaseAccess):
- I have use access to the associated source credential
I can delete a CredentialInputSource when:
- I'm an admin of the associated target_credential
- '''
+ """
model = CredentialInputSource
select_related = ('target_credential', 'source_credential')
def filtered_queryset(self):
- return CredentialInputSource.objects.filter(
- target_credential__in=Credential.accessible_pk_qs(self.user, 'read_role'))
+ return CredentialInputSource.objects.filter(target_credential__in=Credential.accessible_pk_qs(self.user, 'read_role'))
@check_superuser
def can_add(self, data):
- return (
- self.check_related('target_credential', Credential, data, role_field='admin_role') and
- self.check_related('source_credential', Credential, data, role_field='use_role')
+ return self.check_related('target_credential', Credential, data, role_field='admin_role') and self.check_related(
+ 'source_credential', Credential, data, role_field='use_role'
)
@check_superuser
@@ -1217,10 +1240,7 @@ class CredentialInputSourceAccess(BaseAccess):
if self.can_add(data) is False:
return False
- return (
- self.user in obj.target_credential.admin_role and
- self.user in obj.source_credential.use_role
- )
+ return self.user in obj.target_credential.admin_role and self.user in obj.source_credential.use_role
@check_superuser
def can_delete(self, obj):
@@ -1228,7 +1248,7 @@ class CredentialInputSourceAccess(BaseAccess):
class TeamAccess(BaseAccess):
- '''
+ """
I can see a team when:
- I'm a superuser.
- I'm an admin of the team
@@ -1237,18 +1257,20 @@ class TeamAccess(BaseAccess):
I can create/change a team when:
- I'm a superuser.
- I'm an admin for the team
- '''
+ """
model = Team
- select_related = ('created_by', 'modified_by', 'organization',)
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ 'organization',
+ )
def filtered_queryset(self):
- if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and \
- (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
+ if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
return self.model.objects.all()
return self.model.objects.filter(
- Q(organization__in=Organization.accessible_pk_qs(self.user, 'member_role')) |
- Q(pk__in=self.model.accessible_pk_qs(self.user, 'read_role'))
+ Q(organization__in=Organization.accessible_pk_qs(self.user, 'member_role')) | Q(pk__in=self.model.accessible_pk_qs(self.user, 'read_role'))
)
@check_superuser
@@ -1283,8 +1305,7 @@ class TeamAccess(BaseAccess):
if isinstance(sub_obj.content_object, ResourceMixin):
role_access = RoleAccess(self.user)
- return role_access.can_attach(sub_obj, obj, 'member_role.parents',
- *args, **kwargs)
+ return role_access.can_attach(sub_obj, obj, 'member_role.parents', *args, **kwargs)
if self.user.is_superuser:
return True
@@ -1293,24 +1314,21 @@ class TeamAccess(BaseAccess):
rel_role = getattr(obj, relationship.split('.')[0])
return RoleAccess(self.user).can_attach(rel_role, sub_obj, 'members', *args, **kwargs)
- return super(TeamAccess, self).can_attach(obj, sub_obj, relationship,
- *args, **kwargs)
+ return super(TeamAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
# MANAGE_ORGANIZATION_AUTH setting checked in RoleAccess
if isinstance(sub_obj, Role):
if isinstance(sub_obj.content_object, ResourceMixin):
role_access = RoleAccess(self.user)
- return role_access.can_unattach(sub_obj, obj, 'member_role.parents',
- *args, **kwargs)
+ return role_access.can_unattach(sub_obj, obj, 'member_role.parents', *args, **kwargs)
# If the request is updating the membership, check the membership role permissions instead
if relationship in ('member_role.members', 'admin_role.members'):
rel_role = getattr(obj, relationship.split('.')[0])
return RoleAccess(self.user).can_unattach(rel_role, sub_obj, 'members', *args, **kwargs)
- return super(TeamAccess, self).can_unattach(obj, sub_obj, relationship,
- *args, **kwargs)
+ return super(TeamAccess, self).can_unattach(obj, sub_obj, relationship, *args, **kwargs)
class ExecutionEnvironmentAccess(BaseAccess):
@@ -1330,16 +1348,14 @@ class ExecutionEnvironmentAccess(BaseAccess):
def filtered_queryset(self):
return ExecutionEnvironment.objects.filter(
- Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) |
- Q(organization__isnull=True)
+ Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | Q(organization__isnull=True)
).distinct()
@check_superuser
def can_add(self, data):
if not data: # So the browseable API will work
return Organization.accessible_objects(self.user, 'execution_environment_admin_role').exists()
- return self.check_related('organization', Organization, data, mandatory=True,
- role_field='execution_environment_admin_role')
+ return self.check_related('organization', Organization, data, mandatory=True, role_field='execution_environment_admin_role')
def can_change(self, obj, data):
if obj.managed_by_tower:
@@ -1354,15 +1370,14 @@ class ExecutionEnvironmentAccess(BaseAccess):
new_org = get_object_from_data('organization', Organization, data, obj=obj)
if not new_org or self.user not in new_org.execution_environment_admin_role:
return False
- return self.check_related('organization', Organization, data, obj=obj, mandatory=True,
- role_field='execution_environment_admin_role')
+ return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
def can_delete(self, obj):
return self.can_change(obj, None)
class ProjectAccess(NotificationAttachMixin, BaseAccess):
- '''
+ """
I can see projects when:
- I am a superuser.
- I am an admin in an organization associated with the project.
@@ -1376,7 +1391,7 @@ class ProjectAccess(NotificationAttachMixin, BaseAccess):
- I am a superuser.
- I am an admin in an organization associated with the project.
- I created the project but it isn't associated with an organization
- '''
+ """
model = Project
select_related = ('credential',)
@@ -1396,9 +1411,8 @@ class ProjectAccess(NotificationAttachMixin, BaseAccess):
if not self.user.can_access(ExecutionEnvironment, 'read', ee):
return False
- return (
- self.check_related('organization', Organization, data, role_field='project_admin_role', mandatory=True) and
- self.check_related('credential', Credential, data, role_field='use_role')
+ return self.check_related('organization', Organization, data, role_field='project_admin_role', mandatory=True) and self.check_related(
+ 'credential', Credential, data, role_field='use_role'
)
@check_superuser
@@ -1409,9 +1423,9 @@ class ProjectAccess(NotificationAttachMixin, BaseAccess):
return False
return (
- self.check_related('organization', Organization, data, obj=obj, role_field='project_admin_role') and
- self.user in obj.admin_role and
- self.check_related('credential', Credential, data, obj=obj, role_field='use_role')
+ self.check_related('organization', Organization, data, obj=obj, role_field='project_admin_role')
+ and self.user in obj.admin_role
+ and self.check_related('credential', Credential, data, obj=obj, role_field='use_role')
)
@check_superuser
@@ -1423,20 +1437,25 @@ class ProjectAccess(NotificationAttachMixin, BaseAccess):
class ProjectUpdateAccess(BaseAccess):
- '''
+ """
I can see project updates when I can see the project.
I can change when I can change the project.
I can delete when I can change/delete the project.
- '''
+ """
model = ProjectUpdate
- select_related = ('created_by', 'modified_by', 'project',)
- prefetch_related = ('unified_job_template', 'instance_group',)
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ 'project',
+ )
+ prefetch_related = (
+ 'unified_job_template',
+ 'instance_group',
+ )
def filtered_queryset(self):
- return self.model.objects.filter(
- project__in=Project.accessible_pk_qs(self.user, 'read_role')
- )
+ return self.model.objects.filter(project__in=Project.accessible_pk_qs(self.user, 'read_role'))
@check_superuser
def can_cancel(self, obj):
@@ -1460,14 +1479,20 @@ class ProjectUpdateAccess(BaseAccess):
class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
- '''
+ """
I can see job templates when:
- I have read role for the job template.
- '''
+ """
model = JobTemplate
- select_related = ('created_by', 'modified_by', 'inventory', 'project', 'organization',
- 'next_schedule',)
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ 'inventory',
+ 'project',
+ 'organization',
+ 'next_schedule',
+ )
prefetch_related = (
'instance_groups',
'credentials__credential_type',
@@ -1479,7 +1504,7 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
return self.model.accessible_objects(self.user, 'read_role')
def can_add(self, data):
- '''
+ """
a user can create a job template if
- they are a superuser
- an org admin of any org that the project is a member
@@ -1488,7 +1513,7 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
based permissions tying the project to the inventory source for the
given action as well as the 'create' deploy permission.
Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
- '''
+ """
if not data: # So the browseable API will work
return Project.accessible_objects(self.user, 'use_role').exists()
@@ -1531,10 +1556,10 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
@check_superuser
def can_copy_related(self, obj):
- '''
+ """
Check if we have access to all the credentials related to Job Templates.
Does not verify the user's permission for any other related fields (projects, inventories, etc).
- '''
+ """
# obj.credentials.all() is accessible ONLY when object is saved (has valid id)
credential_manager = getattr(obj, 'credentials', None) if getattr(obj, 'id', False) else Credential.objects.none()
@@ -1582,28 +1607,47 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
return True
def changes_are_non_sensitive(self, obj, data):
- '''
+ """
Return true if the changes being made are considered nonsensitive, and
thus can be made by a job template administrator which may not have access
to the any inventory, project, or credentials associated with the template.
- '''
+ """
allowed_fields = [
- 'name', 'description', 'forks', 'limit', 'verbosity', 'extra_vars',
- 'job_tags', 'force_handlers', 'skip_tags', 'ask_variables_on_launch',
- 'ask_tags_on_launch', 'ask_job_type_on_launch', 'ask_skip_tags_on_launch',
- 'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled',
- 'custom_virtualenv', 'diff_mode', 'timeout', 'job_slice_count',
-
+ 'name',
+ 'description',
+ 'forks',
+ 'limit',
+ 'verbosity',
+ 'extra_vars',
+ 'job_tags',
+ 'force_handlers',
+ 'skip_tags',
+ 'ask_variables_on_launch',
+ 'ask_tags_on_launch',
+ 'ask_job_type_on_launch',
+ 'ask_skip_tags_on_launch',
+ 'ask_inventory_on_launch',
+ 'ask_credential_on_launch',
+ 'survey_enabled',
+ 'custom_virtualenv',
+ 'diff_mode',
+ 'timeout',
+ 'job_slice_count',
# These fields are ignored, but it is convenient for QA to allow clients to post them
- 'last_job_run', 'created', 'modified',
+ 'last_job_run',
+ 'created',
+ 'modified',
]
for k, v in data.items():
if k not in [x.name for x in obj._meta.concrete_fields]:
continue
if hasattr(obj, k) and getattr(obj, k) != v:
- if k not in allowed_fields and v != getattr(obj, '%s_id' % k, None) \
- and not (hasattr(obj, '%s_id' % k) and getattr(obj, '%s_id' % k) is None and v == ''): # Equate '' to None in the case of foreign keys
+ if (
+ k not in allowed_fields
+ and v != getattr(obj, '%s_id' % k, None)
+ and not (hasattr(obj, '%s_id' % k) and getattr(obj, '%s_id' % k) is None and v == '')
+ ): # Equate '' to None in the case of foreign keys
return False
return True
@@ -1618,8 +1662,7 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
return self.user.can_access(type(sub_obj), "read", sub_obj) and self.user in obj.organization.admin_role
if relationship == 'credentials' and isinstance(sub_obj, Credential):
return self.user in obj.admin_role and self.user in sub_obj.use_role
- return super(JobTemplateAccess, self).can_attach(
- obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
+ return super(JobTemplateAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
@check_superuser
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
@@ -1631,7 +1674,7 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
class JobAccess(BaseAccess):
- '''
+ """
I can see jobs when:
- I am a superuser.
- I can see its job template
@@ -1640,11 +1683,17 @@ class JobAccess(BaseAccess):
I can delete jobs when:
- I am an admin of the organization which contains its inventory
- I am an admin of the organization which contains its project
- '''
+ """
model = Job
- select_related = ('created_by', 'modified_by', 'job_template', 'inventory',
- 'project', 'project_update',)
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ 'job_template',
+ 'inventory',
+ 'project',
+ 'project_update',
+ )
prefetch_related = (
'organization',
'unified_job_template',
@@ -1656,18 +1705,13 @@ class JobAccess(BaseAccess):
def filtered_queryset(self):
qs = self.model.objects
- qs_jt = qs.filter(
- job_template__in=JobTemplate.accessible_objects(self.user, 'read_role')
- )
+ qs_jt = qs.filter(job_template__in=JobTemplate.accessible_objects(self.user, 'read_role'))
- org_access_qs = Organization.objects.filter(
- Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
+ org_access_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
if not org_access_qs.exists():
return qs_jt
- return qs.filter(
- Q(job_template__in=JobTemplate.accessible_objects(self.user, 'read_role')) |
- Q(organization__in=org_access_qs)).distinct()
+ return qs.filter(Q(job_template__in=JobTemplate.accessible_objects(self.user, 'read_role')) | Q(organization__in=org_access_qs)).distinct()
def can_add(self, data, validate_license=True):
raise NotImplementedError('Direct job creation not possible in v2 API')
@@ -1750,9 +1794,9 @@ class JobAccess(BaseAccess):
class SystemJobTemplateAccess(BaseAccess):
- '''
+ """
I can only see/manage System Job Templates if I'm a super user
- '''
+ """
model = SystemJobTemplate
@@ -1763,17 +1807,18 @@ class SystemJobTemplateAccess(BaseAccess):
class SystemJobAccess(BaseAccess):
- '''
+ """
I can only see manage System Jobs if I'm a super user
- '''
+ """
+
model = SystemJob
def can_start(self, obj, validate_license=True):
- return False # no relaunching of system jobs
+ return False # no relaunching of system jobs
class JobLaunchConfigAccess(BaseAccess):
- '''
+ """
Launch configs must have permissions checked for
- relaunching
- rescheduling
@@ -1781,15 +1826,14 @@ class JobLaunchConfigAccess(BaseAccess):
In order to create a new object with a copy of this launch config, I need:
- use access to related inventory (if present)
- use role to many-related credentials (if any present)
- '''
+ """
+
model = JobLaunchConfig
- select_related = ('job')
+ select_related = 'job'
prefetch_related = ('credentials', 'inventory')
def _unusable_creds_exist(self, qs):
- return qs.exclude(
- pk__in=Credential._accessible_pk_qs(Credential, self.user, 'use_role')
- ).exists()
+ return qs.exclude(pk__in=Credential._accessible_pk_qs(Credential, self.user, 'use_role')).exists()
def has_credentials_access(self, obj):
# user has access if no related credentials exist that the user lacks use role for
@@ -1816,10 +1860,7 @@ class JobLaunchConfigAccess(BaseAccess):
@check_superuser
def can_use(self, obj):
- return (
- self.check_related('inventory', Inventory, {}, obj=obj, role_field='use_role', mandatory=True) and
- self.has_credentials_access(obj)
- )
+ return self.check_related('inventory', Inventory, {}, obj=obj, role_field='use_role', mandatory=True) and self.has_credentials_access(obj)
def can_change(self, obj, data):
return self.check_related('inventory', Inventory, data, obj=obj, role_field='use_role')
@@ -1841,7 +1882,7 @@ class JobLaunchConfigAccess(BaseAccess):
class WorkflowJobTemplateNodeAccess(BaseAccess):
- '''
+ """
I can see/use a WorkflowJobTemplateNode if I have read permission
to associated Workflow Job Template
@@ -1860,24 +1901,23 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
In order to manage connections (edges) between nodes I do not need anything
beyond the standard admin access to its WFJT
- '''
+ """
+
model = WorkflowJobTemplateNode
- prefetch_related = ('success_nodes', 'failure_nodes', 'always_nodes',
- 'unified_job_template', 'credentials', 'workflow_job_template')
+ prefetch_related = ('success_nodes', 'failure_nodes', 'always_nodes', 'unified_job_template', 'credentials', 'workflow_job_template')
def filtered_queryset(self):
- return self.model.objects.filter(
- workflow_job_template__in=WorkflowJobTemplate.accessible_objects(
- self.user, 'read_role'))
+ return self.model.objects.filter(workflow_job_template__in=WorkflowJobTemplate.accessible_objects(self.user, 'read_role'))
@check_superuser
def can_add(self, data):
if not data: # So the browseable API will work
return True
return (
- self.check_related('workflow_job_template', WorkflowJobTemplate, data, mandatory=True) and
- self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role') and
- JobLaunchConfigAccess(self.user).can_add(data))
+ self.check_related('workflow_job_template', WorkflowJobTemplate, data, mandatory=True)
+ and self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role')
+ and JobLaunchConfigAccess(self.user).can_add(data)
+ )
def wfjt_admin(self, obj):
if not obj.workflow_job_template:
@@ -1888,19 +1928,14 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
def ujt_execute(self, obj):
if not obj.unified_job_template:
return True
- return self.check_related('unified_job_template', UnifiedJobTemplate, {}, obj=obj,
- role_field='execute_role', mandatory=True)
+ return self.check_related('unified_job_template', UnifiedJobTemplate, {}, obj=obj, role_field='execute_role', mandatory=True)
def can_change(self, obj, data):
if not data:
return True
# should not be able to edit the prompts if lacking access to UJT or WFJT
- return (
- self.ujt_execute(obj) and
- self.wfjt_admin(obj) and
- JobLaunchConfigAccess(self.user).can_change(obj, data)
- )
+ return self.ujt_execute(obj) and self.wfjt_admin(obj) and JobLaunchConfigAccess(self.user).can_change(obj, data)
def can_delete(self, obj):
return self.wfjt_admin(obj)
@@ -1919,10 +1954,7 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
# Need permission to related template to attach a credential
if not self.ujt_execute(obj):
return False
- return JobLaunchConfigAccess(self.user).can_attach(
- obj, sub_obj, relationship, data,
- skip_sub_obj_read_check=skip_sub_obj_read_check
- )
+ return JobLaunchConfigAccess(self.user).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
elif relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
return self.check_same_WFJT(obj, sub_obj)
else:
@@ -1934,10 +1966,7 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
if relationship == 'credentials':
if not self.ujt_execute(obj):
return False
- return JobLaunchConfigAccess(self.user).can_unattach(
- obj, sub_obj, relationship, data,
- skip_sub_obj_read_check=skip_sub_obj_read_check
- )
+ return JobLaunchConfigAccess(self.user).can_unattach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
elif relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
return self.check_same_WFJT(obj, sub_obj)
else:
@@ -1945,7 +1974,7 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
class WorkflowJobNodeAccess(BaseAccess):
- '''
+ """
I can see a WorkflowJobNode if I have permission to...
the workflow job template associated with...
the workflow job associated with the node.
@@ -1953,23 +1982,29 @@ class WorkflowJobNodeAccess(BaseAccess):
Any deletion of editing of individual nodes would undermine the integrity
of the graph structure.
Deletion must happen as a cascade delete from the workflow job.
- '''
+ """
+
model = WorkflowJobNode
- prefetch_related = ('unified_job_template', 'job', 'workflow_job', 'credentials',
- 'success_nodes', 'failure_nodes', 'always_nodes',)
+ prefetch_related = (
+ 'unified_job_template',
+ 'job',
+ 'workflow_job',
+ 'credentials',
+ 'success_nodes',
+ 'failure_nodes',
+ 'always_nodes',
+ )
def filtered_queryset(self):
- return self.model.objects.filter(
- workflow_job__unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(
- self.user, 'read_role'))
+ return self.model.objects.filter(workflow_job__unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
@check_superuser
def can_add(self, data):
if data is None: # Hide direct creation in API browser
return False
- return (
- self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role') and
- JobLaunchConfigAccess(self.user).can_add(data))
+ return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role') and JobLaunchConfigAccess(self.user).can_add(
+ data
+ )
def can_change(self, obj, data):
return False
@@ -1980,26 +2015,33 @@ class WorkflowJobNodeAccess(BaseAccess):
# TODO: notification attachments?
class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
- '''
+ """
I can see/manage Workflow Job Templates based on object roles
- '''
+ """
model = WorkflowJobTemplate
- select_related = ('created_by', 'modified_by', 'organization', 'next_schedule',
- 'admin_role', 'execute_role', 'read_role',)
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ 'organization',
+ 'next_schedule',
+ 'admin_role',
+ 'execute_role',
+ 'read_role',
+ )
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@check_superuser
def can_add(self, data):
- '''
+ """
a user can create a job template if they are a superuser, an org admin
of any org that the project is a member, or if they have user or team
based permissions tying the project to the inventory source for the
given action as well as the 'create' deploy permission.
Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
- '''
+ """
if not data: # So the browseable API will work
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
@@ -2008,9 +2050,8 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
if not self.user.can_access(ExecutionEnvironment, 'read', ee):
return False
- return (
- self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True) and
- self.check_related('inventory', Inventory, data, role_field='use_role')
+ return self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True) and self.check_related(
+ 'inventory', Inventory, data, role_field='use_role'
)
def can_copy(self, obj):
@@ -2036,8 +2077,7 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
if missing_inventories:
self.messages['inventories_unable_to_copy'] = missing_inventories
- return self.check_related('organization', Organization, {'reference_obj': obj}, role_field='workflow_admin_role',
- mandatory=True)
+ return self.check_related('organization', Organization, {'reference_obj': obj}, role_field='workflow_admin_role', mandatory=True)
def can_start(self, obj, validate_license=True):
if validate_license:
@@ -2063,9 +2103,9 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
return False
return (
- self.check_related('organization', Organization, data, role_field='workflow_admin_role', obj=obj) and
- self.check_related('inventory', Inventory, data, role_field='use_role', obj=obj) and
- self.user in obj.admin_role
+ self.check_related('organization', Organization, data, role_field='workflow_admin_role', obj=obj)
+ and self.check_related('inventory', Inventory, data, role_field='use_role', obj=obj)
+ and self.user in obj.admin_role
)
def can_delete(self, obj):
@@ -2073,20 +2113,23 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
class WorkflowJobAccess(BaseAccess):
- '''
+ """
I can only see Workflow Jobs if I can see the associated
workflow job template that it was created from.
I can delete them if I am admin of their workflow job template
I can cancel one if I can delete it
I can also cancel it if I started it
- '''
+ """
+
model = WorkflowJob
- select_related = ('created_by', 'modified_by', 'organization',)
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ 'organization',
+ )
def filtered_queryset(self):
- return WorkflowJob.objects.filter(
- unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(
- self.user, 'read_role'))
+ return WorkflowJob.objects.filter(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
def can_add(self, data):
# Old add-start system for launching jobs is being depreciated, and
@@ -2098,9 +2141,7 @@ class WorkflowJobAccess(BaseAccess):
@check_superuser
def can_delete(self, obj):
- return (obj.workflow_job_template and
- obj.workflow_job_template.organization and
- self.user in obj.workflow_job_template.organization.workflow_admin_role)
+ return obj.workflow_job_template and obj.workflow_job_template.organization and self.user in obj.workflow_job_template.organization.workflow_admin_role
def get_method_capability(self, method, obj, parent_obj):
if method == 'start':
@@ -2159,8 +2200,7 @@ class WorkflowJobAccess(BaseAccess):
if not node_access.can_add({'reference_obj': node}):
wj_add_perm = False
if not wj_add_perm and self.save_messages:
- self.messages['workflow_job_template'] = _('You do not have permission to the workflow job '
- 'resources required for relaunch.')
+ self.messages['workflow_job_template'] = _('You do not have permission to the workflow job ' 'resources required for relaunch.')
return wj_add_perm
def can_cancel(self, obj):
@@ -2172,13 +2212,19 @@ class WorkflowJobAccess(BaseAccess):
class AdHocCommandAccess(BaseAccess):
- '''
+ """
I can only see/run ad hoc commands when:
- I am a superuser.
- I have read access to the inventory
- '''
+ """
+
model = AdHocCommand
- select_related = ('created_by', 'modified_by', 'inventory', 'credential',)
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ 'inventory',
+ 'credential',
+ )
def filtered_queryset(self):
return self.model.objects.filter(inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
@@ -2212,10 +2258,13 @@ class AdHocCommandAccess(BaseAccess):
return obj.inventory is not None and self.user in obj.inventory.organization.admin_role
def can_start(self, obj, validate_license=True):
- return self.can_add({
- 'credential': obj.credential_id,
- 'inventory': obj.inventory_id,
- }, validate_license=validate_license)
+ return self.can_add(
+ {
+ 'credential': obj.credential_id,
+ 'inventory': obj.inventory_id,
+ },
+ validate_license=validate_license,
+ )
def can_cancel(self, obj):
if not obj.can_cancel:
@@ -2226,10 +2275,10 @@ class AdHocCommandAccess(BaseAccess):
class AdHocCommandEventAccess(BaseAccess):
- '''
+ """
I can see ad hoc command event records whenever I can read both ad hoc
command and host.
- '''
+ """
model = AdHocCommandEvent
@@ -2241,8 +2290,7 @@ class AdHocCommandEventAccess(BaseAccess):
return qs.all()
ad_hoc_command_qs = self.user.get_queryset(AdHocCommand)
host_qs = self.user.get_queryset(Host)
- return qs.filter(Q(host__isnull=True) | Q(host__in=host_qs),
- ad_hoc_command__in=ad_hoc_command_qs)
+ return qs.filter(Q(host__isnull=True) | Q(host__in=host_qs), ad_hoc_command__in=ad_hoc_command_qs)
def can_add(self, data):
return False
@@ -2255,12 +2303,16 @@ class AdHocCommandEventAccess(BaseAccess):
class JobHostSummaryAccess(BaseAccess):
- '''
+ """
I can see job/host summary records whenever I can read both job and host.
- '''
+ """
model = JobHostSummary
- select_related = ('job', 'job__job_template', 'host',)
+ select_related = (
+ 'job',
+ 'job__job_template',
+ 'host',
+ )
def filtered_queryset(self):
job_qs = self.user.get_queryset(Job)
@@ -2278,17 +2330,21 @@ class JobHostSummaryAccess(BaseAccess):
class JobEventAccess(BaseAccess):
- '''
+ """
I can see job event records whenever I can read both job and host.
- '''
+ """
model = JobEvent
- prefetch_related = ('job__job_template', 'host',)
+ prefetch_related = (
+ 'job__job_template',
+ 'host',
+ )
def filtered_queryset(self):
return self.model.objects.filter(
- Q(host__inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role')) |
- Q(job__job_template__in=JobTemplate.accessible_pk_qs(self.user, 'read_role')))
+ Q(host__inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
+ | Q(job__job_template__in=JobTemplate.accessible_pk_qs(self.user, 'read_role'))
+ )
def can_add(self, data):
return False
@@ -2301,15 +2357,14 @@ class JobEventAccess(BaseAccess):
class ProjectUpdateEventAccess(BaseAccess):
- '''
+ """
I can see project update event records whenever I can access the project update
- '''
+ """
model = ProjectUpdateEvent
def filtered_queryset(self):
- return self.model.objects.filter(
- Q(project_update__project__in=Project.accessible_pk_qs(self.user, 'read_role')))
+ return self.model.objects.filter(Q(project_update__project__in=Project.accessible_pk_qs(self.user, 'read_role')))
def can_add(self, data):
return False
@@ -2322,15 +2377,14 @@ class ProjectUpdateEventAccess(BaseAccess):
class InventoryUpdateEventAccess(BaseAccess):
- '''
+ """
I can see inventory update event records whenever I can access the inventory update
- '''
+ """
model = InventoryUpdateEvent
def filtered_queryset(self):
- return self.model.objects.filter(
- Q(inventory_update__inventory_source__inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role')))
+ return self.model.objects.filter(Q(inventory_update__inventory_source__inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role')))
def can_add(self, data):
return False
@@ -2343,9 +2397,10 @@ class InventoryUpdateEventAccess(BaseAccess):
class SystemJobEventAccess(BaseAccess):
- '''
+ """
I can only see manage System Jobs events if I'm a super user
- '''
+ """
+
model = SystemJobEvent
def can_add(self, data):
@@ -2359,11 +2414,11 @@ class SystemJobEventAccess(BaseAccess):
class UnifiedJobTemplateAccess(BaseAccess):
- '''
+ """
I can see a unified job template whenever I can see the same project,
inventory source, WFJT, or job template. Unified job templates do not include
inventory sources without a cloud source.
- '''
+ """
model = UnifiedJobTemplate
select_related = (
@@ -2383,16 +2438,15 @@ class UnifiedJobTemplateAccess(BaseAccess):
# WISH - sure would be nice if the following worked, but it does not.
# In the future, as django and polymorphic libs are upgraded, try again.
- #qs = qs.prefetch_related(
+ # qs = qs.prefetch_related(
# 'project',
# 'inventory',
- #)
+ # )
def filtered_queryset(self):
return self.model.objects.filter(
- Q(pk__in=self.model.accessible_pk_qs(self.user, 'read_role')) |
- Q(inventorysource__inventory__id__in=Inventory._accessible_pk_qs(
- Inventory, self.user, 'read_role'))
+ Q(pk__in=self.model.accessible_pk_qs(self.user, 'read_role'))
+ | Q(inventorysource__inventory__id__in=Inventory._accessible_pk_qs(Inventory, self.user, 'read_role'))
)
def can_start(self, obj, validate_license=True):
@@ -2401,15 +2455,14 @@ class UnifiedJobTemplateAccess(BaseAccess):
return access_instance.can_start(obj, validate_license=validate_license)
def get_queryset(self):
- return super(UnifiedJobTemplateAccess, self).get_queryset().filter(
- workflowapprovaltemplate__isnull=True)
+ return super(UnifiedJobTemplateAccess, self).get_queryset().filter(workflowapprovaltemplate__isnull=True)
class UnifiedJobAccess(BaseAccess):
- '''
+ """
I can see a unified job whenever I can see the same project update,
inventory update or job.
- '''
+ """
model = UnifiedJob
prefetch_related = (
@@ -2426,7 +2479,7 @@ class UnifiedJobAccess(BaseAccess):
# WISH - sure would be nice if the following worked, but it does not.
# In the future, as django and polymorphic libs are upgraded, try again.
- #qs = qs.prefetch_related(
+ # qs = qs.prefetch_related(
# 'project',
# 'inventory',
# 'job_template',
@@ -2436,38 +2489,40 @@ class UnifiedJobAccess(BaseAccess):
# 'inventory_source___inventory',
# 'job_template__inventory',
# 'job_template__project',
- #)
+ # )
def filtered_queryset(self):
inv_pk_qs = Inventory._accessible_pk_qs(Inventory, self.user, 'read_role')
- org_auditor_qs = Organization.objects.filter(
- Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
+ org_auditor_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
qs = self.model.objects.filter(
- Q(unified_job_template_id__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role')) |
- Q(inventoryupdate__inventory_source__inventory__id__in=inv_pk_qs) |
- Q(adhoccommand__inventory__id__in=inv_pk_qs) |
- Q(organization__in=org_auditor_qs)
+ Q(unified_job_template_id__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
+ | Q(inventoryupdate__inventory_source__inventory__id__in=inv_pk_qs)
+ | Q(adhoccommand__inventory__id__in=inv_pk_qs)
+ | Q(organization__in=org_auditor_qs)
)
return qs
def get_queryset(self):
- return super(UnifiedJobAccess, self).get_queryset().filter(
- workflowapproval__isnull=True)
+ return super(UnifiedJobAccess, self).get_queryset().filter(workflowapproval__isnull=True)
class ScheduleAccess(BaseAccess):
- '''
+ """
I can see a schedule if I can see it's related unified job, I can create them or update them if I have write access
- '''
+ """
model = Schedule
- select_related = ('created_by', 'modified_by',)
- prefetch_related = ('unified_job_template', 'credentials',)
+ select_related = (
+ 'created_by',
+ 'modified_by',
+ )
+ prefetch_related = (
+ 'unified_job_template',
+ 'credentials',
+ )
def filtered_queryset(self):
- return self.model.objects.filter(
- unified_job_template__in=UnifiedJobTemplateAccess(self.user).filtered_queryset()
- )
+ return self.model.objects.filter(unified_job_template__in=UnifiedJobTemplateAccess(self.user).filtered_queryset())
@check_superuser
def can_add(self, data):
@@ -2485,37 +2540,31 @@ class ScheduleAccess(BaseAccess):
if self.check_related('unified_job_template', UnifiedJobTemplate, data, obj=obj, mandatory=True):
return True
# Users with execute role can modify the schedules they created
- return (
- obj.created_by == self.user and
- self.check_related('unified_job_template', UnifiedJobTemplate, data, obj=obj, role_field='execute_role', mandatory=True))
+ return obj.created_by == self.user and self.check_related(
+ 'unified_job_template', UnifiedJobTemplate, data, obj=obj, role_field='execute_role', mandatory=True
+ )
def can_delete(self, obj):
return self.can_change(obj, {})
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
- return JobLaunchConfigAccess(self.user).can_attach(
- obj, sub_obj, relationship, data,
- skip_sub_obj_read_check=skip_sub_obj_read_check
- )
+ return JobLaunchConfigAccess(self.user).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
def can_unattach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
- return JobLaunchConfigAccess(self.user).can_unattach(
- obj, sub_obj, relationship, data,
- skip_sub_obj_read_check=skip_sub_obj_read_check
- )
+ return JobLaunchConfigAccess(self.user).can_unattach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
class NotificationTemplateAccess(BaseAccess):
- '''
+ """
I can see/use a notification_template if I have permission to
- '''
+ """
+
model = NotificationTemplate
prefetch_related = ('created_by', 'modified_by', 'organization')
def filtered_queryset(self):
return self.model.objects.filter(
- Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) |
- Q(organization__in=self.user.auditor_of_organizations)
+ Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations)
).distinct()
@check_superuser
@@ -2545,16 +2594,17 @@ class NotificationTemplateAccess(BaseAccess):
class NotificationAccess(BaseAccess):
- '''
+ """
I can see/use a notification if I have permission to
- '''
+ """
+
model = Notification
prefetch_related = ('notification_template',)
def filtered_queryset(self):
return self.model.objects.filter(
- Q(notification_template__organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) |
- Q(notification_template__organization__in=self.user.auditor_of_organizations)
+ Q(notification_template__organization__in=Organization.accessible_objects(self.user, 'notification_admin_role'))
+ | Q(notification_template__organization__in=self.user.auditor_of_organizations)
).distinct()
def can_delete(self, obj):
@@ -2562,16 +2612,21 @@ class NotificationAccess(BaseAccess):
class LabelAccess(BaseAccess):
- '''
+ """
I can see/use a Label if I have permission to associated organization, or to a JT that the label is on
- '''
+ """
+
model = Label
- prefetch_related = ('modified_by', 'created_by', 'organization',)
+ prefetch_related = (
+ 'modified_by',
+ 'created_by',
+ 'organization',
+ )
def filtered_queryset(self):
return self.model.objects.filter(
- Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) |
- Q(unifiedjobtemplate_labels__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
+ Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role'))
+ | Q(unifiedjobtemplate_labels__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
)
@check_superuser
@@ -2592,20 +2647,37 @@ class LabelAccess(BaseAccess):
class ActivityStreamAccess(BaseAccess):
- '''
+ """
I can see activity stream events only when I have permission on all objects included in the event
- '''
+ """
model = ActivityStream
- prefetch_related = ('organization', 'user', 'inventory', 'host', 'group',
- 'inventory_update', 'credential', 'credential_type', 'team',
- 'ad_hoc_command', 'o_auth2_application', 'o_auth2_access_token',
- 'notification_template', 'notification', 'label', 'role', 'actor',
- 'schedule', 'custom_inventory_script', 'unified_job_template',
- 'workflow_job_template_node',)
+ prefetch_related = (
+ 'organization',
+ 'user',
+ 'inventory',
+ 'host',
+ 'group',
+ 'inventory_update',
+ 'credential',
+ 'credential_type',
+ 'team',
+ 'ad_hoc_command',
+ 'o_auth2_application',
+ 'o_auth2_access_token',
+ 'notification_template',
+ 'notification',
+ 'label',
+ 'role',
+ 'actor',
+ 'schedule',
+ 'custom_inventory_script',
+ 'unified_job_template',
+ 'workflow_job_template_node',
+ )
def filtered_queryset(self):
- '''
+ """
The full set is returned if the user is:
- System Administrator
- System Auditor
@@ -2624,7 +2696,7 @@ class ActivityStreamAccess(BaseAccess):
- unified jobs
- schedules
- custom inventory scripts
- '''
+ """
qs = self.model.objects.all()
# FIXME: the following fields will be attached to the wrong object
# if they are included in prefetch_related because of
@@ -2635,9 +2707,10 @@ class ActivityStreamAccess(BaseAccess):
inventory_set = Inventory.accessible_objects(self.user, 'read_role')
credential_set = Credential.accessible_objects(self.user, 'read_role')
auditing_orgs = (
- Organization.accessible_objects(self.user, 'admin_role') |
- Organization.accessible_objects(self.user, 'auditor_role')
- ).distinct().values_list('id', flat=True)
+ (Organization.accessible_objects(self.user, 'admin_role') | Organization.accessible_objects(self.user, 'auditor_role'))
+ .distinct()
+ .values_list('id', flat=True)
+ )
project_set = Project.accessible_objects(self.user, 'read_role')
jt_set = JobTemplate.accessible_objects(self.user, 'read_role')
team_set = Team.accessible_objects(self.user, 'read_role')
@@ -2646,30 +2719,30 @@ class ActivityStreamAccess(BaseAccess):
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
return qs.filter(
- Q(ad_hoc_command__inventory__in=inventory_set) |
- Q(o_auth2_application__in=app_set) |
- Q(o_auth2_access_token__in=token_set) |
- Q(user__in=auditing_orgs.values('member_role__members')) |
- Q(user=self.user) |
- Q(organization__in=auditing_orgs) |
- Q(inventory__in=inventory_set) |
- Q(host__inventory__in=inventory_set) |
- Q(group__inventory__in=inventory_set) |
- Q(inventory_source__inventory__in=inventory_set) |
- Q(inventory_update__inventory_source__inventory__in=inventory_set) |
- Q(credential__in=credential_set) |
- Q(team__in=team_set) |
- Q(project__in=project_set) |
- Q(project_update__project__in=project_set) |
- Q(job_template__in=jt_set) |
- Q(job__job_template__in=jt_set) |
- Q(workflow_job_template__in=wfjt_set) |
- Q(workflow_job_template_node__workflow_job_template__in=wfjt_set) |
- Q(workflow_job__workflow_job_template__in=wfjt_set) |
- Q(notification_template__organization__in=auditing_orgs) |
- Q(notification__notification_template__organization__in=auditing_orgs) |
- Q(label__organization__in=auditing_orgs) |
- Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
+ Q(ad_hoc_command__inventory__in=inventory_set)
+ | Q(o_auth2_application__in=app_set)
+ | Q(o_auth2_access_token__in=token_set)
+ | Q(user__in=auditing_orgs.values('member_role__members'))
+ | Q(user=self.user)
+ | Q(organization__in=auditing_orgs)
+ | Q(inventory__in=inventory_set)
+ | Q(host__inventory__in=inventory_set)
+ | Q(group__inventory__in=inventory_set)
+ | Q(inventory_source__inventory__in=inventory_set)
+ | Q(inventory_update__inventory_source__inventory__in=inventory_set)
+ | Q(credential__in=credential_set)
+ | Q(team__in=team_set)
+ | Q(project__in=project_set)
+ | Q(project_update__project__in=project_set)
+ | Q(job_template__in=jt_set)
+ | Q(job__job_template__in=jt_set)
+ | Q(workflow_job_template__in=wfjt_set)
+ | Q(workflow_job_template_node__workflow_job_template__in=wfjt_set)
+ | Q(workflow_job__workflow_job_template__in=wfjt_set)
+ | Q(notification_template__organization__in=auditing_orgs)
+ | Q(notification__notification_template__organization__in=auditing_orgs)
+ | Q(label__organization__in=auditing_orgs)
+ | Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
).distinct()
def can_add(self, data):
@@ -2710,13 +2783,13 @@ class CustomInventoryScriptAccess(BaseAccess):
class RoleAccess(BaseAccess):
- '''
+ """
- I can see roles when
- I am a super user
- I am a member of that role
- The role is a descdendent role of a role I am a member of
- The role is an implicit role of an object that I can see a role of.
- '''
+ """
model = Role
prefetch_related = ('content_type',)
@@ -2776,7 +2849,7 @@ class RoleAccess(BaseAccess):
class WorkflowApprovalAccess(BaseAccess):
- '''
+ """
A user can create a workflow approval if they are a superuser, an org admin
of the org connected to the workflow, or if they are assigned as admins to
the workflow.
@@ -2788,10 +2861,13 @@ class WorkflowApprovalAccess(BaseAccess):
- any user who has explicitly been assigned the "approver" role
A user can see approvals if they have read access to the associated WorkflowJobTemplate.
- '''
+ """
model = WorkflowApproval
- prefetch_related = ('created_by', 'modified_by',)
+ prefetch_related = (
+ 'created_by',
+ 'modified_by',
+ )
def can_use(self, obj):
return True
@@ -2800,20 +2876,15 @@ class WorkflowApprovalAccess(BaseAccess):
return True
def filtered_queryset(self):
- return self.model.objects.filter(
- unified_job_node__workflow_job__unified_job_template__in=WorkflowJobTemplate.accessible_pk_qs(
- self.user, 'read_role'))
+ return self.model.objects.filter(unified_job_node__workflow_job__unified_job_template__in=WorkflowJobTemplate.accessible_pk_qs(self.user, 'read_role'))
def can_approve_or_deny(self, obj):
- if (
- (obj.workflow_job_template and self.user in obj.workflow_job_template.approval_role) or
- self.user.is_superuser
- ):
+ if (obj.workflow_job_template and self.user in obj.workflow_job_template.approval_role) or self.user.is_superuser:
return True
class WorkflowApprovalTemplateAccess(BaseAccess):
- '''
+ """
A user can create a workflow approval if they are a superuser, an org admin
of the org connected to the workflow, or if they are assigned as admins to
the workflow.
@@ -2825,17 +2896,20 @@ class WorkflowApprovalTemplateAccess(BaseAccess):
- any user who has explicitly been assigned the "approver" role at the workflow or organization level
A user can see approval templates if they have read access to the associated WorkflowJobTemplate.
- '''
+ """
model = WorkflowApprovalTemplate
- prefetch_related = ('created_by', 'modified_by',)
+ prefetch_related = (
+ 'created_by',
+ 'modified_by',
+ )
@check_superuser
def can_add(self, data):
if data is None: # Hide direct creation in API browser
return False
else:
- return (self.check_related('workflow_approval_template', UnifiedJobTemplate, role_field='admin_role'))
+ return self.check_related('workflow_approval_template', UnifiedJobTemplate, role_field='admin_role')
def can_change(self, obj, data):
return self.user.can_access(WorkflowJobTemplate, 'change', obj.workflow_job_template, data={})
@@ -2848,9 +2922,7 @@ class WorkflowApprovalTemplateAccess(BaseAccess):
return self.user in obj.workflow_job_template.execute_role
def filtered_queryset(self):
- return self.model.objects.filter(
- workflowjobtemplatenodes__workflow_job_template__in=WorkflowJobTemplate.accessible_pk_qs(
- self.user, 'read_role'))
+ return self.model.objects.filter(workflowjobtemplatenodes__workflow_job_template__in=WorkflowJobTemplate.accessible_pk_qs(self.user, 'read_role'))
for cls in BaseAccess.__subclasses__():
diff --git a/awx/main/analytics/broadcast_websocket.py b/awx/main/analytics/broadcast_websocket.py
index d8abcb4745..ff4bcb4fa1 100644
--- a/awx/main/analytics/broadcast_websocket.py
+++ b/awx/main/analytics/broadcast_websocket.py
@@ -24,7 +24,7 @@ logger = logging.getLogger('awx.analytics.broadcast_websocket')
def dt_to_seconds(dt):
- return int((dt - datetime.datetime(1970,1,1)).total_seconds())
+ return int((dt - datetime.datetime(1970, 1, 1)).total_seconds())
def now_seconds():
@@ -37,7 +37,7 @@ def safe_name(s):
# Second granularity; Per-minute
-class FixedSlidingWindow():
+class FixedSlidingWindow:
def __init__(self, start_time=None):
self.buckets = dict()
self.start_time = start_time or now_seconds()
@@ -65,7 +65,7 @@ class FixedSlidingWindow():
return sum(self.buckets.values()) or 0
-class BroadcastWebsocketStatsManager():
+class BroadcastWebsocketStatsManager:
def __init__(self, event_loop, local_hostname):
self._local_hostname = local_hostname
@@ -74,8 +74,7 @@ class BroadcastWebsocketStatsManager():
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
def new_remote_host_stats(self, remote_hostname):
- self._stats[remote_hostname] = BroadcastWebsocketStats(self._local_hostname,
- remote_hostname)
+ self._stats[remote_hostname] = BroadcastWebsocketStats(self._local_hostname, remote_hostname)
return self._stats[remote_hostname]
def delete_remote_host_stats(self, remote_hostname):
@@ -100,15 +99,15 @@ class BroadcastWebsocketStatsManager():
@classmethod
def get_stats_sync(cls):
- '''
+ """
Stringified verion of all the stats
- '''
+ """
redis_conn = redis.Redis.from_url(settings.BROKER_URL)
stats_str = redis_conn.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME) or b''
return parser.text_string_to_metric_families(stats_str.decode('UTF-8'))
-class BroadcastWebsocketStats():
+class BroadcastWebsocketStats:
def __init__(self, local_hostname, remote_hostname):
self._local_hostname = local_hostname
self._remote_hostname = remote_hostname
@@ -118,24 +117,25 @@ class BroadcastWebsocketStats():
self.name = safe_name(self._local_hostname)
self.remote_name = safe_name(self._remote_hostname)
- self._messages_received_total = Counter(f'awx_{self.remote_name}_messages_received_total',
- 'Number of messages received, to be forwarded, by the broadcast websocket system',
- registry=self._registry)
- self._messages_received = Gauge(f'awx_{self.remote_name}_messages_received',
- 'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
- registry=self._registry)
- self._connection = Enum(f'awx_{self.remote_name}_connection',
- 'Websocket broadcast connection',
- states=['disconnected', 'connected'],
- registry=self._registry)
+ self._messages_received_total = Counter(
+ f'awx_{self.remote_name}_messages_received_total',
+ 'Number of messages received, to be forwarded, by the broadcast websocket system',
+ registry=self._registry,
+ )
+ self._messages_received = Gauge(
+ f'awx_{self.remote_name}_messages_received',
+ 'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
+ registry=self._registry,
+ )
+ self._connection = Enum(
+ f'awx_{self.remote_name}_connection', 'Websocket broadcast connection', states=['disconnected', 'connected'], registry=self._registry
+ )
self._connection.state('disconnected')
- self._connection_start = Gauge(f'awx_{self.remote_name}_connection_start',
- 'Time the connection was established',
- registry=self._registry)
+ self._connection_start = Gauge(f'awx_{self.remote_name}_connection_start', 'Time the connection was established', registry=self._registry)
- self._messages_received_per_minute = Gauge(f'awx_{self.remote_name}_messages_received_per_minute',
- 'Messages received per minute',
- registry=self._registry)
+ self._messages_received_per_minute = Gauge(
+ f'awx_{self.remote_name}_messages_received_per_minute', 'Messages received per minute', registry=self._registry
+ )
self._internal_messages_received_per_minute = FixedSlidingWindow()
def unregister(self):
diff --git a/awx/main/analytics/collectors.py b/awx/main/analytics/collectors.py
index d280ffe753..abc10682a9 100644
--- a/awx/main/analytics/collectors.py
+++ b/awx/main/analytics/collectors.py
@@ -10,8 +10,7 @@ from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from awx.conf.license import get_license
-from awx.main.utils import (get_awx_version, get_ansible_version,
- get_custom_venv_choices, camelcase_to_underscore)
+from awx.main.utils import get_awx_version, get_ansible_version, get_custom_venv_choices, camelcase_to_underscore
from awx.main import models
from django.contrib.sessions.models import Session
from awx.main.analytics import register
@@ -68,96 +67,99 @@ def config(since, **kwargs):
@register('counts', '1.0', description=_('Counts of objects such as organizations, inventories, and projects'))
def counts(since, **kwargs):
counts = {}
- for cls in (models.Organization, models.Team, models.User,
- models.Inventory, models.Credential, models.Project,
- models.JobTemplate, models.WorkflowJobTemplate,
- models.Host, models.Schedule, models.CustomInventoryScript,
- models.NotificationTemplate):
+ for cls in (
+ models.Organization,
+ models.Team,
+ models.User,
+ models.Inventory,
+ models.Credential,
+ models.Project,
+ models.JobTemplate,
+ models.WorkflowJobTemplate,
+ models.Host,
+ models.Schedule,
+ models.CustomInventoryScript,
+ models.NotificationTemplate,
+ ):
counts[camelcase_to_underscore(cls.__name__)] = cls.objects.count()
venvs = get_custom_venv_choices()
- counts['custom_virtualenvs'] = len([
- v for v in venvs
- if os.path.basename(v.rstrip('/')) != 'ansible'
- ])
+ counts['custom_virtualenvs'] = len([v for v in venvs if os.path.basename(v.rstrip('/')) != 'ansible'])
inv_counts = dict(models.Inventory.objects.order_by().values_list('kind').annotate(Count('kind')))
inv_counts['normal'] = inv_counts.get('', 0)
inv_counts.pop('', None)
inv_counts['smart'] = inv_counts.get('smart', 0)
counts['inventories'] = inv_counts
-
- counts['unified_job'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() # excludes implicit project_updates
- counts['active_host_count'] = models.Host.objects.active_count()
+
+ counts['unified_job'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() # excludes implicit project_updates
+ counts['active_host_count'] = models.Host.objects.active_count()
active_sessions = Session.objects.filter(expire_date__gte=now()).count()
active_user_sessions = models.UserSessionMembership.objects.select_related('session').filter(session__expire_date__gte=now()).count()
active_anonymous_sessions = active_sessions - active_user_sessions
counts['active_sessions'] = active_sessions
counts['active_user_sessions'] = active_user_sessions
counts['active_anonymous_sessions'] = active_anonymous_sessions
- counts['running_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('running', 'waiting',)).count()
+ counts['running_jobs'] = (
+ models.UnifiedJob.objects.exclude(launch_type='sync')
+ .filter(
+ status__in=(
+ 'running',
+ 'waiting',
+ )
+ )
+ .count()
+ )
counts['pending_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('pending',)).count()
return counts
-
+
@register('org_counts', '1.0', description=_('Counts of users and teams by organization'))
def org_counts(since, **kwargs):
counts = {}
- for org in models.Organization.objects.annotate(num_users=Count('member_role__members', distinct=True),
- num_teams=Count('teams', distinct=True)).values('name', 'id', 'num_users', 'num_teams'):
- counts[org['id']] = {'name': org['name'],
- 'users': org['num_users'],
- 'teams': org['num_teams']
- }
+ for org in models.Organization.objects.annotate(num_users=Count('member_role__members', distinct=True), num_teams=Count('teams', distinct=True)).values(
+ 'name', 'id', 'num_users', 'num_teams'
+ ):
+ counts[org['id']] = {'name': org['name'], 'users': org['num_users'], 'teams': org['num_teams']}
return counts
-
-
+
+
@register('cred_type_counts', '1.0', description=_('Counts of credentials by credential type'))
def cred_type_counts(since, **kwargs):
counts = {}
- for cred_type in models.CredentialType.objects.annotate(num_credentials=Count(
- 'credentials', distinct=True)).values('name', 'id', 'managed_by_tower', 'num_credentials'):
- counts[cred_type['id']] = {'name': cred_type['name'],
- 'credential_count': cred_type['num_credentials'],
- 'managed_by_tower': cred_type['managed_by_tower']
- }
+ for cred_type in models.CredentialType.objects.annotate(num_credentials=Count('credentials', distinct=True)).values(
+ 'name', 'id', 'managed_by_tower', 'num_credentials'
+ ):
+ counts[cred_type['id']] = {
+ 'name': cred_type['name'],
+ 'credential_count': cred_type['num_credentials'],
+ 'managed_by_tower': cred_type['managed_by_tower'],
+ }
return counts
-
-
+
+
@register('inventory_counts', '1.2', description=_('Inventories, their inventory sources, and host counts'))
def inventory_counts(since, **kwargs):
counts = {}
- for inv in models.Inventory.objects.filter(kind='').annotate(num_sources=Count('inventory_sources', distinct=True),
- num_hosts=Count('hosts', distinct=True)).only('id', 'name', 'kind'):
+ for inv in (
+ models.Inventory.objects.filter(kind='')
+ .annotate(num_sources=Count('inventory_sources', distinct=True), num_hosts=Count('hosts', distinct=True))
+ .only('id', 'name', 'kind')
+ ):
source_list = []
- for source in inv.inventory_sources.filter().annotate(num_hosts=Count('hosts', distinct=True)).values('name','source', 'num_hosts'):
+ for source in inv.inventory_sources.filter().annotate(num_hosts=Count('hosts', distinct=True)).values('name', 'source', 'num_hosts'):
source_list.append(source)
- counts[inv.id] = {'name': inv.name,
- 'kind': inv.kind,
- 'hosts': inv.num_hosts,
- 'sources': inv.num_sources,
- 'source_list': source_list
- }
+ counts[inv.id] = {'name': inv.name, 'kind': inv.kind, 'hosts': inv.num_hosts, 'sources': inv.num_sources, 'source_list': source_list}
for smart_inv in models.Inventory.objects.filter(kind='smart'):
- counts[smart_inv.id] = {'name': smart_inv.name,
- 'kind': smart_inv.kind,
- 'hosts': smart_inv.hosts.count(),
- 'sources': 0,
- 'source_list': []
- }
+ counts[smart_inv.id] = {'name': smart_inv.name, 'kind': smart_inv.kind, 'hosts': smart_inv.hosts.count(), 'sources': 0, 'source_list': []}
return counts
@register('projects_by_scm_type', '1.0', description=_('Counts of projects by source control type'))
def projects_by_scm_type(since, **kwargs):
- counts = dict(
- (t[0] or 'manual', 0)
- for t in models.Project.SCM_TYPE_CHOICES
- )
- for result in models.Project.objects.values('scm_type').annotate(
- count=Count('scm_type')
- ).order_by('scm_type'):
+ counts = dict((t[0] or 'manual', 0) for t in models.Project.SCM_TYPE_CHOICES)
+ for result in models.Project.objects.values('scm_type').annotate(count=Count('scm_type')).order_by('scm_type'):
counts[result['scm_type'] or 'manual'] = result['count']
return counts
@@ -172,10 +174,10 @@ def _get_isolated_datetime(last_check):
def instance_info(since, include_hostnames=False, **kwargs):
info = {}
instances = models.Instance.objects.values_list('hostname').values(
- 'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'last_isolated_check', 'enabled')
+ 'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'last_isolated_check', 'enabled'
+ )
for instance in instances:
- consumed_capacity = sum(x.task_impact for x in models.UnifiedJob.objects.filter(execution_node=instance['hostname'],
- status__in=('running', 'waiting')))
+ consumed_capacity = sum(x.task_impact for x in models.UnifiedJob.objects.filter(execution_node=instance['hostname'], status__in=('running', 'waiting')))
instance_info = {
'uuid': instance['uuid'],
'version': instance['version'],
@@ -186,7 +188,7 @@ def instance_info(since, include_hostnames=False, **kwargs):
'last_isolated_check': _get_isolated_datetime(instance['last_isolated_check']),
'enabled': instance['enabled'],
'consumed_capacity': consumed_capacity,
- 'remaining_capacity': instance['capacity'] - consumed_capacity
+ 'remaining_capacity': instance['capacity'] - consumed_capacity,
}
if include_hostnames is True:
instance_info['hostname'] = instance['hostname']
@@ -198,20 +200,22 @@ def job_counts(since, **kwargs):
counts = {}
counts['total_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').count()
counts['status'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('status').annotate(Count('status')).order_by())
- counts['launch_type'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
- 'launch_type').annotate(Count('launch_type')).order_by())
+ counts['launch_type'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('launch_type').annotate(Count('launch_type')).order_by())
return counts
-
-
+
+
def job_instance_counts(since, **kwargs):
counts = {}
- job_types = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
- 'execution_node', 'launch_type').annotate(job_launch_type=Count('launch_type')).order_by()
+ job_types = (
+ models.UnifiedJob.objects.exclude(launch_type='sync')
+ .values_list('execution_node', 'launch_type')
+ .annotate(job_launch_type=Count('launch_type'))
+ .order_by()
+ )
for job in job_types:
counts.setdefault(job[0], {}).setdefault('launch_type', {})[job[1]] = job[2]
-
- job_statuses = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
- 'execution_node', 'status').annotate(job_status=Count('status')).order_by()
+
+ job_statuses = models.UnifiedJob.objects.exclude(launch_type='sync').values_list('execution_node', 'status').annotate(job_status=Count('status')).order_by()
for job in job_statuses:
counts.setdefault(job[0], {}).setdefault('status', {})[job[1]] = job[2]
return counts
@@ -261,12 +265,12 @@ class FileSplitter(io.StringIO):
self.files = self.files[:-1]
# If we only have one file, remove the suffix
if len(self.files) == 1:
- os.rename(self.files[0],self.files[0].replace('_split0',''))
+ os.rename(self.files[0], self.files[0].replace('_split0', ''))
return self.files
def write(self, s):
if not self.header:
- self.header = s[0:s.index('\n')]
+ self.header = s[0 : s.index('\n')]
self.counter += self.currentfile.write(s)
if self.counter >= MAX_TABLE_SIZE:
self.cycle_file()
@@ -307,7 +311,9 @@ def events_table(since, full_path, until, **kwargs):
FROM main_jobevent
WHERE (main_jobevent.created > '{}' AND main_jobevent.created <= '{}')
ORDER BY main_jobevent.id ASC) TO STDOUT WITH CSV HEADER
- '''.format(since.isoformat(),until.isoformat())
+ '''.format(
+ since.isoformat(), until.isoformat()
+ )
return _copy_table(table='events', query=events_query, path=full_path)
@@ -346,7 +352,9 @@ def unified_jobs_table(since, full_path, until, **kwargs):
OR (main_unifiedjob.finished > '{0}' AND main_unifiedjob.finished <= '{1}'))
AND main_unifiedjob.launch_type != 'sync'
ORDER BY main_unifiedjob.id ASC) TO STDOUT WITH CSV HEADER
- '''.format(since.isoformat(),until.isoformat())
+ '''.format(
+ since.isoformat(), until.isoformat()
+ )
return _copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
@@ -369,7 +377,7 @@ def unified_job_template_table(since, full_path, **kwargs):
main_unifiedjobtemplate.status
FROM main_unifiedjobtemplate, django_content_type
WHERE main_unifiedjobtemplate.polymorphic_ctype_id = django_content_type.id
- ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''
+ ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''
return _copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path)
@@ -405,7 +413,9 @@ def workflow_job_node_table(since, full_path, until, **kwargs):
) always_nodes ON main_workflowjobnode.id = always_nodes.from_workflowjobnode_id
WHERE (main_workflowjobnode.modified > '{}' AND main_workflowjobnode.modified <= '{}')
ORDER BY main_workflowjobnode.id ASC) TO STDOUT WITH CSV HEADER
- '''.format(since.isoformat(),until.isoformat())
+ '''.format(
+ since.isoformat(), until.isoformat()
+ )
return _copy_table(table='workflow_job_node', query=workflow_job_node_query, path=full_path)
@@ -437,5 +447,5 @@ def workflow_job_template_node_table(since, full_path, **kwargs):
FROM main_workflowjobtemplatenode_always_nodes
GROUP BY from_workflowjobtemplatenode_id
) always_nodes ON main_workflowjobtemplatenode.id = always_nodes.from_workflowjobtemplatenode_id
- ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER'''
+ ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER'''
return _copy_table(table='workflow_job_template_node', query=workflow_job_template_node_query, path=full_path)
diff --git a/awx/main/analytics/core.py b/awx/main/analytics/core.py
index e9f7f99bc0..69b992a1c2 100644
--- a/awx/main/analytics/core.py
+++ b/awx/main/analytics/core.py
@@ -43,7 +43,7 @@ def all_collectors():
key = func.__awx_analytics_key__
desc = func.__awx_analytics_description__ or ''
version = func.__awx_analytics_version__
- collector_dict[key] = { 'name': key, 'version': version, 'description': desc}
+ collector_dict[key] = {'name': key, 'version': version, 'description': desc}
return collector_dict
@@ -82,7 +82,7 @@ def register(key, version, description=None, format='json', expensive=False):
return decorate
-def gather(dest=None, module=None, subset = None, since = None, until = now(), collection_type='scheduled'):
+def gather(dest=None, module=None, subset=None, since=None, until=now(), collection_type='scheduled'):
"""
Gather all defined metrics and write them as JSON files in a .tgz
@@ -90,6 +90,7 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
:param module: the module to search for registered analytic collector
functions; defaults to awx.main.analytics.collectors
"""
+
def _write_manifest(destdir, manifest):
path = os.path.join(destdir, 'manifest.json')
with open(path, 'w', encoding='utf-8') as f:
@@ -116,13 +117,10 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
collector_module = module
else:
from awx.main.analytics import collectors
+
collector_module = collectors
for name, func in inspect.getmembers(collector_module):
- if (
- inspect.isfunction(func) and
- hasattr(func, '__awx_analytics_key__') and
- (not subset or name in subset)
- ):
+ if inspect.isfunction(func) and hasattr(func, '__awx_analytics_key__') and (not subset or name in subset):
collector_list.append((name, func))
manifest = dict()
@@ -162,6 +160,7 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
# Always include config.json if we're using our collectors
if 'config.json' not in manifest.keys() and not module:
from awx.main.analytics import collectors
+
config = collectors.config
path = '{}.json'.format(os.path.join(gather_dir, config.__awx_analytics_key__))
with open(path, 'w', encoding='utf-8') as f:
@@ -204,22 +203,14 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
for i in range(0, len(stage_dirs)):
stage_dir = stage_dirs[i]
# can't use isoformat() since it has colons, which GNU tar doesn't like
- tarname = '_'.join([
- settings.SYSTEM_UUID,
- until.strftime('%Y-%m-%d-%H%M%S%z'),
- str(i)
- ])
- tgz = shutil.make_archive(
- os.path.join(os.path.dirname(dest), tarname),
- 'gztar',
- stage_dir
- )
+ tarname = '_'.join([settings.SYSTEM_UUID, until.strftime('%Y-%m-%d-%H%M%S%z'), str(i)])
+ tgz = shutil.make_archive(os.path.join(os.path.dirname(dest), tarname), 'gztar', stage_dir)
tarfiles.append(tgz)
except Exception:
- shutil.rmtree(stage_dir, ignore_errors = True)
+ shutil.rmtree(stage_dir, ignore_errors=True)
logger.exception("Failed to write analytics archive file")
finally:
- shutil.rmtree(dest, ignore_errors = True)
+ shutil.rmtree(dest, ignore_errors=True)
return tarfiles
@@ -253,16 +244,17 @@ def ship(path):
s.headers = get_awx_http_client_headers()
s.headers.pop('Content-Type')
with set_environ(**settings.AWX_TASK_ENV):
- response = s.post(url,
- files=files,
- verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
- auth=(rh_user, rh_password),
- headers=s.headers,
- timeout=(31, 31))
+ response = s.post(
+ url,
+ files=files,
+ verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
+ auth=(rh_user, rh_password),
+ headers=s.headers,
+ timeout=(31, 31),
+ )
# Accept 2XX status_codes
if response.status_code >= 300:
- return logger.exception('Upload failed with status {}, {}'.format(response.status_code,
- response.text))
+ return logger.exception('Upload failed with status {}, {}'.format(response.status_code, response.text))
finally:
# cleanup tar.gz
if os.path.exists(path):
diff --git a/awx/main/analytics/metrics.py b/awx/main/analytics/metrics.py
index 20bf8ae830..e889719ded 100644
--- a/awx/main/analytics/metrics.py
+++ b/awx/main/analytics/metrics.py
@@ -1,16 +1,8 @@
from django.conf import settings
-from prometheus_client import (
- REGISTRY,
- PROCESS_COLLECTOR,
- PLATFORM_COLLECTOR,
- GC_COLLECTOR,
- Gauge,
- Info,
- generate_latest
-)
+from prometheus_client import REGISTRY, PROCESS_COLLECTOR, PLATFORM_COLLECTOR, GC_COLLECTOR, Gauge, Info, generate_latest
from awx.conf.license import get_license
-from awx.main.utils import (get_awx_version, get_ansible_version)
+from awx.main.utils import get_awx_version, get_ansible_version
from awx.main.analytics.collectors import (
counts,
instance_info,
@@ -31,23 +23,97 @@ INV_COUNT = Gauge('awx_inventories_total', 'Number of inventories')
PROJ_COUNT = Gauge('awx_projects_total', 'Number of projects')
JT_COUNT = Gauge('awx_job_templates_total', 'Number of job templates')
WFJT_COUNT = Gauge('awx_workflow_job_templates_total', 'Number of workflow job templates')
-HOST_COUNT = Gauge('awx_hosts_total', 'Number of hosts', ['type',])
+HOST_COUNT = Gauge(
+ 'awx_hosts_total',
+ 'Number of hosts',
+ [
+ 'type',
+ ],
+)
SCHEDULE_COUNT = Gauge('awx_schedules_total', 'Number of schedules')
INV_SCRIPT_COUNT = Gauge('awx_inventory_scripts_total', 'Number of invetory scripts')
-USER_SESSIONS = Gauge('awx_sessions_total', 'Number of sessions', ['type',])
+USER_SESSIONS = Gauge(
+ 'awx_sessions_total',
+ 'Number of sessions',
+ [
+ 'type',
+ ],
+)
CUSTOM_VENVS = Gauge('awx_custom_virtualenvs_total', 'Number of virtualenvs')
RUNNING_JOBS = Gauge('awx_running_jobs_total', 'Number of running jobs on the Tower system')
PENDING_JOBS = Gauge('awx_pending_jobs_total', 'Number of pending jobs on the Tower system')
-STATUS = Gauge('awx_status_total', 'Status of Job launched', ['status',])
+STATUS = Gauge(
+ 'awx_status_total',
+ 'Status of Job launched',
+ [
+ 'status',
+ ],
+)
-INSTANCE_CAPACITY = Gauge('awx_instance_capacity', 'Capacity of each node in a Tower system', ['hostname', 'instance_uuid',])
-INSTANCE_CPU = Gauge('awx_instance_cpu', 'CPU cores on each node in a Tower system', ['hostname', 'instance_uuid',])
-INSTANCE_MEMORY = Gauge('awx_instance_memory', 'RAM (Kb) on each node in a Tower system', ['hostname', 'instance_uuid',])
-INSTANCE_INFO = Info('awx_instance', 'Info about each node in a Tower system', ['hostname', 'instance_uuid',])
-INSTANCE_LAUNCH_TYPE = Gauge('awx_instance_launch_type_total', 'Type of Job launched', ['node', 'launch_type',])
-INSTANCE_STATUS = Gauge('awx_instance_status_total', 'Status of Job launched', ['node', 'status',])
-INSTANCE_CONSUMED_CAPACITY = Gauge('awx_instance_consumed_capacity', 'Consumed capacity of each node in a Tower system', ['hostname', 'instance_uuid',])
-INSTANCE_REMAINING_CAPACITY = Gauge('awx_instance_remaining_capacity', 'Remaining capacity of each node in a Tower system', ['hostname', 'instance_uuid',])
+INSTANCE_CAPACITY = Gauge(
+ 'awx_instance_capacity',
+ 'Capacity of each node in a Tower system',
+ [
+ 'hostname',
+ 'instance_uuid',
+ ],
+)
+INSTANCE_CPU = Gauge(
+ 'awx_instance_cpu',
+ 'CPU cores on each node in a Tower system',
+ [
+ 'hostname',
+ 'instance_uuid',
+ ],
+)
+INSTANCE_MEMORY = Gauge(
+ 'awx_instance_memory',
+ 'RAM (Kb) on each node in a Tower system',
+ [
+ 'hostname',
+ 'instance_uuid',
+ ],
+)
+INSTANCE_INFO = Info(
+ 'awx_instance',
+ 'Info about each node in a Tower system',
+ [
+ 'hostname',
+ 'instance_uuid',
+ ],
+)
+INSTANCE_LAUNCH_TYPE = Gauge(
+ 'awx_instance_launch_type_total',
+ 'Type of Job launched',
+ [
+ 'node',
+ 'launch_type',
+ ],
+)
+INSTANCE_STATUS = Gauge(
+ 'awx_instance_status_total',
+ 'Status of Job launched',
+ [
+ 'node',
+ 'status',
+ ],
+)
+INSTANCE_CONSUMED_CAPACITY = Gauge(
+ 'awx_instance_consumed_capacity',
+ 'Consumed capacity of each node in a Tower system',
+ [
+ 'hostname',
+ 'instance_uuid',
+ ],
+)
+INSTANCE_REMAINING_CAPACITY = Gauge(
+ 'awx_instance_remaining_capacity',
+ 'Remaining capacity of each node in a Tower system',
+ [
+ 'hostname',
+ 'instance_uuid',
+ ],
+)
LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license')
LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license')
@@ -55,18 +121,20 @@ LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining
def metrics():
license_info = get_license()
- SYSTEM_INFO.info({
- 'install_uuid': settings.INSTALL_UUID,
- 'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE),
- 'tower_url_base': settings.TOWER_URL_BASE,
- 'tower_version': get_awx_version(),
- 'ansible_version': get_ansible_version(),
- 'license_type': license_info.get('license_type', 'UNLICENSED'),
- 'license_expiry': str(license_info.get('time_remaining', 0)),
- 'pendo_tracking': settings.PENDO_TRACKING_STATE,
- 'external_logger_enabled': str(settings.LOG_AGGREGATOR_ENABLED),
- 'external_logger_type': getattr(settings, 'LOG_AGGREGATOR_TYPE', 'None')
- })
+ SYSTEM_INFO.info(
+ {
+ 'install_uuid': settings.INSTALL_UUID,
+ 'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE),
+ 'tower_url_base': settings.TOWER_URL_BASE,
+ 'tower_version': get_awx_version(),
+ 'ansible_version': get_ansible_version(),
+ 'license_type': license_info.get('license_type', 'UNLICENSED'),
+ 'license_expiry': str(license_info.get('time_remaining', 0)),
+ 'pendo_tracking': settings.PENDO_TRACKING_STATE,
+ 'external_logger_enabled': str(settings.LOG_AGGREGATOR_ENABLED),
+ 'external_logger_type': getattr(settings, 'LOG_AGGREGATOR_TYPE', 'None'),
+ }
+ )
LICENSE_INSTANCE_TOTAL.set(str(license_info.get('instance_count', 0)))
LICENSE_INSTANCE_FREE.set(str(license_info.get('free_instances', 0)))
@@ -108,16 +176,18 @@ def metrics():
INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory'])
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['consumed_capacity'])
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['remaining_capacity'])
- INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info({
- 'enabled': str(instance_data[uuid]['enabled']),
- 'last_isolated_check': getattr(instance_data[uuid], 'last_isolated_check', 'None'),
- 'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
- 'version': instance_data[uuid]['version']
- })
+ INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info(
+ {
+ 'enabled': str(instance_data[uuid]['enabled']),
+ 'last_isolated_check': getattr(instance_data[uuid], 'last_isolated_check', 'None'),
+ 'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
+ 'version': instance_data[uuid]['version'],
+ }
+ )
instance_data = job_instance_counts(None)
for node in instance_data:
- # skipping internal execution node (for system jobs)
+ # skipping internal execution node (for system jobs)
if node == '':
continue
types = instance_data[node].get('launch_type', {})
@@ -127,7 +197,6 @@ def metrics():
for status, value in statuses.items():
INSTANCE_STATUS.labels(node=node, status=status).set(value)
-
return generate_latest()
diff --git a/awx/main/conf.py b/awx/main/conf.py
index f46371e22b..2cfe06a25f 100644
--- a/awx/main/conf.py
+++ b/awx/main/conf.py
@@ -37,8 +37,7 @@ register(
'ORG_ADMINS_CAN_SEE_ALL_USERS',
field_class=fields.BooleanField,
label=_('All Users Visible to Organization Admins'),
- help_text=_('Controls whether any Organization Admin can view all users and teams, '
- 'even those not associated with their Organization.'),
+ help_text=_('Controls whether any Organization Admin can view all users and teams, ' 'even those not associated with their Organization.'),
category=_('System'),
category_slug='system',
)
@@ -47,8 +46,10 @@ register(
'MANAGE_ORGANIZATION_AUTH',
field_class=fields.BooleanField,
label=_('Organization Admins Can Manage Users and Teams'),
- help_text=_('Controls whether any Organization Admin has the privileges to create and manage users and teams. '
- 'You may want to disable this ability if you are using an LDAP or SAML integration.'),
+ help_text=_(
+ 'Controls whether any Organization Admin has the privileges to create and manage users and teams. '
+ 'You may want to disable this ability if you are using an LDAP or SAML integration.'
+ ),
category=_('System'),
category_slug='system',
)
@@ -59,8 +60,7 @@ register(
schemes=('http', 'https'),
allow_plain_hostname=True, # Allow hostname only without TLD.
label=_('Base URL of the Tower host'),
- help_text=_('This setting is used by services like notifications to render '
- 'a valid url to the Tower host.'),
+ help_text=_('This setting is used by services like notifications to render ' 'a valid url to the Tower host.'),
category=_('System'),
category_slug='system',
)
@@ -69,11 +69,13 @@ register(
'REMOTE_HOST_HEADERS',
field_class=fields.StringListField,
label=_('Remote Host Headers'),
- help_text=_('HTTP headers and meta keys to search to determine remote host '
- 'name or IP. Add additional items to this list, such as '
- '"HTTP_X_FORWARDED_FOR", if behind a reverse proxy. '
- 'See the "Proxy Support" section of the Adminstrator guide for '
- 'more details.'),
+ help_text=_(
+ 'HTTP headers and meta keys to search to determine remote host '
+ 'name or IP. Add additional items to this list, such as '
+ '"HTTP_X_FORWARDED_FOR", if behind a reverse proxy. '
+ 'See the "Proxy Support" section of the Adminstrator guide for '
+ 'more details.'
+ ),
category=_('System'),
category_slug='system',
)
@@ -82,11 +84,13 @@ register(
'PROXY_IP_ALLOWED_LIST',
field_class=fields.StringListField,
label=_('Proxy IP Allowed List'),
- help_text=_("If Tower is behind a reverse proxy/load balancer, use this setting "
- "to configure the proxy IP addresses from which Tower should trust "
- "custom REMOTE_HOST_HEADERS header values. "
- "If this setting is an empty list (the default), the headers specified by "
- "REMOTE_HOST_HEADERS will be trusted unconditionally')"),
+ help_text=_(
+ "If Tower is behind a reverse proxy/load balancer, use this setting "
+ "to configure the proxy IP addresses from which Tower should trust "
+ "custom REMOTE_HOST_HEADERS header values. "
+ "If this setting is an empty list (the default), the headers specified by "
+ "REMOTE_HOST_HEADERS will be trusted unconditionally')"
+ ),
category=_('System'),
category_slug='system',
)
@@ -97,9 +101,7 @@ register(
field_class=fields.DictField,
default=lambda: {},
label=_('License'),
- help_text=_('The license controls which features and functionality are '
- 'enabled. Use /api/v2/config/ to update or change '
- 'the license.'),
+ help_text=_('The license controls which features and functionality are ' 'enabled. Use /api/v2/config/ to update or change ' 'the license.'),
category=_('System'),
category_slug='system',
)
@@ -193,8 +195,7 @@ register(
'CUSTOM_VENV_PATHS',
field_class=fields.StringListPathField,
label=_('Custom virtual environment paths'),
- help_text=_('Paths where Tower will look for custom virtual environments '
- '(in addition to /var/lib/awx/venv/). Enter one path per line.'),
+ help_text=_('Paths where Tower will look for custom virtual environments ' '(in addition to /var/lib/awx/venv/). Enter one path per line.'),
category=_('System'),
category_slug='system',
default=[],
@@ -244,9 +245,11 @@ register(
'AWX_PROOT_BASE_PATH',
field_class=fields.CharField,
label=_('Job execution path'),
- help_text=_('The directory in which Tower will create new temporary '
- 'directories for job execution and isolation '
- '(such as credential files and custom inventory scripts).'),
+ help_text=_(
+ 'The directory in which Tower will create new temporary '
+ 'directories for job execution and isolation '
+ '(such as credential files and custom inventory scripts).'
+ ),
category=_('Jobs'),
category_slug='jobs',
)
@@ -287,8 +290,10 @@ register(
field_class=fields.IntegerField,
min_value=0,
label=_('Isolated launch timeout'),
- help_text=_('The timeout (in seconds) for launching jobs on isolated instances. '
- 'This includes the time needed to copy source control files (playbooks) to the isolated instance.'),
+ help_text=_(
+ 'The timeout (in seconds) for launching jobs on isolated instances. '
+ 'This includes the time needed to copy source control files (playbooks) to the isolated instance.'
+ ),
category=_('Jobs'),
category_slug='jobs',
unit=_('seconds'),
@@ -300,8 +305,10 @@ register(
min_value=0,
default=10,
label=_('Isolated connection timeout'),
- help_text=_('Ansible SSH connection timeout (in seconds) to use when communicating with isolated instances. '
- 'Value should be substantially greater than expected network latency.'),
+ help_text=_(
+ 'Ansible SSH connection timeout (in seconds) to use when communicating with isolated instances. '
+ 'Value should be substantially greater than expected network latency.'
+ ),
category=_('Jobs'),
category_slug='jobs',
unit=_('seconds'),
@@ -314,7 +321,7 @@ register(
help_text=_('When set to True, AWX will enforce strict host key checking for communication with isolated nodes.'),
category=_('Jobs'),
category_slug='jobs',
- default=False
+ default=False,
)
register(
@@ -322,9 +329,11 @@ register(
field_class=fields.BooleanField,
default=True,
label=_('Generate RSA keys for isolated instances'),
- help_text=_('If set, a random RSA key will be generated and distributed to '
- 'isolated instances. To disable this behavior and manage authentication '
- 'for isolated instances outside of Tower, disable this setting.'), # noqa
+ help_text=_(
+ 'If set, a random RSA key will be generated and distributed to '
+ 'isolated instances. To disable this behavior and manage authentication '
+ 'for isolated instances outside of Tower, disable this setting.'
+ ), # noqa
category=_('Jobs'),
category_slug='jobs',
)
@@ -359,8 +368,7 @@ register(
field_class=fields.BooleanField,
default=False,
label=_('Enable detailed resource profiling on all playbook runs'),
- help_text=_('If set, detailed resource profiling data will be collected on all jobs. '
- 'This data can be gathered with `sosreport`.'), # noqa
+ help_text=_('If set, detailed resource profiling data will be collected on all jobs. ' 'This data can be gathered with `sosreport`.'), # noqa
category=_('Jobs'),
category_slug='jobs',
)
@@ -370,8 +378,7 @@ register(
field_class=FloatField,
default='0.25',
label=_('Interval (in seconds) between polls for cpu usage.'),
- help_text=_('Interval (in seconds) between polls for cpu usage. '
- 'Setting this lower than the default will affect playbook performance.'),
+ help_text=_('Interval (in seconds) between polls for cpu usage. ' 'Setting this lower than the default will affect playbook performance.'),
category=_('Jobs'),
category_slug='jobs',
required=False,
@@ -382,8 +389,7 @@ register(
field_class=FloatField,
default='0.25',
label=_('Interval (in seconds) between polls for memory usage.'),
- help_text=_('Interval (in seconds) between polls for memory usage. '
- 'Setting this lower than the default will affect playbook performance.'),
+ help_text=_('Interval (in seconds) between polls for memory usage. ' 'Setting this lower than the default will affect playbook performance.'),
category=_('Jobs'),
category_slug='jobs',
required=False,
@@ -394,8 +400,7 @@ register(
field_class=FloatField,
default='0.25',
label=_('Interval (in seconds) between polls for PID count.'),
- help_text=_('Interval (in seconds) between polls for PID count. '
- 'Setting this lower than the default will affect playbook performance.'),
+ help_text=_('Interval (in seconds) between polls for PID count. ' 'Setting this lower than the default will affect playbook performance.'),
category=_('Jobs'),
category_slug='jobs',
required=False,
@@ -469,10 +474,9 @@ register(
field_class=fields.BooleanField,
default=False,
label=_('Ignore Ansible Galaxy SSL Certificate Verification'),
- help_text=_('If set to true, certificate validation will not be done when '
- 'installing content from any Galaxy server.'),
+ help_text=_('If set to true, certificate validation will not be done when ' 'installing content from any Galaxy server.'),
category=_('Jobs'),
- category_slug='jobs'
+ category_slug='jobs',
)
register(
@@ -491,7 +495,8 @@ register(
min_value=0,
label=_('Job Event Standard Output Maximum Display Size'),
help_text=_(
- u'Maximum Size of Standard Output in bytes to display for a single job or ad hoc command event. `stdout` will end with `\u2026` when truncated.'),
+ u'Maximum Size of Standard Output in bytes to display for a single job or ad hoc command event. `stdout` will end with `\u2026` when truncated.'
+ ),
category=_('Jobs'),
category_slug='jobs',
)
@@ -522,8 +527,10 @@ register(
min_value=0,
default=0,
label=_('Default Job Timeout'),
- help_text=_('Maximum time in seconds to allow jobs to run. Use value of 0 to indicate that no '
- 'timeout should be imposed. A timeout set on an individual job template will override this.'),
+ help_text=_(
+ 'Maximum time in seconds to allow jobs to run. Use value of 0 to indicate that no '
+ 'timeout should be imposed. A timeout set on an individual job template will override this.'
+ ),
category=_('Jobs'),
category_slug='jobs',
unit=_('seconds'),
@@ -535,8 +542,10 @@ register(
min_value=0,
default=0,
label=_('Default Inventory Update Timeout'),
- help_text=_('Maximum time in seconds to allow inventory updates to run. Use value of 0 to indicate that no '
- 'timeout should be imposed. A timeout set on an individual inventory source will override this.'),
+ help_text=_(
+ 'Maximum time in seconds to allow inventory updates to run. Use value of 0 to indicate that no '
+ 'timeout should be imposed. A timeout set on an individual inventory source will override this.'
+ ),
category=_('Jobs'),
category_slug='jobs',
unit=_('seconds'),
@@ -548,8 +557,10 @@ register(
min_value=0,
default=0,
label=_('Default Project Update Timeout'),
- help_text=_('Maximum time in seconds to allow project updates to run. Use value of 0 to indicate that no '
- 'timeout should be imposed. A timeout set on an individual project will override this.'),
+ help_text=_(
+ 'Maximum time in seconds to allow project updates to run. Use value of 0 to indicate that no '
+ 'timeout should be imposed. A timeout set on an individual project will override this.'
+ ),
category=_('Jobs'),
category_slug='jobs',
unit=_('seconds'),
@@ -561,10 +572,12 @@ register(
min_value=0,
default=0,
label=_('Per-Host Ansible Fact Cache Timeout'),
- help_text=_('Maximum time, in seconds, that stored Ansible facts are considered valid since '
- 'the last time they were modified. Only valid, non-stale, facts will be accessible by '
- 'a playbook. Note, this does not influence the deletion of ansible_facts from the database. '
- 'Use a value of 0 to indicate that no timeout should be imposed.'),
+ help_text=_(
+ 'Maximum time, in seconds, that stored Ansible facts are considered valid since '
+ 'the last time they were modified. Only valid, non-stale, facts will be accessible by '
+ 'a playbook. Note, this does not influence the deletion of ansible_facts from the database. '
+ 'Use a value of 0 to indicate that no timeout should be imposed.'
+ ),
category=_('Jobs'),
category_slug='jobs',
unit=_('seconds'),
@@ -576,8 +589,7 @@ register(
allow_null=False,
default=200,
label=_('Maximum number of forks per job'),
- help_text=_('Saving a Job Template with more than this number of forks will result in an error. '
- 'When set to 0, no limit is applied.'),
+ help_text=_('Saving a Job Template with more than this number of forks will result in an error. ' 'When set to 0, no limit is applied.'),
category=_('Jobs'),
category_slug='jobs',
)
@@ -598,11 +610,10 @@ register(
allow_null=True,
default=None,
label=_('Logging Aggregator Port'),
- help_text=_('Port on Logging Aggregator to send logs to (if required and not'
- ' provided in Logging Aggregator).'),
+ help_text=_('Port on Logging Aggregator to send logs to (if required and not' ' provided in Logging Aggregator).'),
category=_('Logging'),
category_slug='logging',
- required=False
+ required=False,
)
register(
'LOG_AGGREGATOR_TYPE',
@@ -643,12 +654,14 @@ register(
field_class=fields.StringListField,
default=['awx', 'activity_stream', 'job_events', 'system_tracking'],
label=_('Loggers Sending Data to Log Aggregator Form'),
- help_text=_('List of loggers that will send HTTP logs to the collector, these can '
- 'include any or all of: \n'
- 'awx - service logs\n'
- 'activity_stream - activity stream records\n'
- 'job_events - callback data from Ansible job events\n'
- 'system_tracking - facts gathered from scan jobs.'),
+ help_text=_(
+ 'List of loggers that will send HTTP logs to the collector, these can '
+ 'include any or all of: \n'
+ 'awx - service logs\n'
+ 'activity_stream - activity stream records\n'
+ 'job_events - callback data from Ansible job events\n'
+ 'system_tracking - facts gathered from scan jobs.'
+ ),
category=_('Logging'),
category_slug='logging',
)
@@ -657,10 +670,12 @@ register(
field_class=fields.BooleanField,
default=False,
label=_('Log System Tracking Facts Individually'),
- help_text=_('If set, system tracking facts will be sent for each package, service, or '
- 'other item found in a scan, allowing for greater search query granularity. '
- 'If unset, facts will be sent as a single dictionary, allowing for greater '
- 'efficiency in fact processing.'),
+ help_text=_(
+ 'If set, system tracking facts will be sent for each package, service, or '
+ 'other item found in a scan, allowing for greater search query granularity. '
+ 'If unset, facts will be sent as a single dictionary, allowing for greater '
+ 'efficiency in fact processing.'
+ ),
category=_('Logging'),
category_slug='logging',
)
@@ -689,9 +704,11 @@ register(
choices=[('https', 'HTTPS/HTTP'), ('tcp', 'TCP'), ('udp', 'UDP')],
default='https',
label=_('Logging Aggregator Protocol'),
- help_text=_('Protocol used to communicate with log aggregator. '
- 'HTTPS/HTTP assumes HTTPS unless http:// is explicitly used in '
- 'the Logging Aggregator hostname.'),
+ help_text=_(
+ 'Protocol used to communicate with log aggregator. '
+ 'HTTPS/HTTP assumes HTTPS unless http:// is explicitly used in '
+ 'the Logging Aggregator hostname.'
+ ),
category=_('Logging'),
category_slug='logging',
)
@@ -700,9 +717,7 @@ register(
field_class=fields.IntegerField,
default=5,
label=_('TCP Connection Timeout'),
- help_text=_('Number of seconds for a TCP connection to external log '
- 'aggregator to timeout. Applies to HTTPS and TCP log '
- 'aggregator protocols.'),
+ help_text=_('Number of seconds for a TCP connection to external log ' 'aggregator to timeout. Applies to HTTPS and TCP log ' 'aggregator protocols.'),
category=_('Logging'),
category_slug='logging',
unit=_('seconds'),
@@ -712,10 +727,12 @@ register(
field_class=fields.BooleanField,
default=True,
label=_('Enable/disable HTTPS certificate verification'),
- help_text=_('Flag to control enable/disable of certificate verification'
- ' when LOG_AGGREGATOR_PROTOCOL is "https". If enabled, Tower\'s'
- ' log handler will verify certificate sent by external log aggregator'
- ' before establishing connection.'),
+ help_text=_(
+ 'Flag to control enable/disable of certificate verification'
+ ' when LOG_AGGREGATOR_PROTOCOL is "https". If enabled, Tower\'s'
+ ' log handler will verify certificate sent by external log aggregator'
+ ' before establishing connection.'
+ ),
category=_('Logging'),
category_slug='logging',
)
@@ -725,10 +742,12 @@ register(
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
default='WARNING',
label=_('Logging Aggregator Level Threshold'),
- help_text=_('Level threshold used by log handler. Severities from lowest to highest'
- ' are DEBUG, INFO, WARNING, ERROR, CRITICAL. Messages less severe '
- 'than the threshold will be ignored by log handler. (messages under category '
- 'awx.anlytics ignore this setting)'),
+ help_text=_(
+ 'Level threshold used by log handler. Severities from lowest to highest'
+ ' are DEBUG, INFO, WARNING, ERROR, CRITICAL. Messages less severe '
+ 'than the threshold will be ignored by log handler. (messages under category '
+ 'awx.anlytics ignore this setting)'
+ ),
category=_('Logging'),
category_slug='logging',
)
@@ -738,9 +757,11 @@ register(
default=1,
min_value=1,
label=_('Maximum disk persistance for external log aggregation (in GB)'),
- help_text=_('Amount of data to store (in gigabytes) during an outage of '
- 'the external log aggregator (defaults to 1). '
- 'Equivalent to the rsyslogd queue.maxdiskspace setting.'),
+ help_text=_(
+ 'Amount of data to store (in gigabytes) during an outage of '
+ 'the external log aggregator (defaults to 1). '
+ 'Equivalent to the rsyslogd queue.maxdiskspace setting.'
+ ),
category=_('Logging'),
category_slug='logging',
)
@@ -749,9 +770,11 @@ register(
field_class=fields.CharField,
default='/var/lib/awx',
label=_('File system location for rsyslogd disk persistence'),
- help_text=_('Location to persist logs that should be retried after an outage '
- 'of the external log aggregator (defaults to /var/lib/awx). '
- 'Equivalent to the rsyslogd queue.spoolDirectory setting.'),
+ help_text=_(
+ 'Location to persist logs that should be retried after an outage '
+ 'of the external log aggregator (defaults to /var/lib/awx). '
+ 'Equivalent to the rsyslogd queue.spoolDirectory setting.'
+ ),
category=_('Logging'),
category_slug='logging',
)
@@ -760,21 +783,19 @@ register(
field_class=fields.BooleanField,
default=False,
label=_('Enable rsyslogd debugging'),
- help_text=_('Enabled high verbosity debugging for rsyslogd. '
- 'Useful for debugging connection issues for external log aggregation.'),
+ help_text=_('Enabled high verbosity debugging for rsyslogd. ' 'Useful for debugging connection issues for external log aggregation.'),
category=_('Logging'),
category_slug='logging',
)
-
register(
'AUTOMATION_ANALYTICS_LAST_GATHER',
field_class=fields.DateTimeField,
label=_('Last gather date for Automation Analytics.'),
allow_null=True,
category=_('System'),
- category_slug='system'
+ category_slug='system',
)
@@ -783,8 +804,8 @@ register(
field_class=fields.IntegerField,
label=_('Automation Analytics Gather Interval'),
help_text=_('Interval (in seconds) between data gathering.'),
- default=14400, # every 4 hours
- min_value=1800, # every 30 minutes
+ default=14400, # every 4 hours
+ min_value=1800, # every 30 minutes
category=_('System'),
category_slug='system',
unit=_('seconds'),
@@ -792,17 +813,23 @@ register(
def logging_validate(serializer, attrs):
- if not serializer.instance or \
- not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or \
- not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
+ if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
return attrs
errors = []
if attrs.get('LOG_AGGREGATOR_ENABLED', False):
- if not serializer.instance.LOG_AGGREGATOR_HOST and not attrs.get('LOG_AGGREGATOR_HOST', None) or\
- serializer.instance.LOG_AGGREGATOR_HOST and not attrs.get('LOG_AGGREGATOR_HOST', True):
+ if (
+ not serializer.instance.LOG_AGGREGATOR_HOST
+ and not attrs.get('LOG_AGGREGATOR_HOST', None)
+ or serializer.instance.LOG_AGGREGATOR_HOST
+ and not attrs.get('LOG_AGGREGATOR_HOST', True)
+ ):
errors.append('Cannot enable log aggregator without providing host.')
- if not serializer.instance.LOG_AGGREGATOR_TYPE and not attrs.get('LOG_AGGREGATOR_TYPE', None) or\
- serializer.instance.LOG_AGGREGATOR_TYPE and not attrs.get('LOG_AGGREGATOR_TYPE', True):
+ if (
+ not serializer.instance.LOG_AGGREGATOR_TYPE
+ and not attrs.get('LOG_AGGREGATOR_TYPE', None)
+ or serializer.instance.LOG_AGGREGATOR_TYPE
+ and not attrs.get('LOG_AGGREGATOR_TYPE', True)
+ ):
errors.append('Cannot enable log aggregator without providing type.')
if errors:
raise serializers.ValidationError(_('\n'.join(errors)))
diff --git a/awx/main/constants.py b/awx/main/constants.py
index 323f61f311..db2e9c44d7 100644
--- a/awx/main/constants.py
+++ b/awx/main/constants.py
@@ -6,17 +6,33 @@ import re
from django.utils.translation import ugettext_lazy as _
__all__ = [
- 'CLOUD_PROVIDERS', 'SCHEDULEABLE_PROVIDERS', 'PRIVILEGE_ESCALATION_METHODS',
- 'ANSI_SGR_PATTERN', 'CAN_CANCEL', 'ACTIVE_STATES', 'STANDARD_INVENTORY_UPDATE_ENV'
+ 'CLOUD_PROVIDERS',
+ 'SCHEDULEABLE_PROVIDERS',
+ 'PRIVILEGE_ESCALATION_METHODS',
+ 'ANSI_SGR_PATTERN',
+ 'CAN_CANCEL',
+ 'ACTIVE_STATES',
+ 'STANDARD_INVENTORY_UPDATE_ENV',
]
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'tower')
-SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',)
+SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + (
+ 'custom',
+ 'scm',
+)
PRIVILEGE_ESCALATION_METHODS = [
- ('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')),
- ('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas')),
- ('enable', _('Enable')), ('doas', _('Doas')), ('ksu', _('Ksu')),
- ('machinectl', _('Machinectl')), ('sesu', _('Sesu')),
+ ('sudo', _('Sudo')),
+ ('su', _('Su')),
+ ('pbrun', _('Pbrun')),
+ ('pfexec', _('Pfexec')),
+ ('dzdo', _('DZDO')),
+ ('pmrun', _('Pmrun')),
+ ('runas', _('Runas')),
+ ('enable', _('Enable')),
+ ('doas', _('Doas')),
+ ('ksu', _('Ksu')),
+ ('machinectl', _('Machinectl')),
+ ('sesu', _('Sesu')),
]
CHOICES_PRIVILEGE_ESCALATION_METHODS = [('', _('None'))] + PRIVILEGE_ESCALATION_METHODS
ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m')
@@ -26,19 +42,35 @@ STANDARD_INVENTORY_UPDATE_ENV = {
# Always use the --export option for ansible-inventory
'ANSIBLE_INVENTORY_EXPORT': 'True',
# Redirecting output to stderr allows JSON parsing to still work with -vvv
- 'ANSIBLE_VERBOSE_TO_STDERR': 'True'
+ 'ANSIBLE_VERBOSE_TO_STDERR': 'True',
}
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
ACTIVE_STATES = CAN_CANCEL
CENSOR_VALUE = '************'
-ENV_BLOCKLIST = frozenset((
- 'VIRTUAL_ENV', 'PATH', 'PYTHONPATH', 'PROOT_TMP_DIR', 'JOB_ID',
- 'INVENTORY_ID', 'INVENTORY_SOURCE_ID', 'INVENTORY_UPDATE_ID',
- 'AD_HOC_COMMAND_ID', 'REST_API_URL', 'REST_API_TOKEN', 'MAX_EVENT_RES',
- 'CALLBACK_QUEUE', 'CALLBACK_CONNECTION', 'CACHE',
- 'JOB_CALLBACK_DEBUG', 'INVENTORY_HOSTVARS',
- 'AWX_HOST', 'PROJECT_REVISION', 'SUPERVISOR_WEB_CONFIG_PATH'
-))
+ENV_BLOCKLIST = frozenset(
+ (
+ 'VIRTUAL_ENV',
+ 'PATH',
+ 'PYTHONPATH',
+ 'PROOT_TMP_DIR',
+ 'JOB_ID',
+ 'INVENTORY_ID',
+ 'INVENTORY_SOURCE_ID',
+ 'INVENTORY_UPDATE_ID',
+ 'AD_HOC_COMMAND_ID',
+ 'REST_API_URL',
+ 'REST_API_TOKEN',
+ 'MAX_EVENT_RES',
+ 'CALLBACK_QUEUE',
+ 'CALLBACK_CONNECTION',
+ 'CACHE',
+ 'JOB_CALLBACK_DEBUG',
+ 'INVENTORY_HOSTVARS',
+ 'AWX_HOST',
+ 'PROJECT_REVISION',
+ 'SUPERVISOR_WEB_CONFIG_PATH',
+ )
+)
# loggers that may be called in process of emitting a log
LOGGER_BLOCKLIST = (
@@ -48,5 +80,5 @@ LOGGER_BLOCKLIST = (
'awx.main.utils.encryption',
'awx.main.utils.log',
# loggers that may be called getting logging settings
- 'awx.conf'
+ 'awx.conf',
)
diff --git a/awx/main/consumers.py b/awx/main/consumers.py
index 4fc1196dbe..a2425ec337 100644
--- a/awx/main/consumers.py
+++ b/awx/main/consumers.py
@@ -22,7 +22,7 @@ class WebsocketSecretAuthHelper:
"""
Middlewareish for websockets to verify node websocket broadcast interconnect.
- Note: The "ish" is due to the channels routing interface. Routing occurs
+ Note: The "ish" is due to the channels routing interface. Routing occurs
_after_ authentication; making it hard to apply this auth to _only_ a subset of
websocket endpoints.
"""
@@ -30,19 +30,13 @@ class WebsocketSecretAuthHelper:
@classmethod
def construct_secret(cls):
nonce_serialized = f"{int(time.time())}"
- payload_dict = {
- 'secret': settings.BROADCAST_WEBSOCKET_SECRET,
- 'nonce': nonce_serialized
- }
+ payload_dict = {'secret': settings.BROADCAST_WEBSOCKET_SECRET, 'nonce': nonce_serialized}
payload_serialized = json.dumps(payload_dict)
- secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET),
- msg=force_bytes(payload_serialized),
- digestmod='sha256').hexdigest()
+ secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET), msg=force_bytes(payload_serialized), digestmod='sha256').hexdigest()
return 'HMAC-SHA256 {}:{}'.format(nonce_serialized, secret_serialized)
-
@classmethod
def verify_secret(cls, s, nonce_tolerance=300):
try:
@@ -62,9 +56,7 @@ class WebsocketSecretAuthHelper:
except Exception:
raise ValueError("Failed to create hash to compare to secret.")
- secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET),
- msg=force_bytes(payload_serialized),
- digestmod='sha256').hexdigest()
+ secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET), msg=force_bytes(payload_serialized), digestmod='sha256').hexdigest()
if secret_serialized != secret_parsed:
raise ValueError("Invalid secret")
@@ -90,7 +82,6 @@ class WebsocketSecretAuthHelper:
class BroadcastConsumer(AsyncJsonWebsocketConsumer):
-
async def connect(self):
try:
WebsocketSecretAuthHelper.is_authorized(self.scope)
@@ -151,13 +142,10 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
async def receive_json(self, data):
from awx.main.access import consumer_access
+
user = self.scope['user']
xrftoken = data.get('xrftoken')
- if (
- not xrftoken or
- XRF_KEY not in self.scope["session"] or
- xrftoken != self.scope["session"][XRF_KEY]
- ):
+ if not xrftoken or XRF_KEY not in self.scope["session"] or xrftoken != self.scope["session"][XRF_KEY]:
logger.error(f"access denied to channel, XRF mismatch for {user.username}")
await self.send_json({"error": "access denied to channel"})
return
@@ -166,7 +154,7 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
groups = data['groups']
new_groups = set()
current_groups = set(self.scope['session'].pop('groups') if 'groups' in self.scope['session'] else [])
- for group_name,v in groups.items():
+ for group_name, v in groups.items():
if type(v) is list:
for oid in v:
name = '{}-{}'.format(group_name, oid)
@@ -191,16 +179,9 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
new_groups_exclusive = new_groups - current_groups
for group_name in new_groups_exclusive:
- await self.channel_layer.group_add(
- group_name,
- self.channel_name
- )
+ await self.channel_layer.group_add(group_name, self.channel_name)
self.scope['session']['groups'] = new_groups
- await self.send_json({
- "groups_current": list(new_groups),
- "groups_left": list(old_groups),
- "groups_joined": list(new_groups_exclusive)
- })
+ await self.send_json({"groups_current": list(new_groups), "groups_left": list(old_groups), "groups_joined": list(new_groups_exclusive)})
async def internal_message(self, event):
await self.send(event['text'])
@@ -221,7 +202,7 @@ def _dump_payload(payload):
def emit_channel_notification(group, payload):
- from awx.main.wsbroadcast import wrap_broadcast_msg # noqa
+ from awx.main.wsbroadcast import wrap_broadcast_msg # noqa
payload_dumped = _dump_payload(payload)
if payload_dumped is None:
@@ -229,18 +210,19 @@ def emit_channel_notification(group, payload):
channel_layer = get_channel_layer()
- run_sync(channel_layer.group_send(
- group,
- {
- "type": "internal.message",
- "text": payload_dumped
- },
- ))
-
- run_sync(channel_layer.group_send(
- settings.BROADCAST_WEBSOCKET_GROUP_NAME,
- {
- "type": "internal.message",
- "text": wrap_broadcast_msg(group, payload_dumped),
- },
- ))
+ run_sync(
+ channel_layer.group_send(
+ group,
+ {"type": "internal.message", "text": payload_dumped},
+ )
+ )
+
+ run_sync(
+ channel_layer.group_send(
+ settings.BROADCAST_WEBSOCKET_GROUP_NAME,
+ {
+ "type": "internal.message",
+ "text": wrap_broadcast_msg(group, payload_dumped),
+ },
+ )
+ )
diff --git a/awx/main/credential_plugins/aim.py b/awx/main/credential_plugins/aim.py
index 7c99665bf0..235511f959 100644
--- a/awx/main/credential_plugins/aim.py
+++ b/awx/main/credential_plugins/aim.py
@@ -6,51 +6,55 @@ from django.utils.translation import ugettext_lazy as _
import requests
aim_inputs = {
- 'fields': [{
- 'id': 'url',
- 'label': _('CyberArk AIM URL'),
- 'type': 'string',
- 'format': 'url',
- }, {
- 'id': 'app_id',
- 'label': _('Application ID'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'client_key',
- 'label': _('Client Key'),
- 'type': 'string',
- 'secret': True,
- 'multiline': True,
- }, {
- 'id': 'client_cert',
- 'label': _('Client Certificate'),
- 'type': 'string',
- 'secret': True,
- 'multiline': True,
- }, {
- 'id': 'verify',
- 'label': _('Verify SSL Certificates'),
- 'type': 'boolean',
- 'default': True,
- }],
- 'metadata': [{
- 'id': 'object_query',
- 'label': _('Object Query'),
- 'type': 'string',
- 'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'),
- }, {
- 'id': 'object_query_format',
- 'label': _('Object Query Format'),
- 'type': 'string',
- 'default': 'Exact',
- 'choices': ['Exact', 'Regexp']
- }, {
- 'id': 'reason',
- 'label': _('Reason'),
- 'type': 'string',
- 'help_text': _('Object request reason. This is only needed if it is required by the object\'s policy.')
- }],
+ 'fields': [
+ {
+ 'id': 'url',
+ 'label': _('CyberArk AIM URL'),
+ 'type': 'string',
+ 'format': 'url',
+ },
+ {
+ 'id': 'app_id',
+ 'label': _('Application ID'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {
+ 'id': 'client_key',
+ 'label': _('Client Key'),
+ 'type': 'string',
+ 'secret': True,
+ 'multiline': True,
+ },
+ {
+ 'id': 'client_cert',
+ 'label': _('Client Certificate'),
+ 'type': 'string',
+ 'secret': True,
+ 'multiline': True,
+ },
+ {
+ 'id': 'verify',
+ 'label': _('Verify SSL Certificates'),
+ 'type': 'boolean',
+ 'default': True,
+ },
+ ],
+ 'metadata': [
+ {
+ 'id': 'object_query',
+ 'label': _('Object Query'),
+ 'type': 'string',
+ 'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'),
+ },
+ {'id': 'object_query_format', 'label': _('Object Query Format'), 'type': 'string', 'default': 'Exact', 'choices': ['Exact', 'Regexp']},
+ {
+ 'id': 'reason',
+ 'label': _('Reason'),
+ 'type': 'string',
+ 'help_text': _('Object request reason. This is only needed if it is required by the object\'s policy.'),
+ },
+ ],
'required': ['url', 'app_id', 'object_query'],
}
@@ -88,8 +92,4 @@ def aim_backend(**kwargs):
return res.json()['Content']
-aim_plugin = CredentialPlugin(
- 'CyberArk AIM Central Credential Provider Lookup',
- inputs=aim_inputs,
- backend=aim_backend
-)
+aim_plugin = CredentialPlugin('CyberArk AIM Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
diff --git a/awx/main/credential_plugins/azure_kv.py b/awx/main/credential_plugins/azure_kv.py
index 645e6f6b1a..58580edf9a 100644
--- a/awx/main/credential_plugins/azure_kv.py
+++ b/awx/main/credential_plugins/azure_kv.py
@@ -7,51 +7,48 @@ from msrestazure import azure_cloud
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
-clouds = [
- vars(azure_cloud)[n]
- for n in dir(azure_cloud)
- if n.startswith("AZURE_") and n.endswith("_CLOUD")
-]
+clouds = [vars(azure_cloud)[n] for n in dir(azure_cloud) if n.startswith("AZURE_") and n.endswith("_CLOUD")]
default_cloud = vars(azure_cloud)["AZURE_PUBLIC_CLOUD"]
azure_keyvault_inputs = {
- 'fields': [{
- 'id': 'url',
- 'label': _('Vault URL (DNS Name)'),
- 'type': 'string',
- 'format': 'url',
- }, {
- 'id': 'client',
- 'label': _('Client ID'),
- 'type': 'string'
- }, {
- 'id': 'secret',
- 'label': _('Client Secret'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'tenant',
- 'label': _('Tenant ID'),
- 'type': 'string'
- }, {
- 'id': 'cloud_name',
- 'label': _('Cloud Environment'),
- 'help_text': _('Specify which azure cloud environment to use.'),
- 'choices': list(set([default_cloud.name] + [c.name for c in clouds])),
- 'default': default_cloud.name
- }],
- 'metadata': [{
- 'id': 'secret_field',
- 'label': _('Secret Name'),
- 'type': 'string',
- 'help_text': _('The name of the secret to look up.'),
- }, {
- 'id': 'secret_version',
- 'label': _('Secret Version'),
- 'type': 'string',
- 'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
- }],
+ 'fields': [
+ {
+ 'id': 'url',
+ 'label': _('Vault URL (DNS Name)'),
+ 'type': 'string',
+ 'format': 'url',
+ },
+ {'id': 'client', 'label': _('Client ID'), 'type': 'string'},
+ {
+ 'id': 'secret',
+ 'label': _('Client Secret'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {'id': 'tenant', 'label': _('Tenant ID'), 'type': 'string'},
+ {
+ 'id': 'cloud_name',
+ 'label': _('Cloud Environment'),
+ 'help_text': _('Specify which azure cloud environment to use.'),
+ 'choices': list(set([default_cloud.name] + [c.name for c in clouds])),
+ 'default': default_cloud.name,
+ },
+ ],
+ 'metadata': [
+ {
+ 'id': 'secret_field',
+ 'label': _('Secret Name'),
+ 'type': 'string',
+ 'help_text': _('The name of the secret to look up.'),
+ },
+ {
+ 'id': 'secret_version',
+ 'label': _('Secret Version'),
+ 'type': 'string',
+ 'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
+ },
+ ],
'required': ['url', 'client', 'secret', 'tenant', 'secret_field'],
}
@@ -62,11 +59,11 @@ def azure_keyvault_backend(**kwargs):
def auth_callback(server, resource, scope):
credentials = ServicePrincipalCredentials(
- url = url,
- client_id = kwargs['client'],
- secret = kwargs['secret'],
- tenant = kwargs['tenant'],
- resource = f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
+ url=url,
+ client_id=kwargs['client'],
+ secret=kwargs['secret'],
+ tenant=kwargs['tenant'],
+ resource=f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
)
token = credentials.token
return token['token_type'], token['access_token']
@@ -75,8 +72,4 @@ def azure_keyvault_backend(**kwargs):
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
-azure_keyvault_plugin = CredentialPlugin(
- 'Microsoft Azure Key Vault',
- inputs=azure_keyvault_inputs,
- backend=azure_keyvault_backend
-)
+azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
diff --git a/awx/main/credential_plugins/centrify_vault.py b/awx/main/credential_plugins/centrify_vault.py
index dc4db1fe22..a0be2250f4 100644
--- a/awx/main/credential_plugins/centrify_vault.py
+++ b/awx/main/credential_plugins/centrify_vault.py
@@ -2,68 +2,68 @@ from .plugin import CredentialPlugin, raise_for_status
from django.utils.translation import ugettext_lazy as _
from urllib.parse import urljoin
import requests
-pas_inputs = {
- 'fields': [{
- 'id': 'url',
- 'label': _('Centrify Tenant URL'),
- 'type': 'string',
- 'help_text': _('Centrify Tenant URL'),
- 'format': 'url',
- }, {
- 'id':'client_id',
- 'label':_('Centrify API User'),
- 'type':'string',
- 'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'),
- }, {
- 'id':'client_password',
- 'label':_('Centrify API Password'),
- 'type':'string',
- 'help_text': _('Password of Centrify API User with necessary permissions'),
- 'secret':True,
- },{
- 'id':'oauth_application_id',
- 'label':_('OAuth2 Application ID'),
- 'type':'string',
- 'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'),
- 'default': 'awx',
- },{
- 'id':'oauth_scope',
- 'label':_('OAuth2 Scope'),
- 'type':'string',
- 'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'),
- 'default': 'awx',
- }],
- 'metadata': [{
- 'id': 'account-name',
- 'label': _('Account Name'),
- 'type': 'string',
- 'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'),
- },{
- 'id': 'system-name',
- 'label': _('System Name'),
- 'type': 'string',
- 'help_text': _('Machine Name enrolled with in Centrify Portal'),
- }],
- 'required': ['url', 'account-name', 'system-name','client_id','client_password'],
+pas_inputs = {
+ 'fields': [
+ {
+ 'id': 'url',
+ 'label': _('Centrify Tenant URL'),
+ 'type': 'string',
+ 'help_text': _('Centrify Tenant URL'),
+ 'format': 'url',
+ },
+ {
+ 'id': 'client_id',
+ 'label': _('Centrify API User'),
+ 'type': 'string',
+ 'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'),
+ },
+ {
+ 'id': 'client_password',
+ 'label': _('Centrify API Password'),
+ 'type': 'string',
+ 'help_text': _('Password of Centrify API User with necessary permissions'),
+ 'secret': True,
+ },
+ {
+ 'id': 'oauth_application_id',
+ 'label': _('OAuth2 Application ID'),
+ 'type': 'string',
+ 'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'),
+ 'default': 'awx',
+ },
+ {
+ 'id': 'oauth_scope',
+ 'label': _('OAuth2 Scope'),
+ 'type': 'string',
+ 'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'),
+ 'default': 'awx',
+ },
+ ],
+ 'metadata': [
+ {
+ 'id': 'account-name',
+ 'label': _('Account Name'),
+ 'type': 'string',
+ 'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'),
+ },
+ {
+ 'id': 'system-name',
+ 'label': _('System Name'),
+ 'type': 'string',
+ 'help_text': _('Machine Name enrolled with in Centrify Portal'),
+ },
+ ],
+ 'required': ['url', 'account-name', 'system-name', 'client_id', 'client_password'],
}
# generate bearer token to authenticate with PAS portal, Input : Client ID, Client Secret
def handle_auth(**kwargs):
- post_data = {
- "grant_type": "client_credentials",
- "scope": kwargs['oauth_scope']
- }
- response = requests.post(
- kwargs['endpoint'],
- data = post_data,
- auth = (kwargs['client_id'],kwargs['client_password']),
- verify = True,
- timeout = (5, 30)
- )
+ post_data = {"grant_type": "client_credentials", "scope": kwargs['oauth_scope']}
+ response = requests.post(kwargs['endpoint'], data=post_data, auth=(kwargs['client_id'], kwargs['client_password']), verify=True, timeout=(5, 30))
raise_for_status(response)
- try:
+ try:
return response.json()['access_token']
except KeyError:
raise RuntimeError('OAuth request to tenant was unsuccessful')
@@ -71,20 +71,11 @@ def handle_auth(**kwargs):
# fetch the ID of system with RedRock query, Input : System Name, Account Name
def get_ID(**kwargs):
- endpoint = urljoin(kwargs['url'],'/Redrock/query')
- name=" Name='{0}' and User='{1}'".format(kwargs['system_name'],kwargs['acc_name'])
- query = 'Select ID from VaultAccount where {0}'.format(name)
- post_headers = {
- "Authorization": "Bearer " + kwargs['access_token'],
- "X-CENTRIFY-NATIVE-CLIENT":"true"
- }
- response = requests.post(
- endpoint,
- json = {'Script': query},
- headers = post_headers,
- verify = True,
- timeout = (5, 30)
- )
+ endpoint = urljoin(kwargs['url'], '/Redrock/query')
+ name = " Name='{0}' and User='{1}'".format(kwargs['system_name'], kwargs['acc_name'])
+ query = 'Select ID from VaultAccount where {0}'.format(name)
+ post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"}
+ response = requests.post(endpoint, json={'Script': query}, headers=post_headers, verify=True, timeout=(5, 30))
raise_for_status(response)
try:
result_str = response.json()["Result"]["Results"]
@@ -95,23 +86,14 @@ def get_ID(**kwargs):
# CheckOut Password from Centrify Vault, Input : ID
def get_passwd(**kwargs):
- endpoint = urljoin(kwargs['url'],'/ServerManage/CheckoutPassword')
- post_headers = {
- "Authorization": "Bearer " + kwargs['access_token'],
- "X-CENTRIFY-NATIVE-CLIENT":"true"
- }
- response = requests.post(
- endpoint,
- json = {'ID': kwargs['acc_id']},
- headers = post_headers,
- verify = True,
- timeout = (5, 30)
- )
+ endpoint = urljoin(kwargs['url'], '/ServerManage/CheckoutPassword')
+ post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"}
+ response = requests.post(endpoint, json={'ID': kwargs['acc_id']}, headers=post_headers, verify=True, timeout=(5, 30))
raise_for_status(response)
try:
return response.json()["Result"]["Password"]
except KeyError:
- raise RuntimeError("Password Not Found")
+ raise RuntimeError("Password Not Found")
def centrify_backend(**kwargs):
@@ -122,21 +104,12 @@ def centrify_backend(**kwargs):
client_password = kwargs.get('client_password')
app_id = kwargs.get('oauth_application_id', 'awx')
endpoint = urljoin(url, f'/oauth2/token/{app_id}')
- endpoint = {
- 'endpoint': endpoint,
- 'client_id': client_id,
- 'client_password': client_password,
- 'oauth_scope': kwargs.get('oauth_scope', 'awx')
- }
+ endpoint = {'endpoint': endpoint, 'client_id': client_id, 'client_password': client_password, 'oauth_scope': kwargs.get('oauth_scope', 'awx')}
token = handle_auth(**endpoint)
- get_id_args = {'system_name':system_name,'acc_name':acc_name,'url':url,'access_token':token}
+ get_id_args = {'system_name': system_name, 'acc_name': acc_name, 'url': url, 'access_token': token}
acc_id = get_ID(**get_id_args)
- get_pwd_args = {'url':url,'acc_id':acc_id,'access_token':token}
+ get_pwd_args = {'url': url, 'acc_id': acc_id, 'access_token': token}
return get_passwd(**get_pwd_args)
-centrify_plugin = CredentialPlugin(
- 'Centrify Vault Credential Provider Lookup',
- inputs=pas_inputs,
- backend=centrify_backend
-)
+centrify_plugin = CredentialPlugin('Centrify Vault Credential Provider Lookup', inputs=pas_inputs, backend=centrify_backend)
diff --git a/awx/main/credential_plugins/conjur.py b/awx/main/credential_plugins/conjur.py
index 5cd87007fc..b9606d48bc 100644
--- a/awx/main/credential_plugins/conjur.py
+++ b/awx/main/credential_plugins/conjur.py
@@ -8,41 +8,45 @@ import requests
conjur_inputs = {
- 'fields': [{
- 'id': 'url',
- 'label': _('Conjur URL'),
- 'type': 'string',
- 'format': 'url',
- }, {
- 'id': 'api_key',
- 'label': _('API Key'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'account',
- 'label': _('Account'),
- 'type': 'string',
- }, {
- 'id': 'username',
- 'label': _('Username'),
- 'type': 'string',
- }, {
- 'id': 'cacert',
- 'label': _('Public Key Certificate'),
- 'type': 'string',
- 'multiline': True
- }],
- 'metadata': [{
- 'id': 'secret_path',
- 'label': _('Secret Identifier'),
- 'type': 'string',
- 'help_text': _('The identifier for the secret e.g., /some/identifier'),
- }, {
- 'id': 'secret_version',
- 'label': _('Secret Version'),
- 'type': 'string',
- 'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
- }],
+ 'fields': [
+ {
+ 'id': 'url',
+ 'label': _('Conjur URL'),
+ 'type': 'string',
+ 'format': 'url',
+ },
+ {
+ 'id': 'api_key',
+ 'label': _('API Key'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {
+ 'id': 'account',
+ 'label': _('Account'),
+ 'type': 'string',
+ },
+ {
+ 'id': 'username',
+ 'label': _('Username'),
+ 'type': 'string',
+ },
+ {'id': 'cacert', 'label': _('Public Key Certificate'), 'type': 'string', 'multiline': True},
+ ],
+ 'metadata': [
+ {
+ 'id': 'secret_path',
+ 'label': _('Secret Identifier'),
+ 'type': 'string',
+ 'help_text': _('The identifier for the secret e.g., /some/identifier'),
+ },
+ {
+ 'id': 'secret_version',
+ 'label': _('Secret Version'),
+ 'type': 'string',
+ 'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
+ },
+ ],
'required': ['url', 'api_key', 'account', 'username'],
}
@@ -50,7 +54,7 @@ conjur_inputs = {
def conjur_backend(**kwargs):
url = kwargs['url']
api_key = kwargs['api_key']
- account = quote(kwargs['account'], safe='')
+ account = quote(kwargs['account'], safe='')
username = quote(kwargs['username'], safe='')
secret_path = quote(kwargs['secret_path'], safe='')
version = kwargs.get('secret_version')
@@ -65,10 +69,7 @@ def conjur_backend(**kwargs):
with CertFiles(cacert) as cert:
# https://www.conjur.org/api.html#authentication-authenticate-post
auth_kwargs['verify'] = cert
- resp = requests.post(
- urljoin(url, '/'.join(['authn', account, username, 'authenticate'])),
- **auth_kwargs
- )
+ resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
raise_for_status(resp)
token = base64.b64encode(resp.content).decode('utf-8')
@@ -78,12 +79,7 @@ def conjur_backend(**kwargs):
}
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
- path = urljoin(url, '/'.join([
- 'secrets',
- account,
- 'variable',
- secret_path
- ]))
+ path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
if version:
path = '?'.join([path, version])
@@ -94,8 +90,4 @@ def conjur_backend(**kwargs):
return resp.text
-conjur_plugin = CredentialPlugin(
- 'CyberArk Conjur Secret Lookup',
- inputs=conjur_inputs,
- backend=conjur_backend
-)
+conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)
diff --git a/awx/main/credential_plugins/hashivault.py b/awx/main/credential_plugins/hashivault.py
index 8c7a74c8e8..adc5be4342 100644
--- a/awx/main/credential_plugins/hashivault.py
+++ b/awx/main/credential_plugins/hashivault.py
@@ -9,110 +9,131 @@ import requests
from django.utils.translation import ugettext_lazy as _
base_inputs = {
- 'fields': [{
- 'id': 'url',
- 'label': _('Server URL'),
- 'type': 'string',
- 'format': 'url',
- 'help_text': _('The URL to the HashiCorp Vault'),
- }, {
- 'id': 'token',
- 'label': _('Token'),
- 'type': 'string',
- 'secret': True,
- 'help_text': _('The access token used to authenticate to the Vault server'),
- }, {
- 'id': 'cacert',
- 'label': _('CA Certificate'),
- 'type': 'string',
- 'multiline': True,
- 'help_text': _('The CA certificate used to verify the SSL certificate of the Vault server')
- }, {
- 'id': 'role_id',
- 'label': _('AppRole role_id'),
- 'type': 'string',
- 'multiline': False,
- 'help_text': _('The Role ID for AppRole Authentication')
- }, {
- 'id': 'secret_id',
- 'label': _('AppRole secret_id'),
- 'type': 'string',
- 'multiline': False,
- 'secret': True,
- 'help_text': _('The Secret ID for AppRole Authentication')
- }, {
- 'id': 'namespace',
- 'label': _('Namespace name (Vault Enterprise only)'),
- 'type': 'string',
- 'multiline': False,
- 'help_text': _('Name of the namespace to use when authenticate and retrieve secrets')
- }, {
- 'id': 'default_auth_path',
- 'label': _('Path to Approle Auth'),
- 'type': 'string',
- 'multiline': False,
- 'default': 'approle',
- 'help_text': _('The AppRole Authentication path to use if one isn\'t provided in the metadata when linking to an input field. Defaults to \'approle\'')
- }
+ 'fields': [
+ {
+ 'id': 'url',
+ 'label': _('Server URL'),
+ 'type': 'string',
+ 'format': 'url',
+ 'help_text': _('The URL to the HashiCorp Vault'),
+ },
+ {
+ 'id': 'token',
+ 'label': _('Token'),
+ 'type': 'string',
+ 'secret': True,
+ 'help_text': _('The access token used to authenticate to the Vault server'),
+ },
+ {
+ 'id': 'cacert',
+ 'label': _('CA Certificate'),
+ 'type': 'string',
+ 'multiline': True,
+ 'help_text': _('The CA certificate used to verify the SSL certificate of the Vault server'),
+ },
+ {'id': 'role_id', 'label': _('AppRole role_id'), 'type': 'string', 'multiline': False, 'help_text': _('The Role ID for AppRole Authentication')},
+ {
+ 'id': 'secret_id',
+ 'label': _('AppRole secret_id'),
+ 'type': 'string',
+ 'multiline': False,
+ 'secret': True,
+ 'help_text': _('The Secret ID for AppRole Authentication'),
+ },
+ {
+ 'id': 'namespace',
+ 'label': _('Namespace name (Vault Enterprise only)'),
+ 'type': 'string',
+ 'multiline': False,
+ 'help_text': _('Name of the namespace to use when authenticate and retrieve secrets'),
+ },
+ {
+ 'id': 'default_auth_path',
+ 'label': _('Path to Approle Auth'),
+ 'type': 'string',
+ 'multiline': False,
+ 'default': 'approle',
+ 'help_text': _(
+ 'The AppRole Authentication path to use if one isn\'t provided in the metadata when linking to an input field. Defaults to \'approle\''
+ ),
+ },
+ ],
+ 'metadata': [
+ {
+ 'id': 'secret_path',
+ 'label': _('Path to Secret'),
+ 'type': 'string',
+ 'help_text': _('The path to the secret stored in the secret backend e.g, /some/secret/'),
+ },
+ {
+ 'id': 'auth_path',
+ 'label': _('Path to Auth'),
+ 'type': 'string',
+ 'multiline': False,
+ 'help_text': _('The path where the Authentication method is mounted e.g, approle'),
+ },
],
- 'metadata': [{
- 'id': 'secret_path',
- 'label': _('Path to Secret'),
- 'type': 'string',
- 'help_text': _('The path to the secret stored in the secret backend e.g, /some/secret/')
- }, {
- 'id': 'auth_path',
- 'label': _('Path to Auth'),
- 'type': 'string',
- 'multiline': False,
- 'help_text': _('The path where the Authentication method is mounted e.g, approle')
- }],
'required': ['url', 'secret_path'],
}
hashi_kv_inputs = copy.deepcopy(base_inputs)
-hashi_kv_inputs['fields'].append({
- 'id': 'api_version',
- 'label': _('API Version'),
- 'choices': ['v1', 'v2'],
- 'help_text': _('API v1 is for static key/value lookups. API v2 is for versioned key/value lookups.'),
- 'default': 'v1',
-})
-hashi_kv_inputs['metadata'] = [{
- 'id': 'secret_backend',
- 'label': _('Name of Secret Backend'),
- 'type': 'string',
- 'help_text': _('The name of the kv secret backend (if left empty, the first segment of the secret path will be used).')
-}] + hashi_kv_inputs['metadata'] + [{
- 'id': 'secret_key',
- 'label': _('Key Name'),
- 'type': 'string',
- 'help_text': _('The name of the key to look up in the secret.'),
-}, {
- 'id': 'secret_version',
- 'label': _('Secret Version (v2 only)'),
- 'type': 'string',
- 'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
-}]
+hashi_kv_inputs['fields'].append(
+ {
+ 'id': 'api_version',
+ 'label': _('API Version'),
+ 'choices': ['v1', 'v2'],
+ 'help_text': _('API v1 is for static key/value lookups. API v2 is for versioned key/value lookups.'),
+ 'default': 'v1',
+ }
+)
+hashi_kv_inputs['metadata'] = (
+ [
+ {
+ 'id': 'secret_backend',
+ 'label': _('Name of Secret Backend'),
+ 'type': 'string',
+ 'help_text': _('The name of the kv secret backend (if left empty, the first segment of the secret path will be used).'),
+ }
+ ]
+ + hashi_kv_inputs['metadata']
+ + [
+ {
+ 'id': 'secret_key',
+ 'label': _('Key Name'),
+ 'type': 'string',
+ 'help_text': _('The name of the key to look up in the secret.'),
+ },
+ {
+ 'id': 'secret_version',
+ 'label': _('Secret Version (v2 only)'),
+ 'type': 'string',
+ 'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
+ },
+ ]
+)
hashi_kv_inputs['required'].extend(['api_version', 'secret_key'])
hashi_ssh_inputs = copy.deepcopy(base_inputs)
-hashi_ssh_inputs['metadata'] = [{
- 'id': 'public_key',
- 'label': _('Unsigned Public Key'),
- 'type': 'string',
- 'multiline': True,
-}] + hashi_ssh_inputs['metadata'] + [{
- 'id': 'role',
- 'label': _('Role Name'),
- 'type': 'string',
- 'help_text': _('The name of the role used to sign.')
-}, {
- 'id': 'valid_principals',
- 'label': _('Valid Principals'),
- 'type': 'string',
- 'help_text': _('Valid principals (either usernames or hostnames) that the certificate should be signed for.'),
-}]
+hashi_ssh_inputs['metadata'] = (
+ [
+ {
+ 'id': 'public_key',
+ 'label': _('Unsigned Public Key'),
+ 'type': 'string',
+ 'multiline': True,
+ }
+ ]
+ + hashi_ssh_inputs['metadata']
+ + [
+ {'id': 'role', 'label': _('Role Name'), 'type': 'string', 'help_text': _('The name of the role used to sign.')},
+ {
+ 'id': 'valid_principals',
+ 'label': _('Valid Principals'),
+ 'type': 'string',
+ 'help_text': _('Valid principals (either usernames or hostnames) that the certificate should be signed for.'),
+ },
+ ]
+)
hashi_ssh_inputs['required'].extend(['public_key', 'role'])
@@ -209,9 +230,7 @@ def kv_backend(**kwargs):
try:
return json['data'][secret_key]
except KeyError:
- raise RuntimeError(
- '{} is not present at {}'.format(secret_key, secret_path)
- )
+ raise RuntimeError('{} is not present at {}'.format(secret_key, secret_path))
return json['data']
@@ -248,14 +267,6 @@ def ssh_backend(**kwargs):
return resp.json()['data']['signed_key']
-hashivault_kv_plugin = CredentialPlugin(
- 'HashiCorp Vault Secret Lookup',
- inputs=hashi_kv_inputs,
- backend=kv_backend
-)
+hashivault_kv_plugin = CredentialPlugin('HashiCorp Vault Secret Lookup', inputs=hashi_kv_inputs, backend=kv_backend)
-hashivault_ssh_plugin = CredentialPlugin(
- 'HashiCorp Vault Signed SSH',
- inputs=hashi_ssh_inputs,
- backend=ssh_backend
-)
+hashivault_ssh_plugin = CredentialPlugin('HashiCorp Vault Signed SSH', inputs=hashi_ssh_inputs, backend=ssh_backend)
diff --git a/awx/main/credential_plugins/plugin.py b/awx/main/credential_plugins/plugin.py
index fa5c770fd1..7219231efc 100644
--- a/awx/main/credential_plugins/plugin.py
+++ b/awx/main/credential_plugins/plugin.py
@@ -16,7 +16,7 @@ def raise_for_status(resp):
raise exc
-class CertFiles():
+class CertFiles:
"""
A context manager used for writing a certificate and (optional) key
to $TMPDIR, and cleaning up afterwards.
diff --git a/awx/main/db/profiled_pg/base.py b/awx/main/db/profiled_pg/base.py
index 2a449437ce..5df1341428 100644
--- a/awx/main/db/profiled_pg/base.py
+++ b/awx/main/db/profiled_pg/base.py
@@ -16,7 +16,6 @@ __all__ = ['DatabaseWrapper']
class RecordedQueryLog(object):
-
def __init__(self, log, db, dest='/var/log/tower/profile'):
self.log = log
self.db = db
@@ -70,10 +69,7 @@ class RecordedQueryLog(object):
break
else:
progname = os.path.basename(sys.argv[0])
- filepath = os.path.join(
- self.dest,
- '{}.sqlite'.format(progname)
- )
+ filepath = os.path.join(self.dest, '{}.sqlite'.format(progname))
version = pkg_resources.get_distribution('awx').version
log = sqlite3.connect(filepath, timeout=3)
log.execute(
@@ -91,9 +87,8 @@ class RecordedQueryLog(object):
)
log.commit()
log.execute(
- 'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) '
- 'VALUES (?, ?, ?, ?, ?, ?, ?);',
- (os.getpid(), version, ' ' .join(sys.argv), seconds, sql, explain, bt)
+ 'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) ' 'VALUES (?, ?, ?, ?, ?, ?, ?);',
+ (os.getpid(), version, ' '.join(sys.argv), seconds, sql, explain, bt),
)
log.commit()
diff --git a/awx/main/dispatch/__init__.py b/awx/main/dispatch/__init__.py
index 587a8219aa..c240f6fee9 100644
--- a/awx/main/dispatch/__init__.py
+++ b/awx/main/dispatch/__init__.py
@@ -47,16 +47,9 @@ class PubSub(object):
@contextmanager
def pg_bus_conn():
conf = settings.DATABASES['default']
- conn = psycopg2.connect(dbname=conf['NAME'],
- host=conf['HOST'],
- user=conf['USER'],
- password=conf['PASSWORD'],
- port=conf['PORT'],
- **conf.get("OPTIONS", {}))
+ conn = psycopg2.connect(dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf.get("OPTIONS", {}))
# Django connection.cursor().connection doesn't have autocommit=True on
conn.set_session(autocommit=True)
pubsub = PubSub(conn)
yield pubsub
conn.close()
-
-
diff --git a/awx/main/dispatch/control.py b/awx/main/dispatch/control.py
index 47cc60b40d..e5952f02bf 100644
--- a/awx/main/dispatch/control.py
+++ b/awx/main/dispatch/control.py
@@ -48,8 +48,7 @@ class Control(object):
with pg_bus_conn() as conn:
conn.listen(reply_queue)
- conn.notify(self.queuename,
- json.dumps({'control': command, 'reply_to': reply_queue}))
+ conn.notify(self.queuename, json.dumps({'control': command, 'reply_to': reply_queue}))
for reply in conn.events(select_timeout=timeout, yield_timeouts=True):
if reply is None:
diff --git a/awx/main/dispatch/periodic.py b/awx/main/dispatch/periodic.py
index b3a1b769c0..9ff6dd2570 100644
--- a/awx/main/dispatch/periodic.py
+++ b/awx/main/dispatch/periodic.py
@@ -14,12 +14,8 @@ logger = logging.getLogger('awx.main.dispatch.periodic')
class Scheduler(Scheduler):
-
def run_continuously(self):
- idle_seconds = max(
- 1,
- min(self.jobs).period.total_seconds() / 2
- )
+ idle_seconds = max(1, min(self.jobs).period.total_seconds() / 2)
def run():
ppid = os.getppid()
@@ -39,9 +35,7 @@ class Scheduler(Scheduler):
GuidMiddleware.set_guid(GuidMiddleware._generate_guid())
self.run_pending()
except Exception:
- logger.exception(
- 'encountered an error while scheduling periodic tasks'
- )
+ logger.exception('encountered an error while scheduling periodic tasks')
time.sleep(idle_seconds)
process = Process(target=run)
diff --git a/awx/main/dispatch/pool.py b/awx/main/dispatch/pool.py
index 5dbe034547..f0be3b9917 100644
--- a/awx/main/dispatch/pool.py
+++ b/awx/main/dispatch/pool.py
@@ -30,13 +30,12 @@ else:
class NoOpResultQueue(object):
-
def put(self, item):
pass
class PoolWorker(object):
- '''
+ """
Used to track a worker child process and its pending and finished messages.
This class makes use of two distinct multiprocessing.Queues to track state:
@@ -62,7 +61,7 @@ class PoolWorker(object):
A worker is "busy" when it has at least one message in self.managed_tasks.
It is "idle" when self.managed_tasks is empty.
- '''
+ """
track_managed_tasks = False
@@ -91,10 +90,10 @@ class PoolWorker(object):
self.calculate_managed_tasks()
def quit(self):
- '''
+ """
Send a special control message to the worker that tells it to exit
gracefully.
- '''
+ """
self.queue.put('QUIT')
@property
@@ -112,9 +111,7 @@ class PoolWorker(object):
@property
def mb(self):
if self.alive:
- return '{:0.3f}'.format(
- psutil.Process(self.pid).memory_info().rss / 1024.0 / 1024.0
- )
+ return '{:0.3f}'.format(psutil.Process(self.pid).memory_info().rss / 1024.0 / 1024.0)
return '0'
@property
@@ -179,11 +176,7 @@ class PoolWorker(object):
except QueueEmpty:
break # qsize is not always _totally_ up to date
if len(orphaned):
- logger.error(
- 'requeuing {} messages from gone worker pid:{}'.format(
- len(orphaned), self.pid
- )
- )
+ logger.error('requeuing {} messages from gone worker pid:{}'.format(len(orphaned), self.pid))
return orphaned
@property
@@ -202,7 +195,7 @@ class StatefulPoolWorker(PoolWorker):
class WorkerPool(object):
- '''
+ """
Creates a pool of forked PoolWorkers.
As WorkerPool.write(...) is called (generally, by a kombu consumer
@@ -220,7 +213,7 @@ class WorkerPool(object):
0, # preferred worker 0
'Hello, World!'
)
- '''
+ """
pool_cls = PoolWorker
debug_meta = ''
@@ -284,13 +277,10 @@ class WorkerPool(object):
'{% endfor %}'
)
now = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC')
- return tmpl.render(
- pool=self, workers=self.workers, meta=self.debug_meta,
- dt=now
- )
+ return tmpl.render(pool=self, workers=self.workers, meta=self.debug_meta, dt=now)
def write(self, preferred_queue, body):
- queue_order = sorted(range(len(self.workers)), key=lambda x: -1 if x==preferred_queue else x)
+ queue_order = sorted(range(len(self.workers)), key=lambda x: -1 if x == preferred_queue else x)
write_attempt_order = []
for queue_actual in queue_order:
try:
@@ -315,10 +305,10 @@ class WorkerPool(object):
class AutoscalePool(WorkerPool):
- '''
+ """
An extended pool implementation that automatically scales workers up and
down based on demand
- '''
+ """
pool_cls = StatefulPoolWorker
@@ -333,7 +323,7 @@ class AutoscalePool(WorkerPool):
else:
total_memory_gb = (psutil.virtual_memory().total >> 30) + 1 # noqa: round up
# 5 workers per GB of total memory
- self.max_workers = (total_memory_gb * 5)
+ self.max_workers = total_memory_gb * 5
# max workers can't be less than min_workers
self.max_workers = max(self.min_workers, self.max_workers)
@@ -410,15 +400,11 @@ class AutoscalePool(WorkerPool):
if current_task and isinstance(current_task, dict):
if current_task.get('task', '').endswith('tasks.run_task_manager'):
if 'started' not in current_task:
- w.managed_tasks[
- current_task['uuid']
- ]['started'] = time.time()
+ w.managed_tasks[current_task['uuid']]['started'] = time.time()
age = time.time() - current_task['started']
w.managed_tasks[current_task['uuid']]['age'] = age
if age > (60 * 5):
- logger.error(
- f'run_task_manager has held the advisory lock for >5m, sending SIGTERM to {w.pid}'
- ) # noqa
+ logger.error(f'run_task_manager has held the advisory lock for >5m, sending SIGTERM to {w.pid}') # noqa
os.kill(w.pid, signal.SIGTERM)
for m in orphaned:
diff --git a/awx/main/dispatch/publish.py b/awx/main/dispatch/publish.py
index 4d75654b5d..63b2890e1e 100644
--- a/awx/main/dispatch/publish.py
+++ b/awx/main/dispatch/publish.py
@@ -70,20 +70,12 @@ class task:
task_id = uuid or str(uuid4())
args = args or []
kwargs = kwargs or {}
- queue = (
- queue or
- getattr(cls.queue, 'im_func', cls.queue)
- )
+ queue = queue or getattr(cls.queue, 'im_func', cls.queue)
if not queue:
msg = f'{cls.name}: Queue value required and may not be None'
logger.error(msg)
raise ValueError(msg)
- obj = {
- 'uuid': task_id,
- 'args': args,
- 'kwargs': kwargs,
- 'task': cls.name
- }
+ obj = {'uuid': task_id, 'args': args, 'kwargs': kwargs, 'task': cls.name}
guid = GuidMiddleware.get_guid()
if guid:
obj['guid'] = guid
@@ -105,11 +97,7 @@ class task:
if inspect.isclass(fn):
bases = list(fn.__bases__)
ns.update(fn.__dict__)
- cls = type(
- fn.__name__,
- tuple(bases + [PublisherMixin]),
- ns
- )
+ cls = type(fn.__name__, tuple(bases + [PublisherMixin]), ns)
if inspect.isclass(fn):
return cls
diff --git a/awx/main/dispatch/reaper.py b/awx/main/dispatch/reaper.py
index 2c45507c2c..773c75f975 100644
--- a/awx/main/dispatch/reaper.py
+++ b/awx/main/dispatch/reaper.py
@@ -16,23 +16,23 @@ def reap_job(j, status):
return
j.status = status
j.start_args = '' # blank field to remove encrypted passwords
- j.job_explanation += ' '.join((
- 'Task was marked as running in Tower but was not present in',
- 'the job queue, so it has been marked as failed.',
- ))
+ j.job_explanation += ' '.join(
+ (
+ 'Task was marked as running in Tower but was not present in',
+ 'the job queue, so it has been marked as failed.',
+ )
+ )
j.save(update_fields=['status', 'start_args', 'job_explanation'])
if hasattr(j, 'send_notification_templates'):
j.send_notification_templates('failed')
j.websocket_emit_status(status)
- logger.error(
- '{} is no longer running; reaping'.format(j.log_format)
- )
+ logger.error('{} is no longer running; reaping'.format(j.log_format))
def reap(instance=None, status='failed', excluded_uuids=[]):
- '''
+ """
Reap all jobs in waiting|running for this instance.
- '''
+ """
me = instance
if me is None:
(changed, me) = Instance.objects.get_or_register()
@@ -41,13 +41,9 @@ def reap(instance=None, status='failed', excluded_uuids=[]):
now = tz_now()
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
jobs = UnifiedJob.objects.filter(
- (
- Q(status='running') |
- Q(status='waiting', modified__lte=now - timedelta(seconds=60))
- ) & (
- Q(execution_node=me.hostname) |
- Q(controller_node=me.hostname)
- ) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
+ (Q(status='running') | Q(status='waiting', modified__lte=now - timedelta(seconds=60)))
+ & (Q(execution_node=me.hostname) | Q(controller_node=me.hostname))
+ & ~Q(polymorphic_ctype_id=workflow_ctype_id)
).exclude(celery_task_id__in=excluded_uuids)
for j in jobs:
reap_job(j, status)
diff --git a/awx/main/dispatch/worker/base.py b/awx/main/dispatch/worker/base.py
index 8b44c71e43..edb22b8797 100644
--- a/awx/main/dispatch/worker/base.py
+++ b/awx/main/dispatch/worker/base.py
@@ -25,14 +25,10 @@ else:
def signame(sig):
- return dict(
- (k, v) for v, k in signal.__dict__.items()
- if v.startswith('SIG') and not v.startswith('SIG_')
- )[sig]
+ return dict((k, v) for v, k in signal.__dict__.items() if v.startswith('SIG') and not v.startswith('SIG_'))[sig]
class WorkerSignalHandler:
-
def __init__(self):
self.kill_now = False
signal.signal(signal.SIGTERM, signal.SIG_DFL)
@@ -162,7 +158,6 @@ class AWXConsumerPG(AWXConsumerBase):
class BaseWorker(object):
-
def read(self, queue):
return queue.get(block=True, timeout=1)
diff --git a/awx/main/dispatch/worker/callback.py b/awx/main/dispatch/worker/callback.py
index 342280868c..68b8d5fd4f 100644
--- a/awx/main/dispatch/worker/callback.py
+++ b/awx/main/dispatch/worker/callback.py
@@ -16,9 +16,7 @@ import psutil
import redis
from awx.main.consumers import emit_channel_notification
-from awx.main.models import (JobEvent, AdHocCommandEvent, ProjectUpdateEvent,
- InventoryUpdateEvent, SystemJobEvent, UnifiedJob,
- Job)
+from awx.main.models import JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent, UnifiedJob, Job
from awx.main.tasks import handle_success_and_failure_notifications
from awx.main.models.events import emit_event_detail
from awx.main.utils.profiling import AWXProfiler
@@ -29,13 +27,13 @@ logger = logging.getLogger('awx.main.commands.run_callback_receiver')
class CallbackBrokerWorker(BaseWorker):
- '''
+ """
A worker implementation that deserializes callback event data and persists
it into the database.
The code that *generates* these types of messages is found in the
ansible-runner display callback plugin.
- '''
+ """
MAX_RETRIES = 2
last_stats = time.time()
@@ -83,9 +81,7 @@ class CallbackBrokerWorker(BaseWorker):
@property
def mb(self):
- return '{:0.3f}'.format(
- psutil.Process(self.pid).memory_info().rss / 1024.0 / 1024.0
- )
+ return '{:0.3f}'.format(psutil.Process(self.pid).memory_info().rss / 1024.0 / 1024.0)
def toggle_profiling(self, *args):
if not self.prof.is_started():
@@ -102,11 +98,7 @@ class CallbackBrokerWorker(BaseWorker):
def flush(self, force=False):
now = tz_now()
- if (
- force or
- (time.time() - self.last_flush) > settings.JOB_EVENT_BUFFER_SECONDS or
- any([len(events) >= 1000 for events in self.buff.values()])
- ):
+ if force or (time.time() - self.last_flush) > settings.JOB_EVENT_BUFFER_SECONDS or any([len(events) >= 1000 for events in self.buff.values()]):
for cls, events in self.buff.items():
logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})')
for e in events:
@@ -161,10 +153,7 @@ class CallbackBrokerWorker(BaseWorker):
# closed. don't actually persist them to the database; we
# just use them to report `summary` websocket events as an
# approximation for when a job is "done"
- emit_channel_notification(
- 'jobs-summary',
- dict(group_name='jobs', unified_job_id=job_identifier, final_counter=final_counter)
- )
+ emit_channel_notification('jobs-summary', dict(group_name='jobs', unified_job_id=job_identifier, final_counter=final_counter))
# Additionally, when we've processed all events, we should
# have all the data we need to send out success/failure
# notification templates
@@ -196,10 +185,7 @@ class CallbackBrokerWorker(BaseWorker):
logger.exception('Worker could not re-establish database connectivity, giving up on one or more events.')
return
delay = 60 * retries
- logger.exception('Database Error Saving Job Event, retry #{i} in {delay} seconds:'.format(
- i=retries + 1,
- delay=delay
- ))
+ logger.exception('Database Error Saving Job Event, retry #{i} in {delay} seconds:'.format(i=retries + 1, delay=delay))
django_connection.close()
time.sleep(delay)
retries += 1
diff --git a/awx/main/dispatch/worker/task.py b/awx/main/dispatch/worker/task.py
index d71d62686f..e55cfbdde2 100644
--- a/awx/main/dispatch/worker/task.py
+++ b/awx/main/dispatch/worker/task.py
@@ -17,22 +17,22 @@ logger = logging.getLogger('awx.main.dispatch')
class TaskWorker(BaseWorker):
- '''
+ """
A worker implementation that deserializes task messages and runs native
Python code.
The code that *builds* these types of messages is found in
`awx.main.dispatch.publish`.
- '''
+ """
@classmethod
def resolve_callable(cls, task):
- '''
+ """
Transform a dotted notation task into an imported, callable function, e.g.,
awx.main.tasks.delete_inventory
awx.main.tasks.RunProjectUpdate
- '''
+ """
if not task.startswith('awx.'):
raise ValueError('{} is not a valid awx task'.format(task))
module, target = task.rsplit('.', 1)
@@ -40,17 +40,15 @@ class TaskWorker(BaseWorker):
_call = None
if hasattr(module, target):
_call = getattr(module, target, None)
- if not (
- hasattr(_call, 'apply_async') and hasattr(_call, 'delay')
- ):
+ if not (hasattr(_call, 'apply_async') and hasattr(_call, 'delay')):
raise ValueError('{} is not decorated with @task()'.format(task))
return _call
def run_callable(self, body):
- '''
+ """
Given some AMQP message, import the correct Python code and run it.
- '''
+ """
task = body['task']
uuid = body.get('uuid', '<unknown>')
args = body.get('args', [])
@@ -67,7 +65,7 @@ class TaskWorker(BaseWorker):
return _call(*args, **kwargs)
def perform_work(self, body):
- '''
+ """
Import and run code for a task e.g.,
body = {
@@ -85,7 +83,7 @@ class TaskWorker(BaseWorker):
'kwargs': {},
'task': u'awx.main.tasks.RunProjectUpdate'
}
- '''
+ """
settings.__clean_on_fork__()
result = None
try:
@@ -101,9 +99,7 @@ class TaskWorker(BaseWorker):
task = body['task']
args = body.get('args', [])
kwargs = body.get('kwargs', {})
- logger.exception('Worker failed to run task {}(*{}, **{}'.format(
- task, args, kwargs
- ))
+ logger.exception('Worker failed to run task {}(*{}, **{}'.format(task, args, kwargs))
except Exception:
# It's fairly critical that this code _not_ raise exceptions on logging
# If you configure external logging in a way that _it_ fails, there's
diff --git a/awx/main/exceptions.py b/awx/main/exceptions.py
index 64cbc94783..6a9bb7ece4 100644
--- a/awx/main/exceptions.py
+++ b/awx/main/exceptions.py
@@ -2,8 +2,7 @@
# All Rights Reserved.
-
-class _AwxTaskError():
+class _AwxTaskError:
def build_exception(self, task, message=None):
if message is None:
message = "Execution error running {}".format(task.log_format)
@@ -14,7 +13,7 @@ class _AwxTaskError():
def TaskCancel(self, task, rc):
"""Canceled flag caused run_pexpect to kill the job run"""
- message="{} was canceled (rc={})".format(task.log_format, rc)
+ message = "{} was canceled (rc={})".format(task.log_format, rc)
e = self.build_exception(task, message)
e.rc = rc
e.awx_task_error_type = "TaskCancel"
diff --git a/awx/main/fields.py b/awx/main/fields.py
index 0122b0ab80..05fea8ca6a 100644
--- a/awx/main/fields.py
+++ b/awx/main/fields.py
@@ -26,7 +26,7 @@ from django.db.models.fields.related_descriptors import (
ForwardManyToOneDescriptor,
ManyToManyDescriptor,
ReverseManyToOneDescriptor,
- create_forward_many_to_many_manager
+ create_forward_many_to_many_manager,
)
from django.utils.encoding import smart_text
from django.utils.functional import cached_property
@@ -46,34 +46,32 @@ from rest_framework import serializers
from awx.main.utils.filters import SmartFilter
from awx.main.utils.encryption import encrypt_value, decrypt_value, get_encryption_key
from awx.main.validators import validate_ssh_private_key
-from awx.main.models.rbac import (
- batch_role_ancestor_rebuilding, Role,
- ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR
-)
+from awx.main.models.rbac import batch_role_ancestor_rebuilding, Role, ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR
from awx.main.constants import ENV_BLOCKLIST
from awx.main import utils
-__all__ = ['AutoOneToOneField', 'ImplicitRoleField', 'JSONField',
- 'SmartFilterField', 'OrderedManyToManyField',
- 'update_role_parentage_for_instance',
- 'is_implicit_parent']
+__all__ = [
+ 'AutoOneToOneField',
+ 'ImplicitRoleField',
+ 'JSONField',
+ 'SmartFilterField',
+ 'OrderedManyToManyField',
+ 'update_role_parentage_for_instance',
+ 'is_implicit_parent',
+]
# Provide a (better) custom error message for enum jsonschema validation
def __enum_validate__(validator, enums, instance, schema):
if instance not in enums:
- yield jsonschema.exceptions.ValidationError(
- _("'{value}' is not one of ['{allowed_values}']").format(
- value=instance, allowed_values="', '".join(enums))
- )
+ yield jsonschema.exceptions.ValidationError(_("'{value}' is not one of ['{allowed_values}']").format(value=instance, allowed_values="', '".join(enums)))
Draft4Validator.VALIDATORS['enum'] = __enum_validate__
class JSONField(upstream_JSONField):
-
def db_type(self, connection):
return 'text'
@@ -93,9 +91,7 @@ class JSONBField(upstream_JSONBField):
if connection.vendor == 'sqlite':
# sqlite (which we use for tests) does not support jsonb;
return json.dumps(value, cls=DjangoJSONEncoder)
- return super(JSONBField, self).get_db_prep_value(
- value, connection, prepared
- )
+ return super(JSONBField, self).get_db_prep_value(value, connection, prepared)
def from_db_value(self, value, expression, connection):
# Work around a bug in django-jsonfield
@@ -104,6 +100,7 @@ class JSONBField(upstream_JSONBField):
return json.loads(value)
return value
+
# Based on AutoOneToOneField from django-annoying:
# https://bitbucket.org/offline/django-annoying/src/a0de8b294db3/annoying/fields.py
@@ -124,8 +121,7 @@ class AutoOneToOneField(models.OneToOneField):
"""OneToOneField that creates related object if it doesn't exist."""
def contribute_to_related_class(self, cls, related):
- setattr(cls, related.get_accessor_name(),
- AutoSingleRelatedObjectDescriptor(related))
+ setattr(cls, related.get_accessor_name(), AutoSingleRelatedObjectDescriptor(related))
def resolve_role_field(obj, field):
@@ -157,22 +153,17 @@ def resolve_role_field(obj, field):
def is_implicit_parent(parent_role, child_role):
- '''
+ """
Determine if the parent_role is an implicit parent as defined by
the model definition. This does not include any role parents that
might have been set by the user.
- '''
+ """
if child_role.content_object is None:
# The only singleton implicit parent is the system admin being
# a parent of the system auditor role
- return bool(
- child_role.singleton_name == ROLE_SINGLETON_SYSTEM_AUDITOR and
- parent_role.singleton_name == ROLE_SINGLETON_SYSTEM_ADMINISTRATOR
- )
+ return bool(child_role.singleton_name == ROLE_SINGLETON_SYSTEM_AUDITOR and parent_role.singleton_name == ROLE_SINGLETON_SYSTEM_ADMINISTRATOR)
# Get the list of implicit parents that were defined at the class level.
- implicit_parents = getattr(
- child_role.content_object.__class__, child_role.role_field
- ).field.parent_role
+ implicit_parents = getattr(child_role.content_object.__class__, child_role.role_field).field.parent_role
if type(implicit_parents) != list:
implicit_parents = [implicit_parents]
@@ -195,10 +186,10 @@ def is_implicit_parent(parent_role, child_role):
def update_role_parentage_for_instance(instance):
- '''update_role_parentage_for_instance
+ """update_role_parentage_for_instance
updates the parents listing for all the roles
of a given instance if they have changed
- '''
+ """
parents_removed = set()
parents_added = set()
for implicit_role_field in getattr(instance.__class__, '__implicit_role_fields'):
@@ -278,8 +269,7 @@ class ImplicitRoleField(models.ForeignKey):
# consistency is assured by unit test awx.main.tests.functional
field = getattr(cls, field_name, None)
- if field and type(field) is ReverseManyToOneDescriptor or \
- type(field) is ManyToManyDescriptor:
+ if field and type(field) is ReverseManyToOneDescriptor or type(field) is ManyToManyDescriptor:
if '.' in field_attr:
raise Exception('Referencing deep roles through ManyToMany fields is unsupported.')
@@ -313,8 +303,8 @@ class ImplicitRoleField(models.ForeignKey):
getattr(instance, self.name).parents.add(getattr(obj, field_attr))
if action == 'pre_remove':
getattr(instance, self.name).parents.remove(getattr(obj, field_attr))
- return _m2m_update
+ return _m2m_update
def _post_save(self, instance, created, *args, **kwargs):
Role_ = utils.get_current_apps().get_model('main', 'Role')
@@ -330,13 +320,7 @@ class ImplicitRoleField(models.ForeignKey):
for implicit_role_field in getattr(latest_instance.__class__, '__implicit_role_fields'):
cur_role = getattr(latest_instance, implicit_role_field.name, None)
if cur_role is None:
- missing_roles.append(
- Role_(
- role_field=implicit_role_field.name,
- content_type_id=ct_id,
- object_id=latest_instance.id
- )
- )
+ missing_roles.append(Role_(role_field=implicit_role_field.name, content_type_id=ct_id, object_id=latest_instance.id))
if len(missing_roles) > 0:
Role_.objects.bulk_create(missing_roles)
@@ -352,7 +336,6 @@ class ImplicitRoleField(models.ForeignKey):
update_role_parentage_for_instance(latest_instance)
instance.refresh_from_db()
-
def _resolve_parent_roles(self, instance):
if not self.parent_role:
return set()
@@ -424,10 +407,7 @@ class JSONSchemaField(JSONBField):
def validate(self, value, model_instance):
super(JSONSchemaField, self).validate(value, model_instance)
errors = []
- for error in Draft4Validator(
- self.schema(model_instance),
- format_checker=self.format_checker
- ).iter_errors(value):
+ for error in Draft4Validator(self.schema(model_instance), format_checker=self.format_checker).iter_errors(value):
if error.validator == 'pattern' and 'error' in error.schema:
error.message = error.schema['error'].format(instance=error.instance)
elif error.validator == 'type':
@@ -435,19 +415,15 @@ class JSONSchemaField(JSONBField):
if expected_type == 'object':
expected_type = 'dict'
if error.path:
- error.message = _(
- '{type} provided in relative path {path}, expected {expected_type}'
- ).format(path=list(error.path), type=type(error.instance).__name__,
- expected_type=expected_type)
+ error.message = _('{type} provided in relative path {path}, expected {expected_type}').format(
+ path=list(error.path), type=type(error.instance).__name__, expected_type=expected_type
+ )
else:
- error.message = _(
- '{type} provided, expected {expected_type}'
- ).format(path=list(error.path), type=type(error.instance).__name__,
- expected_type=expected_type)
+ error.message = _('{type} provided, expected {expected_type}').format(
+ path=list(error.path), type=type(error.instance).__name__, expected_type=expected_type
+ )
elif error.validator == 'additionalProperties' and hasattr(error, 'path'):
- error.message = _(
- 'Schema validation error in relative path {path} ({error})'
- ).format(path=list(error.path), error=error.message)
+ error.message = _('Schema validation error in relative path {path} ({error})').format(path=list(error.path), error=error.message)
errors.append(error)
if errors:
@@ -492,13 +468,9 @@ def format_url(value):
except Exception as e:
raise jsonschema.exceptions.FormatError(str(e))
if parsed.scheme == '':
- raise jsonschema.exceptions.FormatError(
- 'Invalid URL: Missing url scheme (http, https, etc.)'
- )
+ raise jsonschema.exceptions.FormatError('Invalid URL: Missing url scheme (http, https, etc.)')
if parsed.netloc == '':
- raise jsonschema.exceptions.FormatError(
- 'Invalid URL: {}'.format(value)
- )
+ raise jsonschema.exceptions.FormatError('Invalid URL: {}'.format(value))
return True
@@ -536,10 +508,7 @@ class DynamicCredentialInputField(JSONSchemaField):
super(JSONSchemaField, self).validate(value, model_instance)
credential_type = model_instance.source_credential.credential_type
errors = {}
- for error in Draft4Validator(
- self.schema(credential_type),
- format_checker=self.format_checker
- ).iter_errors(value):
+ for error in Draft4Validator(self.schema(credential_type), format_checker=self.format_checker).iter_errors(value):
if error.validator == 'pattern' and 'error' in error.schema:
error.message = error.schema['error'].format(instance=error.instance)
if 'id' not in error.schema:
@@ -555,14 +524,10 @@ class DynamicCredentialInputField(JSONSchemaField):
defined_metadata = [field.get('id') for field in credential_type.inputs.get('metadata', [])]
for field in credential_type.inputs.get('required', []):
if field in defined_metadata and not value.get(field, None):
- errors[field] = [_('required for %s') % (
- credential_type.name
- )]
+ errors[field] = [_('required for %s') % (credential_type.name)]
if errors:
- raise serializers.ValidationError({
- 'metadata': errors
- })
+ raise serializers.ValidationError({'metadata': errors})
class CredentialInputField(JSONSchemaField):
@@ -610,8 +575,7 @@ class CredentialInputField(JSONSchemaField):
# ssh_key_data format)
if not isinstance(value, dict):
- return super(CredentialInputField, self).validate(value,
- model_instance)
+ return super(CredentialInputField, self).validate(value, model_instance)
# Backwards compatability: in prior versions, if you submit `null` for
# a credential field value, it just considers the value an empty string
@@ -622,11 +586,7 @@ class CredentialInputField(JSONSchemaField):
decrypted_values = {}
for k, v in value.items():
- if all([
- k in model_instance.credential_type.secret_fields,
- v != '$encrypted$',
- model_instance.pk
- ]):
+ if all([k in model_instance.credential_type.secret_fields, v != '$encrypted$', model_instance.pk]):
if not isinstance(model_instance.inputs.get(k), str):
raise django_exceptions.ValidationError(
_('secret values must be of type string, not {}').format(type(v).__name__),
@@ -641,16 +601,11 @@ class CredentialInputField(JSONSchemaField):
if not model_instance.pk:
for field in model_instance.credential_type.secret_fields:
if value.get(field) == '$encrypted$':
- raise serializers.ValidationError({
- self.name: [f'$encrypted$ is a reserved keyword, and cannot be used for {field}.']
- })
+ raise serializers.ValidationError({self.name: [f'$encrypted$ is a reserved keyword, and cannot be used for {field}.']})
super(JSONSchemaField, self).validate(decrypted_values, model_instance)
errors = {}
- for error in Draft4Validator(
- self.schema(model_instance),
- format_checker=self.format_checker
- ).iter_errors(decrypted_values):
+ for error in Draft4Validator(self.schema(model_instance), format_checker=self.format_checker).iter_errors(decrypted_values):
if error.validator == 'pattern' and 'error' in error.schema:
error.message = error.schema['error'].format(instance=error.instance)
if error.validator == 'dependencies':
@@ -661,8 +616,8 @@ class CredentialInputField(JSONSchemaField):
# string)
match = re.search(
# 'foo' is a dependency of 'bar'
- r"'" # apostrophe
- r"([^']+)" # one or more non-apostrophes (first group)
+ r"'" # apostrophe
+ r"([^']+)" # one or more non-apostrophes (first group)
r"'[\w ]+'" # one or more words/spaces
r"([^']+)", # second group
error.message,
@@ -671,9 +626,7 @@ class CredentialInputField(JSONSchemaField):
label, extraneous = match.groups()
if error.schema['properties'].get(label):
label = error.schema['properties'][label]['label']
- errors[extraneous] = [
- _('cannot be set unless "%s" is set') % label
- ]
+ errors[extraneous] = [_('cannot be set unless "%s" is set') % label]
continue
if 'id' not in error.schema:
# If the error is not for a specific field, it's specific to
@@ -689,10 +642,7 @@ class CredentialInputField(JSONSchemaField):
# `ssh_key_unlock` requirements are very specific and can't be
# represented without complicated JSON schema
- if (
- model_instance.credential_type.managed_by_tower is True and
- 'ssh_key_unlock' in defined_fields
- ):
+ if model_instance.credential_type.managed_by_tower is True and 'ssh_key_unlock' in defined_fields:
# in order to properly test the necessity of `ssh_key_unlock`, we
# need to know the real value of `ssh_key_data`; for a payload like:
@@ -702,25 +652,23 @@ class CredentialInputField(JSONSchemaField):
# }
# ...we have to fetch the actual key value from the database
if model_instance.pk and model_instance.inputs.get('ssh_key_data') == '$encrypted$':
- model_instance.inputs['ssh_key_data'] = model_instance.__class__.objects.get(
- pk=model_instance.pk
- ).inputs.get('ssh_key_data')
+ model_instance.inputs['ssh_key_data'] = model_instance.__class__.objects.get(pk=model_instance.pk).inputs.get('ssh_key_data')
if model_instance.has_encrypted_ssh_key_data and not value.get('ssh_key_unlock'):
errors['ssh_key_unlock'] = [_('must be set when SSH key is encrypted.')]
-
- if all([
- model_instance.inputs.get('ssh_key_data'),
- value.get('ssh_key_unlock'),
- not model_instance.has_encrypted_ssh_key_data,
- 'ssh_key_data' not in errors
- ]):
+
+ if all(
+ [
+ model_instance.inputs.get('ssh_key_data'),
+ value.get('ssh_key_unlock'),
+ not model_instance.has_encrypted_ssh_key_data,
+ 'ssh_key_data' not in errors,
+ ]
+ ):
errors['ssh_key_unlock'] = [_('should not be set when SSH key is not encrypted.')]
if errors:
- raise serializers.ValidationError({
- 'inputs': errors
- })
+ raise serializers.ValidationError({'inputs': errors})
class CredentialTypeInputField(JSONSchemaField):
@@ -734,23 +682,15 @@ class CredentialTypeInputField(JSONSchemaField):
'type': 'object',
'additionalProperties': False,
'properties': {
- 'required': {
- 'type': 'array',
- 'items': {'type': 'string'}
- },
- 'fields': {
+ 'required': {'type': 'array', 'items': {'type': 'string'}},
+ 'fields': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'type': {'enum': ['string', 'boolean']},
'format': {'enum': ['ssh_private_key', 'url']},
- 'choices': {
- 'type': 'array',
- 'minItems': 1,
- 'items': {'type': 'string'},
- 'uniqueItems': True
- },
+ 'choices': {'type': 'array', 'minItems': 1, 'items': {'type': 'string'}, 'uniqueItems': True},
'id': {
'type': 'string',
'pattern': '^[a-zA-Z_]+[a-zA-Z0-9_]*$',
@@ -765,23 +705,20 @@ class CredentialTypeInputField(JSONSchemaField):
},
'additionalProperties': False,
'required': ['id', 'label'],
- }
- }
- }
+ },
+ },
+ },
}
def validate(self, value, model_instance):
- if isinstance(value, dict) and 'dependencies' in value and \
- not model_instance.managed_by_tower:
+ if isinstance(value, dict) and 'dependencies' in value and not model_instance.managed_by_tower:
raise django_exceptions.ValidationError(
_("'dependencies' is not supported for custom credentials."),
code='invalid',
params={'value': value},
)
- super(CredentialTypeInputField, self).validate(
- value, model_instance
- )
+ super(CredentialTypeInputField, self).validate(value, model_instance)
ids = {}
for field in value.get('fields', []):
@@ -809,21 +746,26 @@ class CredentialTypeInputField(JSONSchemaField):
default = field['default']
_type = {'string': str, 'boolean': bool}[field['type']]
if type(default) != _type:
- raise django_exceptions.ValidationError(
- _('{} is not a {}').format(default, field['type'])
- )
-
- for key in ('choices', 'multiline', 'format', 'secret',):
+ raise django_exceptions.ValidationError(_('{} is not a {}').format(default, field['type']))
+
+ for key in (
+ 'choices',
+ 'multiline',
+ 'format',
+ 'secret',
+ ):
if key in field and field['type'] != 'string':
raise django_exceptions.ValidationError(
- _('{sub_key} not allowed for {element_type} type ({element_id})'.format(
- sub_key=key, element_type=field['type'], element_id=field['id'])),
+ _(
+ '{sub_key} not allowed for {element_type} type ({element_id})'.format(
+ sub_key=key, element_type=field['type'], element_id=field['id']
+ )
+ ),
code='invalid',
params={'value': value},
)
-
class CredentialTypeInjectorField(JSONSchemaField):
"""
Used to validate JSON for
@@ -854,7 +796,7 @@ class CredentialTypeInjectorField(JSONSchemaField):
'type': 'string',
# The environment variable _value_ can be any ascii,
# but pexpect will choke on any unicode
- 'pattern': '^[\x00-\x7F]*$'
+ 'pattern': '^[\x00-\x7F]*$',
},
},
'additionalProperties': False,
@@ -868,26 +810,25 @@ class CredentialTypeInjectorField(JSONSchemaField):
'additionalProperties': False,
},
},
- 'additionalProperties': False
+ 'additionalProperties': False,
}
def validate_env_var_allowed(self, env_var):
if env_var.startswith('ANSIBLE_'):
raise django_exceptions.ValidationError(
- _('Environment variable {} may affect Ansible configuration so its '
- 'use is not allowed in credentials.').format(env_var),
- code='invalid', params={'value': env_var},
+ _('Environment variable {} may affect Ansible configuration so its ' 'use is not allowed in credentials.').format(env_var),
+ code='invalid',
+ params={'value': env_var},
)
if env_var in ENV_BLOCKLIST:
raise django_exceptions.ValidationError(
_('Environment variable {} is not allowed to be used in credentials.').format(env_var),
- code='invalid', params={'value': env_var},
+ code='invalid',
+ params={'value': env_var},
)
def validate(self, value, model_instance):
- super(CredentialTypeInjectorField, self).validate(
- value, model_instance
- )
+ super(CredentialTypeInjectorField, self).validate(value, model_instance)
# make sure the inputs are valid first
try:
@@ -901,10 +842,7 @@ class CredentialTypeInjectorField(JSONSchemaField):
# In addition to basic schema validation, search the injector fields
# for template variables and make sure they match the fields defined in
# the inputs
- valid_namespace = dict(
- (field, 'EXAMPLE')
- for field in model_instance.defined_fields
- )
+ valid_namespace = dict((field, 'EXAMPLE') for field in model_instance.defined_fields)
class ExplodingNamespace:
def __str__(self):
@@ -940,24 +878,18 @@ class CredentialTypeInjectorField(JSONSchemaField):
self.validate_env_var_allowed(key)
for key, tmpl in injector.items():
try:
- sandbox.ImmutableSandboxedEnvironment(
- undefined=StrictUndefined
- ).from_string(tmpl).render(valid_namespace)
+ sandbox.ImmutableSandboxedEnvironment(undefined=StrictUndefined).from_string(tmpl).render(valid_namespace)
except UndefinedError as e:
raise django_exceptions.ValidationError(
- _('{sub_key} uses an undefined field ({error_msg})').format(
- sub_key=key, error_msg=e),
+ _('{sub_key} uses an undefined field ({error_msg})').format(sub_key=key, error_msg=e),
code='invalid',
params={'value': value},
)
except SecurityError as e:
- raise django_exceptions.ValidationError(
- _('Encountered unsafe code execution: {}').format(e)
- )
+ raise django_exceptions.ValidationError(_('Encountered unsafe code execution: {}').format(e))
except TemplateSyntaxError as e:
raise django_exceptions.ValidationError(
- _('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(
- sub_key=key, type=type_, error_msg=e),
+ _('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(sub_key=key, type=type_, error_msg=e),
code='invalid',
params={'value': value},
)
@@ -967,6 +899,7 @@ class AskForField(models.BooleanField):
"""
Denotes whether to prompt on launch for another field on the same template
"""
+
def __init__(self, allows_field=None, **kwargs):
super(AskForField, self).__init__(**kwargs)
self._allows_field = allows_field
@@ -975,7 +908,7 @@ class AskForField(models.BooleanField):
def allows_field(self):
if self._allows_field is None:
try:
- return self.name[len('ask_'):-len('_on_launch')]
+ return self.name[len('ask_') : -len('_on_launch')]
except AttributeError:
# self.name will be set by the model metaclass, not this field
raise Exception('Corresponding allows_field cannot be accessed until model is initialized.')
@@ -983,11 +916,8 @@ class AskForField(models.BooleanField):
class OAuth2ClientSecretField(models.CharField):
-
def get_db_prep_value(self, value, connection, prepared=False):
- return super(OAuth2ClientSecretField, self).get_db_prep_value(
- encrypt_value(value), connection, prepared
- )
+ return super(OAuth2ClientSecretField, self).get_db_prep_value(encrypt_value(value), connection, prepared)
def from_db_value(self, value, expression, connection):
if value and value.startswith('$encrypted$'):
@@ -1021,9 +951,7 @@ class OrderedManyToManyDescriptor(ManyToManyDescriptor):
def add_custom_queryset_to_many_related_manager(many_related_manage_cls):
class OrderedManyRelatedManager(many_related_manage_cls):
def get_queryset(self):
- return super(OrderedManyRelatedManager, self).get_queryset().order_by(
- '%s__position' % self.through._meta.model_name
- )
+ return super(OrderedManyRelatedManager, self).get_queryset().order_by('%s__position' % self.through._meta.model_name)
return OrderedManyRelatedManager
@@ -1046,30 +974,19 @@ class OrderedManyToManyField(models.ManyToManyField):
if kwargs.get('action') in ('post_add', 'post_remove'):
order_with_respect_to = None
for field in sender._meta.local_fields:
- if (
- isinstance(field, models.ForeignKey) and
- isinstance(kwargs['instance'], field.related_model)
- ):
+ if isinstance(field, models.ForeignKey) and isinstance(kwargs['instance'], field.related_model):
order_with_respect_to = field.name
- for i, ig in enumerate(sender.objects.filter(**{
- order_with_respect_to: kwargs['instance'].pk}
- )):
+ for i, ig in enumerate(sender.objects.filter(**{order_with_respect_to: kwargs['instance'].pk})):
if ig.position != i:
ig.position = i
ig.save()
def contribute_to_class(self, cls, name, **kwargs):
super(OrderedManyToManyField, self).contribute_to_class(cls, name, **kwargs)
- setattr(
- cls, name,
- OrderedManyToManyDescriptor(self.remote_field, reverse=False)
- )
+ setattr(cls, name, OrderedManyToManyDescriptor(self.remote_field, reverse=False))
through = getattr(cls, name).through
if isinstance(through, str) and "." not in through:
# support lazy loading of string model names
through = '.'.join([cls._meta.app_label, through])
- m2m_changed.connect(
- self._update_m2m_position,
- sender=through
- )
+ m2m_changed.connect(self._update_m2m_position, sender=through)
diff --git a/awx/main/isolated/manager.py b/awx/main/isolated/manager.py
index abcd41c5c1..79dac4445f 100644
--- a/awx/main/isolated/manager.py
+++ b/awx/main/isolated/manager.py
@@ -12,9 +12,7 @@ from django.conf import settings
import ansible_runner
import awx
-from awx.main.utils import (
- get_system_task_capacity
-)
+from awx.main.utils import get_system_task_capacity
logger = logging.getLogger('awx.isolated.manager')
playbook_logger = logging.getLogger('awx.isolated.manager.playbooks')
@@ -30,7 +28,6 @@ def set_pythonpath(venv_libdir, env):
class IsolatedManager(object):
-
def __init__(self, event_handler, canceled_callback=None, check_callback=None):
"""
:param event_handler: a callable used to persist event data from isolated nodes
@@ -46,10 +43,7 @@ class IsolatedManager(object):
self.instance = None
def build_inventory(self, hosts):
- inventory = '\n'.join([
- '{} ansible_ssh_user={}'.format(host, settings.AWX_ISOLATED_USERNAME)
- for host in hosts
- ])
+ inventory = '\n'.join(['{} ansible_ssh_user={}'.format(host, settings.AWX_ISOLATED_USERNAME) for host in hosts])
return inventory
@@ -83,10 +77,7 @@ class IsolatedManager(object):
playbook_logger.info(runner_obj.stdout.read())
return {
- 'project_dir': os.path.abspath(os.path.join(
- os.path.dirname(awx.__file__),
- 'playbooks'
- )),
+ 'project_dir': os.path.abspath(os.path.join(os.path.dirname(awx.__file__), 'playbooks')),
'inventory': self.build_inventory(hosts),
'envvars': env,
'finished_callback': finished_callback,
@@ -102,10 +93,7 @@ class IsolatedManager(object):
return os.path.join(self.private_data_dir, *args)
def run_management_playbook(self, playbook, private_data_dir, idle_timeout=None, **kw):
- iso_dir = tempfile.mkdtemp(
- prefix=playbook,
- dir=private_data_dir
- )
+ iso_dir = tempfile.mkdtemp(prefix=playbook, dir=private_data_dir)
params = self.runner_params.copy()
params.get('envvars', dict())['ANSIBLE_CALLBACK_WHITELIST'] = 'profile_tasks'
params['playbook'] = playbook
@@ -115,17 +103,14 @@ class IsolatedManager(object):
else:
params['settings'].pop('idle_timeout', None)
params.update(**kw)
- if all([
- getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True,
- getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)
- ]):
+ if all([getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True, getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)]):
params['ssh_key'] = settings.AWX_ISOLATED_PRIVATE_KEY
return ansible_runner.interface.run(**params)
def dispatch(self, playbook=None, module=None, module_args=None):
- '''
+ """
Ship the runner payload to a remote host for isolated execution.
- '''
+ """
self.handled_events = set()
self.started_at = time.time()
@@ -139,12 +124,10 @@ class IsolatedManager(object):
# don't rsync the ssh_key FIFO
'- /env/ssh_key',
# don't rsync kube config files
- '- .kubeconfig*'
+ '- .kubeconfig*',
]
- for filename, data in (
- ['.rsync-filter', '\n'.join(rsync_exclude)],
- ):
+ for filename, data in (['.rsync-filter', '\n'.join(rsync_exclude)],):
path = self.path_to(filename)
with open(path, 'w') as f:
f.write(data)
@@ -163,10 +146,9 @@ class IsolatedManager(object):
extravars['module_args'] = module_args
logger.debug('Starting job {} on isolated host with `run_isolated.yml` playbook.'.format(self.instance.id))
- runner_obj = self.run_management_playbook('run_isolated.yml',
- self.private_data_dir,
- idle_timeout=max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT),
- extravars=extravars)
+ runner_obj = self.run_management_playbook(
+ 'run_isolated.yml', self.private_data_dir, idle_timeout=max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT), extravars=extravars
+ )
if runner_obj.status == 'failed':
self.instance.result_traceback = runner_obj.stdout.read()
@@ -190,10 +172,7 @@ class IsolatedManager(object):
:param interval: an interval (in seconds) to wait between status polls
"""
interval = interval if interval is not None else settings.AWX_ISOLATED_CHECK_INTERVAL
- extravars = {
- 'src': self.private_data_dir,
- 'job_id': self.instance.id
- }
+ extravars = {'src': self.private_data_dir, 'job_id': self.instance.id}
status = 'failed'
rc = None
last_check = time.time()
@@ -210,9 +189,7 @@ class IsolatedManager(object):
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
time_start = datetime.datetime.now()
- runner_obj = self.run_management_playbook('check_isolated.yml',
- self.private_data_dir,
- extravars=extravars)
+ runner_obj = self.run_management_playbook('check_isolated.yml', self.private_data_dir, extravars=extravars)
time_end = datetime.datetime.now()
time_diff = time_end - time_start
logger.debug('Finished checking on isolated job {} with `check_isolated.yml` took {} seconds.'.format(self.instance.id, time_diff.total_seconds()))
@@ -274,9 +251,7 @@ class IsolatedManager(object):
path = os.path.join(events_path, event)
if os.path.exists(path) and os.path.isfile(path):
try:
- event_data = json.load(
- open(os.path.join(events_path, event), 'r')
- )
+ event_data = json.load(open(os.path.join(events_path, event), 'r'))
except json.decoder.JSONDecodeError:
# This means the event we got back isn't valid JSON
# that can happen if runner is still partially
@@ -290,7 +265,6 @@ class IsolatedManager(object):
self.event_handler(event_data)
self.handled_events.add(event)
-
def cleanup(self):
extravars = {
'private_data_dir': self.private_data_dir,
@@ -299,11 +273,7 @@ class IsolatedManager(object):
],
}
logger.debug('Cleaning up job {} on isolated host with `clean_isolated.yml` playbook.'.format(self.instance.id))
- self.run_management_playbook(
- 'clean_isolated.yml',
- self.private_data_dir,
- extravars=extravars
- )
+ self.run_management_playbook('clean_isolated.yml', self.private_data_dir, extravars=extravars)
@classmethod
def update_capacity(cls, instance, task_result):
@@ -315,13 +285,13 @@ class IsolatedManager(object):
instance.memory = int(task_result['mem'])
instance.cpu_capacity = int(task_result['capacity_cpu'])
instance.mem_capacity = int(task_result['capacity_mem'])
- instance.capacity = get_system_task_capacity(scale=instance.capacity_adjustment,
- cpu_capacity=int(task_result['capacity_cpu']),
- mem_capacity=int(task_result['capacity_mem']))
+ instance.capacity = get_system_task_capacity(
+ scale=instance.capacity_adjustment, cpu_capacity=int(task_result['capacity_cpu']), mem_capacity=int(task_result['capacity_mem'])
+ )
instance.save(update_fields=['cpu', 'memory', 'cpu_capacity', 'mem_capacity', 'capacity', 'version', 'modified'])
def health_check(self, instance_qs):
- '''
+ """
:param instance_qs: List of Django objects representing the
isolated instances to manage
Runs playbook that will
@@ -329,24 +299,16 @@ class IsolatedManager(object):
- find the instance capacity
- clean up orphaned private files
Performs save on each instance to update its capacity.
- '''
+ """
instance_qs = [i for i in instance_qs if i.enabled]
if not len(instance_qs):
return
try:
- private_data_dir = tempfile.mkdtemp(
- prefix='awx_iso_heartbeat_',
- dir=settings.AWX_PROOT_BASE_PATH
- )
- self.runner_params = self.build_runner_params([
- instance.hostname for instance in instance_qs
- ])
+ private_data_dir = tempfile.mkdtemp(prefix='awx_iso_heartbeat_', dir=settings.AWX_PROOT_BASE_PATH)
+ self.runner_params = self.build_runner_params([instance.hostname for instance in instance_qs])
self.runner_params['private_data_dir'] = private_data_dir
self.runner_params['forks'] = len(instance_qs)
- runner_obj = self.run_management_playbook(
- 'heartbeat_isolated.yml',
- private_data_dir
- )
+ runner_obj = self.run_management_playbook('heartbeat_isolated.yml', private_data_dir)
for instance in instance_qs:
task_result = {}
@@ -360,20 +322,18 @@ class IsolatedManager(object):
'mem': task_result['awx_mem'],
'capacity_cpu': task_result['awx_capacity_cpu'],
'capacity_mem': task_result['awx_capacity_mem'],
- 'version': task_result['awx_capacity_version']
+ 'version': task_result['awx_capacity_version'],
}
IsolatedManager.update_capacity(instance, task_result)
logger.debug('Isolated instance {} successful heartbeat'.format(instance.hostname))
elif instance.capacity == 0:
- logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
- instance.hostname))
+ logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(instance.hostname))
else:
logger.warning('Could not update status of isolated instance {}'.format(instance.hostname))
if instance.is_lost(isolated=True):
instance.capacity = 0
instance.save(update_fields=['capacity'])
- logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(
- instance.hostname, instance.modified))
+ logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(instance.hostname, instance.modified))
finally:
if os.path.exists(private_data_dir):
shutil.rmtree(private_data_dir)
@@ -397,10 +357,7 @@ class IsolatedManager(object):
self.ident = ident
self.instance = instance
self.private_data_dir = private_data_dir
- self.runner_params = self.build_runner_params(
- [instance.execution_node],
- verbosity=min(5, self.instance.verbosity)
- )
+ self.runner_params = self.build_runner_params([instance.execution_node], verbosity=min(5, self.instance.verbosity))
status, rc = self.dispatch(playbook, module, module_args)
if status == 'successful':
diff --git a/awx/main/management/commands/bottleneck.py b/awx/main/management/commands/bottleneck.py
index beac1d0745..a2a472e3e7 100644
--- a/awx/main/management/commands/bottleneck.py
+++ b/awx/main/management/commands/bottleneck.py
@@ -8,12 +8,11 @@ class Command(BaseCommand):
help = "Find the slowest tasks and hosts for a Job Template's most recent runs."
def add_arguments(self, parser):
- parser.add_argument('--template', dest='jt', type=int,
- help='ID of the Job Template to profile')
- parser.add_argument('--threshold', dest='threshold', type=float, default=30,
- help='Only show tasks that took at least this many seconds (defaults to 30)')
- parser.add_argument('--history', dest='history', type=float, default=25,
- help='The number of historic jobs to look at')
+ parser.add_argument('--template', dest='jt', type=int, help='ID of the Job Template to profile')
+ parser.add_argument(
+ '--threshold', dest='threshold', type=float, default=30, help='Only show tasks that took at least this many seconds (defaults to 30)'
+ )
+ parser.add_argument('--history', dest='history', type=float, default=25, help='The number of historic jobs to look at')
parser.add_argument('--ignore', action='append', help='ignore a specific action (e.g., --ignore git)')
def handle(self, *args, **options):
diff --git a/awx/main/management/commands/callback_stats.py b/awx/main/management/commands/callback_stats.py
index 0a61089607..76c9618bb2 100644
--- a/awx/main/management/commands/callback_stats.py
+++ b/awx/main/management/commands/callback_stats.py
@@ -6,28 +6,19 @@ from django.core.management.base import BaseCommand
class Command(BaseCommand):
-
def handle(self, *args, **options):
with connection.cursor() as cursor:
start = {}
- for relation in (
- 'main_jobevent', 'main_inventoryupdateevent',
- 'main_projectupdateevent', 'main_adhoccommandevent'
- ):
+ for relation in ('main_jobevent', 'main_inventoryupdateevent', 'main_projectupdateevent', 'main_adhoccommandevent'):
cursor.execute(f"SELECT MAX(id) FROM {relation};")
start[relation] = cursor.fetchone()[0] or 0
clear = False
while True:
lines = []
- for relation in (
- 'main_jobevent', 'main_inventoryupdateevent',
- 'main_projectupdateevent', 'main_adhoccommandevent'
- ):
+ for relation in ('main_jobevent', 'main_inventoryupdateevent', 'main_projectupdateevent', 'main_adhoccommandevent'):
lines.append(relation)
minimum = start[relation]
- cursor.execute(
- f"SELECT MAX(id) - MIN(id) FROM {relation} WHERE id > {minimum} AND modified > now() - '1 minute'::interval;"
- )
+ cursor.execute(f"SELECT MAX(id) - MIN(id) FROM {relation} WHERE id > {minimum} AND modified > now() - '1 minute'::interval;")
events = cursor.fetchone()[0] or 0
lines.append(f'↳ last minute {events}')
lines.append('')
@@ -37,4 +28,4 @@ class Command(BaseCommand):
for line in lines:
print(line)
clear = True
- time.sleep(.25)
+ time.sleep(0.25)
diff --git a/awx/main/management/commands/check_license.py b/awx/main/management/commands/check_license.py
index 356ab42249..80585b76db 100644
--- a/awx/main/management/commands/check_license.py
+++ b/awx/main/management/commands/check_license.py
@@ -11,8 +11,7 @@ class Command(BaseCommand):
"""Returns license type, e.g., 'enterprise', 'open', 'none'"""
def add_arguments(self, parser):
- parser.add_argument('--data', dest='data', action='store_true',
- help='verbose, prints the actual (sanitized) license')
+ parser.add_argument('--data', dest='data', action='store_true', help='verbose, prints the actual (sanitized) license')
def handle(self, *args, **options):
super(Command, self).__init__()
diff --git a/awx/main/management/commands/check_migrations.py b/awx/main/management/commands/check_migrations.py
index 6f9cfc7727..b48d130bad 100644
--- a/awx/main/management/commands/check_migrations.py
+++ b/awx/main/management/commands/check_migrations.py
@@ -4,7 +4,6 @@ from django.core.management.commands.makemigrations import Command as MakeMigrat
class Command(MakeMigrations):
-
def execute(self, *args, **options):
settings = connections['default'].settings_dict.copy()
settings['ENGINE'] = 'sqlite3'
diff --git a/awx/main/management/commands/cleanup_activitystream.py b/awx/main/management/commands/cleanup_activitystream.py
index ac790a2e1b..8d42b626db 100644
--- a/awx/main/management/commands/cleanup_activitystream.py
+++ b/awx/main/management/commands/cleanup_activitystream.py
@@ -15,22 +15,18 @@ from awx.main.models import ActivityStream
class Command(BaseCommand):
- '''
+ """
Management command to purge old activity stream events.
- '''
+ """
help = 'Remove old activity stream events from the database'
def add_arguments(self, parser):
- parser.add_argument('--days', dest='days', type=int, default=90, metavar='N',
- help='Remove activity stream events more than N days old')
- parser.add_argument('--dry-run', dest='dry_run', action='store_true',
- default=False, help='Dry run mode (show items that would '
- 'be removed)')
+ parser.add_argument('--days', dest='days', type=int, default=90, metavar='N', help='Remove activity stream events more than N days old')
+ parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would ' 'be removed)')
def init_logging(self):
- log_levels = dict(enumerate([logging.ERROR, logging.INFO,
- logging.DEBUG, 0]))
+ log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0]))
self.logger = logging.getLogger('awx.main.commands.cleanup_activitystream')
self.logger.setLevel(log_levels.get(self.verbosity, 0))
handler = logging.StreamHandler()
diff --git a/awx/main/management/commands/cleanup_jobs.py b/awx/main/management/commands/cleanup_jobs.py
index 66953acde9..85136caa08 100644
--- a/awx/main/management/commands/cleanup_jobs.py
+++ b/awx/main/management/commands/cleanup_jobs.py
@@ -12,53 +12,29 @@ from django.db import transaction
from django.utils.timezone import now
# AWX
-from awx.main.models import (
- Job, AdHocCommand, ProjectUpdate, InventoryUpdate,
- SystemJob, WorkflowJob, Notification
-)
-from awx.main.signals import (
- disable_activity_stream,
- disable_computed_fields
-)
+from awx.main.models import Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob, WorkflowJob, Notification
+from awx.main.signals import disable_activity_stream, disable_computed_fields
from awx.main.utils.deletion import AWXCollector, pre_delete
class Command(BaseCommand):
- '''
+ """
Management command to cleanup old jobs and project updates.
- '''
+ """
help = 'Remove old jobs, project and inventory updates from the database.'
def add_arguments(self, parser):
- parser.add_argument('--days', dest='days', type=int, default=90, metavar='N',
- help='Remove jobs/updates executed more than N days ago. Defaults to 90.')
- parser.add_argument('--dry-run', dest='dry_run', action='store_true',
- default=False, help='Dry run mode (show items that would '
- 'be removed)')
- parser.add_argument('--jobs', dest='only_jobs', action='store_true',
- default=False,
- help='Remove jobs')
- parser.add_argument('--ad-hoc-commands', dest='only_ad_hoc_commands',
- action='store_true', default=False,
- help='Remove ad hoc commands')
- parser.add_argument('--project-updates', dest='only_project_updates',
- action='store_true', default=False,
- help='Remove project updates')
- parser.add_argument('--inventory-updates', dest='only_inventory_updates',
- action='store_true', default=False,
- help='Remove inventory updates')
- parser.add_argument('--management-jobs', default=False,
- action='store_true', dest='only_management_jobs',
- help='Remove management jobs')
- parser.add_argument('--notifications', dest='only_notifications',
- action='store_true', default=False,
- help='Remove notifications')
- parser.add_argument('--workflow-jobs', default=False,
- action='store_true', dest='only_workflow_jobs',
- help='Remove workflow jobs')
-
+ parser.add_argument('--days', dest='days', type=int, default=90, metavar='N', help='Remove jobs/updates executed more than N days ago. Defaults to 90.')
+ parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would ' 'be removed)')
+ parser.add_argument('--jobs', dest='only_jobs', action='store_true', default=False, help='Remove jobs')
+ parser.add_argument('--ad-hoc-commands', dest='only_ad_hoc_commands', action='store_true', default=False, help='Remove ad hoc commands')
+ parser.add_argument('--project-updates', dest='only_project_updates', action='store_true', default=False, help='Remove project updates')
+ parser.add_argument('--inventory-updates', dest='only_inventory_updates', action='store_true', default=False, help='Remove inventory updates')
+ parser.add_argument('--management-jobs', default=False, action='store_true', dest='only_management_jobs', help='Remove management jobs')
+ parser.add_argument('--notifications', dest='only_notifications', action='store_true', default=False, help='Remove notifications')
+ parser.add_argument('--workflow-jobs', default=False, action='store_true', dest='only_workflow_jobs', help='Remove workflow jobs')
def cleanup_jobs(self):
skipped, deleted = 0, 0
@@ -83,7 +59,7 @@ class Command(BaseCommand):
just_deleted = models_deleted['main.Job']
deleted += just_deleted
else:
- just_deleted = 0 # break from loop, this is dry run
+ just_deleted = 0 # break from loop, this is dry run
deleted = qs.count()
if just_deleted == 0:
@@ -96,9 +72,7 @@ class Command(BaseCommand):
skipped, deleted = 0, 0
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
for ad_hoc_command in ad_hoc_commands.iterator():
- ad_hoc_command_display = '"%s" (%d events)' % \
- (str(ad_hoc_command),
- ad_hoc_command.ad_hoc_command_events.count())
+ ad_hoc_command_display = '"%s" (%d events)' % (str(ad_hoc_command), ad_hoc_command.ad_hoc_command_events.count())
if ad_hoc_command.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s ad hoc command %s', action_text, ad_hoc_command.status, ad_hoc_command_display)
@@ -179,8 +153,7 @@ class Command(BaseCommand):
return skipped, deleted
def init_logging(self):
- log_levels = dict(enumerate([logging.ERROR, logging.INFO,
- logging.DEBUG, 0]))
+ log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0]))
self.logger = logging.getLogger('awx.main.commands.cleanup_jobs')
self.logger.setLevel(log_levels.get(self.verbosity, 0))
handler = logging.StreamHandler()
@@ -192,9 +165,7 @@ class Command(BaseCommand):
skipped, deleted = 0, 0
workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff)
for workflow_job in workflow_jobs.iterator():
- workflow_job_display = '"{}" ({} nodes)'.format(
- str(workflow_job),
- workflow_job.workflow_nodes.count())
+ workflow_job_display = '"{}" ({} nodes)'.format(str(workflow_job), workflow_job.workflow_nodes.count())
if workflow_job.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s job %s', action_text, workflow_job.status, workflow_job_display)
@@ -214,8 +185,8 @@ class Command(BaseCommand):
notifications = Notification.objects.filter(created__lt=self.cutoff)
for notification in notifications.iterator():
notification_display = '"{}" (started {}, {} type, {} sent)'.format(
- str(notification), str(notification.created),
- notification.notification_type, notification.notifications_sent)
+ str(notification), str(notification.created), notification.notification_type, notification.notifications_sent
+ )
if notification.status in ('pending',):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s notification %s', action_text, notification.status, notification_display)
@@ -240,8 +211,7 @@ class Command(BaseCommand):
self.cutoff = now() - datetime.timedelta(days=self.days)
except OverflowError:
raise CommandError('--days specified is too large. Try something less than 99999 (about 270 years).')
- model_names = ('jobs', 'ad_hoc_commands', 'project_updates', 'inventory_updates',
- 'management_jobs', 'workflow_jobs', 'notifications')
+ model_names = ('jobs', 'ad_hoc_commands', 'project_updates', 'inventory_updates', 'management_jobs', 'workflow_jobs', 'notifications')
models_to_cleanup = set()
for m in model_names:
if options.get('only_%s' % m, False):
diff --git a/awx/main/management/commands/cleanup_sessions.py b/awx/main/management/commands/cleanup_sessions.py
index 3f4fc16912..c21af71ffd 100644
--- a/awx/main/management/commands/cleanup_sessions.py
+++ b/awx/main/management/commands/cleanup_sessions.py
@@ -6,10 +6,8 @@ from django.contrib.sessions.models import Session
class Command(BaseCommand):
-
def init_logging(self):
- log_levels = dict(enumerate([logging.ERROR, logging.INFO,
- logging.DEBUG, 0]))
+ log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0]))
self.logger = logging.getLogger('awx.main.commands.cleanup_sessions')
self.logger.setLevel(log_levels.get(self.verbosity, 0))
handler = logging.StreamHandler()
diff --git a/awx/main/management/commands/cleanup_tokens.py b/awx/main/management/commands/cleanup_tokens.py
index c5dfde145f..2deefd3790 100644
--- a/awx/main/management/commands/cleanup_tokens.py
+++ b/awx/main/management/commands/cleanup_tokens.py
@@ -7,10 +7,8 @@ from oauth2_provider.models import RefreshToken
class Command(BaseCommand):
-
def init_logging(self):
- log_levels = dict(enumerate([logging.ERROR, logging.INFO,
- logging.DEBUG, 0]))
+ log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0]))
self.logger = logging.getLogger('awx.main.commands.cleanup_tokens')
self.logger.setLevel(log_levels.get(self.verbosity, 0))
handler = logging.StreamHandler()
diff --git a/awx/main/management/commands/create_oauth2_token.py b/awx/main/management/commands/create_oauth2_token.py
index bd4a86da2d..62d89db1df 100644
--- a/awx/main/management/commands/create_oauth2_token.py
+++ b/awx/main/management/commands/create_oauth2_token.py
@@ -9,7 +9,8 @@ from awx.api.serializers import OAuth2TokenSerializer
class Command(BaseCommand):
"""Command that creates an OAuth2 token for a certain user. Returns the value of created token."""
- help='Creates an OAuth2 token for a user.'
+
+ help = 'Creates an OAuth2 token for a user.'
def add_arguments(self, parser):
parser.add_argument('--user', dest='user', type=str)
@@ -22,7 +23,7 @@ class Command(BaseCommand):
user = User.objects.get(username=options['user'])
except ObjectDoesNotExist:
raise CommandError('The user does not exist.')
- config = {'user': user, 'scope':'write'}
+ config = {'user': user, 'scope': 'write'}
serializer_obj = OAuth2TokenSerializer()
class FakeRequest(object):
diff --git a/awx/main/management/commands/create_preload_data.py b/awx/main/management/commands/create_preload_data.py
index 05ed18b96c..af5d8d9d9b 100644
--- a/awx/main/management/commands/create_preload_data.py
+++ b/awx/main/management/commands/create_preload_data.py
@@ -4,16 +4,13 @@
from django.core.management.base import BaseCommand
from django.conf import settings
from crum import impersonate
-from awx.main.models import (
- User, Organization, Project, Inventory, CredentialType,
- Credential, Host, JobTemplate, ExecutionEnvironment
-)
+from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate, ExecutionEnvironment
from awx.main.signals import disable_computed_fields
class Command(BaseCommand):
- """Create preloaded data, intended for new installs
- """
+ """Create preloaded data, intended for new installs"""
+
help = 'Creates a preload tower data if there is none.'
def handle(self, *args, **kwargs):
@@ -29,44 +26,42 @@ class Command(BaseCommand):
if not Organization.objects.exists():
o = Organization.objects.create(name='Default')
- p = Project(name='Demo Project',
- scm_type='git',
- scm_url='https://github.com/ansible/ansible-tower-samples',
- scm_update_on_launch=True,
- scm_update_cache_timeout=0,
- organization=o)
+ p = Project(
+ name='Demo Project',
+ scm_type='git',
+ scm_url='https://github.com/ansible/ansible-tower-samples',
+ scm_update_on_launch=True,
+ scm_update_cache_timeout=0,
+ organization=o,
+ )
p.save(skip_update=True)
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
- c = Credential.objects.create(credential_type=ssh_type,
- name='Demo Credential',
- inputs={
- 'username': superuser.username
- },
- created_by=superuser)
+ c = Credential.objects.create(
+ credential_type=ssh_type, name='Demo Credential', inputs={'username': superuser.username}, created_by=superuser
+ )
c.admin_role.members.add(superuser)
- public_galaxy_credential = Credential(name='Ansible Galaxy',
- managed_by_tower=True,
- credential_type=CredentialType.objects.get(kind='galaxy'),
- inputs={'url': 'https://galaxy.ansible.com/'})
+ public_galaxy_credential = Credential(
+ name='Ansible Galaxy',
+ managed_by_tower=True,
+ credential_type=CredentialType.objects.get(kind='galaxy'),
+ inputs={'url': 'https://galaxy.ansible.com/'},
+ )
public_galaxy_credential.save()
o.galaxy_credentials.add(public_galaxy_credential)
- i = Inventory.objects.create(name='Demo Inventory',
- organization=o,
- created_by=superuser)
+ i = Inventory.objects.create(name='Demo Inventory', organization=o, created_by=superuser)
- Host.objects.create(name='localhost',
- inventory=i,
- variables="ansible_connection: local\nansible_python_interpreter: '{{ ansible_playbook_python }}'",
- created_by=superuser)
+ Host.objects.create(
+ name='localhost',
+ inventory=i,
+ variables="ansible_connection: local\nansible_python_interpreter: '{{ ansible_playbook_python }}'",
+ created_by=superuser,
+ )
- jt = JobTemplate.objects.create(name='Demo Job Template',
- playbook='hello_world.yml',
- project=p,
- inventory=i)
+ jt = JobTemplate.objects.create(name='Demo Job Template', playbook='hello_world.yml', project=p, inventory=i)
jt.credentials.add(c)
print('Default organization added.')
@@ -74,8 +69,7 @@ class Command(BaseCommand):
changed = True
default_ee = settings.AWX_EXECUTION_ENVIRONMENT_DEFAULT_IMAGE
- ee, created = ExecutionEnvironment.objects.get_or_create(name='Default EE', defaults={'image': default_ee,
- 'managed_by_tower': True})
+ ee, created = ExecutionEnvironment.objects.get_or_create(name='Default EE', defaults={'image': default_ee, 'managed_by_tower': True})
if created:
changed = True
diff --git a/awx/main/management/commands/deprovision_instance.py b/awx/main/management/commands/deprovision_instance.py
index 248e1e7684..6bbffc712f 100644
--- a/awx/main/management/commands/deprovision_instance.py
+++ b/awx/main/management/commands/deprovision_instance.py
@@ -13,14 +13,10 @@ class Command(BaseCommand):
Deprovision a Tower cluster node
"""
- help = (
- 'Remove instance from the database. '
- 'Specify `--hostname` to use this command.'
- )
+ help = 'Remove instance from the database. ' 'Specify `--hostname` to use this command.'
def add_arguments(self, parser):
- parser.add_argument('--hostname', dest='hostname', type=str,
- help='Hostname used during provisioning')
+ parser.add_argument('--hostname', dest='hostname', type=str, help='Hostname used during provisioning')
@transaction.atomic
def handle(self, *args, **options):
@@ -37,4 +33,3 @@ class Command(BaseCommand):
print('(changed: True)')
else:
print('No instance found matching name {}'.format(hostname))
-
diff --git a/awx/main/management/commands/expire_sessions.py b/awx/main/management/commands/expire_sessions.py
index 04abd492db..65053e3e50 100644
--- a/awx/main/management/commands/expire_sessions.py
+++ b/awx/main/management/commands/expire_sessions.py
@@ -12,7 +12,8 @@ from django.core.exceptions import ObjectDoesNotExist
class Command(BaseCommand):
"""Expire Django auth sessions for a user/all users"""
- help='Expire Django auth sessions. Will expire all auth sessions if --user option is not supplied.'
+
+ help = 'Expire Django auth sessions. Will expire all auth sessions if --user option is not supplied.'
def add_arguments(self, parser):
parser.add_argument('--user', dest='user', type=str)
diff --git a/awx/main/management/commands/gather_analytics.py b/awx/main/management/commands/gather_analytics.py
index b5e8427955..5099d4d0d1 100644
--- a/awx/main/management/commands/gather_analytics.py
+++ b/awx/main/management/commands/gather_analytics.py
@@ -7,21 +7,19 @@ from django.utils.timezone import now
class Command(BaseCommand):
- '''
+ """
Gather AWX analytics data
- '''
+ """
help = 'Gather AWX analytics data'
def add_arguments(self, parser):
- parser.add_argument('--dry-run', dest='dry-run', action='store_true',
- help='Gather analytics without shipping. Works even if analytics are disabled in settings.')
- parser.add_argument('--ship', dest='ship', action='store_true',
- help='Enable to ship metrics to the Red Hat Cloud')
- parser.add_argument('--since', dest='since', action='store',
- help='Start date for collection')
- parser.add_argument('--until', dest='until', action='store',
- help='End date for collection')
+ parser.add_argument(
+ '--dry-run', dest='dry-run', action='store_true', help='Gather analytics without shipping. Works even if analytics are disabled in settings.'
+ )
+ parser.add_argument('--ship', dest='ship', action='store_true', help='Enable to ship metrics to the Red Hat Cloud')
+ parser.add_argument('--since', dest='since', action='store', help='Start date for collection')
+ parser.add_argument('--until', dest='until', action='store', help='End date for collection')
def init_logging(self):
self.logger = logging.getLogger('awx.main.analytics')
@@ -50,7 +48,7 @@ class Command(BaseCommand):
if opt_ship and opt_dry_run:
self.logger.error('Both --ship and --dry-run cannot be processed at the same time.')
return
- tgzfiles = gather(collection_type='manual' if not opt_dry_run else 'dry-run', since = since, until = until)
+ tgzfiles = gather(collection_type='manual' if not opt_dry_run else 'dry-run', since=since, until=until)
if tgzfiles:
for tgz in tgzfiles:
self.logger.info(tgz)
diff --git a/awx/main/management/commands/generate_isolated_key.py b/awx/main/management/commands/generate_isolated_key.py
index e2b06bd21e..51112ea3d7 100644
--- a/awx/main/management/commands/generate_isolated_key.py
+++ b/awx/main/management/commands/generate_isolated_key.py
@@ -14,6 +14,7 @@ from awx.conf.models import Setting
class Command(BaseCommand):
"""Generate and store a randomized RSA key for SSH traffic to isolated instances"""
+
help = 'Generates and stores a randomized RSA key for SSH traffic to isolated instances'
def handle(self, *args, **kwargs):
@@ -21,25 +22,17 @@ class Command(BaseCommand):
print(settings.AWX_ISOLATED_PUBLIC_KEY)
return
- key = rsa.generate_private_key(
- public_exponent=65537,
- key_size=4096,
- backend=default_backend()
- )
+ key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend())
Setting.objects.create(
key='AWX_ISOLATED_PRIVATE_KEY',
value=key.private_bytes(
- encoding=serialization.Encoding.PEM,
- format=serialization.PrivateFormat.TraditionalOpenSSL,
- encryption_algorithm=serialization.NoEncryption()
- )
+ encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()
+ ),
).save()
pemfile = Setting.objects.create(
key='AWX_ISOLATED_PUBLIC_KEY',
- value=smart_str(key.public_key().public_bytes(
- encoding=serialization.Encoding.OpenSSH,
- format=serialization.PublicFormat.OpenSSH
- )) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
+ value=smart_str(key.public_key().public_bytes(encoding=serialization.Encoding.OpenSSH, format=serialization.PublicFormat.OpenSSH))
+ + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat(),
)
pemfile.save()
print(pemfile.value)
diff --git a/awx/main/management/commands/graph_jobs.py b/awx/main/management/commands/graph_jobs.py
index f1c8ad75e1..2af9389036 100644
--- a/awx/main/management/commands/graph_jobs.py
+++ b/awx/main/management/commands/graph_jobs.py
@@ -9,10 +9,7 @@ from django.db.models import Count
from django.core.management.base import BaseCommand
# AWX
-from awx.main.models import (
- Job,
- Instance
-)
+from awx.main.models import Job, Instance
DEFAULT_WIDTH = 100
@@ -27,7 +24,7 @@ def clear_screen():
print(chr(27) + "[2J")
-class JobStatus():
+class JobStatus:
def __init__(self, status, color, width):
self.status = status
self.color = color
@@ -44,16 +41,12 @@ class JobStatusController:
RESET = chart_color_lookup('reset')
def __init__(self, width):
- self.plots = [
- JobStatus('pending', 'red', width),
- JobStatus('waiting', 'blue', width),
- JobStatus('running', 'green', width)
- ]
+ self.plots = [JobStatus('pending', 'red', width), JobStatus('waiting', 'blue', width), JobStatus('running', 'green', width)]
self.ts_start = int(time.time())
def tick(self):
ts = int(time.time()) - self.ts_start
- q = Job.objects.filter(status__in=['pending','waiting','running']).values_list('status').order_by().annotate(Count('status'))
+ q = Job.objects.filter(status__in=['pending', 'waiting', 'running']).values_list('status').order_by().annotate(Count('status'))
status_count = dict(pending=0, waiting=0, running=0)
for status, count in q:
status_count[status] = count
@@ -86,12 +79,11 @@ class Command(BaseCommand):
help = "Plot pending, waiting, running jobs over time on the terminal"
def add_arguments(self, parser):
- parser.add_argument('--refresh', dest='refresh', type=float, default=1.0,
- help='Time between refreshes of the graph and data in seconds (defaults to 1.0)')
- parser.add_argument('--width', dest='width', type=int, default=DEFAULT_WIDTH,
- help=f'Width of the graph (defaults to {DEFAULT_WIDTH})')
- parser.add_argument('--height', dest='height', type=int, default=DEFAULT_HEIGHT,
- help=f'Height of the graph (defaults to {DEFAULT_HEIGHT})')
+ parser.add_argument(
+ '--refresh', dest='refresh', type=float, default=1.0, help='Time between refreshes of the graph and data in seconds (defaults to 1.0)'
+ )
+ parser.add_argument('--width', dest='width', type=int, default=DEFAULT_WIDTH, help=f'Width of the graph (defaults to {DEFAULT_WIDTH})')
+ parser.add_argument('--height', dest='height', type=int, default=DEFAULT_HEIGHT, help=f'Height of the graph (defaults to {DEFAULT_HEIGHT})')
def handle(self, *args, **options):
refresh_seconds = options['refresh']
@@ -114,4 +106,3 @@ class Command(BaseCommand):
print(draw)
sys.stdout.write(status_line)
time.sleep(refresh_seconds)
-
diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py
index 292323fe38..0a2a19937d 100644
--- a/awx/main/management/commands/inventory_import.py
+++ b/awx/main/management/commands/inventory_import.py
@@ -22,21 +22,13 @@ from django.utils.encoding import smart_text
from rest_framework.exceptions import PermissionDenied
# AWX inventory imports
-from awx.main.models.inventory import (
- Inventory,
- InventorySource,
- InventoryUpdate,
- Host
-)
+from awx.main.models.inventory import Inventory, InventorySource, InventoryUpdate, Host
from awx.main.utils.mem_inventory import MemInventory, dict_to_mem_data
from awx.main.utils.safe_yaml import sanitize_jinja
# other AWX imports
from awx.main.models.rbac import batch_role_ancestor_rebuilding
-from awx.main.utils import (
- ignore_inventory_computed_fields,
- get_licenser
-)
+from awx.main.utils import ignore_inventory_computed_fields, get_licenser
from awx.main.signals import disable_activity_stream
from awx.main.constants import STANDARD_INVENTORY_UPDATE_ENV
from awx.main.utils.pglock import advisory_lock
@@ -67,12 +59,12 @@ def functioning_dir(path):
class AnsibleInventoryLoader(object):
- '''
+ """
Given executable `source` (directory, executable, or file) this will
use the ansible-inventory CLI utility to convert it into in-memory
representational objects. Example:
/usr/bin/ansible/ansible-inventory -i hosts --list
- '''
+ """
def __init__(self, source, venv_path=None, verbosity=0):
self.source = source
@@ -88,17 +80,11 @@ class AnsibleInventoryLoader(object):
venv_exe = os.path.join(self.venv_path, 'bin', 'ansible-inventory')
if os.path.exists(venv_exe):
return venv_exe
- elif os.path.exists(
- os.path.join(self.venv_path, 'bin', 'ansible')
- ):
+ elif os.path.exists(os.path.join(self.venv_path, 'bin', 'ansible')):
# if bin/ansible exists but bin/ansible-inventory doesn't, it's
# probably a really old version of ansible that doesn't support
# ansible-inventory
- raise RuntimeError(
- "{} does not exist (please upgrade to ansible >= 2.4)".format(
- venv_exe
- )
- )
+ raise RuntimeError("{} does not exist (please upgrade to ansible >= 2.4)".format(venv_exe))
return shutil.which('ansible-inventory')
def get_base_args(self):
@@ -126,8 +112,7 @@ class AnsibleInventoryLoader(object):
stderr = smart_text(stderr)
if proc.returncode != 0:
- raise RuntimeError('%s failed (rc=%d) with stdout:\n%s\nstderr:\n%s' % (
- 'ansible-inventory', proc.returncode, stdout, stderr))
+ raise RuntimeError('%s failed (rc=%d) with stdout:\n%s\nstderr:\n%s' % ('ansible-inventory', proc.returncode, stdout, stderr))
for line in stderr.splitlines():
logger.error(line)
@@ -149,63 +134,78 @@ class AnsibleInventoryLoader(object):
class Command(BaseCommand):
- '''
+ """
Management command to import inventory from a directory, ini file, or
dynamic inventory script.
- '''
+ """
help = 'Import or sync external inventory sources'
def add_arguments(self, parser):
- parser.add_argument('--inventory-name', dest='inventory_name',
- type=str, default=None, metavar='n',
- help='name of inventory to sync')
- parser.add_argument('--inventory-id', dest='inventory_id', type=int,
- default=None, metavar='i',
- help='id of inventory to sync')
- parser.add_argument('--venv', dest='venv', type=str, default=None,
- help='absolute path to the AWX custom virtualenv to use')
- parser.add_argument('--overwrite', dest='overwrite', action='store_true', default=False,
- help='overwrite the destination hosts and groups')
- parser.add_argument('--overwrite-vars', dest='overwrite_vars',
- action='store_true', default=False,
- help='overwrite (rather than merge) variables')
- parser.add_argument('--keep-vars', dest='keep_vars', action='store_true', default=False,
- help='DEPRECATED legacy option, has no effect')
- parser.add_argument('--custom', dest='custom', action='store_true', default=False,
- help='DEPRECATED indicates a custom inventory script, no longer used')
- parser.add_argument('--source', dest='source', type=str, default=None,
- metavar='s', help='inventory directory, file, or script to load')
- parser.add_argument('--enabled-var', dest='enabled_var', type=str,
- default=None, metavar='v', help='host variable used to '
- 'set/clear enabled flag when host is online/offline, may '
- 'be specified as "foo.bar" to traverse nested dicts.')
- parser.add_argument('--enabled-value', dest='enabled_value', type=str,
- default=None, metavar='v', help='value of host variable '
- 'specified by --enabled-var that indicates host is '
- 'enabled/online.')
- parser.add_argument('--group-filter', dest='group_filter', type=str,
- default=None, metavar='regex', help='regular expression '
- 'to filter group name(s); only matches are imported.')
- parser.add_argument('--host-filter', dest='host_filter', type=str,
- default=None, metavar='regex', help='regular expression '
- 'to filter host name(s); only matches are imported.')
- parser.add_argument('--exclude-empty-groups', dest='exclude_empty_groups',
- action='store_true', default=False, help='when set, '
- 'exclude all groups that have no child groups, hosts, or '
- 'variables.')
- parser.add_argument('--instance-id-var', dest='instance_id_var', type=str,
- default=None, metavar='v', help='host variable that '
- 'specifies the unique, immutable instance ID, may be '
- 'specified as "foo.bar" to traverse nested dicts.')
+ parser.add_argument('--inventory-name', dest='inventory_name', type=str, default=None, metavar='n', help='name of inventory to sync')
+ parser.add_argument('--inventory-id', dest='inventory_id', type=int, default=None, metavar='i', help='id of inventory to sync')
+ parser.add_argument('--venv', dest='venv', type=str, default=None, help='absolute path to the AWX custom virtualenv to use')
+ parser.add_argument('--overwrite', dest='overwrite', action='store_true', default=False, help='overwrite the destination hosts and groups')
+ parser.add_argument('--overwrite-vars', dest='overwrite_vars', action='store_true', default=False, help='overwrite (rather than merge) variables')
+ parser.add_argument('--keep-vars', dest='keep_vars', action='store_true', default=False, help='DEPRECATED legacy option, has no effect')
+ parser.add_argument(
+ '--custom', dest='custom', action='store_true', default=False, help='DEPRECATED indicates a custom inventory script, no longer used'
+ )
+ parser.add_argument('--source', dest='source', type=str, default=None, metavar='s', help='inventory directory, file, or script to load')
+ parser.add_argument(
+ '--enabled-var',
+ dest='enabled_var',
+ type=str,
+ default=None,
+ metavar='v',
+ help='host variable used to ' 'set/clear enabled flag when host is online/offline, may ' 'be specified as "foo.bar" to traverse nested dicts.',
+ )
+ parser.add_argument(
+ '--enabled-value',
+ dest='enabled_value',
+ type=str,
+ default=None,
+ metavar='v',
+ help='value of host variable ' 'specified by --enabled-var that indicates host is ' 'enabled/online.',
+ )
+ parser.add_argument(
+ '--group-filter',
+ dest='group_filter',
+ type=str,
+ default=None,
+ metavar='regex',
+ help='regular expression ' 'to filter group name(s); only matches are imported.',
+ )
+ parser.add_argument(
+ '--host-filter',
+ dest='host_filter',
+ type=str,
+ default=None,
+ metavar='regex',
+ help='regular expression ' 'to filter host name(s); only matches are imported.',
+ )
+ parser.add_argument(
+ '--exclude-empty-groups',
+ dest='exclude_empty_groups',
+ action='store_true',
+ default=False,
+ help='when set, ' 'exclude all groups that have no child groups, hosts, or ' 'variables.',
+ )
+ parser.add_argument(
+ '--instance-id-var',
+ dest='instance_id_var',
+ type=str,
+ default=None,
+ metavar='v',
+ help='host variable that ' 'specifies the unique, immutable instance ID, may be ' 'specified as "foo.bar" to traverse nested dicts.',
+ )
def set_logging_level(self, verbosity):
- log_levels = dict(enumerate([logging.WARNING, logging.INFO,
- logging.DEBUG, 0]))
+ log_levels = dict(enumerate([logging.WARNING, logging.INFO, logging.DEBUG, 0]))
logger.setLevel(log_levels.get(verbosity, 0))
def _get_instance_id(self, variables, default=''):
- '''
+ """
Retrieve the instance ID from the given dict of host variables.
The instance ID variable may be specified as 'foo.bar', in which case
@@ -216,7 +216,7 @@ class Command(BaseCommand):
Multiple ID variables may be specified as 'foo.bar,foobar', so that
it will first try to find 'bar' inside of 'foo', and if unable,
will try to find 'foobar' as a fallback
- '''
+ """
instance_id = default
if getattr(self, 'instance_id_var', None):
for single_instance_id in self.instance_id_var.split(','):
@@ -232,14 +232,14 @@ class Command(BaseCommand):
return smart_text(instance_id)
def _get_enabled(self, from_dict, default=None):
- '''
+ """
Retrieve the enabled state from the given dict of host variables.
The enabled variable may be specified as 'foo.bar', in which case
the lookup will traverse into nested dicts, equivalent to:
from_dict.get('foo', {}).get('bar', default)
- '''
+ """
enabled = default
if getattr(self, 'enabled_var', None):
default = object()
@@ -266,8 +266,7 @@ class Command(BaseCommand):
def get_source_absolute_path(source):
if not os.path.exists(source):
raise IOError('Source does not exist: %s' % source)
- source = os.path.join(os.getcwd(), os.path.dirname(source),
- os.path.basename(source))
+ source = os.path.join(os.getcwd(), os.path.dirname(source), os.path.basename(source))
source = os.path.normpath(os.path.abspath(source))
return source
@@ -284,15 +283,14 @@ class Command(BaseCommand):
self._batch_add_m2m_cache[key] = []
def _build_db_instance_id_map(self):
- '''
+ """
Find any hosts in the database without an instance_id set that may
still have one available via host variables.
- '''
+ """
self.db_instance_id_map = {}
if self.instance_id_var:
host_qs = self.inventory_source.hosts.all()
- host_qs = host_qs.filter(instance_id='',
- variables__contains=self.instance_id_var.split('.')[0])
+ host_qs = host_qs.filter(instance_id='', variables__contains=self.instance_id_var.split('.')[0])
for host in host_qs:
instance_id = self._get_instance_id(host.variables_dict)
if not instance_id:
@@ -300,38 +298,36 @@ class Command(BaseCommand):
self.db_instance_id_map[instance_id] = host.pk
def _build_mem_instance_id_map(self):
- '''
+ """
Update instance ID for each imported host and define a mapping of
instance IDs to MemHost instances.
- '''
+ """
self.mem_instance_id_map = {}
if self.instance_id_var:
for mem_host in self.all_group.all_hosts.values():
instance_id = self._get_instance_id(mem_host.variables)
if not instance_id:
- logger.warning('Host "%s" has no "%s" variable(s)',
- mem_host.name, self.instance_id_var)
+ logger.warning('Host "%s" has no "%s" variable(s)', mem_host.name, self.instance_id_var)
continue
mem_host.instance_id = instance_id
self.mem_instance_id_map[instance_id] = mem_host.name
def _existing_host_pks(self):
- '''Returns cached set of existing / previous host primary key values
+ """Returns cached set of existing / previous host primary key values
this is the starting set, meaning that it is pre-modification
by deletions and other things done in the course of this import
- '''
+ """
if not hasattr(self, '_cached_host_pk_set'):
- self._cached_host_pk_set = frozenset(
- self.inventory_source.hosts.values_list('pk', flat=True))
+ self._cached_host_pk_set = frozenset(self.inventory_source.hosts.values_list('pk', flat=True))
return self._cached_host_pk_set
def _delete_hosts(self):
- '''
+ """
For each host in the database that is NOT in the local list, delete
it. When importing from a cloud inventory source attached to a
specific group, only delete hosts beneath that group. Delete each
host individually so signal handlers will run.
- '''
+ """
if settings.SQL_DEBUG:
queries_before = len(connection.queries)
hosts_qs = self.inventory_source.hosts
@@ -341,38 +337,36 @@ class Command(BaseCommand):
all_instance_ids = list(self.mem_instance_id_map.keys())
instance_ids = []
for offset in range(0, len(all_instance_ids), self._batch_size):
- instance_ids = all_instance_ids[offset:(offset + self._batch_size)]
+ instance_ids = all_instance_ids[offset : (offset + self._batch_size)]
for host_pk in hosts_qs.filter(instance_id__in=instance_ids).values_list('pk', flat=True):
del_host_pks.discard(host_pk)
- for host_pk in set([v for k,v in self.db_instance_id_map.items() if k in instance_ids]):
+ for host_pk in set([v for k, v in self.db_instance_id_map.items() if k in instance_ids]):
del_host_pks.discard(host_pk)
all_host_names = list(set(self.mem_instance_id_map.values()) - set(self.all_group.all_hosts.keys()))
else:
all_host_names = list(self.all_group.all_hosts.keys())
for offset in range(0, len(all_host_names), self._batch_size):
- host_names = all_host_names[offset:(offset + self._batch_size)]
+ host_names = all_host_names[offset : (offset + self._batch_size)]
for host_pk in hosts_qs.filter(name__in=host_names).values_list('pk', flat=True):
del_host_pks.discard(host_pk)
# Now delete all remaining hosts in batches.
all_del_pks = sorted(list(del_host_pks))
for offset in range(0, len(all_del_pks), self._batch_size):
- del_pks = all_del_pks[offset:(offset + self._batch_size)]
+ del_pks = all_del_pks[offset : (offset + self._batch_size)]
for host in hosts_qs.filter(pk__in=del_pks):
host_name = host.name
host.delete()
logger.debug('Deleted host "%s"', host_name)
if settings.SQL_DEBUG:
- logger.warning('host deletions took %d queries for %d hosts',
- len(connection.queries) - queries_before,
- len(all_del_pks))
+ logger.warning('host deletions took %d queries for %d hosts', len(connection.queries) - queries_before, len(all_del_pks))
def _delete_groups(self):
- '''
+ """
# If overwrite is set, for each group in the database that is NOT in
# the local list, delete it. When importing from a cloud inventory
# source attached to a specific group, only delete children of that
# group. Delete each group individually so signal handlers will run.
- '''
+ """
if settings.SQL_DEBUG:
queries_before = len(connection.queries)
groups_qs = self.inventory_source.groups.all()
@@ -380,30 +374,28 @@ class Command(BaseCommand):
del_group_pks = set(groups_qs.values_list('pk', flat=True))
all_group_names = list(self.all_group.all_groups.keys())
for offset in range(0, len(all_group_names), self._batch_size):
- group_names = all_group_names[offset:(offset + self._batch_size)]
+ group_names = all_group_names[offset : (offset + self._batch_size)]
for group_pk in groups_qs.filter(name__in=group_names).values_list('pk', flat=True):
del_group_pks.discard(group_pk)
# Now delete all remaining groups in batches.
all_del_pks = sorted(list(del_group_pks))
for offset in range(0, len(all_del_pks), self._batch_size):
- del_pks = all_del_pks[offset:(offset + self._batch_size)]
+ del_pks = all_del_pks[offset : (offset + self._batch_size)]
for group in groups_qs.filter(pk__in=del_pks):
group_name = group.name
with ignore_inventory_computed_fields():
group.delete()
logger.debug('Group "%s" deleted', group_name)
if settings.SQL_DEBUG:
- logger.warning('group deletions took %d queries for %d groups',
- len(connection.queries) - queries_before,
- len(all_del_pks))
+ logger.warning('group deletions took %d queries for %d groups', len(connection.queries) - queries_before, len(all_del_pks))
def _delete_group_children_and_hosts(self):
- '''
+ """
Clear all invalid child relationships for groups and all invalid host
memberships. When importing from a cloud inventory source attached to
a specific group, only clear relationships for hosts and groups that
are beneath the inventory source group.
- '''
+ """
# FIXME: Optimize performance!
if settings.SQL_DEBUG:
queries_before = len(connection.queries)
@@ -432,12 +424,11 @@ class Command(BaseCommand):
# Removal list is complete - now perform the removals
del_child_group_pks = list(set(db_children_name_pk_map.values()))
for offset in range(0, len(del_child_group_pks), self._batch_size):
- child_group_pks = del_child_group_pks[offset:(offset + self._batch_size)]
+ child_group_pks = del_child_group_pks[offset : (offset + self._batch_size)]
for db_child in db_children.filter(pk__in=child_group_pks):
group_group_count += 1
db_group.children.remove(db_child)
- logger.debug('Group "%s" removed from group "%s"',
- db_child.name, db_group.name)
+ logger.debug('Group "%s" removed from group "%s"', db_child.name, db_group.name)
# FIXME: Inventory source group relationships
# Delete group/host relationships not present in imported data.
db_hosts = db_group.hosts
@@ -451,37 +442,38 @@ class Command(BaseCommand):
mem_hosts = self.all_group.all_groups[db_group.name].hosts
all_mem_host_names = [h.name for h in mem_hosts if not h.instance_id]
for offset in range(0, len(all_mem_host_names), self._batch_size):
- mem_host_names = all_mem_host_names[offset:(offset + self._batch_size)]
+ mem_host_names = all_mem_host_names[offset : (offset + self._batch_size)]
for db_host_pk in db_hosts.filter(name__in=mem_host_names).values_list('pk', flat=True):
del_host_pks.discard(db_host_pk)
all_mem_instance_ids = [h.instance_id for h in mem_hosts if h.instance_id]
for offset in range(0, len(all_mem_instance_ids), self._batch_size):
- mem_instance_ids = all_mem_instance_ids[offset:(offset + self._batch_size)]
+ mem_instance_ids = all_mem_instance_ids[offset : (offset + self._batch_size)]
for db_host_pk in db_hosts.filter(instance_id__in=mem_instance_ids).values_list('pk', flat=True):
del_host_pks.discard(db_host_pk)
- all_db_host_pks = [v for k,v in self.db_instance_id_map.items() if k in all_mem_instance_ids]
+ all_db_host_pks = [v for k, v in self.db_instance_id_map.items() if k in all_mem_instance_ids]
for db_host_pk in all_db_host_pks:
del_host_pks.discard(db_host_pk)
# Removal list is complete - now perform the removals
del_host_pks = list(del_host_pks)
for offset in range(0, len(del_host_pks), self._batch_size):
- del_pks = del_host_pks[offset:(offset + self._batch_size)]
+ del_pks = del_host_pks[offset : (offset + self._batch_size)]
for db_host in db_hosts.filter(pk__in=del_pks):
group_host_count += 1
if db_host not in db_group.hosts.all():
continue
db_group.hosts.remove(db_host)
- logger.debug('Host "%s" removed from group "%s"',
- db_host.name, db_group.name)
+ logger.debug('Host "%s" removed from group "%s"', db_host.name, db_group.name)
if settings.SQL_DEBUG:
- logger.warning('group-group and group-host deletions took %d queries for %d relationships',
- len(connection.queries) - queries_before,
- group_group_count + group_host_count)
+ logger.warning(
+ 'group-group and group-host deletions took %d queries for %d relationships',
+ len(connection.queries) - queries_before,
+ group_group_count + group_host_count,
+ )
def _update_inventory(self):
- '''
+ """
Update inventory variables from "all" group.
- '''
+ """
# TODO: We disable variable overwrite here in case user-defined inventory variables get
# mangled. But we still need to figure out a better way of processing multiple inventory
# update variables mixing with each other.
@@ -496,24 +488,24 @@ class Command(BaseCommand):
logger.debug('Inventory variables unmodified')
def _create_update_groups(self):
- '''
+ """
For each group in the local list, create it if it doesn't exist in the
database. Otherwise, update/replace database variables from the
imported data. Associate with the inventory source group if importing
from cloud inventory source.
- '''
+ """
if settings.SQL_DEBUG:
queries_before = len(connection.queries)
all_group_names = sorted(self.all_group.all_groups.keys())
root_group_names = set()
- for k,v in self.all_group.all_groups.items():
+ for k, v in self.all_group.all_groups.items():
if not v.parents:
root_group_names.add(k)
if len(v.parents) == 1 and v.parents[0].name == 'all':
root_group_names.add(k)
existing_group_names = set()
for offset in range(0, len(all_group_names), self._batch_size):
- group_names = all_group_names[offset:(offset + self._batch_size)]
+ group_names = all_group_names[offset : (offset + self._batch_size)]
for group in self.inventory.groups.filter(name__in=group_names):
mem_group = self.all_group.all_groups[group.name]
db_variables = group.variables_dict
@@ -537,20 +529,14 @@ class Command(BaseCommand):
continue
mem_group = self.all_group.all_groups[group_name]
group_desc = mem_group.variables.pop('_awx_description', 'imported')
- group = self.inventory.groups.update_or_create(
- name=group_name,
- defaults={
- 'variables':json.dumps(mem_group.variables),
- 'description':group_desc
- }
- )[0]
+ group = self.inventory.groups.update_or_create(name=group_name, defaults={'variables': json.dumps(mem_group.variables), 'description': group_desc})[
+ 0
+ ]
logger.debug('Group "%s" added', group.name)
self._batch_add_m2m(self.inventory_source.groups, group)
self._batch_add_m2m(self.inventory_source.groups, flush=True)
if settings.SQL_DEBUG:
- logger.warning('group updates took %d queries for %d groups',
- len(connection.queries) - queries_before,
- len(self.all_group.all_groups))
+ logger.warning('group updates took %d queries for %d groups', len(connection.queries) - queries_before, len(self.all_group.all_groups))
def _update_db_host_from_mem_host(self, db_host, mem_host):
# Update host variables.
@@ -604,12 +590,12 @@ class Command(BaseCommand):
self._batch_add_m2m(self.inventory_source.hosts, db_host)
def _create_update_hosts(self):
- '''
+ """
For each host in the local list, create it if it doesn't exist in the
database. Otherwise, update/replace database variables from the
imported data. Associate with the inventory source group if importing
from cloud inventory source.
- '''
+ """
if settings.SQL_DEBUG:
queries_before = len(connection.queries)
host_pks_updated = set()
@@ -617,7 +603,7 @@ class Command(BaseCommand):
mem_host_instance_id_map = {}
mem_host_name_map = {}
mem_host_names_to_update = set(self.all_group.all_hosts.keys())
- for k,v in self.all_group.all_hosts.items():
+ for k, v in self.all_group.all_hosts.items():
mem_host_name_map[k] = v
instance_id = self._get_instance_id(v.variables)
if instance_id in self.db_instance_id_map:
@@ -628,8 +614,8 @@ class Command(BaseCommand):
# Update all existing hosts where we know the PK based on instance_id.
all_host_pks = sorted(mem_host_pk_map.keys())
for offset in range(0, len(all_host_pks), self._batch_size):
- host_pks = all_host_pks[offset:(offset + self._batch_size)]
- for db_host in self.inventory.hosts.filter( pk__in=host_pks):
+ host_pks = all_host_pks[offset : (offset + self._batch_size)]
+ for db_host in self.inventory.hosts.filter(pk__in=host_pks):
if db_host.pk in host_pks_updated:
continue
mem_host = mem_host_pk_map[db_host.pk]
@@ -640,8 +626,8 @@ class Command(BaseCommand):
# Update all existing hosts where we know the instance_id.
all_instance_ids = sorted(mem_host_instance_id_map.keys())
for offset in range(0, len(all_instance_ids), self._batch_size):
- instance_ids = all_instance_ids[offset:(offset + self._batch_size)]
- for db_host in self.inventory.hosts.filter( instance_id__in=instance_ids):
+ instance_ids = all_instance_ids[offset : (offset + self._batch_size)]
+ for db_host in self.inventory.hosts.filter(instance_id__in=instance_ids):
if db_host.pk in host_pks_updated:
continue
mem_host = mem_host_instance_id_map[db_host.instance_id]
@@ -652,8 +638,8 @@ class Command(BaseCommand):
# Update all existing hosts by name.
all_host_names = sorted(mem_host_name_map.keys())
for offset in range(0, len(all_host_names), self._batch_size):
- host_names = all_host_names[offset:(offset + self._batch_size)]
- for db_host in self.inventory.hosts.filter( name__in=host_names):
+ host_names = all_host_names[offset : (offset + self._batch_size)]
+ for db_host in self.inventory.hosts.filter(name__in=host_names):
if db_host.pk in host_pks_updated:
continue
mem_host = mem_host_name_map[db_host.name]
@@ -687,27 +673,25 @@ class Command(BaseCommand):
self._batch_add_m2m(self.inventory_source.hosts, flush=True)
if settings.SQL_DEBUG:
- logger.warning('host updates took %d queries for %d hosts',
- len(connection.queries) - queries_before,
- len(self.all_group.all_hosts))
+ logger.warning('host updates took %d queries for %d hosts', len(connection.queries) - queries_before, len(self.all_group.all_hosts))
@transaction.atomic
def _create_update_group_children(self):
- '''
+ """
For each imported group, create all parent-child group relationships.
- '''
+ """
if settings.SQL_DEBUG:
queries_before = len(connection.queries)
- all_group_names = sorted([k for k,v in self.all_group.all_groups.items() if v.children])
+ all_group_names = sorted([k for k, v in self.all_group.all_groups.items() if v.children])
group_group_count = 0
for offset in range(0, len(all_group_names), self._batch_size):
- group_names = all_group_names[offset:(offset + self._batch_size)]
+ group_names = all_group_names[offset : (offset + self._batch_size)]
for db_group in self.inventory.groups.filter(name__in=group_names):
mem_group = self.all_group.all_groups[db_group.name]
group_group_count += len(mem_group.children)
all_child_names = sorted([g.name for g in mem_group.children])
for offset2 in range(0, len(all_child_names), self._batch_size):
- child_names = all_child_names[offset2:(offset2 + self._batch_size)]
+ child_names = all_child_names[offset2 : (offset2 + self._batch_size)]
db_children_qs = self.inventory.groups.filter(name__in=child_names)
for db_child in db_children_qs.filter(children__id=db_group.id):
logger.debug('Group "%s" already child of group "%s"', db_child.name, db_group.name)
@@ -716,8 +700,7 @@ class Command(BaseCommand):
logger.debug('Group "%s" added as child of "%s"', db_child.name, db_group.name)
self._batch_add_m2m(db_group.children, flush=True)
if settings.SQL_DEBUG:
- logger.warning('Group-group updates took %d queries for %d group-group relationships',
- len(connection.queries) - queries_before, group_group_count)
+ logger.warning('Group-group updates took %d queries for %d group-group relationships', len(connection.queries) - queries_before, group_group_count)
@transaction.atomic
def _create_update_group_hosts(self):
@@ -725,16 +708,16 @@ class Command(BaseCommand):
# belongs.
if settings.SQL_DEBUG:
queries_before = len(connection.queries)
- all_group_names = sorted([k for k,v in self.all_group.all_groups.items() if v.hosts])
+ all_group_names = sorted([k for k, v in self.all_group.all_groups.items() if v.hosts])
group_host_count = 0
for offset in range(0, len(all_group_names), self._batch_size):
- group_names = all_group_names[offset:(offset + self._batch_size)]
+ group_names = all_group_names[offset : (offset + self._batch_size)]
for db_group in self.inventory.groups.filter(name__in=group_names):
mem_group = self.all_group.all_groups[db_group.name]
group_host_count += len(mem_group.hosts)
all_host_names = sorted([h.name for h in mem_group.hosts if not h.instance_id])
for offset2 in range(0, len(all_host_names), self._batch_size):
- host_names = all_host_names[offset2:(offset2 + self._batch_size)]
+ host_names = all_host_names[offset2 : (offset2 + self._batch_size)]
db_hosts_qs = self.inventory.hosts.filter(name__in=host_names)
for db_host in db_hosts_qs.filter(groups__id=db_group.id):
logger.debug('Host "%s" already in group "%s"', db_host.name, db_group.name)
@@ -743,7 +726,7 @@ class Command(BaseCommand):
logger.debug('Host "%s" added to group "%s"', db_host.name, db_group.name)
all_instance_ids = sorted([h.instance_id for h in mem_group.hosts if h.instance_id])
for offset2 in range(0, len(all_instance_ids), self._batch_size):
- instance_ids = all_instance_ids[offset2:(offset2 + self._batch_size)]
+ instance_ids = all_instance_ids[offset2 : (offset2 + self._batch_size)]
db_hosts_qs = self.inventory.hosts.filter(instance_id__in=instance_ids)
for db_host in db_hosts_qs.filter(groups__id=db_group.id):
logger.debug('Host "%s" already in group "%s"', db_host.name, db_group.name)
@@ -752,14 +735,13 @@ class Command(BaseCommand):
logger.debug('Host "%s" added to group "%s"', db_host.name, db_group.name)
self._batch_add_m2m(db_group.hosts, flush=True)
if settings.SQL_DEBUG:
- logger.warning('Group-host updates took %d queries for %d group-host relationships',
- len(connection.queries) - queries_before, group_host_count)
+ logger.warning('Group-host updates took %d queries for %d group-host relationships', len(connection.queries) - queries_before, group_host_count)
def load_into_database(self):
- '''
+ """
Load inventory from in-memory groups to the database, overwriting or
merging as appropriate.
- '''
+ """
# FIXME: Attribute changes to superuser?
# Perform __in queries in batches (mainly for unit tests using SQLite).
self._batch_size = 500
@@ -782,9 +764,7 @@ class Command(BaseCommand):
if remote_license_type is None:
raise PermissionDenied('Unexpected Error: Tower inventory plugin missing needed metadata!')
if local_license_type != remote_license_type:
- raise PermissionDenied('Tower server licenses must match: source: {} local: {}'.format(
- remote_license_type, local_license_type
- ))
+ raise PermissionDenied('Tower server licenses must match: source: {} local: {}'.format(remote_license_type, local_license_type))
def check_license(self):
license_info = get_licenser().validate()
@@ -875,7 +855,6 @@ class Command(BaseCommand):
raise CommandError('Inventory with %s = %s returned multiple results' % list(q.items())[0])
logger.info('Updating inventory %d: %s' % (inventory.pk, inventory.name))
-
# Create ad-hoc inventory source and inventory update objects
with ignore_inventory_computed_fields():
source = Command.get_source_absolute_path(raw_source)
@@ -888,15 +867,10 @@ class Command(BaseCommand):
overwrite_vars=bool(options.get('overwrite_vars', False)),
)
inventory_update = inventory_source.create_inventory_update(
- _eager_fields=dict(
- job_args=json.dumps(sys.argv),
- job_env=dict(os.environ.items()),
- job_cwd=os.getcwd())
+ _eager_fields=dict(job_args=json.dumps(sys.argv), job_env=dict(os.environ.items()), job_cwd=os.getcwd())
)
- data = AnsibleInventoryLoader(
- source=source, venv_path=venv_path, verbosity=verbosity
- ).load()
+ data = AnsibleInventoryLoader(source=source, venv_path=venv_path, verbosity=verbosity).load()
logger.debug('Finished loading from source: %s', source)
@@ -992,12 +966,10 @@ class Command(BaseCommand):
self.inventory_update.save()
logger.info('Processing JSON output...')
- inventory = MemInventory(
- group_filter_re=self.group_filter_re, host_filter_re=self.host_filter_re)
+ inventory = MemInventory(group_filter_re=self.group_filter_re, host_filter_re=self.host_filter_re)
inventory = dict_to_mem_data(data, inventory=inventory)
- logger.info('Loaded %d groups, %d hosts', len(inventory.all_group.all_groups),
- len(inventory.all_group.all_hosts))
+ logger.info('Loaded %d groups, %d hosts', len(inventory.all_group.all_groups), len(inventory.all_group.all_hosts))
if self.exclude_empty_groups:
inventory.delete_empty_groups()
@@ -1036,8 +1008,7 @@ class Command(BaseCommand):
queries_before2 = len(connection.queries)
self.inventory.update_computed_fields()
if settings.SQL_DEBUG:
- logger.warning('update computed fields took %d queries',
- len(connection.queries) - queries_before2)
+ logger.warning('update computed fields took %d queries', len(connection.queries) - queries_before2)
# Check if the license is valid.
# If the license is not valid, a CommandError will be thrown,
@@ -1057,17 +1028,13 @@ class Command(BaseCommand):
raise e
if settings.SQL_DEBUG:
- logger.warning('Inventory import completed for %s in %0.1fs',
- self.inventory_source.name, time.time() - begin)
+ logger.warning('Inventory import completed for %s in %0.1fs', self.inventory_source.name, time.time() - begin)
else:
- logger.info('Inventory import completed for %s in %0.1fs',
- self.inventory_source.name, time.time() - begin)
+ logger.info('Inventory import completed for %s in %0.1fs', self.inventory_source.name, time.time() - begin)
# If we're in debug mode, then log the queries and time
# used to do the operation.
if settings.SQL_DEBUG:
queries_this_import = connection.queries[queries_before:]
sqltime = sum(float(x['time']) for x in queries_this_import)
- logger.warning('Inventory import required %d queries '
- 'taking %0.3fs', len(queries_this_import),
- sqltime)
+ logger.warning('Inventory import required %d queries ' 'taking %0.3fs', len(queries_this_import), sqltime)
diff --git a/awx/main/management/commands/list_instances.py b/awx/main/management/commands/list_instances.py
index 3067f96fce..95807cb5a9 100644
--- a/awx/main/management/commands/list_instances.py
+++ b/awx/main/management/commands/list_instances.py
@@ -22,8 +22,7 @@ class Ungrouped(object):
class Command(BaseCommand):
- """List instances from the Tower database
- """
+ """List instances from the Tower database"""
def handle(self, *args, **options):
super(Command, self).__init__()
diff --git a/awx/main/management/commands/profile_sql.py b/awx/main/management/commands/profile_sql.py
index 585fb3d706..2853b072ff 100644
--- a/awx/main/management/commands/profile_sql.py
+++ b/awx/main/management/commands/profile_sql.py
@@ -10,18 +10,18 @@ class Command(BaseCommand):
"""
def add_arguments(self, parser):
- parser.add_argument('--threshold', dest='threshold', type=float, default=2.0,
- help='The minimum query duration in seconds (default=2). Use 0 to disable.')
- parser.add_argument('--minutes', dest='minutes', type=float, default=5,
- help='How long to record for in minutes (default=5)')
+ parser.add_argument(
+ '--threshold', dest='threshold', type=float, default=2.0, help='The minimum query duration in seconds (default=2). Use 0 to disable.'
+ )
+ parser.add_argument('--minutes', dest='minutes', type=float, default=5, help='How long to record for in minutes (default=5)')
def handle(self, **options):
- profile_sql.delay(
- threshold=options['threshold'], minutes=options['minutes']
- )
+ profile_sql.delay(threshold=options['threshold'], minutes=options['minutes'])
if options['threshold'] > 0:
- print(f"SQL profiling initiated with a threshold of {options['threshold']} second(s) and a"
- f" duration of {options['minutes']} minute(s), any queries that meet criteria can"
- f" be found in /var/log/tower/profile/.")
+ print(
+ f"SQL profiling initiated with a threshold of {options['threshold']} second(s) and a"
+ f" duration of {options['minutes']} minute(s), any queries that meet criteria can"
+ f" be found in /var/log/tower/profile/."
+ )
else:
print("SQL profiling disabled.")
diff --git a/awx/main/management/commands/provision_instance.py b/awx/main/management/commands/provision_instance.py
index 4d7655821a..02435ee167 100644
--- a/awx/main/management/commands/provision_instance.py
+++ b/awx/main/management/commands/provision_instance.py
@@ -16,16 +16,11 @@ class Command(BaseCommand):
Register this instance with the database for HA tracking.
"""
- help = (
- 'Add instance to the database. '
- 'Specify `--hostname` to use this command.'
- )
+ help = 'Add instance to the database. ' 'Specify `--hostname` to use this command.'
def add_arguments(self, parser):
- parser.add_argument('--hostname', dest='hostname', type=str,
- help='Hostname used during provisioning')
- parser.add_argument('--is-isolated', dest='is_isolated', action='store_true',
- help='Specify whether the instance is isolated')
+ parser.add_argument('--hostname', dest='hostname', type=str, help='Hostname used during provisioning')
+ parser.add_argument('--is-isolated', dest='is_isolated', action='store_true', help='Specify whether the instance is isolated')
def _register_hostname(self, hostname):
if not hostname:
diff --git a/awx/main/management/commands/regenerate_secret_key.py b/awx/main/management/commands/regenerate_secret_key.py
index 61a2c46b4c..68a1102daa 100644
--- a/awx/main/management/commands/regenerate_secret_key.py
+++ b/awx/main/management/commands/regenerate_secret_key.py
@@ -10,13 +10,8 @@ from django.db.models.signals import post_save
from awx.conf import settings_registry
from awx.conf.models import Setting
from awx.conf.signals import on_post_save_setting
-from awx.main.models import (
- UnifiedJob, Credential, NotificationTemplate, Job, JobTemplate, WorkflowJob,
- WorkflowJobTemplate, OAuth2Application
-)
-from awx.main.utils.encryption import (
- encrypt_field, decrypt_field, encrypt_value, decrypt_value, get_encryption_key
-)
+from awx.main.models import UnifiedJob, Credential, NotificationTemplate, Job, JobTemplate, WorkflowJob, WorkflowJobTemplate, OAuth2Application
+from awx.main.utils.encryption import encrypt_field, decrypt_field, encrypt_value, decrypt_value, get_encryption_key
class Command(BaseCommand):
@@ -41,8 +36,7 @@ class Command(BaseCommand):
for nt in NotificationTemplate.objects.iterator():
CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NotificationTemplate.NOTIFICATION_TYPES])
notification_class = CLASS_FOR_NOTIFICATION_TYPE[nt.notification_type]
- for field in filter(lambda x: notification_class.init_parameters[x]['type'] == "password",
- notification_class.init_parameters):
+ for field in filter(lambda x: notification_class.init_parameters[x]['type'] == "password", notification_class.init_parameters):
nt.notification_configuration[field] = decrypt_field(nt, 'notification_configuration', subfield=field, secret_key=self.old_key)
nt.notification_configuration[field] = encrypt_field(nt, 'notification_configuration', subfield=field, secret_key=self.new_key)
nt.save()
@@ -51,26 +45,14 @@ class Command(BaseCommand):
for credential in Credential.objects.iterator():
for field_name in credential.credential_type.secret_fields:
if field_name in credential.inputs:
- credential.inputs[field_name] = decrypt_field(
- credential,
- field_name,
- secret_key=self.old_key
- )
- credential.inputs[field_name] = encrypt_field(
- credential,
- field_name,
- secret_key=self.new_key
- )
+ credential.inputs[field_name] = decrypt_field(credential, field_name, secret_key=self.old_key)
+ credential.inputs[field_name] = encrypt_field(credential, field_name, secret_key=self.new_key)
credential.save()
def _unified_jobs(self):
for uj in UnifiedJob.objects.iterator():
if uj.start_args:
- uj.start_args = decrypt_field(
- uj,
- 'start_args',
- secret_key=self.old_key
- )
+ uj.start_args = decrypt_field(uj, 'start_args', secret_key=self.old_key)
uj.start_args = encrypt_field(uj, 'start_args', secret_key=self.new_key)
uj.save()
@@ -97,15 +79,8 @@ class Command(BaseCommand):
if jt.survey_spec.get('spec', []):
for field in jt.survey_spec['spec']:
if field.get('type') == 'password' and field.get('default', ''):
- raw = decrypt_value(
- get_encryption_key('value', None, secret_key=self.old_key),
- field['default']
- )
- field['default'] = encrypt_value(
- raw,
- pk=None,
- secret_key=self.new_key
- )
+ raw = decrypt_value(get_encryption_key('value', None, secret_key=self.old_key), field['default'])
+ field['default'] = encrypt_value(raw, pk=None, secret_key=self.new_key)
changed = True
if changed:
jt.save(update_fields=["survey_spec"])
@@ -118,10 +93,7 @@ class Command(BaseCommand):
extra_vars = json.loads(job.extra_vars)
if not extra_vars.get(key):
continue
- raw = decrypt_value(
- get_encryption_key('value', None, secret_key=self.old_key),
- extra_vars[key]
- )
+ raw = decrypt_value(get_encryption_key('value', None, secret_key=self.old_key), extra_vars[key])
extra_vars[key] = encrypt_value(raw, pk=None, secret_key=self.new_key)
job.extra_vars = json.dumps(extra_vars)
changed = True
diff --git a/awx/main/management/commands/register_queue.py b/awx/main/management/commands/register_queue.py
index 15891a771f..5369e4fe06 100644
--- a/awx/main/management/commands/register_queue.py
+++ b/awx/main/management/commands/register_queue.py
@@ -112,19 +112,22 @@ class RegisterQueue:
class Command(BaseCommand):
-
def add_arguments(self, parser):
- parser.add_argument('--queuename', dest='queuename', type=str,
- help='Queue to create/update')
- parser.add_argument('--hostnames', dest='hostnames', type=str,
- help='Comma-Delimited Hosts to add to the Queue (will not remove already assigned instances)')
- parser.add_argument('--controller', dest='controller', type=str,
- default='', help='The controlling group (makes this an isolated group)')
- parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0,
- help='The percentage of active instances that will be assigned to this group'),
- parser.add_argument('--instance_minimum', dest='instance_minimum', type=int, default=0,
- help='The minimum number of instance that will be retained for this group from available instances')
-
+ parser.add_argument('--queuename', dest='queuename', type=str, help='Queue to create/update')
+ parser.add_argument(
+ '--hostnames', dest='hostnames', type=str, help='Comma-Delimited Hosts to add to the Queue (will not remove already assigned instances)'
+ )
+ parser.add_argument('--controller', dest='controller', type=str, default='', help='The controlling group (makes this an isolated group)')
+ parser.add_argument(
+ '--instance_percent', dest='instance_percent', type=int, default=0, help='The percentage of active instances that will be assigned to this group'
+ ),
+ parser.add_argument(
+ '--instance_minimum',
+ dest='instance_minimum',
+ type=int,
+ default=0,
+ help='The minimum number of instance that will be retained for this group from available instances',
+ )
def handle(self, **options):
queuename = options.get('queuename')
diff --git a/awx/main/management/commands/remove_from_queue.py b/awx/main/management/commands/remove_from_queue.py
index b249749219..35c83fe298 100644
--- a/awx/main/management/commands/remove_from_queue.py
+++ b/awx/main/management/commands/remove_from_queue.py
@@ -10,13 +10,12 @@ class Command(BaseCommand):
help = (
"Remove an instance (specified by --hostname) from the specified queue (instance group).\n"
- "In order remove the queue, use the `unregister_queue` command.")
+ "In order remove the queue, use the `unregister_queue` command."
+ )
def add_arguments(self, parser):
- parser.add_argument('--queuename', dest='queuename', type=str,
- help='Queue to be removed from')
- parser.add_argument('--hostname', dest='hostname', type=str,
- help='Host to remove from queue')
+ parser.add_argument('--queuename', dest='queuename', type=str, help='Queue to be removed from')
+ parser.add_argument('--hostname', dest='hostname', type=str, help='Host to remove from queue')
def handle(self, *arg, **options):
if not options.get('queuename'):
diff --git a/awx/main/management/commands/replay_job_events.py b/awx/main/management/commands/replay_job_events.py
index ad8d7a5f5f..d8817681ea 100644
--- a/awx/main/management/commands/replay_job_events.py
+++ b/awx/main/management/commands/replay_job_events.py
@@ -10,17 +10,10 @@ from django.utils import timezone
from django.core.management.base import BaseCommand
from awx.main.models.events import emit_event_detail
-from awx.main.models import (
- UnifiedJob,
- Job,
- AdHocCommand,
- ProjectUpdate,
- InventoryUpdate,
- SystemJob
-)
+from awx.main.models import UnifiedJob, Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob
-class JobStatusLifeCycle():
+class JobStatusLifeCycle:
def emit_job_status(self, job, status):
# {"status": "successful", "project_id": 13, "unified_job_id": 659, "group_name": "jobs"}
job.websocket_emit_status(status)
@@ -65,10 +58,10 @@ class ReplayJobEvents(JobStatusLifeCycle):
time.sleep(seconds)
def replay_elapsed(self):
- return (self.now() - self.replay_start)
+ return self.now() - self.replay_start
def recording_elapsed(self, created):
- return (created - self.recording_start)
+ return created - self.recording_start
def replay_offset(self, created, speed):
return self.replay_elapsed().total_seconds() - (self.recording_elapsed(created).total_seconds() * (1.0 / speed))
@@ -156,12 +149,12 @@ class ReplayJobEvents(JobStatusLifeCycle):
self.sleep(replay_diff)
else:
stats['events_late']['total'] += 1
- stats['events_late']['lateness_total'] += (replay_diff * -1)
+ stats['events_late']['lateness_total'] += replay_diff * -1
if verbosity >= 3:
print("\treplay: too far behind to sleep {} seconds".format(replay_diff))
else:
replay_offset = self.replay_offset(je_current.created, speed)
- stats['events_late']['lateness_total'] += (replay_offset * -1)
+ stats['events_late']['lateness_total'] += replay_offset * -1
stats['events_late']['total'] += 1
if verbosity >= 3:
print("\treplay: behind by {} seconds".format(replay_offset))
@@ -211,18 +204,23 @@ class Command(BaseCommand):
return range(start, stop, step)
def add_arguments(self, parser):
- parser.add_argument('--job_id', dest='job_id', type=int, metavar='j',
- help='Id of the job to replay (job or adhoc)')
- parser.add_argument('--speed', dest='speed', type=float, metavar='s',
- help='Speedup factor.')
- parser.add_argument('--skip-range', dest='skip_range', type=str, metavar='k',
- default='0:-1:1', help='Range of events to skip')
- parser.add_argument('--random-seed', dest='random_seed', type=int, metavar='r',
- default=0, help='Random number generator seed to use when determining job_event index to emit final job status')
- parser.add_argument('--final-status-delay', dest='final_status_delay', type=float, metavar='f',
- default=0, help='Delay between event and final status emit')
- parser.add_argument('--debug', dest='debug', type=bool, metavar='d',
- default=False, help='Enable step mode to control emission of job events one at a time.')
+ parser.add_argument('--job_id', dest='job_id', type=int, metavar='j', help='Id of the job to replay (job or adhoc)')
+ parser.add_argument('--speed', dest='speed', type=float, metavar='s', help='Speedup factor.')
+ parser.add_argument('--skip-range', dest='skip_range', type=str, metavar='k', default='0:-1:1', help='Range of events to skip')
+ parser.add_argument(
+ '--random-seed',
+ dest='random_seed',
+ type=int,
+ metavar='r',
+ default=0,
+ help='Random number generator seed to use when determining job_event index to emit final job status',
+ )
+ parser.add_argument(
+ '--final-status-delay', dest='final_status_delay', type=float, metavar='f', default=0, help='Delay between event and final status emit'
+ )
+ parser.add_argument(
+ '--debug', dest='debug', type=bool, metavar='d', default=False, help='Enable step mode to control emission of job events one at a time.'
+ )
def handle(self, *args, **options):
job_id = options.get('job_id')
@@ -234,5 +232,4 @@ class Command(BaseCommand):
skip = self._parse_slice_range(options.get('skip_range'))
replayer = ReplayJobEvents()
- replayer.run(job_id, speed=speed, verbosity=verbosity, skip_range=skip, random_seed=random_seed,
- final_status_delay=final_status_delay, debug=debug)
+ replayer.run(job_id, speed=speed, verbosity=verbosity, skip_range=skip, random_seed=random_seed, final_status_delay=final_status_delay, debug=debug)
diff --git a/awx/main/management/commands/revoke_oauth2_tokens.py b/awx/main/management/commands/revoke_oauth2_tokens.py
index ff6af19a37..1cc128afdf 100644
--- a/awx/main/management/commands/revoke_oauth2_tokens.py
+++ b/awx/main/management/commands/revoke_oauth2_tokens.py
@@ -16,7 +16,8 @@ def revoke_tokens(token_list):
class Command(BaseCommand):
"""Command that revokes OAuth2 access tokens."""
- help='Revokes OAuth2 access tokens. Use --all to revoke access and refresh tokens.'
+
+ help = 'Revokes OAuth2 access tokens. Use --all to revoke access and refresh tokens.'
def add_arguments(self, parser):
parser.add_argument('--user', dest='user', type=str, help='revoke OAuth2 tokens for a specific username')
diff --git a/awx/main/management/commands/run_callback_receiver.py b/awx/main/management/commands/run_callback_receiver.py
index 23922a7537..cb3ab781b5 100644
--- a/awx/main/management/commands/run_callback_receiver.py
+++ b/awx/main/management/commands/run_callback_receiver.py
@@ -9,16 +9,16 @@ from awx.main.dispatch.worker import AWXConsumerRedis, CallbackBrokerWorker
class Command(BaseCommand):
- '''
+ """
Save Job Callback receiver
Runs as a management command and receives job save events. It then hands
them off to worker processors (see Worker) which writes them to the database
- '''
+ """
+
help = 'Launch the job callback receiver'
def add_arguments(self, parser):
- parser.add_argument('--status', dest='status', action='store_true',
- help='print the internal state of any running dispatchers')
+ parser.add_argument('--status', dest='status', action='store_true', help='print the internal state of any running dispatchers')
def handle(self, *arg, **options):
if options.get('status'):
diff --git a/awx/main/management/commands/run_dispatcher.py b/awx/main/management/commands/run_dispatcher.py
index fb8c1b4a6b..bafe27cdaf 100644
--- a/awx/main/management/commands/run_dispatcher.py
+++ b/awx/main/management/commands/run_dispatcher.py
@@ -24,13 +24,14 @@ class Command(BaseCommand):
help = 'Launch the task dispatcher'
def add_arguments(self, parser):
- parser.add_argument('--status', dest='status', action='store_true',
- help='print the internal state of any running dispatchers')
- parser.add_argument('--running', dest='running', action='store_true',
- help='print the UUIDs of any tasked managed by this dispatcher')
- parser.add_argument('--reload', dest='reload', action='store_true',
- help=('cause the dispatcher to recycle all of its worker processes;'
- 'running jobs will run to completion first'))
+ parser.add_argument('--status', dest='status', action='store_true', help='print the internal state of any running dispatchers')
+ parser.add_argument('--running', dest='running', action='store_true', help='print the UUIDs of any tasked managed by this dispatcher')
+ parser.add_argument(
+ '--reload',
+ dest='reload',
+ action='store_true',
+ help=('cause the dispatcher to recycle all of its worker processes;' 'running jobs will run to completion first'),
+ )
def handle(self, *arg, **options):
if options.get('status'):
@@ -57,12 +58,7 @@ class Command(BaseCommand):
try:
queues = ['tower_broadcast_all', get_local_queuename()]
- consumer = AWXConsumerPG(
- 'dispatcher',
- TaskWorker(),
- queues,
- AutoscalePool(min_workers=4)
- )
+ consumer = AWXConsumerPG('dispatcher', TaskWorker(), queues, AutoscalePool(min_workers=4))
consumer.run()
except KeyboardInterrupt:
logger.debug('Terminating Task Dispatcher')
diff --git a/awx/main/management/commands/run_wsbroadcast.py b/awx/main/management/commands/run_wsbroadcast.py
index 5801f4e5d0..60f262c86c 100644
--- a/awx/main/management/commands/run_wsbroadcast.py
+++ b/awx/main/management/commands/run_wsbroadcast.py
@@ -27,8 +27,7 @@ class Command(BaseCommand):
help = 'Launch the websocket broadcaster'
def add_arguments(self, parser):
- parser.add_argument('--status', dest='status', action='store_true',
- help='print the internal state of any running broadcast websocket')
+ parser.add_argument('--status', dest='status', action='store_true', help='print the internal state of any running broadcast websocket')
@classmethod
def display_len(cls, s):
@@ -58,7 +57,7 @@ class Command(BaseCommand):
def get_connection_status(cls, me, hostnames, data):
host_stats = [('hostname', 'state', 'start time', 'duration (sec)')]
for h in hostnames:
- connection_color = '91' # red
+ connection_color = '91' # red
h_safe = safe_name(h)
prefix = f'awx_{h_safe}'
connection_state = data.get(f'{prefix}_connection', 'N/A')
@@ -67,7 +66,7 @@ class Command(BaseCommand):
if connection_state is None:
connection_state = 'unknown'
if connection_state == 'connected':
- connection_color = '92' # green
+ connection_color = '92' # green
connection_started = data.get(f'{prefix}_connection_start', 'Error')
if connection_started != 'Error':
connection_started = datetime.datetime.fromtimestamp(connection_started)
diff --git a/awx/main/management/commands/stats.py b/awx/main/management/commands/stats.py
index b588acb558..4594e6dcfa 100644
--- a/awx/main/management/commands/stats.py
+++ b/awx/main/management/commands/stats.py
@@ -9,19 +9,14 @@ from awx.main.models import UnifiedJob
class Command(BaseCommand):
- '''
+ """
Emits some simple statistics suitable for external monitoring
- '''
+ """
help = 'Display some simple statistics'
def add_arguments(self, parser):
- parser.add_argument('--stat',
- action='store',
- dest='stat',
- type=str,
- default="jobs_running",
- help='Select which stat to get information for')
+ parser.add_argument('--stat', action='store', dest='stat', type=str, default="jobs_running", help='Select which stat to get information for')
def job_stats(self, state):
return UnifiedJob.objects.filter(status=state).count()
diff --git a/awx/main/management/commands/test_isolated_connection.py b/awx/main/management/commands/test_isolated_connection.py
index 1c6ecc5393..3983967251 100644
--- a/awx/main/management/commands/test_isolated_connection.py
+++ b/awx/main/management/commands/test_isolated_connection.py
@@ -13,11 +13,11 @@ from awx.main.isolated.manager import set_pythonpath
class Command(BaseCommand):
"""Tests SSH connectivity between a controller and target isolated node"""
+
help = 'Tests SSH connectivity between a controller and target isolated node'
def add_arguments(self, parser):
- parser.add_argument('--hostname', dest='hostname', type=str,
- help='Hostname of an isolated node')
+ parser.add_argument('--hostname', dest='hostname', type=str, help='Hostname of an isolated node')
def handle(self, *args, **options):
hostname = options.get('hostname')
@@ -27,10 +27,7 @@ class Command(BaseCommand):
try:
path = tempfile.mkdtemp(prefix='awx_isolated_ssh', dir=settings.AWX_PROOT_BASE_PATH)
ssh_key = None
- if all([
- getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True,
- getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)
- ]):
+ if all([getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True, getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)]):
ssh_key = settings.AWX_ISOLATED_PRIVATE_KEY
env = dict(os.environ.items())
env['ANSIBLE_HOST_KEY_CHECKING'] = str(settings.AWX_ISOLATED_HOST_KEY_CHECKING)
diff --git a/awx/main/management/commands/unregister_queue.py b/awx/main/management/commands/unregister_queue.py
index 8b5466d3b5..5cf4ee3ff9 100644
--- a/awx/main/management/commands/unregister_queue.py
+++ b/awx/main/management/commands/unregister_queue.py
@@ -14,11 +14,11 @@ class Command(BaseCommand):
help = (
"Remove specified queue (instance group) from database.\n"
"Instances inside of queue will continue to exist, \n"
- "but jobs will no longer be processed by queue.")
+ "but jobs will no longer be processed by queue."
+ )
def add_arguments(self, parser):
- parser.add_argument('--queuename', dest='queuename', type=str,
- help='Queue to create/update')
+ parser.add_argument('--queuename', dest='queuename', type=str, help='Queue to create/update')
@transaction.atomic
def handle(self, *args, **options):
diff --git a/awx/main/management/commands/update_password.py b/awx/main/management/commands/update_password.py
index d97a7c80f5..7d0a5364ba 100644
--- a/awx/main/management/commands/update_password.py
+++ b/awx/main/management/commands/update_password.py
@@ -23,10 +23,8 @@ class UpdatePassword(object):
class Command(BaseCommand):
def add_arguments(self, parser):
- parser.add_argument('--username', dest='username', action='store', type=str, default=None,
- help='username to change the password for')
- parser.add_argument('--password', dest='password', action='store', type=str, default=None,
- help='new password for user')
+ parser.add_argument('--username', dest='username', action='store', type=str, default=None, help='username to change the password for')
+ parser.add_argument('--password', dest='password', action='store', type=str, default=None, help='new password for user')
def handle(self, *args, **options):
if not options['username']:
diff --git a/awx/main/managers.py b/awx/main/managers.py
index 0d36515628..473ff6523b 100644
--- a/awx/main/managers.py
+++ b/awx/main/managers.py
@@ -40,24 +40,24 @@ class HostManager(models.Manager):
- Only consider results that are unique
- Return the count of this query
"""
- return self.order_by().exclude(
- inventory_sources__source='tower'
- ).filter(inventory__organization=org_id).values('name').distinct().count()
+ return self.order_by().exclude(inventory_sources__source='tower').filter(inventory__organization=org_id).values('name').distinct().count()
def get_queryset(self):
"""When the parent instance of the host query set has a `kind=smart` and a `host_filter`
set. Use the `host_filter` to generate the queryset for the hosts.
"""
- qs = super(HostManager, self).get_queryset().defer(
- 'last_job__extra_vars',
- 'last_job_host_summary__job__extra_vars',
- 'last_job__artifacts',
- 'last_job_host_summary__job__artifacts',
+ qs = (
+ super(HostManager, self)
+ .get_queryset()
+ .defer(
+ 'last_job__extra_vars',
+ 'last_job_host_summary__job__extra_vars',
+ 'last_job__artifacts',
+ 'last_job_host_summary__job__artifacts',
+ )
)
- if (hasattr(self, 'instance') and
- hasattr(self.instance, 'host_filter') and
- hasattr(self.instance, 'kind')):
+ if hasattr(self, 'instance') and hasattr(self.instance, 'host_filter') and hasattr(self.instance, 'kind'):
if self.instance.kind == 'smart' and self.instance.host_filter is not None:
q = SmartFilter.query_from_string(self.instance.host_filter)
if self.instance.organization_id:
@@ -94,13 +94,12 @@ class InstanceManager(models.Manager):
Provides "table-level" methods including getting the currently active
instance or role.
"""
+
def me(self):
"""Return the currently active instance."""
# If we are running unit tests, return a stub record.
if settings.IS_TESTING(sys.argv) or hasattr(sys, '_called_from_test'):
- return self.model(id=1,
- hostname='localhost',
- uuid='00000000-0000-0000-0000-000000000000')
+ return self.model(id=1, hostname='localhost', uuid='00000000-0000-0000-0000-000000000000')
node = self.filter(hostname=settings.CLUSTER_HOST_ID)
if node.exists():
@@ -133,15 +132,13 @@ class InstanceManager(models.Manager):
return (True, instance)
else:
return (False, instance)
- instance = self.create(uuid=uuid,
- hostname=hostname,
- ip_address=ip_address,
- capacity=0)
+ instance = self.create(uuid=uuid, hostname=hostname, ip_address=ip_address, capacity=0)
return (True, instance)
def get_or_register(self):
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
from awx.main.management.commands.register_queue import RegisterQueue
+
pod_ip = os.environ.get('MY_POD_IP')
registered = self.register(ip_address=pod_ip)
is_container_group = settings.IS_K8S
@@ -178,10 +175,7 @@ class InstanceGroupManager(models.Manager):
ig_instance_mapping = {}
# Create dictionaries that represent basic m2m memberships
for group in qs:
- ig_instance_mapping[group.name] = set(
- instance.hostname for instance in group.instances.all() if
- instance.capacity != 0
- )
+ ig_instance_mapping[group.name] = set(instance.hostname for instance in group.instances.all() if instance.capacity != 0)
for inst in group.instances.all():
if inst.capacity == 0:
continue
@@ -210,8 +204,7 @@ class InstanceGroupManager(models.Manager):
instance_ig_mapping, ig_ig_mapping = self.capacity_mapping(qs=qs)
if tasks is None:
- tasks = self.model.unifiedjob_set.related.related_model.objects.filter(
- status__in=('running', 'waiting'))
+ tasks = self.model.unifiedjob_set.related.related_model.objects.filter(status__in=('running', 'waiting'))
if graph is None:
graph = {group.name: {} for group in qs}
@@ -239,8 +232,7 @@ class InstanceGroupManager(models.Manager):
# Subtract capacity from all groups that contain the instance
if t.execution_node not in instance_ig_mapping:
if not t.is_container_group_task:
- logger.warning('Detected %s running inside lost instance, '
- 'may still be waiting for reaper.', t.log_format)
+ logger.warning('Detected %s running inside lost instance, ' 'may still be waiting for reaper.', t.log_format)
if t.instance_group:
impacted_groups = [t.instance_group.name]
else:
diff --git a/awx/main/middleware.py b/awx/main/middleware.py
index 2d509c9c61..05c4564ffa 100644
--- a/awx/main/middleware.py
+++ b/awx/main/middleware.py
@@ -47,7 +47,7 @@ class TimingMiddleware(threading.local, MiddlewareMixin):
response['X-API-Profile-File'] = self.prof.stop()
perf_logger.info(
f'request: {request}, response_time: {response["X-API-Total-Time"]}',
- extra=dict(python_objects=dict(request=request, response=response, X_API_TOTAL_TIME=response["X-API-Total-Time"]))
+ extra=dict(python_objects=dict(request=request, response=response, X_API_TOTAL_TIME=response["X-API-Total-Time"])),
)
return response
@@ -73,6 +73,7 @@ class SessionTimeoutMiddleware(MiddlewareMixin):
def _customize_graph():
from awx.main.models import Instance, Schedule, UnifiedJobTemplate
+
for model in [Schedule, UnifiedJobTemplate]:
if model in settings.NAMED_URL_GRAPH:
settings.NAMED_URL_GRAPH[model].remove_bindings()
@@ -86,7 +87,6 @@ def _customize_graph():
class URLModificationMiddleware(MiddlewareMixin):
-
def __init__(self, get_response=None):
models = [m for m in apps.get_app_config('main').get_models() if hasattr(m, 'get_absolute_url')]
generate_graph(models)
@@ -96,8 +96,7 @@ class URLModificationMiddleware(MiddlewareMixin):
field_class=fields.DictField,
read_only=True,
label=_('Formats of all available named urls'),
- help_text=_('Read-only list of key-value pairs that shows the standard format of all '
- 'available named URLs.'),
+ help_text=_('Read-only list of key-value pairs that shows the standard format of all ' 'available named URLs.'),
category=_('Named URL'),
category_slug='named-url',
)
@@ -106,8 +105,10 @@ class URLModificationMiddleware(MiddlewareMixin):
field_class=fields.DictField,
read_only=True,
label=_('List of all named url graph nodes.'),
- help_text=_('Read-only list of key-value pairs that exposes named URL graph topology.'
- ' Use this list to programmatically generate named URLs for resources'),
+ help_text=_(
+ 'Read-only list of key-value pairs that exposes named URL graph topology.'
+ ' Use this list to programmatically generate named URLs for resources'
+ ),
category=_('Named URL'),
category_slug='named-url',
)
@@ -159,15 +160,13 @@ class URLModificationMiddleware(MiddlewareMixin):
return url_path
resource = url_units[3]
if resource in settings.NAMED_URL_MAPPINGS:
- url_units[4] = cls._named_url_to_pk(
- settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]],
- resource, url_units[4])
+ url_units[4] = cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, url_units[4])
return '/'.join(url_units)
def process_request(self, request):
if hasattr(request, 'environ') and 'REQUEST_URI' in request.environ:
old_path = urllib.parse.urlsplit(request.environ['REQUEST_URI']).path
- old_path = old_path[request.path.find(request.path_info):]
+ old_path = old_path[request.path.find(request.path_info) :]
else:
old_path = request.path_info
new_path = self._convert_named_url(old_path)
@@ -178,10 +177,8 @@ class URLModificationMiddleware(MiddlewareMixin):
class MigrationRanCheckMiddleware(MiddlewareMixin):
-
def process_request(self, request):
executor = MigrationExecutor(connection)
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
- if bool(plan) and \
- getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
+ if bool(plan) and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
return redirect(reverse("ui_next:migrations_notran"))
diff --git a/awx/main/migrations/0001_initial.py b/awx/main/migrations/0001_initial.py
index e14624911a..7ce9911546 100644
--- a/awx/main/migrations/0001_initial.py
+++ b/awx/main/migrations/0001_initial.py
@@ -27,13 +27,28 @@ class Migration(migrations.Migration):
name='ActivityStream',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('operation', models.CharField(max_length=13, choices=[('create', 'Entity Created'), ('update', 'Entity Updated'), ('delete', 'Entity Deleted'), ('associate', 'Entity Associated with another Entity'), ('disassociate', 'Entity was Disassociated with another Entity')])),
+ (
+ 'operation',
+ models.CharField(
+ max_length=13,
+ choices=[
+ ('create', 'Entity Created'),
+ ('update', 'Entity Updated'),
+ ('delete', 'Entity Deleted'),
+ ('associate', 'Entity Associated with another Entity'),
+ ('disassociate', 'Entity was Disassociated with another Entity'),
+ ],
+ ),
+ ),
('timestamp', models.DateTimeField(auto_now_add=True)),
('changes', models.TextField(blank=True)),
('object_relationship_type', models.TextField(blank=True)),
('object1', models.TextField()),
('object2', models.TextField()),
- ('actor', models.ForeignKey(related_name='activity_stream', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True)),
+ (
+ 'actor',
+ models.ForeignKey(related_name='activity_stream', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True),
+ ),
],
),
migrations.CreateModel(
@@ -43,7 +58,18 @@ class Migration(migrations.Migration):
('created', models.DateTimeField(default=None, editable=False)),
('modified', models.DateTimeField(default=None, editable=False)),
('host_name', models.CharField(default='', max_length=1024, editable=False)),
- ('event', models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_skipped', 'Host Skipped')])),
+ (
+ 'event',
+ models.CharField(
+ max_length=100,
+ choices=[
+ ('runner_on_failed', 'Host Failed'),
+ ('runner_on_ok', 'Host OK'),
+ ('runner_on_unreachable', 'Host Unreachable'),
+ ('runner_on_skipped', 'Host Skipped'),
+ ],
+ ),
+ ),
('event_data', jsonfield.fields.JSONField(default=dict, blank=True)),
('failed', models.BooleanField(default=False, editable=False)),
('changed', models.BooleanField(default=False, editable=False)),
@@ -74,22 +100,98 @@ class Migration(migrations.Migration):
('description', models.TextField(default='', blank=True)),
('active', models.BooleanField(default=True, editable=False)),
('name', models.CharField(max_length=512)),
- ('kind', models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('openstack', 'OpenStack')])),
+ (
+ 'kind',
+ models.CharField(
+ default='ssh',
+ max_length=32,
+ choices=[
+ ('ssh', 'Machine'),
+ ('scm', 'Source Control'),
+ ('aws', 'Amazon Web Services'),
+ ('rax', 'Rackspace'),
+ ('vmware', 'VMware vCenter'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure'),
+ ('openstack', 'OpenStack'),
+ ],
+ ),
+ ),
('cloud', models.BooleanField(default=False, editable=False)),
('host', models.CharField(default='', help_text='The hostname or IP address to use.', max_length=1024, verbose_name='Host', blank=True)),
('username', models.CharField(default='', help_text='Username for this credential.', max_length=1024, verbose_name='Username', blank=True)),
- ('password', models.CharField(default='', help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='Password', blank=True)),
- ('security_token', models.CharField(default='', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True)),
+ (
+ 'password',
+ models.CharField(
+ default='',
+ help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).',
+ max_length=1024,
+ verbose_name='Password',
+ blank=True,
+ ),
+ ),
+ (
+ 'security_token',
+ models.CharField(default='', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True),
+ ),
('project', models.CharField(default='', help_text='The identifier for the project.', max_length=100, verbose_name='Project', blank=True)),
- ('ssh_key_data', models.TextField(default='', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True)),
- ('ssh_key_unlock', models.CharField(default='', help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='SSH key unlock', blank=True)),
- ('become_method', models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec')])),
+ (
+ 'ssh_key_data',
+ models.TextField(
+ default='', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True
+ ),
+ ),
+ (
+ 'ssh_key_unlock',
+ models.CharField(
+ default='',
+ help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).',
+ max_length=1024,
+ verbose_name='SSH key unlock',
+ blank=True,
+ ),
+ ),
+ (
+ 'become_method',
+ models.CharField(
+ default='',
+ help_text='Privilege escalation method.',
+ max_length=32,
+ blank=True,
+ choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec')],
+ ),
+ ),
('become_username', models.CharField(default='', help_text='Privilege escalation username.', max_length=1024, blank=True)),
('become_password', models.CharField(default='', help_text='Password for privilege escalation method.', max_length=1024, blank=True)),
('vault_password', models.CharField(default='', help_text='Vault password (or "ASK" to prompt the user).', max_length=1024, blank=True)),
- ('created_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('modified_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'tags',
+ taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
+ ),
],
options={
'ordering': ('kind', 'name'),
@@ -105,8 +207,28 @@ class Migration(migrations.Migration):
('active', models.BooleanField(default=True, editable=False)),
('name', models.CharField(max_length=512)),
('script', models.TextField(default='', help_text='Inventory script contents', blank=True)),
- ('created_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('modified_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
],
options={
'ordering': ('name',),
@@ -122,13 +244,40 @@ class Migration(migrations.Migration):
('active', models.BooleanField(default=True, editable=False)),
('name', models.CharField(max_length=512)),
('variables', models.TextField(default='', help_text='Group variables in JSON or YAML format.', blank=True)),
- ('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts directly or indirectly in this group.', editable=False)),
- ('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether this group has any hosts with active failures.', editable=False)),
- ('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this group with active failures.', editable=False)),
+ (
+ 'total_hosts',
+ models.PositiveIntegerField(default=0, help_text='Total number of hosts directly or indirectly in this group.', editable=False),
+ ),
+ (
+ 'has_active_failures',
+ models.BooleanField(default=False, help_text='Flag indicating whether this group has any hosts with active failures.', editable=False),
+ ),
+ (
+ 'hosts_with_active_failures',
+ models.PositiveIntegerField(default=0, help_text='Number of hosts in this group with active failures.', editable=False),
+ ),
('total_groups', models.PositiveIntegerField(default=0, help_text='Total number of child groups contained within this group.', editable=False)),
- ('groups_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of child groups within this group that have active failures.', editable=False)),
- ('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this group was created/updated from any external inventory sources.', editable=False)),
- ('created_by', models.ForeignKey(related_name="{u'class': 'group', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ (
+ 'groups_with_active_failures',
+ models.PositiveIntegerField(default=0, help_text='Number of child groups within this group that have active failures.', editable=False),
+ ),
+ (
+ 'has_inventory_sources',
+ models.BooleanField(
+ default=False, help_text='Flag indicating whether this group was created/updated from any external inventory sources.', editable=False
+ ),
+ ),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'group', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
],
options={
'ordering': ('name',),
@@ -146,9 +295,27 @@ class Migration(migrations.Migration):
('enabled', models.BooleanField(default=True, help_text='Is this host online and available for running jobs?')),
('instance_id', models.CharField(default='', max_length=100, blank=True)),
('variables', models.TextField(default='', help_text='Host variables in JSON or YAML format.', blank=True)),
- ('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether the last job failed for this host.', editable=False)),
- ('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this host was created/updated from any external inventory sources.', editable=False)),
- ('created_by', models.ForeignKey(related_name="{u'class': 'host', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ (
+ 'has_active_failures',
+ models.BooleanField(default=False, help_text='Flag indicating whether the last job failed for this host.', editable=False),
+ ),
+ (
+ 'has_inventory_sources',
+ models.BooleanField(
+ default=False, help_text='Flag indicating whether this host was created/updated from any external inventory sources.', editable=False
+ ),
+ ),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'host', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
],
options={
'ordering': ('inventory', 'name'),
@@ -175,16 +342,56 @@ class Migration(migrations.Migration):
('active', models.BooleanField(default=True, editable=False)),
('name', models.CharField(unique=True, max_length=512)),
('variables', models.TextField(default='', help_text='Inventory variables in JSON or YAML format.', blank=True)),
- ('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether any hosts in this inventory have failed.', editable=False)),
+ (
+ 'has_active_failures',
+ models.BooleanField(default=False, help_text='Flag indicating whether any hosts in this inventory have failed.', editable=False),
+ ),
('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts in this inventory.', editable=False)),
- ('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this inventory with active failures.', editable=False)),
+ (
+ 'hosts_with_active_failures',
+ models.PositiveIntegerField(default=0, help_text='Number of hosts in this inventory with active failures.', editable=False),
+ ),
('total_groups', models.PositiveIntegerField(default=0, help_text='Total number of groups in this inventory.', editable=False)),
- ('groups_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of groups in this inventory with active failures.', editable=False)),
- ('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this inventory has any external inventory sources.', editable=False)),
- ('total_inventory_sources', models.PositiveIntegerField(default=0, help_text='Total number of external inventory sources configured within this inventory.', editable=False)),
- ('inventory_sources_with_failures', models.PositiveIntegerField(default=0, help_text='Number of external inventory sources in this inventory with failures.', editable=False)),
- ('created_by', models.ForeignKey(related_name="{u'class': 'inventory', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('modified_by', models.ForeignKey(related_name="{u'class': 'inventory', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ (
+ 'groups_with_active_failures',
+ models.PositiveIntegerField(default=0, help_text='Number of groups in this inventory with active failures.', editable=False),
+ ),
+ (
+ 'has_inventory_sources',
+ models.BooleanField(default=False, help_text='Flag indicating whether this inventory has any external inventory sources.', editable=False),
+ ),
+ (
+ 'total_inventory_sources',
+ models.PositiveIntegerField(
+ default=0, help_text='Total number of external inventory sources configured within this inventory.', editable=False
+ ),
+ ),
+ (
+ 'inventory_sources_with_failures',
+ models.PositiveIntegerField(default=0, help_text='Number of external inventory sources in this inventory with failures.', editable=False),
+ ),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'inventory', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ related_name="{u'class': 'inventory', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
],
options={
'ordering': ('name',),
@@ -197,7 +404,35 @@ class Migration(migrations.Migration):
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(default=None, editable=False)),
('modified', models.DateTimeField(default=None, editable=False)),
- ('event', models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete')])),
+ (
+ 'event',
+ models.CharField(
+ max_length=100,
+ choices=[
+ ('runner_on_failed', 'Host Failed'),
+ ('runner_on_ok', 'Host OK'),
+ ('runner_on_error', 'Host Failure'),
+ ('runner_on_skipped', 'Host Skipped'),
+ ('runner_on_unreachable', 'Host Unreachable'),
+ ('runner_on_no_hosts', 'No Hosts Remaining'),
+ ('runner_on_async_poll', 'Host Polling'),
+ ('runner_on_async_ok', 'Host Async OK'),
+ ('runner_on_async_failed', 'Host Async Failure'),
+ ('runner_on_file_diff', 'File Difference'),
+ ('playbook_on_start', 'Playbook Started'),
+ ('playbook_on_notify', 'Running Handlers'),
+ ('playbook_on_no_hosts_matched', 'No Hosts Matched'),
+ ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'),
+ ('playbook_on_task_start', 'Task Started'),
+ ('playbook_on_vars_prompt', 'Variables Prompted'),
+ ('playbook_on_setup', 'Gathering Facts'),
+ ('playbook_on_import_for_host', 'internal: on Import for Host'),
+ ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'),
+ ('playbook_on_play_start', 'Play Started'),
+ ('playbook_on_stats', 'Playbook Complete'),
+ ],
+ ),
+ ),
('event_data', jsonfield.fields.JSONField(default=dict, blank=True)),
('failed', models.BooleanField(default=False, editable=False)),
('changed', models.BooleanField(default=False, editable=False)),
@@ -206,9 +441,24 @@ class Migration(migrations.Migration):
('role', models.CharField(default='', max_length=1024, editable=False)),
('task', models.CharField(default='', max_length=1024, editable=False)),
('counter', models.PositiveIntegerField(default=0)),
- ('host', models.ForeignKey(related_name='job_events_as_primary_host', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True)),
+ (
+ 'host',
+ models.ForeignKey(
+ related_name='job_events_as_primary_host',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to='main.Host',
+ null=True,
+ ),
+ ),
('hosts', models.ManyToManyField(related_name='job_events', editable=False, to='main.Host')),
- ('parent', models.ForeignKey(related_name='children', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.JobEvent', null=True)),
+ (
+ 'parent',
+ models.ForeignKey(
+ related_name='children', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.JobEvent', null=True
+ ),
+ ),
],
options={
'ordering': ('pk',),
@@ -228,7 +478,12 @@ class Migration(migrations.Migration):
('processed', models.PositiveIntegerField(default=0, editable=False)),
('skipped', models.PositiveIntegerField(default=0, editable=False)),
('failed', models.BooleanField(default=False, editable=False)),
- ('host', models.ForeignKey(related_name='job_host_summaries', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True)),
+ (
+ 'host',
+ models.ForeignKey(
+ related_name='job_host_summaries', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True
+ ),
+ ),
],
options={
'ordering': ('-pk',),
@@ -254,9 +509,34 @@ class Migration(migrations.Migration):
('active', models.BooleanField(default=True, editable=False)),
('name', models.CharField(unique=True, max_length=512)),
('admins', models.ManyToManyField(related_name='admin_of_organizations', to=settings.AUTH_USER_MODEL, blank=True)),
- ('created_by', models.ForeignKey(related_name="{u'class': 'organization', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('modified_by', models.ForeignKey(related_name="{u'class': 'organization', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'organization', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ related_name="{u'class': 'organization', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'tags',
+ taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
+ ),
('users', models.ManyToManyField(related_name='organizations', to=settings.AUTH_USER_MODEL, blank=True)),
],
options={
@@ -272,12 +552,51 @@ class Migration(migrations.Migration):
('description', models.TextField(default='', blank=True)),
('active', models.BooleanField(default=True, editable=False)),
('name', models.CharField(max_length=512)),
- ('permission_type', models.CharField(max_length=64, choices=[('read', 'Read Inventory'), ('write', 'Edit Inventory'), ('admin', 'Administrate Inventory'), ('run', 'Deploy To Inventory'), ('check', 'Deploy To Inventory (Dry Run)'), ('scan', 'Scan an Inventory'), ('create', 'Create a Job Template')])),
+ (
+ 'permission_type',
+ models.CharField(
+ max_length=64,
+ choices=[
+ ('read', 'Read Inventory'),
+ ('write', 'Edit Inventory'),
+ ('admin', 'Administrate Inventory'),
+ ('run', 'Deploy To Inventory'),
+ ('check', 'Deploy To Inventory (Dry Run)'),
+ ('scan', 'Scan an Inventory'),
+ ('create', 'Create a Job Template'),
+ ],
+ ),
+ ),
('run_ad_hoc_commands', models.BooleanField(default=False, help_text='Execute Commands on the Inventory')),
- ('created_by', models.ForeignKey(related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
('inventory', models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True)),
- ('modified_by', models.ForeignKey(related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'tags',
+ taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
+ ),
],
),
migrations.CreateModel(
@@ -305,9 +624,34 @@ class Migration(migrations.Migration):
('rrule', models.CharField(max_length=255)),
('next_run', models.DateTimeField(default=None, null=True, editable=False)),
('extra_data', jsonfield.fields.JSONField(default=dict, blank=True)),
- ('created_by', models.ForeignKey(related_name="{u'class': 'schedule', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('modified_by', models.ForeignKey(related_name="{u'class': 'schedule', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'schedule', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ related_name="{u'class': 'schedule', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'tags',
+ taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
+ ),
],
options={
'ordering': ['-next_run'],
@@ -322,10 +666,35 @@ class Migration(migrations.Migration):
('description', models.TextField(default='', blank=True)),
('active', models.BooleanField(default=True, editable=False)),
('name', models.CharField(max_length=512)),
- ('created_by', models.ForeignKey(related_name="{u'class': 'team', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('modified_by', models.ForeignKey(related_name="{u'class': 'team', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'team', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ related_name="{u'class': 'team', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
('organization', models.ForeignKey(related_name='teams', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', null=True)),
- ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ (
+ 'tags',
+ taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
+ ),
('users', models.ManyToManyField(related_name='teams', to=settings.AUTH_USER_MODEL, blank=True)),
],
options={
@@ -342,9 +711,40 @@ class Migration(migrations.Migration):
('active', models.BooleanField(default=True, editable=False)),
('name', models.CharField(max_length=512)),
('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)),
- ('launch_type', models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency')])),
+ (
+ 'launch_type',
+ models.CharField(
+ default='manual',
+ max_length=20,
+ editable=False,
+ choices=[
+ ('manual', 'Manual'),
+ ('relaunch', 'Relaunch'),
+ ('callback', 'Callback'),
+ ('scheduled', 'Scheduled'),
+ ('dependency', 'Dependency'),
+ ],
+ ),
+ ),
('cancel_flag', models.BooleanField(blank=True, default=False, editable=False)),
- ('status', models.CharField(default='new', max_length=20, editable=False, choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled')])),
+ (
+ 'status',
+ models.CharField(
+ default='new',
+ max_length=20,
+ editable=False,
+ choices=[
+ ('new', 'New'),
+ ('pending', 'Pending'),
+ ('waiting', 'Waiting'),
+ ('running', 'Running'),
+ ('successful', 'Successful'),
+ ('failed', 'Failed'),
+ ('error', 'Error'),
+ ('canceled', 'Canceled'),
+ ],
+ ),
+ ),
('failed', models.BooleanField(default=False, editable=False)),
('started', models.DateTimeField(default=None, null=True, editable=False)),
('finished', models.DateTimeField(default=None, null=True, editable=False)),
@@ -374,19 +774,65 @@ class Migration(migrations.Migration):
('last_job_run', models.DateTimeField(default=None, null=True, editable=False)),
('has_schedules', models.BooleanField(default=False, editable=False)),
('next_job_run', models.DateTimeField(default=None, null=True, editable=False)),
- ('status', models.CharField(default='ok', max_length=32, editable=False, choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled'), ('never updated', 'Never Updated'), ('ok', 'OK'), ('missing', 'Missing'), ('none', 'No External Source'), ('updating', 'Updating')])),
+ (
+ 'status',
+ models.CharField(
+ default='ok',
+ max_length=32,
+ editable=False,
+ choices=[
+ ('new', 'New'),
+ ('pending', 'Pending'),
+ ('waiting', 'Waiting'),
+ ('running', 'Running'),
+ ('successful', 'Successful'),
+ ('failed', 'Failed'),
+ ('error', 'Error'),
+ ('canceled', 'Canceled'),
+ ('never updated', 'Never Updated'),
+ ('ok', 'OK'),
+ ('missing', 'Missing'),
+ ('none', 'No External Source'),
+ ('updating', 'Updating'),
+ ],
+ ),
+ ),
],
),
migrations.CreateModel(
name='AdHocCommand',
fields=[
- ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, on_delete=django.db.models.deletion.CASCADE, serialize=False, to='main.UnifiedJob')),
+ (
+ 'unifiedjob_ptr',
+ models.OneToOneField(
+ parent_link=True,
+ auto_created=True,
+ primary_key=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ serialize=False,
+ to='main.UnifiedJob',
+ ),
+ ),
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check')])),
('limit', models.CharField(default='', max_length=1024, blank=True)),
('module_name', models.CharField(default='', max_length=1024, blank=True)),
('module_args', models.TextField(default='', blank=True)),
('forks', models.PositiveIntegerField(default=0, blank=True)),
- ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
+ (
+ 'verbosity',
+ models.PositiveIntegerField(
+ default=0,
+ blank=True,
+ choices=[
+ (0, '0 (Normal)'),
+ (1, '1 (Verbose)'),
+ (2, '2 (More Verbose)'),
+ (3, '3 (Debug)'),
+ (4, '4 (Connection Debug)'),
+ (5, '5 (WinRM Debug)'),
+ ],
+ ),
+ ),
('become_enabled', models.BooleanField(default=False)),
],
bases=('main.unifiedjob',),
@@ -394,13 +840,52 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='InventorySource',
fields=[
- ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, on_delete=django.db.models.deletion.CASCADE, serialize=False, to='main.UnifiedJobTemplate')),
- ('source', models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')])),
+ (
+ 'unifiedjobtemplate_ptr',
+ models.OneToOneField(
+ parent_link=True,
+ auto_created=True,
+ primary_key=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ serialize=False,
+ to='main.UnifiedJobTemplate',
+ ),
+ ),
+ (
+ 'source',
+ models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'Local File, Directory or Script'),
+ ('rax', 'Rackspace Cloud Servers'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure'),
+ ('vmware', 'VMware vCenter'),
+ ('openstack', 'OpenStack'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
+ ),
('source_path', models.CharField(default='', max_length=1024, editable=False, blank=True)),
('source_vars', models.TextField(default='', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
('source_regions', models.CharField(default='', max_length=1024, blank=True)),
- ('instance_filters', models.CharField(default='', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
- ('group_by', models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
+ (
+ 'instance_filters',
+ models.CharField(
+ default='',
+ help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.',
+ max_length=1024,
+ blank=True,
+ ),
+ ),
+ (
+ 'group_by',
+ models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True),
+ ),
('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
('update_on_launch', models.BooleanField(default=False)),
@@ -411,13 +896,52 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='InventoryUpdate',
fields=[
- ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, on_delete=django.db.models.deletion.CASCADE, serialize=False, to='main.UnifiedJob')),
- ('source', models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')])),
+ (
+ 'unifiedjob_ptr',
+ models.OneToOneField(
+ parent_link=True,
+ auto_created=True,
+ primary_key=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ serialize=False,
+ to='main.UnifiedJob',
+ ),
+ ),
+ (
+ 'source',
+ models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'Local File, Directory or Script'),
+ ('rax', 'Rackspace Cloud Servers'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure'),
+ ('vmware', 'VMware vCenter'),
+ ('openstack', 'OpenStack'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
+ ),
('source_path', models.CharField(default='', max_length=1024, editable=False, blank=True)),
('source_vars', models.TextField(default='', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
('source_regions', models.CharField(default='', max_length=1024, blank=True)),
- ('instance_filters', models.CharField(default='', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
- ('group_by', models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
+ (
+ 'instance_filters',
+ models.CharField(
+ default='',
+ help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.',
+ max_length=1024,
+ blank=True,
+ ),
+ ),
+ (
+ 'group_by',
+ models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True),
+ ),
('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
('license_error', models.BooleanField(default=False, editable=False)),
@@ -427,12 +951,36 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Job',
fields=[
- ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, on_delete=django.db.models.deletion.CASCADE, serialize=False, to='main.UnifiedJob')),
+ (
+ 'unifiedjob_ptr',
+ models.OneToOneField(
+ parent_link=True,
+ auto_created=True,
+ primary_key=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ serialize=False,
+ to='main.UnifiedJob',
+ ),
+ ),
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check'), ('scan', 'Scan')])),
('playbook', models.CharField(default='', max_length=1024, blank=True)),
('forks', models.PositiveIntegerField(default=0, blank=True)),
('limit', models.CharField(default='', max_length=1024, blank=True)),
- ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
+ (
+ 'verbosity',
+ models.PositiveIntegerField(
+ default=0,
+ blank=True,
+ choices=[
+ (0, '0 (Normal)'),
+ (1, '1 (Verbose)'),
+ (2, '2 (More Verbose)'),
+ (3, '3 (Debug)'),
+ (4, '4 (Connection Debug)'),
+ (5, '5 (WinRM Debug)'),
+ ],
+ ),
+ ),
('extra_vars', models.TextField(default='', blank=True)),
('job_tags', models.CharField(default='', max_length=1024, blank=True)),
('force_handlers', models.BooleanField(blank=True, default=False)),
@@ -448,12 +996,36 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='JobTemplate',
fields=[
- ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, on_delete=django.db.models.deletion.CASCADE, serialize=False, to='main.UnifiedJobTemplate')),
+ (
+ 'unifiedjobtemplate_ptr',
+ models.OneToOneField(
+ parent_link=True,
+ auto_created=True,
+ primary_key=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ serialize=False,
+ to='main.UnifiedJobTemplate',
+ ),
+ ),
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check'), ('scan', 'Scan')])),
('playbook', models.CharField(default='', max_length=1024, blank=True)),
('forks', models.PositiveIntegerField(default=0, blank=True)),
('limit', models.CharField(default='', max_length=1024, blank=True)),
- ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
+ (
+ 'verbosity',
+ models.PositiveIntegerField(
+ default=0,
+ blank=True,
+ choices=[
+ (0, '0 (Normal)'),
+ (1, '1 (Verbose)'),
+ (2, '2 (More Verbose)'),
+ (3, '3 (Debug)'),
+ (4, '4 (Connection Debug)'),
+ (5, '5 (WinRM Debug)'),
+ ],
+ ),
+ ),
('extra_vars', models.TextField(default='', blank=True)),
('job_tags', models.CharField(default='', max_length=1024, blank=True)),
('force_handlers', models.BooleanField(blank=True, default=False)),
@@ -473,11 +1045,40 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Project',
fields=[
- ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, on_delete=django.db.models.deletion.CASCADE, serialize=False, to='main.UnifiedJobTemplate')),
- ('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
- ('scm_type', models.CharField(default='', max_length=8, verbose_name='SCM Type', blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')])),
+ (
+ 'unifiedjobtemplate_ptr',
+ models.OneToOneField(
+ parent_link=True,
+ auto_created=True,
+ primary_key=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ serialize=False,
+ to='main.UnifiedJobTemplate',
+ ),
+ ),
+ (
+ 'local_path',
+ models.CharField(
+ help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True
+ ),
+ ),
+ (
+ 'scm_type',
+ models.CharField(
+ default='',
+ max_length=8,
+ verbose_name='SCM Type',
+ blank=True,
+ choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')],
+ ),
+ ),
('scm_url', models.CharField(default='', max_length=1024, verbose_name='SCM URL', blank=True)),
- ('scm_branch', models.CharField(default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
+ (
+ 'scm_branch',
+ models.CharField(
+ default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True
+ ),
+ ),
('scm_clean', models.BooleanField(default=False)),
('scm_delete_on_update', models.BooleanField(default=False)),
('scm_delete_on_next_update', models.BooleanField(default=False, editable=False)),
@@ -492,11 +1093,40 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='ProjectUpdate',
fields=[
- ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, on_delete=django.db.models.deletion.CASCADE, serialize=False, to='main.UnifiedJob')),
- ('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
- ('scm_type', models.CharField(default='', max_length=8, verbose_name='SCM Type', blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')])),
+ (
+ 'unifiedjob_ptr',
+ models.OneToOneField(
+ parent_link=True,
+ auto_created=True,
+ primary_key=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ serialize=False,
+ to='main.UnifiedJob',
+ ),
+ ),
+ (
+ 'local_path',
+ models.CharField(
+ help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True
+ ),
+ ),
+ (
+ 'scm_type',
+ models.CharField(
+ default='',
+ max_length=8,
+ verbose_name='SCM Type',
+ blank=True,
+ choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')],
+ ),
+ ),
('scm_url', models.CharField(default='', max_length=1024, verbose_name='SCM URL', blank=True)),
- ('scm_branch', models.CharField(default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
+ (
+ 'scm_branch',
+ models.CharField(
+ default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True
+ ),
+ ),
('scm_clean', models.BooleanField(default=False)),
('scm_delete_on_update', models.BooleanField(default=False)),
],
@@ -505,8 +1135,31 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='SystemJob',
fields=[
- ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, on_delete=django.db.models.deletion.CASCADE, serialize=False, to='main.UnifiedJob')),
- ('job_type', models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_deleted', 'Purge previously deleted items from the database'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
+ (
+ 'unifiedjob_ptr',
+ models.OneToOneField(
+ parent_link=True,
+ auto_created=True,
+ primary_key=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ serialize=False,
+ to='main.UnifiedJob',
+ ),
+ ),
+ (
+ 'job_type',
+ models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('cleanup_jobs', 'Remove jobs older than a certain number of days'),
+ ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'),
+ ('cleanup_deleted', 'Purge previously deleted items from the database'),
+ ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data'),
+ ],
+ ),
+ ),
('extra_vars', models.TextField(default='', blank=True)),
],
options={
@@ -517,50 +1170,123 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='SystemJobTemplate',
fields=[
- ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, on_delete=django.db.models.deletion.CASCADE, serialize=False, to='main.UnifiedJobTemplate')),
- ('job_type', models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_deleted', 'Purge previously deleted items from the database'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
+ (
+ 'unifiedjobtemplate_ptr',
+ models.OneToOneField(
+ parent_link=True,
+ auto_created=True,
+ primary_key=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ serialize=False,
+ to='main.UnifiedJobTemplate',
+ ),
+ ),
+ (
+ 'job_type',
+ models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('cleanup_jobs', 'Remove jobs older than a certain number of days'),
+ ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'),
+ ('cleanup_deleted', 'Purge previously deleted items from the database'),
+ ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data'),
+ ],
+ ),
+ ),
],
bases=('main.unifiedjobtemplate', models.Model),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='created_by',
- field=models.ForeignKey(related_name="{u'class': 'unifiedjobtemplate', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ field=models.ForeignKey(
+ related_name="{u'class': 'unifiedjobtemplate', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='current_job',
- field=models.ForeignKey(related_name='unifiedjobtemplate_as_current_job+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.UnifiedJob', null=True),
+ field=models.ForeignKey(
+ related_name='unifiedjobtemplate_as_current_job+',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to='main.UnifiedJob',
+ null=True,
+ ),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='last_job',
- field=models.ForeignKey(related_name='unifiedjobtemplate_as_last_job+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.UnifiedJob', null=True),
+ field=models.ForeignKey(
+ related_name='unifiedjobtemplate_as_last_job+',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to='main.UnifiedJob',
+ null=True,
+ ),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='modified_by',
- field=models.ForeignKey(related_name="{u'class': 'unifiedjobtemplate', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ field=models.ForeignKey(
+ related_name="{u'class': 'unifiedjobtemplate', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='next_schedule',
- field=models.ForeignKey(related_name='unifiedjobtemplate_as_next_schedule+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Schedule', null=True),
+ field=models.ForeignKey(
+ related_name='unifiedjobtemplate_as_next_schedule+',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to='main.Schedule',
+ null=True,
+ ),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='polymorphic_ctype',
- field=models.ForeignKey(related_name='polymorphic_main.unifiedjobtemplate_set+', editable=False, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType', null=True),
+ field=models.ForeignKey(
+ related_name='polymorphic_main.unifiedjobtemplate_set+',
+ editable=False,
+ on_delete=django.db.models.deletion.CASCADE,
+ to='contenttypes.ContentType',
+ null=True,
+ ),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='tags',
- field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ field=taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
),
migrations.AddField(
model_name='unifiedjob',
name='created_by',
- field=models.ForeignKey(related_name="{u'class': 'unifiedjob', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ field=models.ForeignKey(
+ related_name="{u'class': 'unifiedjob', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
),
migrations.AddField(
model_name='unifiedjob',
@@ -570,12 +1296,25 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='unifiedjob',
name='modified_by',
- field=models.ForeignKey(related_name="{u'class': 'unifiedjob', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ field=models.ForeignKey(
+ related_name="{u'class': 'unifiedjob', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
),
migrations.AddField(
model_name='unifiedjob',
name='polymorphic_ctype',
- field=models.ForeignKey(related_name='polymorphic_main.unifiedjob_set+', editable=False, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType', null=True),
+ field=models.ForeignKey(
+ related_name='polymorphic_main.unifiedjob_set+',
+ editable=False,
+ on_delete=django.db.models.deletion.CASCADE,
+ to='contenttypes.ContentType',
+ null=True,
+ ),
),
migrations.AddField(
model_name='unifiedjob',
@@ -585,12 +1324,21 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='unifiedjob',
name='tags',
- field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ field=taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
),
migrations.AddField(
model_name='unifiedjob',
name='unified_job_template',
- field=models.ForeignKey(related_name='unifiedjob_unified_jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.UnifiedJobTemplate', null=True),
+ field=models.ForeignKey(
+ related_name='unifiedjob_unified_jobs',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to='main.UnifiedJobTemplate',
+ null=True,
+ ),
),
migrations.AddField(
model_name='schedule',
@@ -605,7 +1353,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='permission',
name='user',
- field=models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True),
+ field=models.ForeignKey(
+ related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True
+ ),
),
migrations.AddField(
model_name='joborigin',
@@ -615,12 +1365,19 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='inventory',
name='organization',
- field=models.ForeignKey(related_name='inventories', on_delete=django.db.models.deletion.CASCADE, to='main.Organization', help_text='Organization containing this inventory.'),
+ field=models.ForeignKey(
+ related_name='inventories',
+ on_delete=django.db.models.deletion.CASCADE,
+ to='main.Organization',
+ help_text='Organization containing this inventory.',
+ ),
),
migrations.AddField(
model_name='inventory',
name='tags',
- field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ field=taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
),
migrations.AddField(
model_name='host',
@@ -630,17 +1387,34 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='host',
name='last_job_host_summary',
- field=models.ForeignKey(related_name='hosts_as_last_job_summary+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, editable=False, to='main.JobHostSummary', null=True),
+ field=models.ForeignKey(
+ related_name='hosts_as_last_job_summary+',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ blank=True,
+ editable=False,
+ to='main.JobHostSummary',
+ null=True,
+ ),
),
migrations.AddField(
model_name='host',
name='modified_by',
- field=models.ForeignKey(related_name="{u'class': 'host', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ field=models.ForeignKey(
+ related_name="{u'class': 'host', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
),
migrations.AddField(
model_name='host',
name='tags',
- field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ field=taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
),
migrations.AddField(
model_name='group',
@@ -655,7 +1429,14 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='group',
name='modified_by',
- field=models.ForeignKey(related_name="{u'class': 'group', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ field=models.ForeignKey(
+ related_name="{u'class': 'group', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
),
migrations.AddField(
model_name='group',
@@ -665,32 +1446,48 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='group',
name='tags',
- field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ field=taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
),
migrations.AddField(
model_name='custominventoryscript',
name='organization',
- field=models.ForeignKey(related_name='custom_inventory_scripts', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', help_text='Organization owning this inventory script', null=True),
+ field=models.ForeignKey(
+ related_name='custom_inventory_scripts',
+ on_delete=django.db.models.deletion.SET_NULL,
+ to='main.Organization',
+ help_text='Organization owning this inventory script',
+ null=True,
+ ),
),
migrations.AddField(
model_name='custominventoryscript',
name='tags',
- field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ field=taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
),
migrations.AddField(
model_name='credential',
name='team',
- field=models.ForeignKey(related_name='credentials', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Team', null=True),
+ field=models.ForeignKey(
+ related_name='credentials', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Team', null=True
+ ),
),
migrations.AddField(
model_name='credential',
name='user',
- field=models.ForeignKey(related_name='credentials', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to=settings.AUTH_USER_MODEL, null=True),
+ field=models.ForeignKey(
+ related_name='credentials', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to=settings.AUTH_USER_MODEL, null=True
+ ),
),
migrations.AddField(
model_name='adhoccommandevent',
name='host',
- field=models.ForeignKey(related_name='ad_hoc_command_events', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True),
+ field=models.ForeignKey(
+ related_name='ad_hoc_command_events', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True
+ ),
),
migrations.AddField(
model_name='activitystream',
@@ -764,12 +1561,16 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='systemjob',
name='system_job_template',
- field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.SystemJobTemplate', null=True),
+ field=models.ForeignKey(
+ related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.SystemJobTemplate', null=True
+ ),
),
migrations.AddField(
model_name='projectupdate',
name='credential',
- field=models.ForeignKey(related_name='projectupdates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='projectupdates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True
+ ),
),
migrations.AddField(
model_name='projectupdate',
@@ -779,7 +1580,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='project',
name='credential',
- field=models.ForeignKey(related_name='projects', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='projects', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True
+ ),
),
migrations.AddField(
model_name='permission',
@@ -794,12 +1597,21 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='jobtemplate',
name='cloud_credential',
- field=models.ForeignKey(related_name='jobtemplates_as_cloud_credential+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='jobtemplates_as_cloud_credential+',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ blank=True,
+ to='main.Credential',
+ null=True,
+ ),
),
migrations.AddField(
model_name='jobtemplate',
name='credential',
- field=models.ForeignKey(related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True
+ ),
),
migrations.AddField(
model_name='jobtemplate',
@@ -809,7 +1621,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='jobtemplate',
name='project',
- field=models.ForeignKey(related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Project', null=True),
+ field=models.ForeignKey(
+ related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Project', null=True
+ ),
),
migrations.AddField(
model_name='jobhostsummary',
@@ -824,12 +1638,21 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='job',
name='cloud_credential',
- field=models.ForeignKey(related_name='jobs_as_cloud_credential+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='jobs_as_cloud_credential+',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ blank=True,
+ to='main.Credential',
+ null=True,
+ ),
),
migrations.AddField(
model_name='job',
name='credential',
- field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True
+ ),
),
migrations.AddField(
model_name='job',
@@ -844,7 +1667,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='job',
name='job_template',
- field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.JobTemplate', null=True),
+ field=models.ForeignKey(
+ related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.JobTemplate', null=True
+ ),
),
migrations.AddField(
model_name='job',
@@ -854,7 +1679,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='inventoryupdate',
name='credential',
- field=models.ForeignKey(related_name='inventoryupdates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='inventoryupdates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True
+ ),
),
migrations.AddField(
model_name='inventoryupdate',
@@ -869,17 +1696,23 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='inventorysource',
name='credential',
- field=models.ForeignKey(related_name='inventorysources', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='inventorysources', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True
+ ),
),
migrations.AddField(
model_name='inventorysource',
name='group',
- field=awx.main.fields.AutoOneToOneField(related_name='inventory_source', on_delete=django.db.models.deletion.SET_NULL, null=True, default=None, editable=False, to='main.Group'),
+ field=awx.main.fields.AutoOneToOneField(
+ related_name='inventory_source', on_delete=django.db.models.deletion.SET_NULL, null=True, default=None, editable=False, to='main.Group'
+ ),
),
migrations.AddField(
model_name='inventorysource',
name='inventory',
- field=models.ForeignKey(related_name='inventory_sources', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Inventory', null=True),
+ field=models.ForeignKey(
+ related_name='inventory_sources', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Inventory', null=True
+ ),
),
migrations.AddField(
model_name='inventorysource',
@@ -893,17 +1726,23 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='host',
name='inventory_sources',
- field=models.ManyToManyField(help_text='Inventory source(s) that created or modified this host.', related_name='hosts', editable=False, to='main.InventorySource'),
+ field=models.ManyToManyField(
+ help_text='Inventory source(s) that created or modified this host.', related_name='hosts', editable=False, to='main.InventorySource'
+ ),
),
migrations.AddField(
model_name='host',
name='last_job',
- field=models.ForeignKey(related_name='hosts_as_last_job+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Job', null=True),
+ field=models.ForeignKey(
+ related_name='hosts_as_last_job+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Job', null=True
+ ),
),
migrations.AddField(
model_name='group',
name='inventory_sources',
- field=models.ManyToManyField(help_text='Inventory source(s) that created or modified this group.', related_name='groups', editable=False, to='main.InventorySource'),
+ field=models.ManyToManyField(
+ help_text='Inventory source(s) that created or modified this group.', related_name='groups', editable=False, to='main.InventorySource'
+ ),
),
migrations.AlterUniqueTogether(
name='custominventoryscript',
@@ -921,7 +1760,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='adhoccommand',
name='credential',
- field=models.ForeignKey(related_name='ad_hoc_commands', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='ad_hoc_commands', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.Credential', null=True
+ ),
),
migrations.AddField(
model_name='adhoccommand',
diff --git a/awx/main/migrations/0002_squashed_v300_release.py b/awx/main/migrations/0002_squashed_v300_release.py
index 89fce679ea..2afdef1845 100644
--- a/awx/main/migrations/0002_squashed_v300_release.py
+++ b/awx/main/migrations/0002_squashed_v300_release.py
@@ -17,10 +17,10 @@ import taggit.managers
def create_system_job_templates(apps, schema_editor):
- '''
+ """
Create default system job templates if not present. Create default schedules
only if new system job templates were created (i.e. new database).
- '''
+ """
SystemJobTemplate = apps.get_model('main', 'SystemJobTemplate')
Schedule = apps.get_model('main', 'Schedule')
@@ -104,24 +104,26 @@ def create_system_job_templates(apps, schema_editor):
class Migration(migrations.Migration):
- replaces = [('main', '0002_v300_tower_settings_changes'),
- ('main', '0003_v300_notification_changes'),
- ('main', '0004_v300_fact_changes'),
- ('main', '0005_v300_migrate_facts'),
- ('main', '0006_v300_active_flag_cleanup'),
- ('main', '0007_v300_active_flag_removal'),
- ('main', '0008_v300_rbac_changes'),
- ('main', '0009_v300_rbac_migrations'),
- ('main', '0010_v300_create_system_job_templates'),
- ('main', '0011_v300_credential_domain_field'),
- ('main', '0012_v300_create_labels'),
- ('main', '0013_v300_label_changes'),
- ('main', '0014_v300_invsource_cred'),
- ('main', '0015_v300_label_changes'),
- ('main', '0016_v300_prompting_changes'),
- ('main', '0017_v300_prompting_migrations'),
- ('main', '0018_v300_host_ordering'),
- ('main', '0019_v300_new_azure_credential'),]
+ replaces = [
+ ('main', '0002_v300_tower_settings_changes'),
+ ('main', '0003_v300_notification_changes'),
+ ('main', '0004_v300_fact_changes'),
+ ('main', '0005_v300_migrate_facts'),
+ ('main', '0006_v300_active_flag_cleanup'),
+ ('main', '0007_v300_active_flag_removal'),
+ ('main', '0008_v300_rbac_changes'),
+ ('main', '0009_v300_rbac_migrations'),
+ ('main', '0010_v300_create_system_job_templates'),
+ ('main', '0011_v300_credential_domain_field'),
+ ('main', '0012_v300_create_labels'),
+ ('main', '0013_v300_label_changes'),
+ ('main', '0014_v300_invsource_cred'),
+ ('main', '0015_v300_label_changes'),
+ ('main', '0016_v300_prompting_changes'),
+ ('main', '0017_v300_prompting_migrations'),
+ ('main', '0018_v300_host_ordering'),
+ ('main', '0019_v300_new_azure_credential'),
+ ]
dependencies = [
('taggit', '0002_auto_20150616_2121'),
@@ -142,8 +144,25 @@ class Migration(migrations.Migration):
('description', models.TextField()),
('category', models.CharField(max_length=128)),
('value', models.TextField(blank=True)),
- ('value_type', models.CharField(max_length=12, choices=[('string', 'String'), ('int', 'Integer'), ('float', 'Decimal'), ('json', 'JSON'), ('bool', 'Boolean'), ('password', 'Password'), ('list', 'List')])),
- ('user', models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True)),
+ (
+ 'value_type',
+ models.CharField(
+ max_length=12,
+ choices=[
+ ('string', 'String'),
+ ('int', 'Integer'),
+ ('float', 'Decimal'),
+ ('json', 'JSON'),
+ ('bool', 'Boolean'),
+ ('password', 'Password'),
+ ('list', 'List'),
+ ],
+ ),
+ ),
+ (
+ 'user',
+ models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True),
+ ),
],
),
# Notification changes
@@ -153,10 +172,31 @@ class Migration(migrations.Migration):
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(default=None, editable=False)),
('modified', models.DateTimeField(default=None, editable=False)),
- ('status', models.CharField(default='pending', max_length=20, editable=False, choices=[('pending', 'Pending'), ('successful', 'Successful'), ('failed', 'Failed')])),
+ (
+ 'status',
+ models.CharField(
+ default='pending', max_length=20, editable=False, choices=[('pending', 'Pending'), ('successful', 'Successful'), ('failed', 'Failed')]
+ ),
+ ),
('error', models.TextField(default='', editable=False, blank=True)),
('notifications_sent', models.IntegerField(default=0, editable=False)),
- ('notification_type', models.CharField(max_length=32, choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')])),
+ (
+ 'notification_type',
+ models.CharField(
+ max_length=32,
+ choices=[
+ ('email', 'Email'),
+ ('slack', 'Slack'),
+ ('twilio', 'Twilio'),
+ ('pagerduty', 'Pagerduty'),
+ ('hipchat', 'HipChat'),
+ ('webhook', 'Webhook'),
+ ('mattermost', 'Mattermost'),
+ ('rocketchat', 'Rocket.Chat'),
+ ('irc', 'IRC'),
+ ],
+ ),
+ ),
('recipients', models.TextField(default='', editable=False, blank=True)),
('subject', models.TextField(default='', editable=False, blank=True)),
('body', jsonfield.fields.JSONField(default=dict, blank=True)),
@@ -173,12 +213,56 @@ class Migration(migrations.Migration):
('modified', models.DateTimeField(default=None, editable=False)),
('description', models.TextField(default='', blank=True)),
('name', models.CharField(unique=True, max_length=512)),
- ('notification_type', models.CharField(max_length=32, choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')])),
+ (
+ 'notification_type',
+ models.CharField(
+ max_length=32,
+ choices=[
+ ('email', 'Email'),
+ ('slack', 'Slack'),
+ ('twilio', 'Twilio'),
+ ('pagerduty', 'Pagerduty'),
+ ('hipchat', 'HipChat'),
+ ('webhook', 'Webhook'),
+ ('mattermost', 'Mattermost'),
+ ('rocketchat', 'Rocket.Chat'),
+ ('irc', 'IRC'),
+ ],
+ ),
+ ),
('notification_configuration', jsonfield.fields.JSONField(default=dict)),
- ('created_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('modified_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('organization', models.ForeignKey(related_name='notification_templates', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', null=True)),
- ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'organization',
+ models.ForeignKey(related_name='notification_templates', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', null=True),
+ ),
+ (
+ 'tags',
+ taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
+ ),
],
),
migrations.AddField(
@@ -238,8 +322,18 @@ class Migration(migrations.Migration):
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField(default=None, help_text='Date and time of the corresponding fact scan gathering time.', editable=False)),
('module', models.CharField(max_length=128)),
- ('facts', awx.main.fields.JSONBField(default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True)),
- ('host', models.ForeignKey(related_name='facts', to='main.Host', on_delete=models.CASCADE, help_text='Host for the facts that the fact scan captured.')),
+ (
+ 'facts',
+ awx.main.fields.JSONBField(
+ default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True
+ ),
+ ),
+ (
+ 'host',
+ models.ForeignKey(
+ related_name='facts', to='main.Host', on_delete=models.CASCADE, help_text='Host for the facts that the fact scan captured.'
+ ),
+ ),
],
),
migrations.AlterIndexTogether(
@@ -291,7 +385,6 @@ class Migration(migrations.Migration):
model_name='unifiedjobtemplate',
name='active',
),
-
# RBAC Changes
# ############
migrations.RenameField(
@@ -368,7 +461,6 @@ class Migration(migrations.Migration):
name='organization',
field=models.ForeignKey(related_name='credentials', on_delete=models.CASCADE, default=None, blank=True, to='main.Organization', null=True),
),
-
#
# New RBAC models and fields
#
@@ -383,7 +475,6 @@ class Migration(migrations.Migration):
('implicit_parents', models.TextField(default='[]')),
('content_type', models.ForeignKey(default=None, to='contenttypes.ContentType', on_delete=models.CASCADE, null=True)),
('object_id', models.PositiveIntegerField(default=None, null=True)),
-
],
options={
'db_table': 'main_rbac_roles',
@@ -431,7 +522,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='credential',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'
+ ),
),
migrations.AddField(
model_name='custominventoryscript',
@@ -441,7 +534,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='custominventoryscript',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'organization.member_role', 'admin_role'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+', parent_role=['organization.auditor_role', 'organization.member_role', 'admin_role'], to='main.Role', null='True'
+ ),
),
migrations.AddField(
model_name='inventory',
@@ -466,12 +561,16 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='inventory',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'update_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+', parent_role=['organization.auditor_role', 'update_role', 'use_role', 'admin_role'], to='main.Role', null='True'
+ ),
),
migrations.AddField(
model_name='jobtemplate',
name='admin_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['project.organization.admin_role', 'inventory.organization.admin_role'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+', parent_role=['project.organization.admin_role', 'inventory.organization.admin_role'], to='main.Role', null='True'
+ ),
),
migrations.AddField(
model_name='jobtemplate',
@@ -481,7 +580,12 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='jobtemplate',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+',
+ parent_role=['project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
+ to='main.Role',
+ null='True',
+ ),
),
migrations.AddField(
model_name='organization',
@@ -506,7 +610,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='project',
name='admin_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.admin_role', 'singleton:system_administrator'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+', parent_role=['organization.admin_role', 'singleton:system_administrator'], to='main.Role', null='True'
+ ),
),
migrations.AddField(
model_name='project',
@@ -521,7 +627,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='project',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'singleton:system_auditor', 'use_role', 'update_role'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+', parent_role=['organization.auditor_role', 'singleton:system_auditor', 'use_role', 'update_role'], to='main.Role', null='True'
+ ),
),
migrations.AddField(
model_name='team',
@@ -536,20 +644,39 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='team',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role', 'organization.auditor_role', 'member_role'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+', parent_role=['admin_role', 'organization.auditor_role', 'member_role'], to='main.Role', null='True'
+ ),
),
-
# System Job Templates
migrations.RunPython(create_system_job_templates, migrations.RunPython.noop),
migrations.AlterField(
model_name='systemjob',
name='job_type',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('cleanup_jobs', 'Remove jobs older than a certain number of days'),
+ ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'),
+ ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data'),
+ ],
+ ),
),
migrations.AlterField(
model_name='systemjobtemplate',
name='job_type',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('cleanup_jobs', 'Remove jobs older than a certain number of days'),
+ ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'),
+ ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data'),
+ ],
+ ),
),
# Credential domain field
migrations.AddField(
@@ -566,10 +693,43 @@ class Migration(migrations.Migration):
('modified', models.DateTimeField(default=None, editable=False)),
('description', models.TextField(default='', blank=True)),
('name', models.CharField(max_length=512)),
- ('created_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('modified_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('organization', models.ForeignKey(related_name='labels', on_delete=django.db.models.deletion.CASCADE, to='main.Organization', help_text='Organization this label belongs to.')),
- ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'label', u'app_label': 'main'}(class)s_created+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ related_name="{u'class': 'label', u'app_label': 'main'}(class)s_modified+",
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'organization',
+ models.ForeignKey(
+ related_name='labels',
+ on_delete=django.db.models.deletion.CASCADE,
+ to='main.Organization',
+ help_text='Organization this label belongs to.',
+ ),
+ ),
+ (
+ 'tags',
+ taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
+ ),
],
options={
'ordering': ('organization', 'name'),
@@ -598,23 +758,47 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='label',
name='organization',
- field=models.ForeignKey(related_name='labels', on_delete=django.db.models.deletion.CASCADE, default=None, blank=True, to='main.Organization', help_text='Organization this label belongs to.', null=True),
+ field=models.ForeignKey(
+ related_name='labels',
+ on_delete=django.db.models.deletion.CASCADE,
+ default=None,
+ blank=True,
+ to='main.Organization',
+ help_text='Organization this label belongs to.',
+ null=True,
+ ),
),
migrations.AlterField(
model_name='label',
name='organization',
- field=models.ForeignKey(related_name='labels', on_delete=django.db.models.deletion.CASCADE, to='main.Organization', help_text='Organization this label belongs to.'),
+ field=models.ForeignKey(
+ related_name='labels', on_delete=django.db.models.deletion.CASCADE, to='main.Organization', help_text='Organization this label belongs to.'
+ ),
),
# InventorySource Credential
migrations.AddField(
model_name='job',
name='network_credential',
- field=models.ForeignKey(related_name='jobs_as_network_credential+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='jobs_as_network_credential+',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ blank=True,
+ to='main.Credential',
+ null=True,
+ ),
),
migrations.AddField(
model_name='jobtemplate',
name='network_credential',
- field=models.ForeignKey(related_name='jobtemplates_as_network_credential+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='jobtemplates_as_network_credential+',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ blank=True,
+ to='main.Credential',
+ null=True,
+ ),
),
migrations.AddField(
model_name='credential',
@@ -629,27 +813,86 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credential',
name='deprecated_team',
- field=models.ForeignKey(related_name='deprecated_credentials', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Team', null=True),
+ field=models.ForeignKey(
+ related_name='deprecated_credentials', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Team', null=True
+ ),
),
migrations.AlterField(
model_name='credential',
name='deprecated_user',
- field=models.ForeignKey(related_name='deprecated_credentials', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to=settings.AUTH_USER_MODEL, null=True),
+ field=models.ForeignKey(
+ related_name='deprecated_credentials',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ blank=True,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
),
migrations.AlterField(
model_name='credential',
name='kind',
- field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('openstack', 'OpenStack')]),
+ field=models.CharField(
+ default='ssh',
+ max_length=32,
+ choices=[
+ ('ssh', 'Machine'),
+ ('net', 'Network'),
+ ('scm', 'Source Control'),
+ ('aws', 'Amazon Web Services'),
+ ('rax', 'Rackspace'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure'),
+ ('openstack', 'OpenStack'),
+ ],
+ ),
),
migrations.AlterField(
model_name='inventorysource',
name='source',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'Local File, Directory or Script'),
+ ('rax', 'Rackspace Cloud Servers'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'Local File, Directory or Script'),
+ ('rax', 'Rackspace Cloud Servers'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
),
migrations.AlterField(
model_name='team',
@@ -685,12 +928,16 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='job',
name='inventory',
- field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True),
+ field=models.ForeignKey(
+ related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True
+ ),
),
migrations.AlterField(
model_name='jobtemplate',
name='inventory',
- field=models.ForeignKey(related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True),
+ field=models.ForeignKey(
+ related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True
+ ),
),
# Host ordering
migrations.AlterModelOptions(
@@ -721,7 +968,24 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credential',
name='kind',
- field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('openstack', 'OpenStack')]),
+ field=models.CharField(
+ default='ssh',
+ max_length=32,
+ choices=[
+ ('ssh', 'Machine'),
+ ('net', 'Network'),
+ ('scm', 'Source Control'),
+ ('aws', 'Amazon Web Services'),
+ ('rax', 'Rackspace'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Satellite 6'),
+ ('cloudforms', 'CloudForms'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure Classic (deprecated)'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('openstack', 'OpenStack'),
+ ],
+ ),
),
migrations.AlterField(
model_name='host',
@@ -731,11 +995,47 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='inventorysource',
name='source',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'Local File, Directory or Script'),
+ ('rax', 'Rackspace Cloud Servers'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure Classic (deprecated)'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Satellite 6'),
+ ('cloudforms', 'CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'Local File, Directory or Script'),
+ ('rax', 'Rackspace Cloud Servers'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure Classic (deprecated)'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Satellite 6'),
+ ('cloudforms', 'CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
),
]
diff --git a/awx/main/migrations/0003_squashed_v300_v303_updates.py b/awx/main/migrations/0003_squashed_v300_v303_updates.py
index d58ffc2dfe..493f5532aa 100644
--- a/awx/main/migrations/0003_squashed_v300_v303_updates.py
+++ b/awx/main/migrations/0003_squashed_v300_v303_updates.py
@@ -14,15 +14,17 @@ from ._squashed_30 import SQUASHED_30
class Migration(migrations.Migration):
- replaces = [('main', '0020_v300_labels_changes'),
- ('main', '0021_v300_activity_stream'),
- ('main', '0022_v300_adhoc_extravars'),
- ('main', '0023_v300_activity_stream_ordering'),
- ('main', '0024_v300_jobtemplate_allow_simul'),
- ('main', '0025_v300_update_rbac_parents'),
- ('main', '0026_v300_credential_unique'),
- ('main', '0027_v300_team_migrations'),
- ('main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True)
+ replaces = [
+ ('main', '0020_v300_labels_changes'),
+ ('main', '0021_v300_activity_stream'),
+ ('main', '0022_v300_adhoc_extravars'),
+ ('main', '0023_v300_activity_stream_ordering'),
+ ('main', '0024_v300_jobtemplate_allow_simul'),
+ ('main', '0025_v300_update_rbac_parents'),
+ ('main', '0026_v300_credential_unique'),
+ ('main', '0027_v300_team_migrations'),
+ ('main', '0028_v300_org_team_cascade'),
+ ] + _squashed.replaces(SQUASHED_30, applied=True)
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
@@ -68,17 +70,70 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credential',
name='kind',
- field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('openstack', 'OpenStack')]),
+ field=models.CharField(
+ default='ssh',
+ max_length=32,
+ choices=[
+ ('ssh', 'Machine'),
+ ('net', 'Network'),
+ ('scm', 'Source Control'),
+ ('aws', 'Amazon Web Services'),
+ ('rax', 'Rackspace'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure Classic (deprecated)'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('openstack', 'OpenStack'),
+ ],
+ ),
),
migrations.AlterField(
model_name='inventorysource',
name='source',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'Local File, Directory or Script'),
+ ('rax', 'Rackspace Cloud Servers'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure Classic (deprecated)'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'Local File, Directory or Script'),
+ ('rax', 'Rackspace Cloud Servers'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure', 'Microsoft Azure Classic (deprecated)'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
),
# jobtemplate allow simul
migrations.AddField(
@@ -110,7 +165,9 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credential',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'
+ ),
),
# Team cascade
migrations.AlterField(
diff --git a/awx/main/migrations/0004_squashed_v310_release.py b/awx/main/migrations/0004_squashed_v310_release.py
index 88a33d146c..06fd3aeed3 100644
--- a/awx/main/migrations/0004_squashed_v310_release.py
+++ b/awx/main/migrations/0004_squashed_v310_release.py
@@ -63,18 +63,42 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credential',
name='become_method',
- field=models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec'), ('dzdo', 'DZDO'), ('pmrun', 'Pmrun')]),
+ field=models.CharField(
+ default='',
+ help_text='Privilege escalation method.',
+ max_length=32,
+ blank=True,
+ choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec'), ('dzdo', 'DZDO'), ('pmrun', 'Pmrun')],
+ ),
),
# Add Workflows
migrations.AlterField(
model_name='unifiedjob',
name='launch_type',
- field=models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync')]),
+ field=models.CharField(
+ default='manual',
+ max_length=20,
+ editable=False,
+ choices=[
+ ('manual', 'Manual'),
+ ('relaunch', 'Relaunch'),
+ ('callback', 'Callback'),
+ ('scheduled', 'Scheduled'),
+ ('dependency', 'Dependency'),
+ ('workflow', 'Workflow'),
+ ('sync', 'Sync'),
+ ],
+ ),
),
migrations.CreateModel(
name='WorkflowJob',
fields=[
- ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, on_delete=models.CASCADE, primary_key=True, serialize=False, to='main.UnifiedJob')),
+ (
+ 'unifiedjob_ptr',
+ models.OneToOneField(
+ parent_link=True, auto_created=True, on_delete=models.CASCADE, primary_key=True, serialize=False, to='main.UnifiedJob'
+ ),
+ ),
('extra_vars', models.TextField(default='', blank=True)),
],
options={
@@ -90,7 +114,12 @@ class Migration(migrations.Migration):
('modified', models.DateTimeField(default=None, editable=False)),
('always_nodes', models.ManyToManyField(related_name='workflowjobnodes_always', to='main.WorkflowJobNode', blank=True)),
('failure_nodes', models.ManyToManyField(related_name='workflowjobnodes_failure', to='main.WorkflowJobNode', blank=True)),
- ('job', models.OneToOneField(related_name='unified_job_node', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)),
+ (
+ 'job',
+ models.OneToOneField(
+ related_name='unified_job_node', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True
+ ),
+ ),
('success_nodes', models.ManyToManyField(related_name='workflowjobnodes_success', to='main.WorkflowJobNode', blank=True)),
],
options={
@@ -100,7 +129,12 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='WorkflowJobTemplate',
fields=[
- ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, on_delete=models.CASCADE, serialize=False, to='main.UnifiedJobTemplate')),
+ (
+ 'unifiedjobtemplate_ptr',
+ models.OneToOneField(
+ parent_link=True, auto_created=True, primary_key=True, on_delete=models.CASCADE, serialize=False, to='main.UnifiedJobTemplate'
+ ),
+ ),
('extra_vars', models.TextField(default='', blank=True)),
('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_administrator', to='main.Role', null='True')),
],
@@ -115,8 +149,28 @@ class Migration(migrations.Migration):
('always_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_always', to='main.WorkflowJobTemplateNode', blank=True)),
('failure_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_failure', to='main.WorkflowJobTemplateNode', blank=True)),
('success_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_success', to='main.WorkflowJobTemplateNode', blank=True)),
- ('unified_job_template', models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True)),
- ('workflow_job_template', models.ForeignKey(related_name='workflow_job_template_nodes', on_delete=models.SET_NULL, default=None, blank=True, to='main.WorkflowJobTemplate', null=True)),
+ (
+ 'unified_job_template',
+ models.ForeignKey(
+ related_name='workflowjobtemplatenodes',
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ blank=True,
+ to='main.UnifiedJobTemplate',
+ null=True,
+ ),
+ ),
+ (
+ 'workflow_job_template',
+ models.ForeignKey(
+ related_name='workflow_job_template_nodes',
+ on_delete=models.SET_NULL,
+ default=None,
+ blank=True,
+ to='main.WorkflowJobTemplate',
+ null=True,
+ ),
+ ),
],
options={
'abstract': False,
@@ -125,17 +179,23 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjobnode',
name='unified_job_template',
- field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True),
+ field=models.ForeignKey(
+ related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True
+ ),
),
migrations.AddField(
model_name='workflowjobnode',
name='workflow_job',
- field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True),
+ field=models.ForeignKey(
+ related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True
+ ),
),
migrations.AddField(
model_name='workflowjob',
name='workflow_job_template',
- field=models.ForeignKey(related_name='workflow_jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJobTemplate', null=True),
+ field=models.ForeignKey(
+ related_name='workflow_jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJobTemplate', null=True
+ ),
),
migrations.AddField(
model_name='activitystream',
@@ -166,12 +226,16 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjobnode',
name='credential',
- field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True
+ ),
),
migrations.AddField(
model_name='workflowjobnode',
name='inventory',
- field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True),
+ field=models.ForeignKey(
+ related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True
+ ),
),
migrations.AddField(
model_name='workflowjobtemplate',
@@ -186,7 +250,12 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjobtemplate',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+',
+ parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role'],
+ to='main.Role',
+ null='True',
+ ),
),
migrations.AddField(
model_name='workflowjobtemplatenode',
@@ -196,32 +265,44 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjobtemplatenode',
name='credential',
- field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True
+ ),
),
migrations.AddField(
model_name='workflowjobtemplatenode',
name='inventory',
- field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True),
+ field=models.ForeignKey(
+ related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True
+ ),
),
migrations.AlterField(
model_name='workflowjobnode',
name='unified_job_template',
- field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True),
+ field=models.ForeignKey(
+ related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True
+ ),
),
migrations.AlterField(
model_name='workflowjobnode',
name='workflow_job',
- field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.CASCADE, default=None, blank=True, to='main.WorkflowJob', null=True),
+ field=models.ForeignKey(
+ related_name='workflow_job_nodes', on_delete=django.db.models.deletion.CASCADE, default=None, blank=True, to='main.WorkflowJob', null=True
+ ),
),
migrations.AlterField(
model_name='workflowjobtemplate',
name='admin_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'
+ ),
),
migrations.AlterField(
model_name='workflowjobtemplatenode',
name='unified_job_template',
- field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True),
+ field=models.ForeignKey(
+ related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True
+ ),
),
# Job artifacts
migrations.AddField(
@@ -275,7 +356,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='project',
name='scm_revision',
- field=models.CharField(default='', editable=False, max_length=1024, blank=True, help_text='The last revision fetched by a project update', verbose_name='SCM Revision'),
+ field=models.CharField(
+ default='', editable=False, max_length=1024, blank=True, help_text='The last revision fetched by a project update', verbose_name='SCM Revision'
+ ),
),
migrations.AddField(
model_name='projectupdate',
@@ -285,13 +368,22 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='job',
name='scm_revision',
- field=models.CharField(default='', editable=False, max_length=1024, blank=True, help_text='The SCM Revision from the Project used for this job, if available', verbose_name='SCM Revision'),
+ field=models.CharField(
+ default='',
+ editable=False,
+ max_length=1024,
+ blank=True,
+ help_text='The SCM Revision from the Project used for this job, if available',
+ verbose_name='SCM Revision',
+ ),
),
# Project Playbook Files
migrations.AddField(
model_name='project',
name='playbook_files',
- field=jsonfield.fields.JSONField(default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True),
+ field=jsonfield.fields.JSONField(
+ default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True
+ ),
),
# Job events to stdout
migrations.AddField(
@@ -352,7 +444,21 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='adhoccommandevent',
name='event',
- field=models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_skipped', 'Host Skipped'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')]),
+ field=models.CharField(
+ max_length=100,
+ choices=[
+ ('runner_on_failed', 'Host Failed'),
+ ('runner_on_ok', 'Host OK'),
+ ('runner_on_unreachable', 'Host Unreachable'),
+ ('runner_on_skipped', 'Host Skipped'),
+ ('debug', 'Debug'),
+ ('verbose', 'Verbose'),
+ ('deprecated', 'Deprecated'),
+ ('warning', 'Warning'),
+ ('system_warning', 'System Warning'),
+ ('error', 'Error'),
+ ],
+ ),
),
migrations.AlterField(
model_name='jobevent',
@@ -362,7 +468,43 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='jobevent',
name='event',
- field=models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')]),
+ field=models.CharField(
+ max_length=100,
+ choices=[
+ ('runner_on_failed', 'Host Failed'),
+ ('runner_on_ok', 'Host OK'),
+ ('runner_on_error', 'Host Failure'),
+ ('runner_on_skipped', 'Host Skipped'),
+ ('runner_on_unreachable', 'Host Unreachable'),
+ ('runner_on_no_hosts', 'No Hosts Remaining'),
+ ('runner_on_async_poll', 'Host Polling'),
+ ('runner_on_async_ok', 'Host Async OK'),
+ ('runner_on_async_failed', 'Host Async Failure'),
+ ('runner_item_on_ok', 'Item OK'),
+ ('runner_item_on_failed', 'Item Failed'),
+ ('runner_item_on_skipped', 'Item Skipped'),
+ ('runner_retry', 'Host Retry'),
+ ('runner_on_file_diff', 'File Difference'),
+ ('playbook_on_start', 'Playbook Started'),
+ ('playbook_on_notify', 'Running Handlers'),
+ ('playbook_on_include', 'Including File'),
+ ('playbook_on_no_hosts_matched', 'No Hosts Matched'),
+ ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'),
+ ('playbook_on_task_start', 'Task Started'),
+ ('playbook_on_vars_prompt', 'Variables Prompted'),
+ ('playbook_on_setup', 'Gathering Facts'),
+ ('playbook_on_import_for_host', 'internal: on Import for Host'),
+ ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'),
+ ('playbook_on_play_start', 'Play Started'),
+ ('playbook_on_stats', 'Playbook Complete'),
+ ('debug', 'Debug'),
+ ('verbose', 'Verbose'),
+ ('deprecated', 'Deprecated'),
+ ('warning', 'Warning'),
+ ('system_warning', 'System Warning'),
+ ('error', 'Error'),
+ ],
+ ),
),
migrations.AlterUniqueTogether(
name='adhoccommandevent',
@@ -448,7 +590,9 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='project',
name='playbook_files',
- field=awx.main.fields.JSONField(default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True),
+ field=awx.main.fields.JSONField(
+ default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True
+ ),
),
migrations.AlterField(
model_name='schedule',
@@ -489,7 +633,14 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='job',
name='project_update',
- field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.ProjectUpdate', help_text='The SCM Refresh task used to make sure the playbooks were available for the job run', null=True),
+ field=models.ForeignKey(
+ on_delete=django.db.models.deletion.SET_NULL,
+ default=None,
+ blank=True,
+ to='main.ProjectUpdate',
+ help_text='The SCM Refresh task used to make sure the playbooks were available for the job run',
+ null=True,
+ ),
),
# Inventory, non-unique name
migrations.AlterField(
@@ -505,7 +656,9 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='host',
name='instance_id',
- field=models.CharField(default='', help_text='The value used by the remote inventory source to uniquely identify the host', max_length=1024, blank=True),
+ field=models.CharField(
+ default='', help_text='The value used by the remote inventory source to uniquely identify the host', max_length=1024, blank=True
+ ),
),
migrations.AlterField(
model_name='project',
@@ -520,12 +673,23 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='project',
name='scm_type',
- field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
+ field=models.CharField(
+ default='',
+ choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')],
+ max_length=8,
+ blank=True,
+ help_text='Specifies the source control system used to store the project.',
+ verbose_name='SCM Type',
+ ),
),
migrations.AlterField(
model_name='project',
name='scm_update_cache_timeout',
- field=models.PositiveIntegerField(default=0, help_text='The number of seconds after the last project update ran that a newproject update will be launched as a job dependency.', blank=True),
+ field=models.PositiveIntegerField(
+ default=0,
+ help_text='The number of seconds after the last project update ran that a newproject update will be launched as a job dependency.',
+ blank=True,
+ ),
),
migrations.AlterField(
model_name='project',
@@ -555,7 +719,14 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='projectupdate',
name='scm_type',
- field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
+ field=models.CharField(
+ default='',
+ choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')],
+ max_length=8,
+ blank=True,
+ help_text='Specifies the source control system used to store the project.',
+ verbose_name='SCM Type',
+ ),
),
migrations.AlterField(
model_name='projectupdate',
@@ -570,7 +741,12 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='schedule',
name='dtend',
- field=models.DateTimeField(default=None, help_text='The last occurrence of the schedule occurs before this time, aftewards the schedule expires.', null=True, editable=False),
+ field=models.DateTimeField(
+ default=None,
+ help_text='The last occurrence of the schedule occurs before this time, aftewards the schedule expires.',
+ null=True,
+ editable=False,
+ ),
),
migrations.AlterField(
model_name='schedule',
@@ -610,7 +786,9 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='unifiedjob',
name='job_explanation',
- field=models.TextField(default='', help_text="A status field to indicate the state of the job if it wasn't able to run and capture stdout", editable=False, blank=True),
+ field=models.TextField(
+ default='', help_text="A status field to indicate the state of the job if it wasn't able to run and capture stdout", editable=False, blank=True
+ ),
),
migrations.AlterField(
model_name='unifiedjob',
diff --git a/awx/main/migrations/0006_v320_release.py b/awx/main/migrations/0006_v320_release.py
index 69b95f03e7..1f755f94ce 100644
--- a/awx/main/migrations/0006_v320_release.py
+++ b/awx/main/migrations/0006_v320_release.py
@@ -6,13 +6,7 @@ from __future__ import unicode_literals
from psycopg2.extensions import AsIs
# Django
-from django.db import (
- connection,
- migrations,
- models,
- OperationalError,
- ProgrammingError
-)
+from django.db import connection, migrations, models, OperationalError, ProgrammingError
from django.conf import settings
import taggit.managers
@@ -45,13 +39,8 @@ class Migration(migrations.Migration):
name='unifiedjobtemplate',
unique_together=set([]),
),
-
# Inventory Refresh
- migrations.RenameField(
- 'InventorySource',
- 'group',
- 'deprecated_group'
- ),
+ migrations.RenameField('InventorySource', 'group', 'deprecated_group'),
migrations.AlterField(
model_name='inventorysource',
name='deprecated_group',
@@ -62,17 +51,24 @@ class Migration(migrations.Migration):
name='inventory',
field=models.ForeignKey(related_name='inventory_sources', default=None, to='main.Inventory', on_delete=models.CASCADE, null=True),
),
-
# Smart Inventory
migrations.AddField(
model_name='inventory',
name='host_filter',
- field=awx.main.fields.SmartFilterField(default=None, help_text='Filter that will be applied to the hosts of this inventory.', null=True, blank=True),
+ field=awx.main.fields.SmartFilterField(
+ default=None, help_text='Filter that will be applied to the hosts of this inventory.', null=True, blank=True
+ ),
),
migrations.AddField(
model_name='inventory',
name='kind',
- field=models.CharField(default='', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')]),
+ field=models.CharField(
+ default='',
+ help_text='Kind of inventory being represented.',
+ max_length=32,
+ blank=True,
+ choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')],
+ ),
),
migrations.CreateModel(
name='SmartInventoryMembership',
@@ -95,7 +91,6 @@ class Migration(migrations.Migration):
name='smartinventorymembership',
unique_together=set([('host', 'inventory')]),
),
-
# Background Inventory deletion
migrations.AddField(
model_name='inventory',
@@ -105,14 +100,17 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='inventory',
name='organization',
- field=models.ForeignKey(related_name='inventories', on_delete=models.SET_NULL, to='main.Organization', help_text='Organization containing this inventory.', null=True),
+ field=models.ForeignKey(
+ related_name='inventories', on_delete=models.SET_NULL, to='main.Organization', help_text='Organization containing this inventory.', null=True
+ ),
),
-
# Facts
migrations.AlterField(
model_name='fact',
name='facts',
- field=awx.main.fields.JSONBField(default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True),
+ field=awx.main.fields.JSONBField(
+ default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True
+ ),
),
migrations.AddField(
model_name='host',
@@ -127,18 +125,23 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='job',
name='use_fact_cache',
- field=models.BooleanField(default=False, help_text='If enabled, Tower will act as an Ansible Fact Cache Plugin; persisting facts at the end of a playbook run to the database and caching facts for use by Ansible.'),
+ field=models.BooleanField(
+ default=False,
+ help_text='If enabled, Tower will act as an Ansible Fact Cache Plugin; persisting facts at the end of a playbook run to the database and caching facts for use by Ansible.',
+ ),
),
migrations.AddField(
model_name='jobtemplate',
name='use_fact_cache',
- field=models.BooleanField(default=False, help_text='If enabled, Tower will act as an Ansible Fact Cache Plugin; persisting facts at the end of a playbook run to the database and caching facts for use by Ansible.'),
+ field=models.BooleanField(
+ default=False,
+ help_text='If enabled, Tower will act as an Ansible Fact Cache Plugin; persisting facts at the end of a playbook run to the database and caching facts for use by Ansible.',
+ ),
+ ),
+ migrations.RunSQL(
+ [("CREATE INDEX host_ansible_facts_default_gin ON %s USING gin" "(ansible_facts jsonb_path_ops);", [AsIs(Host._meta.db_table)])],
+ [('DROP INDEX host_ansible_facts_default_gin;', None)],
),
- migrations.RunSQL([("CREATE INDEX host_ansible_facts_default_gin ON %s USING gin"
- "(ansible_facts jsonb_path_ops);", [AsIs(Host._meta.db_table)])],
- [('DROP INDEX host_ansible_facts_default_gin;', None)]),
-
-
# SCM file-based inventories
migrations.AddField(
model_name='inventorysource',
@@ -148,27 +151,83 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='inventorysource',
name='source_project',
- field=models.ForeignKey(related_name='scm_inventory_sources', on_delete=models.CASCADE, default=None, blank=True, to='main.Project', help_text='Project containing inventory file used as source.', null=True),
+ field=models.ForeignKey(
+ related_name='scm_inventory_sources',
+ on_delete=models.CASCADE,
+ default=None,
+ blank=True,
+ to='main.Project',
+ help_text='Project containing inventory file used as source.',
+ null=True,
+ ),
),
migrations.AddField(
model_name='inventoryupdate',
name='source_project_update',
- field=models.ForeignKey(related_name='scm_inventory_updates', on_delete=models.CASCADE, default=None, blank=True, to='main.ProjectUpdate', help_text='Inventory files from this Project Update were used for the inventory update.', null=True),
+ field=models.ForeignKey(
+ related_name='scm_inventory_updates',
+ on_delete=models.CASCADE,
+ default=None,
+ blank=True,
+ to='main.ProjectUpdate',
+ help_text='Inventory files from this Project Update were used for the inventory update.',
+ null=True,
+ ),
),
migrations.AddField(
model_name='project',
name='inventory_files',
- field=awx.main.fields.JSONField(default=[], help_text='Suggested list of content that could be Ansible inventory in the project', verbose_name='Inventory Files', editable=False, blank=True),
+ field=awx.main.fields.JSONField(
+ default=[],
+ help_text='Suggested list of content that could be Ansible inventory in the project',
+ verbose_name='Inventory Files',
+ editable=False,
+ blank=True,
+ ),
),
migrations.AlterField(
model_name='inventorysource',
name='source',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'File, Directory or Script'),
+ ('scm', 'Sourced from a Project'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'File, Directory or Script'),
+ ('scm', 'Sourced from a Project'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
),
migrations.AlterField(
model_name='inventorysource',
@@ -183,14 +242,27 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='unifiedjob',
name='launch_type',
- field=models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync'), ('scm', 'SCM Update')]),
+ field=models.CharField(
+ default='manual',
+ max_length=20,
+ editable=False,
+ choices=[
+ ('manual', 'Manual'),
+ ('relaunch', 'Relaunch'),
+ ('callback', 'Callback'),
+ ('scheduled', 'Scheduled'),
+ ('dependency', 'Dependency'),
+ ('workflow', 'Workflow'),
+ ('sync', 'Sync'),
+ ('scm', 'SCM Update'),
+ ],
+ ),
),
migrations.AddField(
model_name='inventorysource',
name='update_on_project_update',
field=models.BooleanField(default=False),
),
-
# Named URL
migrations.AlterField(
model_name='notificationtemplate',
@@ -206,7 +278,6 @@ class Migration(migrations.Migration):
name='notificationtemplate',
unique_together=set([('organization', 'name')]),
),
-
# Add verbosity option to inventory updates
migrations.AddField(
model_name='inventorysource',
@@ -218,14 +289,12 @@ class Migration(migrations.Migration):
name='verbosity',
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, '0 (WARNING)'), (1, '1 (INFO)'), (2, '2 (DEBUG)')]),
),
-
# Job Templates
migrations.AddField(
model_name='jobtemplate',
name='ask_verbosity_on_launch',
field=models.BooleanField(default=False),
),
-
# Workflows
migrations.AddField(
model_name='workflowjob',
@@ -237,7 +306,6 @@ class Migration(migrations.Migration):
name='allow_simultaneous',
field=models.BooleanField(default=False),
),
-
# Permission and Deprecated Field Removal
migrations.RemoveField(
model_name='permission',
@@ -302,7 +370,6 @@ class Migration(migrations.Migration):
migrations.DeleteModel(
name='Permission',
),
-
# Insights
migrations.AddField(
model_name='host',
@@ -312,14 +379,27 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='inventory',
name='insights_credential',
- field=models.ForeignKey(related_name='insights_inventories', on_delete=models.SET_NULL, default=None, blank=True, to='main.Credential', help_text='Credentials to be used by hosts belonging to this inventory when accessing Red Hat Insights API.', null=True),
+ field=models.ForeignKey(
+ related_name='insights_inventories',
+ on_delete=models.SET_NULL,
+ default=None,
+ blank=True,
+ to='main.Credential',
+ help_text='Credentials to be used by hosts belonging to this inventory when accessing Red Hat Insights API.',
+ null=True,
+ ),
),
migrations.AlterField(
model_name='inventory',
name='kind',
- field=models.CharField(default='', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')]),
+ field=models.CharField(
+ default='',
+ help_text='Kind of inventory being represented.',
+ max_length=32,
+ blank=True,
+ choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')],
+ ),
),
-
# Timeout help text update
migrations.AlterField(
model_name='inventorysource',
@@ -364,14 +444,17 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='job',
name='diff_mode',
- field=models.BooleanField(default=False, help_text='If enabled, textual changes made to any templated files on the host are shown in the standard output'),
+ field=models.BooleanField(
+ default=False, help_text='If enabled, textual changes made to any templated files on the host are shown in the standard output'
+ ),
),
migrations.AddField(
model_name='jobtemplate',
name='diff_mode',
- field=models.BooleanField(default=False, help_text='If enabled, textual changes made to any templated files on the host are shown in the standard output'),
+ field=models.BooleanField(
+ default=False, help_text='If enabled, textual changes made to any templated files on the host are shown in the standard output'
+ ),
),
-
migrations.CreateModel(
name='CredentialType',
fields=[
@@ -380,13 +463,65 @@ class Migration(migrations.Migration):
('modified', models.DateTimeField(default=None, editable=False)),
('description', models.TextField(default='', blank=True)),
('name', models.CharField(max_length=512)),
- ('kind', models.CharField(max_length=32, choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('insights', 'Insights')])),
+ (
+ 'kind',
+ models.CharField(
+ max_length=32,
+ choices=[
+ ('ssh', 'Machine'),
+ ('vault', 'Vault'),
+ ('net', 'Network'),
+ ('scm', 'Source Control'),
+ ('cloud', 'Cloud'),
+ ('insights', 'Insights'),
+ ],
+ ),
+ ),
('managed_by_tower', models.BooleanField(default=False, editable=False)),
- ('inputs', awx.main.fields.CredentialTypeInputField(default=dict, blank=True, help_text='Enter inputs using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')),
- ('injectors', awx.main.fields.CredentialTypeInjectorField(default=dict, blank=True, help_text='Enter injectors using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')),
- ('created_by', models.ForeignKey(related_name="{u'class': 'credentialtype', u'app_label': 'main'}(class)s_created+", on_delete=models.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('modified_by', models.ForeignKey(related_name="{u'class': 'credentialtype', u'app_label': 'main'}(class)s_modified+", on_delete=models.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
- ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ (
+ 'inputs',
+ awx.main.fields.CredentialTypeInputField(
+ default=dict,
+ blank=True,
+ help_text='Enter inputs using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.',
+ ),
+ ),
+ (
+ 'injectors',
+ awx.main.fields.CredentialTypeInjectorField(
+ default=dict,
+ blank=True,
+ help_text='Enter injectors using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.',
+ ),
+ ),
+ (
+ 'created_by',
+ models.ForeignKey(
+ related_name="{u'class': 'credentialtype', u'app_label': 'main'}(class)s_created+",
+ on_delete=models.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ related_name="{u'class': 'credentialtype', u'app_label': 'main'}(class)s_modified+",
+ on_delete=models.SET_NULL,
+ default=None,
+ editable=False,
+ to=settings.AUTH_USER_MODEL,
+ null=True,
+ ),
+ ),
+ (
+ 'tags',
+ taggit.managers.TaggableManager(
+ to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'
+ ),
+ ),
],
options={
'ordering': ('kind', 'name'),
@@ -410,12 +545,16 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='job',
name='vault_credential',
- field=models.ForeignKey(related_name='jobs_as_vault_credential+', on_delete=models.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='jobs_as_vault_credential+', on_delete=models.SET_NULL, default=None, blank=True, to='main.Credential', null=True
+ ),
),
migrations.AddField(
model_name='jobtemplate',
name='vault_credential',
- field=models.ForeignKey(related_name='jobtemplates_as_vault_credential+', on_delete=models.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ field=models.ForeignKey(
+ related_name='jobtemplates_as_vault_credential+', on_delete=models.SET_NULL, default=None, blank=True, to='main.Credential', null=True
+ ),
),
migrations.AddField(
model_name='job',
@@ -431,20 +570,32 @@ class Migration(migrations.Migration):
name='credential',
unique_together=set([('organization', 'name', 'credential_type')]),
),
-
migrations.AlterField(
model_name='credential',
name='become_method',
- field=models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec'), ('dzdo', 'DZDO'), ('pmrun', 'Pmrun'), ('runas', 'Runas')]),
+ field=models.CharField(
+ default='',
+ help_text='Privilege escalation method.',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'None'),
+ ('sudo', 'Sudo'),
+ ('su', 'Su'),
+ ('pbrun', 'Pbrun'),
+ ('pfexec', 'Pfexec'),
+ ('dzdo', 'DZDO'),
+ ('pmrun', 'Pmrun'),
+ ('runas', 'Runas'),
+ ],
+ ),
),
-
# Connecting activity stream
migrations.AddField(
model_name='activitystream',
name='credential_type',
field=models.ManyToManyField(to='main.CredentialType', blank=True),
),
-
migrations.CreateModel(
name='InstanceGroup',
fields=[
@@ -452,8 +603,24 @@ class Migration(migrations.Migration):
('name', models.CharField(unique=True, max_length=250)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
- ('controller', models.ForeignKey(related_name='controlled_groups', on_delete=models.CASCADE, default=None, editable=False, to='main.InstanceGroup', help_text='Instance Group to remotely control this group.', null=True)),
- ('instances', models.ManyToManyField(help_text='Instances that are members of this InstanceGroup', related_name='rampart_groups', editable=False, to='main.Instance')),
+ (
+ 'controller',
+ models.ForeignKey(
+ related_name='controlled_groups',
+ on_delete=models.CASCADE,
+ default=None,
+ editable=False,
+ to='main.InstanceGroup',
+ help_text='Instance Group to remotely control this group.',
+ null=True,
+ ),
+ ),
+ (
+ 'instances',
+ models.ManyToManyField(
+ help_text='Instances that are members of this InstanceGroup', related_name='rampart_groups', editable=False, to='main.Instance'
+ ),
+ ),
],
),
migrations.AddField(
@@ -464,7 +631,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='unifiedjob',
name='instance_group',
- field=models.ForeignKey(on_delete=models.SET_NULL, default=None, blank=True, to='main.InstanceGroup', help_text='The Instance group the job was run under', null=True),
+ field=models.ForeignKey(
+ on_delete=models.SET_NULL, default=None, blank=True, to='main.InstanceGroup', help_text='The Instance group the job was run under', null=True
+ ),
),
migrations.AddField(
model_name='unifiedjobtemplate',
diff --git a/awx/main/migrations/0008_v320_drop_v1_credential_fields.py b/awx/main/migrations/0008_v320_drop_v1_credential_fields.py
index f2fc44397e..8103e0327c 100644
--- a/awx/main/migrations/0008_v320_drop_v1_credential_fields.py
+++ b/awx/main/migrations/0008_v320_drop_v1_credential_fields.py
@@ -5,6 +5,7 @@ from __future__ import unicode_literals
# Django
from django.db import migrations
from django.db import models
+
# AWX
import awx.main.fields
@@ -103,12 +104,22 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credential',
name='credential_type',
- field=models.ForeignKey(related_name='credentials', to='main.CredentialType', on_delete=models.CASCADE, null=False, help_text='Specify the type of credential you want to create. Refer to the Ansible Tower documentation for details on each type.')
+ field=models.ForeignKey(
+ related_name='credentials',
+ to='main.CredentialType',
+ on_delete=models.CASCADE,
+ null=False,
+ help_text='Specify the type of credential you want to create. Refer to the Ansible Tower documentation for details on each type.',
+ ),
),
migrations.AlterField(
model_name='credential',
name='inputs',
- field=awx.main.fields.CredentialInputField(default=dict, help_text='Enter inputs using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.', blank=True),
+ field=awx.main.fields.CredentialInputField(
+ default=dict,
+ help_text='Enter inputs using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.',
+ blank=True,
+ ),
),
migrations.RemoveField(
model_name='job',
diff --git a/awx/main/migrations/0010_v322_add_ovirt4_tower_inventory.py b/awx/main/migrations/0010_v322_add_ovirt4_tower_inventory.py
index 68933162c9..a8504fbad0 100644
--- a/awx/main/migrations/0010_v322_add_ovirt4_tower_inventory.py
+++ b/awx/main/migrations/0010_v322_add_ovirt4_tower_inventory.py
@@ -18,11 +18,49 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='inventorysource',
name='source',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'File, Directory or Script'),
+ ('scm', 'Sourced from a Project'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('rhv', 'Red Hat Virtualization'),
+ ('tower', 'Ansible Tower'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
- field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')]),
+ field=models.CharField(
+ default='',
+ max_length=32,
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('file', 'File, Directory or Script'),
+ ('scm', 'Sourced from a Project'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('rhv', 'Red Hat Virtualization'),
+ ('tower', 'Ansible Tower'),
+ ('custom', 'Custom Script'),
+ ],
+ ),
),
]
diff --git a/awx/main/migrations/0013_v330_multi_credential.py b/awx/main/migrations/0013_v330_multi_credential.py
index 437ade51aa..1ab940ff31 100644
--- a/awx/main/migrations/0013_v330_multi_credential.py
+++ b/awx/main/migrations/0013_v330_multi_credential.py
@@ -59,5 +59,5 @@ class Migration(migrations.Migration):
name='vault_credential',
),
migrations.RunPython(migration_utils.set_current_apps_for_migrations, credentialtypes.remove_vault_id_field),
- migrations.RunPython(credentialtypes.add_vault_id_field, migration_utils.set_current_apps_for_migrations)
+ migrations.RunPython(credentialtypes.add_vault_id_field, migration_utils.set_current_apps_for_migrations),
]
diff --git a/awx/main/migrations/0014_v330_saved_launchtime_configs.py b/awx/main/migrations/0014_v330_saved_launchtime_configs.py
index d9c7b105d9..d120166218 100644
--- a/awx/main/migrations/0014_v330_saved_launchtime_configs.py
+++ b/awx/main/migrations/0014_v330_saved_launchtime_configs.py
@@ -30,7 +30,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='schedule',
name='inventory',
- field=models.ForeignKey(related_name='schedules', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True),
+ field=models.ForeignKey(
+ related_name='schedules', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True
+ ),
),
migrations.AddField(
model_name='schedule',
@@ -87,7 +89,12 @@ class Migration(migrations.Migration):
('survey_passwords', awx.main.fields.JSONField(blank=True, default=dict, editable=False)),
('char_prompts', awx.main.fields.JSONField(blank=True, default=dict)),
('credentials', models.ManyToManyField(related_name='joblaunchconfigs', to='main.Credential')),
- ('inventory', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='joblaunchconfigs', to='main.Inventory')),
+ (
+ 'inventory',
+ models.ForeignKey(
+ blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='joblaunchconfigs', to='main.Inventory'
+ ),
+ ),
('job', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='launch_config', to='main.UnifiedJob')),
],
),
diff --git a/awx/main/migrations/0016_v330_non_blank_workflow.py b/awx/main/migrations/0016_v330_non_blank_workflow.py
index 1b3880f8d4..f715f0635c 100644
--- a/awx/main/migrations/0016_v330_non_blank_workflow.py
+++ b/awx/main/migrations/0016_v330_non_blank_workflow.py
@@ -17,7 +17,9 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='workflowjobtemplatenode',
name='workflow_job_template',
- field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='workflow_job_template_nodes', to='main.WorkflowJobTemplate'),
+ field=models.ForeignKey(
+ default=None, on_delete=django.db.models.deletion.CASCADE, related_name='workflow_job_template_nodes', to='main.WorkflowJobTemplate'
+ ),
preserve_default=False,
),
]
diff --git a/awx/main/migrations/0017_v330_move_deprecated_stdout.py b/awx/main/migrations/0017_v330_move_deprecated_stdout.py
index 5b7f8fc027..e7c3250aa2 100644
--- a/awx/main/migrations/0017_v330_move_deprecated_stdout.py
+++ b/awx/main/migrations/0017_v330_move_deprecated_stdout.py
@@ -20,12 +20,14 @@ class Migration(migrations.Migration):
# Using SeparateDatabaseAndState here allows us to update the migration
# state so that Django thinks the UnifiedJob.result_stdout_text field
# is gone _without_ actually deleting the underlying column/data
- migrations.SeparateDatabaseAndState(state_operations=[
- migrations.RemoveField(
- model_name='unifiedjob',
- name='result_stdout_text',
- ),
- ]),
+ migrations.SeparateDatabaseAndState(
+ state_operations=[
+ migrations.RemoveField(
+ model_name='unifiedjob',
+ name='result_stdout_text',
+ ),
+ ]
+ ),
# On other side of the equation, this migration introduces a new model
# which is *unmanaged* (meaning, a new table is not created for it);
# instead, this sort of "virtual" model is used to maintain an ORM
@@ -34,7 +36,7 @@ class Migration(migrations.Migration):
name='UnifiedJobDeprecatedStdout',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('result_stdout_text', models.TextField(editable=False, null=True))
+ ('result_stdout_text', models.TextField(editable=False, null=True)),
],
options={
'db_table': 'main_unifiedjob',
diff --git a/awx/main/migrations/0018_v330_add_additional_stdout_events.py b/awx/main/migrations/0018_v330_add_additional_stdout_events.py
index 33abaf0eee..c9b026eeb5 100644
--- a/awx/main/migrations/0018_v330_add_additional_stdout_events.py
+++ b/awx/main/migrations/0018_v330_add_additional_stdout_events.py
@@ -27,7 +27,12 @@ class Migration(migrations.Migration):
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
('start_line', models.PositiveIntegerField(default=0, editable=False)),
('end_line', models.PositiveIntegerField(default=0, editable=False)),
- ('inventory_update', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='inventory_update_events', to='main.InventoryUpdate')),
+ (
+ 'inventory_update',
+ models.ForeignKey(
+ editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='inventory_update_events', to='main.InventoryUpdate'
+ ),
+ ),
],
options={
'ordering': ('-pk',),
@@ -39,7 +44,46 @@ class Migration(migrations.Migration):
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(default=None, editable=False)),
('modified', models.DateTimeField(default=None, editable=False)),
- ('event', models.CharField(choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')], max_length=100)),
+ (
+ 'event',
+ models.CharField(
+ choices=[
+ ('runner_on_failed', 'Host Failed'),
+ ('runner_on_ok', 'Host OK'),
+ ('runner_on_error', 'Host Failure'),
+ ('runner_on_skipped', 'Host Skipped'),
+ ('runner_on_unreachable', 'Host Unreachable'),
+ ('runner_on_no_hosts', 'No Hosts Remaining'),
+ ('runner_on_async_poll', 'Host Polling'),
+ ('runner_on_async_ok', 'Host Async OK'),
+ ('runner_on_async_failed', 'Host Async Failure'),
+ ('runner_item_on_ok', 'Item OK'),
+ ('runner_item_on_failed', 'Item Failed'),
+ ('runner_item_on_skipped', 'Item Skipped'),
+ ('runner_retry', 'Host Retry'),
+ ('runner_on_file_diff', 'File Difference'),
+ ('playbook_on_start', 'Playbook Started'),
+ ('playbook_on_notify', 'Running Handlers'),
+ ('playbook_on_include', 'Including File'),
+ ('playbook_on_no_hosts_matched', 'No Hosts Matched'),
+ ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'),
+ ('playbook_on_task_start', 'Task Started'),
+ ('playbook_on_vars_prompt', 'Variables Prompted'),
+ ('playbook_on_setup', 'Gathering Facts'),
+ ('playbook_on_import_for_host', 'internal: on Import for Host'),
+ ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'),
+ ('playbook_on_play_start', 'Play Started'),
+ ('playbook_on_stats', 'Playbook Complete'),
+ ('debug', 'Debug'),
+ ('verbose', 'Verbose'),
+ ('deprecated', 'Deprecated'),
+ ('warning', 'Warning'),
+ ('system_warning', 'System Warning'),
+ ('error', 'Error'),
+ ],
+ max_length=100,
+ ),
+ ),
('event_data', awx.main.fields.JSONField(blank=True, default=dict)),
('failed', models.BooleanField(default=False, editable=False)),
('changed', models.BooleanField(default=False, editable=False)),
@@ -53,7 +97,12 @@ class Migration(migrations.Migration):
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
('start_line', models.PositiveIntegerField(default=0, editable=False)),
('end_line', models.PositiveIntegerField(default=0, editable=False)),
- ('project_update', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='project_update_events', to='main.ProjectUpdate')),
+ (
+ 'project_update',
+ models.ForeignKey(
+ editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='project_update_events', to='main.ProjectUpdate'
+ ),
+ ),
],
options={
'ordering': ('pk',),
@@ -72,7 +121,10 @@ class Migration(migrations.Migration):
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
('start_line', models.PositiveIntegerField(default=0, editable=False)),
('end_line', models.PositiveIntegerField(default=0, editable=False)),
- ('system_job', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='system_job_events', to='main.SystemJob')),
+ (
+ 'system_job',
+ models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='system_job_events', to='main.SystemJob'),
+ ),
],
options={
'ordering': ('-pk',),
diff --git a/awx/main/migrations/0020_v330_instancegroup_policies.py b/awx/main/migrations/0020_v330_instancegroup_policies.py
index a6716352e9..e2dc677b44 100644
--- a/awx/main/migrations/0020_v330_instancegroup_policies.py
+++ b/awx/main/migrations/0020_v330_instancegroup_policies.py
@@ -16,8 +16,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='instancegroup',
name='policy_instance_list',
- field=awx.main.fields.JSONField(default=[], help_text='List of exact-match Instances that will always be automatically assigned to this group',
- blank=True),
+ field=awx.main.fields.JSONField(
+ default=[], help_text='List of exact-match Instances that will always be automatically assigned to this group', blank=True
+ ),
),
migrations.AddField(
model_name='instancegroup',
@@ -34,29 +35,9 @@ class Migration(migrations.Migration):
name='capacity_adjustment',
field=models.DecimalField(decimal_places=2, default=Decimal('1.0'), max_digits=3),
),
- migrations.AddField(
- model_name='instance',
- name='cpu',
- field=models.IntegerField(default=0, editable=False)
- ),
- migrations.AddField(
- model_name='instance',
- name='memory',
- field=models.BigIntegerField(default=0, editable=False)
- ),
- migrations.AddField(
- model_name='instance',
- name='cpu_capacity',
- field=models.IntegerField(default=0, editable=False)
- ),
- migrations.AddField(
- model_name='instance',
- name='mem_capacity',
- field=models.IntegerField(default=0, editable=False)
- ),
- migrations.AddField(
- model_name='instance',
- name='enabled',
- field=models.BooleanField(default=True)
- )
+ migrations.AddField(model_name='instance', name='cpu', field=models.IntegerField(default=0, editable=False)),
+ migrations.AddField(model_name='instance', name='memory', field=models.BigIntegerField(default=0, editable=False)),
+ migrations.AddField(model_name='instance', name='cpu_capacity', field=models.IntegerField(default=0, editable=False)),
+ migrations.AddField(model_name='instance', name='mem_capacity', field=models.IntegerField(default=0, editable=False)),
+ migrations.AddField(model_name='instance', name='enabled', field=models.BooleanField(default=True)),
]
diff --git a/awx/main/migrations/0021_v330_declare_new_rbac_roles.py b/awx/main/migrations/0021_v330_declare_new_rbac_roles.py
index 20dc7685e0..c1c1521efc 100644
--- a/awx/main/migrations/0021_v330_declare_new_rbac_roles.py
+++ b/awx/main/migrations/0021_v330_declare_new_rbac_roles.py
@@ -18,77 +18,145 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='organization',
name='execute_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
+ ),
),
migrations.AddField(
model_name='organization',
name='job_template_admin_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
+ ),
),
migrations.AddField(
model_name='organization',
name='credential_admin_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
+ ),
),
migrations.AddField(
model_name='organization',
name='inventory_admin_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
+ ),
),
migrations.AddField(
model_name='organization',
name='project_admin_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
+ ),
),
migrations.AddField(
model_name='organization',
name='workflow_admin_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
+ ),
),
migrations.AddField(
model_name='organization',
name='notification_admin_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
+ ),
),
migrations.AlterField(
model_name='credential',
name='admin_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['singleton:system_administrator', 'organization.credential_admin_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
+ related_name='+',
+ to='main.Role',
+ ),
),
migrations.AlterField(
model_name='inventory',
name='admin_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
+ ),
),
migrations.AlterField(
model_name='project',
name='admin_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['organization.project_admin_role', 'singleton:system_administrator'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
+ related_name='+',
+ to='main.Role',
+ ),
),
migrations.AlterField(
model_name='workflowjobtemplate',
name='admin_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
+ related_name='+',
+ to='main.Role',
+ ),
),
migrations.AlterField(
model_name='workflowjobtemplate',
name='execute_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'organization.execute_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=['admin_role', 'organization.execute_role'],
+ related_name='+',
+ to='main.Role',
+ ),
),
migrations.AlterField(
model_name='jobtemplate',
name='admin_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False,
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
+ related_name='+',
+ to='main.Role',
+ ),
),
migrations.AlterField(
model_name='jobtemplate',
name='execute_role',
- field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
+ related_name='+',
+ to='main.Role',
+ ),
),
migrations.AlterField(
model_name='organization',
name='member_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False,
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=[
+ 'admin_role',
+ 'execute_role',
+ 'project_admin_role',
+ 'inventory_admin_role',
+ 'workflow_admin_role',
+ 'notification_admin_role',
+ 'credential_admin_role',
+ 'job_template_admin_role',
+ ],
+ related_name='+',
+ to='main.Role',
+ ),
),
-
]
diff --git a/awx/main/migrations/0023_v330_inventory_multicred.py b/awx/main/migrations/0023_v330_inventory_multicred.py
index fdb95e8ddc..06f35bacd9 100644
--- a/awx/main/migrations/0023_v330_inventory_multicred.py
+++ b/awx/main/migrations/0023_v330_inventory_multicred.py
@@ -5,10 +5,7 @@ from __future__ import unicode_literals
from django.db import migrations
from awx.main.migrations import _migration_utils as migration_utils
-from awx.main.migrations._multi_cred import (
- migrate_inventory_source_cred,
- migrate_inventory_source_cred_reverse
-)
+from awx.main.migrations._multi_cred import migrate_inventory_source_cred, migrate_inventory_source_cred_reverse
class Migration(migrations.Migration):
diff --git a/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py b/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py
index 08a1415951..cc1d1bfeba 100644
--- a/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py
+++ b/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py
@@ -8,6 +8,7 @@ import django.db.models.deletion
import oauth2_provider
import re
+
class Migration(migrations.Migration):
dependencies = [
@@ -21,15 +22,28 @@ class Migration(migrations.Migration):
]
operations = [
-
migrations.CreateModel(
name='OAuth2Application',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('client_id', models.CharField(db_index=True, default=oauth2_provider.generators.generate_client_id, max_length=100, unique=True)),
- ('redirect_uris', models.TextField(blank=True, help_text='Allowed URIs list, space separated', validators=[oauth2_provider.validators.validate_uris])),
+ (
+ 'redirect_uris',
+ models.TextField(blank=True, help_text='Allowed URIs list, space separated', validators=[oauth2_provider.validators.validate_uris]),
+ ),
('client_type', models.CharField(choices=[('confidential', 'Confidential'), ('public', 'Public')], max_length=32)),
- ('authorization_grant_type', models.CharField(choices=[('authorization-code', 'Authorization code'), ('implicit', 'Implicit'), ('password', 'Resource owner password-based'), ('client-credentials', 'Client credentials')], max_length=32)),
+ (
+ 'authorization_grant_type',
+ models.CharField(
+ choices=[
+ ('authorization-code', 'Authorization code'),
+ ('implicit', 'Implicit'),
+ ('password', 'Resource owner password-based'),
+ ('client-credentials', 'Client credentials'),
+ ],
+ max_length=32,
+ ),
+ ),
('client_secret', models.CharField(blank=True, db_index=True, default=oauth2_provider.generators.generate_client_secret, max_length=255)),
('name', models.CharField(blank=True, max_length=255)),
('skip_authorization', models.BooleanField(default=False)),
@@ -37,7 +51,12 @@ class Migration(migrations.Migration):
('updated', models.DateTimeField(auto_now=True)),
('description', models.TextField(blank=True, default='')),
('logo_data', models.TextField(default='', editable=False, validators=[django.core.validators.RegexValidator(re.compile('.*'))])),
- ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2application', to=settings.AUTH_USER_MODEL)),
+ (
+ 'user',
+ models.ForeignKey(
+ blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2application', to=settings.AUTH_USER_MODEL
+ ),
+ ),
],
options={
'verbose_name': 'application',
@@ -54,8 +73,16 @@ class Migration(migrations.Migration):
('updated', models.DateTimeField(auto_now=True)),
('description', models.CharField(blank=True, default='', max_length=200)),
('last_used', models.DateTimeField(default=None, editable=False, null=True)),
- ('application', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.OAUTH2_PROVIDER_APPLICATION_MODEL)),
- ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2accesstoken', to=settings.AUTH_USER_MODEL)),
+ (
+ 'application',
+ models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.OAUTH2_PROVIDER_APPLICATION_MODEL),
+ ),
+ (
+ 'user',
+ models.ForeignKey(
+ blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2accesstoken', to=settings.AUTH_USER_MODEL
+ ),
+ ),
],
options={
'verbose_name': 'access token',
@@ -71,5 +98,4 @@ class Migration(migrations.Migration):
name='o_auth2_application',
field=models.ManyToManyField(to='main.OAuth2Application', blank=True),
),
-
]
diff --git a/awx/main/migrations/0026_v330_delete_authtoken.py b/awx/main/migrations/0026_v330_delete_authtoken.py
index c1a8fe19d4..d20d3d54e4 100644
--- a/awx/main/migrations/0026_v330_delete_authtoken.py
+++ b/awx/main/migrations/0026_v330_delete_authtoken.py
@@ -9,6 +9,7 @@ import django.db.models.deletion
# TODO: Squash all of these migrations with '0024_v330_add_oauth_activity_stream_registrar'
+
class Migration(migrations.Migration):
dependencies = [
diff --git a/awx/main/migrations/0030_v330_modify_application.py b/awx/main/migrations/0030_v330_modify_application.py
index 32b4fdd5a3..dcdfe074e0 100644
--- a/awx/main/migrations/0030_v330_modify_application.py
+++ b/awx/main/migrations/0030_v330_modify_application.py
@@ -18,7 +18,13 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='oauth2application',
name='organization',
- field=models.ForeignKey(help_text='Organization containing this application.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='applications', to='main.Organization'),
+ field=models.ForeignKey(
+ help_text='Organization containing this application.',
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name='applications',
+ to='main.Organization',
+ ),
),
migrations.AlterUniqueTogether(
name='oauth2application',
diff --git a/awx/main/migrations/0031_v330_encrypt_oauth2_secret.py b/awx/main/migrations/0031_v330_encrypt_oauth2_secret.py
index 4bb993f423..d1f2288d4e 100644
--- a/awx/main/migrations/0031_v330_encrypt_oauth2_secret.py
+++ b/awx/main/migrations/0031_v330_encrypt_oauth2_secret.py
@@ -17,6 +17,8 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='oauth2application',
name='client_secret',
- field=awx.main.fields.OAuth2ClientSecretField(blank=True, db_index=True, default=oauth2_provider.generators.generate_client_secret, max_length=1024),
+ field=awx.main.fields.OAuth2ClientSecretField(
+ blank=True, db_index=True, default=oauth2_provider.generators.generate_client_secret, max_length=1024
+ ),
),
]
diff --git a/awx/main/migrations/0032_v330_polymorphic_delete.py b/awx/main/migrations/0032_v330_polymorphic_delete.py
index da351240d8..97539ed3fd 100644
--- a/awx/main/migrations/0032_v330_polymorphic_delete.py
+++ b/awx/main/migrations/0032_v330_polymorphic_delete.py
@@ -16,6 +16,13 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='unifiedjob',
name='instance_group',
- field=models.ForeignKey(blank=True, default=None, help_text='The Instance group the job was run under', null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, to='main.InstanceGroup'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='The Instance group the job was run under',
+ null=True,
+ on_delete=awx.main.utils.polymorphic.SET_NULL,
+ to='main.InstanceGroup',
+ ),
),
]
diff --git a/awx/main/migrations/0033_v330_oauth_help_text.py b/awx/main/migrations/0033_v330_oauth_help_text.py
index 8912978065..606fd8ffd9 100644
--- a/awx/main/migrations/0033_v330_oauth_help_text.py
+++ b/awx/main/migrations/0033_v330_oauth_help_text.py
@@ -10,6 +10,7 @@ import oauth2_provider.generators
# TODO: Squash all of these migrations with '0024_v330_add_oauth_activity_stream_registrar'
+
class Migration(migrations.Migration):
dependencies = [
@@ -25,22 +26,48 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='oauth2accesstoken',
name='user',
- field=models.ForeignKey(blank=True, help_text='The user representing the token owner', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2accesstoken', to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ blank=True,
+ help_text='The user representing the token owner',
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name='main_oauth2accesstoken',
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='oauth2application',
name='authorization_grant_type',
- field=models.CharField(choices=[('authorization-code', 'Authorization code'), ('implicit', 'Implicit'), ('password', 'Resource owner password-based'), ('client-credentials', 'Client credentials')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
+ field=models.CharField(
+ choices=[
+ ('authorization-code', 'Authorization code'),
+ ('implicit', 'Implicit'),
+ ('password', 'Resource owner password-based'),
+ ('client-credentials', 'Client credentials'),
+ ],
+ help_text='The Grant type the user must use for acquire tokens for this application.',
+ max_length=32,
+ ),
),
migrations.AlterField(
model_name='oauth2application',
name='client_secret',
- field=awx.main.fields.OAuth2ClientSecretField(blank=True, db_index=True, default=oauth2_provider.generators.generate_client_secret, help_text='Used for more stringent verification of access to an application when creating a token.', max_length=1024),
+ field=awx.main.fields.OAuth2ClientSecretField(
+ blank=True,
+ db_index=True,
+ default=oauth2_provider.generators.generate_client_secret,
+ help_text='Used for more stringent verification of access to an application when creating a token.',
+ max_length=1024,
+ ),
),
migrations.AlterField(
model_name='oauth2application',
name='client_type',
- field=models.CharField(choices=[('confidential', 'Confidential'), ('public', 'Public')], help_text='Set to Public or Confidential depending on how secure the client device is.', max_length=32),
+ field=models.CharField(
+ choices=[('confidential', 'Confidential'), ('public', 'Public')],
+ help_text='Set to Public or Confidential depending on how secure the client device is.',
+ max_length=32,
+ ),
),
migrations.AlterField(
model_name='oauth2application',
diff --git a/awx/main/migrations/0035_v330_more_oauth2_help_text.py b/awx/main/migrations/0035_v330_more_oauth2_help_text.py
index 91d8531730..f8802319e0 100644
--- a/awx/main/migrations/0035_v330_more_oauth2_help_text.py
+++ b/awx/main/migrations/0035_v330_more_oauth2_help_text.py
@@ -6,6 +6,7 @@ from django.db import migrations, models
# TODO: Squash all of these migrations with '0024_v330_add_oauth_activity_stream_registrar'
+
class Migration(migrations.Migration):
dependencies = [
@@ -16,6 +17,10 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='oauth2accesstoken',
name='scope',
- field=models.TextField(blank=True, default='write', help_text="Allowed scopes, further restricts user's permissions. Must be a simple space-separated string with allowed scopes ['read', 'write']."),
+ field=models.TextField(
+ blank=True,
+ default='write',
+ help_text="Allowed scopes, further restricts user's permissions. Must be a simple space-separated string with allowed scopes ['read', 'write'].",
+ ),
),
]
diff --git a/awx/main/migrations/0036_v330_credtype_remove_become_methods.py b/awx/main/migrations/0036_v330_credtype_remove_become_methods.py
index 3a43bd6a8b..73541e79c4 100644
--- a/awx/main/migrations/0036_v330_credtype_remove_become_methods.py
+++ b/awx/main/migrations/0036_v330_credtype_remove_become_methods.py
@@ -11,7 +11,6 @@ class Migration(migrations.Migration):
dependencies = [
('main', '0035_v330_more_oauth2_help_text'),
-
]
operations = [
diff --git a/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py b/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py
index 6f79485f3f..71a9a92df7 100644
--- a/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py
+++ b/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py
@@ -1,4 +1,4 @@
-#d -*- coding: utf-8 -*-
+# d -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-05-21 19:51
from __future__ import unicode_literals
diff --git a/awx/main/migrations/0039_v330_custom_venv_help_text.py b/awx/main/migrations/0039_v330_custom_venv_help_text.py
index ba68aa158f..86101eb570 100644
--- a/awx/main/migrations/0039_v330_custom_venv_help_text.py
+++ b/awx/main/migrations/0039_v330_custom_venv_help_text.py
@@ -18,16 +18,22 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='jobtemplate',
name='custom_virtualenv',
- field=models.CharField(blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True),
+ field=models.CharField(
+ blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True
+ ),
),
migrations.AlterField(
model_name='organization',
name='custom_virtualenv',
- field=models.CharField(blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True),
+ field=models.CharField(
+ blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True
+ ),
),
migrations.AlterField(
model_name='project',
name='custom_virtualenv',
- field=models.CharField(blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True),
+ field=models.CharField(
+ blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True
+ ),
),
]
diff --git a/awx/main/migrations/0041_v330_update_oauth_refreshtoken.py b/awx/main/migrations/0041_v330_update_oauth_refreshtoken.py
index 6f71563e29..8ad0a32a4c 100644
--- a/awx/main/migrations/0041_v330_update_oauth_refreshtoken.py
+++ b/awx/main/migrations/0041_v330_update_oauth_refreshtoken.py
@@ -18,6 +18,12 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='oauth2accesstoken',
name='source_refresh_token',
- field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='refreshed_access_token', to=settings.OAUTH2_PROVIDER_REFRESH_TOKEN_MODEL),
+ field=models.OneToOneField(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='refreshed_access_token',
+ to=settings.OAUTH2_PROVIDER_REFRESH_TOKEN_MODEL,
+ ),
),
]
diff --git a/awx/main/migrations/0042_v330_org_member_role_deparent.py b/awx/main/migrations/0042_v330_org_member_role_deparent.py
index 67795e3901..c5be5146b4 100644
--- a/awx/main/migrations/0042_v330_org_member_role_deparent.py
+++ b/awx/main/migrations/0042_v330_org_member_role_deparent.py
@@ -18,12 +18,31 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='organization',
name='member_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
+ ),
),
migrations.AlterField(
model_name='organization',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['member_role', 'auditor_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False,
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=[
+ 'member_role',
+ 'auditor_role',
+ 'execute_role',
+ 'project_admin_role',
+ 'inventory_admin_role',
+ 'workflow_admin_role',
+ 'notification_admin_role',
+ 'credential_admin_role',
+ 'job_template_admin_role',
+ ],
+ related_name='+',
+ to='main.Role',
+ ),
),
migrations.RunPython(rebuild_role_hierarchy),
]
diff --git a/awx/main/migrations/0044_v330_add_inventory_update_inventory.py b/awx/main/migrations/0044_v330_add_inventory_update_inventory.py
index 1ec8b838ec..30799955bf 100644
--- a/awx/main/migrations/0044_v330_add_inventory_update_inventory.py
+++ b/awx/main/migrations/0044_v330_add_inventory_update_inventory.py
@@ -16,6 +16,8 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='inventoryupdate',
name='inventory',
- field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='inventory_updates', to='main.Inventory'),
+ field=models.ForeignKey(
+ default=None, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='inventory_updates', to='main.Inventory'
+ ),
),
]
diff --git a/awx/main/migrations/0046_v330_remove_client_credentials_grant.py b/awx/main/migrations/0046_v330_remove_client_credentials_grant.py
index ebb87b3b5a..a02a248c3c 100644
--- a/awx/main/migrations/0046_v330_remove_client_credentials_grant.py
+++ b/awx/main/migrations/0046_v330_remove_client_credentials_grant.py
@@ -15,6 +15,10 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='oauth2application',
name='authorization_grant_type',
- field=models.CharField(choices=[('authorization-code', 'Authorization code'), ('implicit', 'Implicit'), ('password', 'Resource owner password-based')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
+ field=models.CharField(
+ choices=[('authorization-code', 'Authorization code'), ('implicit', 'Implicit'), ('password', 'Resource owner password-based')],
+ help_text='The Grant type the user must use for acquire tokens for this application.',
+ max_length=32,
+ ),
),
]
diff --git a/awx/main/migrations/0048_v330_django_created_modified_by_model_name.py b/awx/main/migrations/0048_v330_django_created_modified_by_model_name.py
index 13e04cbec8..d0e135cdb4 100644
--- a/awx/main/migrations/0048_v330_django_created_modified_by_model_name.py
+++ b/awx/main/migrations/0048_v330_django_created_modified_by_model_name.py
@@ -17,131 +17,313 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credential',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credential', 'model_name': 'credential', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'credential', 'model_name': 'credential', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='credential',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credential', 'model_name': 'credential', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'credential', 'model_name': 'credential', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='credentialtype',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credentialtype', 'model_name': 'credentialtype', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'credentialtype', 'model_name': 'credentialtype', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='credentialtype',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credentialtype', 'model_name': 'credentialtype', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'credentialtype', 'model_name': 'credentialtype', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='custominventoryscript',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'custominventoryscript', 'model_name': 'custominventoryscript', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'custominventoryscript', 'model_name': 'custominventoryscript', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='custominventoryscript',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'custominventoryscript', 'model_name': 'custominventoryscript', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'custominventoryscript', 'model_name': 'custominventoryscript', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='group',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'group', 'model_name': 'group', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'group', 'model_name': 'group', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='group',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'group', 'model_name': 'group', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'group', 'model_name': 'group', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='host',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'host', 'model_name': 'host', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'host', 'model_name': 'host', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='host',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'host', 'model_name': 'host', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'host', 'model_name': 'host', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='inventory',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'inventory', 'model_name': 'inventory', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'inventory', 'model_name': 'inventory', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='inventory',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'inventory', 'model_name': 'inventory', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'inventory', 'model_name': 'inventory', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='label',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'label', 'model_name': 'label', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'label', 'model_name': 'label', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='label',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'label', 'model_name': 'label', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'label', 'model_name': 'label', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='notificationtemplate',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'notificationtemplate', 'model_name': 'notificationtemplate', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'notificationtemplate', 'model_name': 'notificationtemplate', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='notificationtemplate',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'notificationtemplate', 'model_name': 'notificationtemplate', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'notificationtemplate', 'model_name': 'notificationtemplate', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='organization',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'organization', 'model_name': 'organization', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'organization', 'model_name': 'organization', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='organization',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'organization', 'model_name': 'organization', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'organization', 'model_name': 'organization', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='schedule',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'schedule', 'model_name': 'schedule', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'schedule', 'model_name': 'schedule', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='schedule',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'schedule', 'model_name': 'schedule', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'schedule', 'model_name': 'schedule', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='team',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'team', 'model_name': 'team', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'team', 'model_name': 'team', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='team',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'team', 'model_name': 'team', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'team', 'model_name': 'team', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='unifiedjob',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjob', 'model_name': 'unifiedjob', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'unifiedjob', 'model_name': 'unifiedjob', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='unifiedjob',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjob', 'model_name': 'unifiedjob', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'unifiedjob', 'model_name': 'unifiedjob', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='unifiedjobtemplate',
name='created_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjobtemplate', 'model_name': 'unifiedjobtemplate', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'unifiedjobtemplate', 'model_name': 'unifiedjobtemplate', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
migrations.AlterField(
model_name='unifiedjobtemplate',
name='modified_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjobtemplate', 'model_name': 'unifiedjobtemplate', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'unifiedjobtemplate', 'model_name': 'unifiedjobtemplate', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
]
diff --git a/awx/main/migrations/0050_v340_drop_celery_tables.py b/awx/main/migrations/0050_v340_drop_celery_tables.py
index 24fb8b226a..4ea93da0c1 100644
--- a/awx/main/migrations/0050_v340_drop_celery_tables.py
+++ b/awx/main/migrations/0050_v340_drop_celery_tables.py
@@ -11,11 +11,17 @@ class Migration(migrations.Migration):
]
operations = [
- migrations.RunSQL([
- ("DROP TABLE IF EXISTS {} CASCADE;".format(table))
- ])
- for table in ('celery_taskmeta', 'celery_tasksetmeta', 'djcelery_crontabschedule',
- 'djcelery_intervalschedule', 'djcelery_periodictask',
- 'djcelery_periodictasks', 'djcelery_taskstate', 'djcelery_workerstate',
- 'djkombu_message', 'djkombu_queue')
+ migrations.RunSQL([("DROP TABLE IF EXISTS {} CASCADE;".format(table))])
+ for table in (
+ 'celery_taskmeta',
+ 'celery_tasksetmeta',
+ 'djcelery_crontabschedule',
+ 'djcelery_intervalschedule',
+ 'djcelery_periodictask',
+ 'djcelery_periodictasks',
+ 'djcelery_taskstate',
+ 'djcelery_workerstate',
+ 'djkombu_message',
+ 'djkombu_queue',
+ )
]
diff --git a/awx/main/migrations/0051_v340_job_slicing.py b/awx/main/migrations/0051_v340_job_slicing.py
index 0e5c8bd701..28a6ec7aee 100644
--- a/awx/main/migrations/0051_v340_job_slicing.py
+++ b/awx/main/migrations/0051_v340_job_slicing.py
@@ -17,17 +17,27 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='job',
name='job_slice_count',
- field=models.PositiveIntegerField(blank=True, default=1, help_text='If ran as part of sliced jobs, the total number of slices. If 1, job is not part of a sliced job.'),
+ field=models.PositiveIntegerField(
+ blank=True, default=1, help_text='If ran as part of sliced jobs, the total number of slices. If 1, job is not part of a sliced job.'
+ ),
),
migrations.AddField(
model_name='job',
name='job_slice_number',
- field=models.PositiveIntegerField(blank=True, default=0, help_text='If part of a sliced job, the ID of the inventory slice operated on. If not part of sliced job, parameter is not used.'),
+ field=models.PositiveIntegerField(
+ blank=True,
+ default=0,
+ help_text='If part of a sliced job, the ID of the inventory slice operated on. If not part of sliced job, parameter is not used.',
+ ),
),
migrations.AddField(
model_name='jobtemplate',
name='job_slice_count',
- field=models.PositiveIntegerField(blank=True, default=1, help_text='The number of jobs to slice into at runtime. Will cause the Job Template to launch a workflow if value is greater than 1.'),
+ field=models.PositiveIntegerField(
+ blank=True,
+ default=1,
+ help_text='The number of jobs to slice into at runtime. Will cause the Job Template to launch a workflow if value is greater than 1.',
+ ),
),
migrations.AddField(
model_name='workflowjob',
@@ -37,11 +47,26 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjob',
name='job_template',
- field=models.ForeignKey(blank=True, default=None, help_text='If automatically created for a sliced job run, the job template the workflow job was created from.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='slice_workflow_jobs', to='main.JobTemplate'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='If automatically created for a sliced job run, the job template the workflow job was created from.',
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='slice_workflow_jobs',
+ to='main.JobTemplate',
+ ),
),
migrations.AlterField(
model_name='unifiedjob',
name='unified_job_template',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='unifiedjob_unified_jobs', to='main.UnifiedJobTemplate'),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=awx.main.utils.polymorphic.SET_NULL,
+ related_name='unifiedjob_unified_jobs',
+ to='main.UnifiedJobTemplate',
+ ),
),
]
diff --git a/awx/main/migrations/0053_v340_workflow_inventory.py b/awx/main/migrations/0053_v340_workflow_inventory.py
index c519a27e25..23bede35f7 100644
--- a/awx/main/migrations/0053_v340_workflow_inventory.py
+++ b/awx/main/migrations/0053_v340_workflow_inventory.py
@@ -22,7 +22,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjob',
name='inventory',
- field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobs', to='main.Inventory'),
+ field=models.ForeignKey(
+ blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobs', to='main.Inventory'
+ ),
),
migrations.AddField(
model_name='workflowjobtemplate',
@@ -32,6 +34,14 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjobtemplate',
name='inventory',
- field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied to all job templates in workflow that prompt for inventory.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobtemplates', to='main.Inventory'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='Inventory applied to all job templates in workflow that prompt for inventory.',
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='workflowjobtemplates',
+ to='main.Inventory',
+ ),
),
]
diff --git a/awx/main/migrations/0054_v340_workflow_convergence.py b/awx/main/migrations/0054_v340_workflow_convergence.py
index e0c2f833fb..4a37e4cde1 100644
--- a/awx/main/migrations/0054_v340_workflow_convergence.py
+++ b/awx/main/migrations/0054_v340_workflow_convergence.py
@@ -15,6 +15,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjobnode',
name='do_not_run',
- field=models.BooleanField(default=False, help_text='Indidcates that a job will not be created when True. Workflow runtime semantics will mark this True if the node is in a path that will decidedly not be ran. A value of False means the node may not run.'),
+ field=models.BooleanField(
+ default=False,
+ help_text='Indidcates that a job will not be created when True. Workflow runtime semantics will mark this True if the node is in a path that will decidedly not be ran. A value of False means the node may not run.',
+ ),
),
]
diff --git a/awx/main/migrations/0055_v340_add_grafana_notification.py b/awx/main/migrations/0055_v340_add_grafana_notification.py
index bac07a7438..de29544231 100644
--- a/awx/main/migrations/0055_v340_add_grafana_notification.py
+++ b/awx/main/migrations/0055_v340_add_grafana_notification.py
@@ -15,11 +15,39 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='notification',
name='notification_type',
- field=models.CharField(choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('grafana', 'Grafana'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')], max_length=32),
+ field=models.CharField(
+ choices=[
+ ('email', 'Email'),
+ ('slack', 'Slack'),
+ ('twilio', 'Twilio'),
+ ('pagerduty', 'Pagerduty'),
+ ('grafana', 'Grafana'),
+ ('hipchat', 'HipChat'),
+ ('webhook', 'Webhook'),
+ ('mattermost', 'Mattermost'),
+ ('rocketchat', 'Rocket.Chat'),
+ ('irc', 'IRC'),
+ ],
+ max_length=32,
+ ),
),
migrations.AlterField(
model_name='notificationtemplate',
name='notification_type',
- field=models.CharField(choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('grafana', 'Grafana'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')], max_length=32),
+ field=models.CharField(
+ choices=[
+ ('email', 'Email'),
+ ('slack', 'Slack'),
+ ('twilio', 'Twilio'),
+ ('pagerduty', 'Pagerduty'),
+ ('grafana', 'Grafana'),
+ ('hipchat', 'HipChat'),
+ ('webhook', 'Webhook'),
+ ('mattermost', 'Mattermost'),
+ ('rocketchat', 'Rocket.Chat'),
+ ('irc', 'IRC'),
+ ],
+ max_length=32,
+ ),
),
]
diff --git a/awx/main/migrations/0056_v350_custom_venv_history.py b/awx/main/migrations/0056_v350_custom_venv_history.py
index 299b19d6b4..484c31a9eb 100644
--- a/awx/main/migrations/0056_v350_custom_venv_history.py
+++ b/awx/main/migrations/0056_v350_custom_venv_history.py
@@ -15,11 +15,15 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='inventoryupdate',
name='custom_virtualenv',
- field=models.CharField(blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True),
+ field=models.CharField(
+ blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True
+ ),
),
migrations.AddField(
model_name='job',
name='custom_virtualenv',
- field=models.CharField(blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True),
+ field=models.CharField(
+ blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True
+ ),
),
]
diff --git a/awx/main/migrations/0061_v350_track_native_credentialtype_source.py b/awx/main/migrations/0061_v350_track_native_credentialtype_source.py
index 4a06b3bd53..4c624d77bf 100644
--- a/awx/main/migrations/0061_v350_track_native_credentialtype_source.py
+++ b/awx/main/migrations/0061_v350_track_native_credentialtype_source.py
@@ -25,5 +25,5 @@ class Migration(migrations.Migration):
name='namespace',
field=models.CharField(default=None, editable=False, max_length=1024, null=True),
),
- migrations.RunPython(migrate_to_static_inputs)
+ migrations.RunPython(migrate_to_static_inputs),
]
diff --git a/awx/main/migrations/0065_v350_index_job_status.py b/awx/main/migrations/0065_v350_index_job_status.py
index f6da0be023..d14e1b63cc 100644
--- a/awx/main/migrations/0065_v350_index_job_status.py
+++ b/awx/main/migrations/0065_v350_index_job_status.py
@@ -15,6 +15,21 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='unifiedjob',
name='status',
- field=models.CharField(choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled')], db_index=True, default='new', editable=False, max_length=20),
+ field=models.CharField(
+ choices=[
+ ('new', 'New'),
+ ('pending', 'Pending'),
+ ('waiting', 'Waiting'),
+ ('running', 'Running'),
+ ('successful', 'Successful'),
+ ('failed', 'Failed'),
+ ('error', 'Error'),
+ ('canceled', 'Canceled'),
+ ],
+ db_index=True,
+ default='new',
+ editable=False,
+ max_length=20,
+ ),
),
]
diff --git a/awx/main/migrations/0066_v350_inventorysource_custom_virtualenv.py b/awx/main/migrations/0066_v350_inventorysource_custom_virtualenv.py
index bc16ded15a..e1196b10da 100644
--- a/awx/main/migrations/0066_v350_inventorysource_custom_virtualenv.py
+++ b/awx/main/migrations/0066_v350_inventorysource_custom_virtualenv.py
@@ -15,6 +15,8 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='inventorysource',
name='custom_virtualenv',
- field=models.CharField(blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True),
+ field=models.CharField(
+ blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True
+ ),
),
]
diff --git a/awx/main/migrations/0067_v350_credential_plugins.py b/awx/main/migrations/0067_v350_credential_plugins.py
index 32190b2bf2..335baef2f7 100644
--- a/awx/main/migrations/0067_v350_credential_plugins.py
+++ b/awx/main/migrations/0067_v350_credential_plugins.py
@@ -35,17 +35,59 @@ class Migration(migrations.Migration):
('description', models.TextField(blank=True, default='')),
('input_field_name', models.CharField(max_length=1024)),
('metadata', awx.main.fields.DynamicCredentialInputField(blank=True, default=dict)),
- ('created_by', models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credentialinputsource', 'model_name': 'credentialinputsource', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL)),
- ('modified_by', models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credentialinputsource', 'model_name': 'credentialinputsource', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL)),
- ('source_credential', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='target_input_sources', to='main.Credential')),
- ('tags', taggit.managers.TaggableManager(blank=True, help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags')),
- ('target_credential', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='input_sources', to='main.Credential')),
+ (
+ 'created_by',
+ models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'credentialinputsource', 'model_name': 'credentialinputsource', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'credentialinputsource', 'model_name': 'credentialinputsource', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
+ (
+ 'source_credential',
+ models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='target_input_sources', to='main.Credential'),
+ ),
+ (
+ 'tags',
+ taggit.managers.TaggableManager(
+ blank=True, help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'
+ ),
+ ),
+ (
+ 'target_credential',
+ models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='input_sources', to='main.Credential'),
+ ),
],
),
migrations.AlterField(
model_name='credentialtype',
name='kind',
- field=models.CharField(choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('insights', 'Insights'), ('external', 'External')], max_length=32),
+ field=models.CharField(
+ choices=[
+ ('ssh', 'Machine'),
+ ('vault', 'Vault'),
+ ('net', 'Network'),
+ ('scm', 'Source Control'),
+ ('cloud', 'Cloud'),
+ ('insights', 'Insights'),
+ ('external', 'External'),
+ ],
+ max_length=32,
+ ),
),
migrations.AlterUniqueTogether(
name='credentialinputsource',
diff --git a/awx/main/migrations/0070_v350_gce_instance_id.py b/awx/main/migrations/0070_v350_gce_instance_id.py
index fe32d930c0..ac0a94b5c0 100644
--- a/awx/main/migrations/0070_v350_gce_instance_id.py
+++ b/awx/main/migrations/0070_v350_gce_instance_id.py
@@ -30,6 +30,4 @@ class Migration(migrations.Migration):
('main', '0069_v350_generate_unique_install_uuid'),
]
- operations = [
- migrations.RunPython(gce_id_forward, gce_id_backward)
- ]
+ operations = [migrations.RunPython(gce_id_forward, gce_id_backward)]
diff --git a/awx/main/migrations/0072_v350_deprecate_fields.py b/awx/main/migrations/0072_v350_deprecate_fields.py
index 923a6d671c..0d87f0bffd 100644
--- a/awx/main/migrations/0072_v350_deprecate_fields.py
+++ b/awx/main/migrations/0072_v350_deprecate_fields.py
@@ -15,71 +15,127 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='group',
name='groups_with_active_failures',
- field=models.PositiveIntegerField(default=0, editable=False, help_text='This field is deprecated and will be removed in a future release. Number of child groups within this group that have active failures.'),
+ field=models.PositiveIntegerField(
+ default=0,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Number of child groups within this group that have active failures.',
+ ),
),
migrations.AlterField(
model_name='group',
name='has_active_failures',
- field=models.BooleanField(default=False, editable=False, help_text='This field is deprecated and will be removed in a future release. Flag indicating whether this group has any hosts with active failures.'),
+ field=models.BooleanField(
+ default=False,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Flag indicating whether this group has any hosts with active failures.',
+ ),
),
migrations.AlterField(
model_name='group',
name='has_inventory_sources',
- field=models.BooleanField(default=False, editable=False, help_text='This field is deprecated and will be removed in a future release. Flag indicating whether this group was created/updated from any external inventory sources.'),
+ field=models.BooleanField(
+ default=False,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Flag indicating whether this group was created/updated from any external inventory sources.',
+ ),
),
migrations.AlterField(
model_name='group',
name='hosts_with_active_failures',
- field=models.PositiveIntegerField(default=0, editable=False, help_text='This field is deprecated and will be removed in a future release. Number of hosts in this group with active failures.'),
+ field=models.PositiveIntegerField(
+ default=0,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Number of hosts in this group with active failures.',
+ ),
),
migrations.AlterField(
model_name='group',
name='total_groups',
- field=models.PositiveIntegerField(default=0, editable=False, help_text='This field is deprecated and will be removed in a future release. Total number of child groups contained within this group.'),
+ field=models.PositiveIntegerField(
+ default=0,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Total number of child groups contained within this group.',
+ ),
),
migrations.AlterField(
model_name='group',
name='total_hosts',
- field=models.PositiveIntegerField(default=0, editable=False, help_text='This field is deprecated and will be removed in a future release. Total number of hosts directly or indirectly in this group.'),
+ field=models.PositiveIntegerField(
+ default=0,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Total number of hosts directly or indirectly in this group.',
+ ),
),
migrations.AlterField(
model_name='host',
name='has_active_failures',
- field=models.BooleanField(default=False, editable=False, help_text='This field is deprecated and will be removed in a future release. Flag indicating whether the last job failed for this host.'),
+ field=models.BooleanField(
+ default=False,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Flag indicating whether the last job failed for this host.',
+ ),
),
migrations.AlterField(
model_name='host',
name='has_inventory_sources',
- field=models.BooleanField(default=False, editable=False, help_text='This field is deprecated and will be removed in a future release. Flag indicating whether this host was created/updated from any external inventory sources.'),
+ field=models.BooleanField(
+ default=False,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Flag indicating whether this host was created/updated from any external inventory sources.',
+ ),
),
migrations.AlterField(
model_name='inventory',
name='groups_with_active_failures',
- field=models.PositiveIntegerField(default=0, editable=False, help_text='This field is deprecated and will be removed in a future release. Number of groups in this inventory with active failures.'),
+ field=models.PositiveIntegerField(
+ default=0,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Number of groups in this inventory with active failures.',
+ ),
),
migrations.AlterField(
model_name='inventory',
name='has_active_failures',
- field=models.BooleanField(default=False, editable=False, help_text='This field is deprecated and will be removed in a future release. Flag indicating whether any hosts in this inventory have failed.'),
+ field=models.BooleanField(
+ default=False,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Flag indicating whether any hosts in this inventory have failed.',
+ ),
),
migrations.AlterField(
model_name='inventory',
name='has_inventory_sources',
- field=models.BooleanField(default=False, editable=False, help_text='This field is deprecated and will be removed in a future release. Flag indicating whether this inventory has any external inventory sources.'),
+ field=models.BooleanField(
+ default=False,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Flag indicating whether this inventory has any external inventory sources.',
+ ),
),
migrations.AlterField(
model_name='inventory',
name='hosts_with_active_failures',
- field=models.PositiveIntegerField(default=0, editable=False, help_text='This field is deprecated and will be removed in a future release. Number of hosts in this inventory with active failures.'),
+ field=models.PositiveIntegerField(
+ default=0,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Number of hosts in this inventory with active failures.',
+ ),
),
migrations.AlterField(
model_name='inventory',
name='total_groups',
- field=models.PositiveIntegerField(default=0, editable=False, help_text='This field is deprecated and will be removed in a future release. Total number of groups in this inventory.'),
+ field=models.PositiveIntegerField(
+ default=0,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Total number of groups in this inventory.',
+ ),
),
migrations.AlterField(
model_name='inventory',
name='total_hosts',
- field=models.PositiveIntegerField(default=0, editable=False, help_text='This field is deprecated and will be removed in a future release. Total number of hosts in this inventory.'),
+ field=models.PositiveIntegerField(
+ default=0,
+ editable=False,
+ help_text='This field is deprecated and will be removed in a future release. Total number of hosts in this inventory.',
+ ),
),
]
diff --git a/awx/main/migrations/0074_v360_migrate_instance_group_relations.py b/awx/main/migrations/0074_v360_migrate_instance_group_relations.py
index aeaf0c7fe6..234d3ef3f2 100644
--- a/awx/main/migrations/0074_v360_migrate_instance_group_relations.py
+++ b/awx/main/migrations/0074_v360_migrate_instance_group_relations.py
@@ -24,11 +24,7 @@ def create_through_relations(apps, schema_editor):
Target = apps.get_model('main', cls)
for x in Target.objects.iterator():
for i, instance_group in enumerate(x.instance_groups.all()):
- Membership(
- instancegroup=instance_group,
- position=i,
- **{'%s' % cls.lower(): x}
- ).save()
+ Membership(instancegroup=instance_group, position=i, **{'%s' % cls.lower(): x}).save()
class Migration(migrations.Migration):
@@ -37,6 +33,4 @@ class Migration(migrations.Migration):
('main', '0073_v360_create_instance_group_m2m'),
]
- operations = [
- migrations.RunPython(create_through_relations)
- ]
+ operations = [migrations.RunPython(create_through_relations)]
diff --git a/awx/main/migrations/0078_v360_clear_sessions_tokens_jt.py b/awx/main/migrations/0078_v360_clear_sessions_tokens_jt.py
index c6a00a24c1..57d9cca1de 100644
--- a/awx/main/migrations/0078_v360_clear_sessions_tokens_jt.py
+++ b/awx/main/migrations/0078_v360_clear_sessions_tokens_jt.py
@@ -19,11 +19,31 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='systemjob',
name='job_type',
- field=models.CharField(blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_sessions', 'Removes expired browser sessions from the database'), ('cleanup_tokens', 'Removes expired OAuth 2 access tokens and refresh tokens')], default='', max_length=32),
+ field=models.CharField(
+ blank=True,
+ choices=[
+ ('cleanup_jobs', 'Remove jobs older than a certain number of days'),
+ ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'),
+ ('cleanup_sessions', 'Removes expired browser sessions from the database'),
+ ('cleanup_tokens', 'Removes expired OAuth 2 access tokens and refresh tokens'),
+ ],
+ default='',
+ max_length=32,
+ ),
),
migrations.AlterField(
model_name='systemjobtemplate',
name='job_type',
- field=models.CharField(blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_sessions', 'Removes expired browser sessions from the database'), ('cleanup_tokens', 'Removes expired OAuth 2 access tokens and refresh tokens')], default='', max_length=32),
+ field=models.CharField(
+ blank=True,
+ choices=[
+ ('cleanup_jobs', 'Remove jobs older than a certain number of days'),
+ ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'),
+ ('cleanup_sessions', 'Removes expired browser sessions from the database'),
+ ('cleanup_tokens', 'Removes expired OAuth 2 access tokens and refresh tokens'),
+ ],
+ default='',
+ max_length=32,
+ ),
),
]
diff --git a/awx/main/migrations/0079_v360_rm_implicit_oauth2_apps.py b/awx/main/migrations/0079_v360_rm_implicit_oauth2_apps.py
index 7edc1e87dc..c273054da5 100644
--- a/awx/main/migrations/0079_v360_rm_implicit_oauth2_apps.py
+++ b/awx/main/migrations/0079_v360_rm_implicit_oauth2_apps.py
@@ -15,6 +15,10 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='oauth2application',
name='authorization_grant_type',
- field=models.CharField(choices=[('authorization-code', 'Authorization code'), ('password', 'Resource owner password-based')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
+ field=models.CharField(
+ choices=[('authorization-code', 'Authorization code'), ('password', 'Resource owner password-based')],
+ help_text='The Grant type the user must use for acquire tokens for this application.',
+ max_length=32,
+ ),
),
]
diff --git a/awx/main/migrations/0080_v360_replace_job_origin.py b/awx/main/migrations/0080_v360_replace_job_origin.py
index 4289dd5abd..2df958f2f6 100644
--- a/awx/main/migrations/0080_v360_replace_job_origin.py
+++ b/awx/main/migrations/0080_v360_replace_job_origin.py
@@ -31,5 +31,5 @@ class Migration(migrations.Migration):
migrations.DeleteModel(
name='JobOrigin',
),
- migrations.RunPython(lambda apps, schema_editor: set_current_apps(apps), migrations.RunPython.noop)
+ migrations.RunPython(lambda apps, schema_editor: set_current_apps(apps), migrations.RunPython.noop),
]
diff --git a/awx/main/migrations/0081_v360_notify_on_start.py b/awx/main/migrations/0081_v360_notify_on_start.py
index 8871a6fd8c..8ac29e53bc 100644
--- a/awx/main/migrations/0081_v360_notify_on_start.py
+++ b/awx/main/migrations/0081_v360_notify_on_start.py
@@ -12,6 +12,7 @@ def forwards_split_unified_job_template_any(apps, schema_editor):
ujt.notification_templates_success.add(ujt_notification)
ujt.notification_templates_error.add(ujt_notification)
+
def forwards_split_organization_any(apps, schema_editor):
Organization = apps.get_model('main', 'organization')
for org in Organization.objects.all():
diff --git a/awx/main/migrations/0083_v360_job_branch_override.py b/awx/main/migrations/0083_v360_job_branch_override.py
index 4e1b00b4a7..4ba9e7c219 100644
--- a/awx/main/migrations/0083_v360_job_branch_override.py
+++ b/awx/main/migrations/0083_v360_job_branch_override.py
@@ -17,18 +17,27 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='project',
name='scm_refspec',
- field=models.CharField(blank=True, default='', help_text='For git projects, an additional refspec to fetch.', max_length=1024, verbose_name='SCM refspec'),
+ field=models.CharField(
+ blank=True, default='', help_text='For git projects, an additional refspec to fetch.', max_length=1024, verbose_name='SCM refspec'
+ ),
),
migrations.AddField(
model_name='projectupdate',
name='scm_refspec',
- field=models.CharField(blank=True, default='', help_text='For git projects, an additional refspec to fetch.', max_length=1024, verbose_name='SCM refspec'),
+ field=models.CharField(
+ blank=True, default='', help_text='For git projects, an additional refspec to fetch.', max_length=1024, verbose_name='SCM refspec'
+ ),
),
# Add fields for job specification of project branch
migrations.AddField(
model_name='job',
name='scm_branch',
- field=models.CharField(blank=True, default='', help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.', max_length=1024),
+ field=models.CharField(
+ blank=True,
+ default='',
+ help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.',
+ max_length=1024,
+ ),
),
migrations.AddField(
model_name='jobtemplate',
@@ -38,7 +47,12 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='jobtemplate',
name='scm_branch',
- field=models.CharField(blank=True, default='', help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.', max_length=1024),
+ field=models.CharField(
+ blank=True,
+ default='',
+ help_text='Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.',
+ max_length=1024,
+ ),
),
migrations.AddField(
model_name='project',
@@ -49,12 +63,23 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='project',
name='scm_update_cache_timeout',
- field=models.PositiveIntegerField(blank=True, default=0, help_text='The number of seconds after the last project update ran that a new project update will be launched as a job dependency.'),
+ field=models.PositiveIntegerField(
+ blank=True,
+ default=0,
+ help_text='The number of seconds after the last project update ran that a new project update will be launched as a job dependency.',
+ ),
),
# Start tracking the fetched revision on project update model
migrations.AddField(
model_name='projectupdate',
name='scm_revision',
- field=models.CharField(blank=True, default='', editable=False, help_text='The SCM Revision discovered by this update for the given project and branch.', max_length=1024, verbose_name='SCM Revision'),
+ field=models.CharField(
+ blank=True,
+ default='',
+ editable=False,
+ help_text='The SCM Revision discovered by this update for the given project and branch.',
+ max_length=1024,
+ verbose_name='SCM Revision',
+ ),
),
]
diff --git a/awx/main/migrations/0084_v360_token_description.py b/awx/main/migrations/0084_v360_token_description.py
index c93e2021cd..07064ca03f 100644
--- a/awx/main/migrations/0084_v360_token_description.py
+++ b/awx/main/migrations/0084_v360_token_description.py
@@ -4,6 +4,7 @@ from django.db import migrations, models
import awx
+
class Migration(migrations.Migration):
dependencies = [
diff --git a/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py b/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py
index 72917aff4c..690989276b 100644
--- a/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py
+++ b/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py
@@ -18,19 +18,49 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='notificationtemplate',
name='messages',
- field=awx.main.fields.JSONField(default=awx.main.models.notifications.NotificationTemplate.default_messages,
- help_text='Optional custom messages for notification template.',
- null=True,
- blank=True),
+ field=awx.main.fields.JSONField(
+ default=awx.main.models.notifications.NotificationTemplate.default_messages,
+ help_text='Optional custom messages for notification template.',
+ null=True,
+ blank=True,
+ ),
),
migrations.AlterField(
model_name='notification',
name='notification_type',
- field=models.CharField(choices=[('email', 'Email'), ('grafana', 'Grafana'), ('hipchat', 'HipChat'), ('irc', 'IRC'), ('mattermost', 'Mattermost'), ('pagerduty', 'Pagerduty'), ('rocketchat', 'Rocket.Chat'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('webhook', 'Webhook')], max_length=32),
+ field=models.CharField(
+ choices=[
+ ('email', 'Email'),
+ ('grafana', 'Grafana'),
+ ('hipchat', 'HipChat'),
+ ('irc', 'IRC'),
+ ('mattermost', 'Mattermost'),
+ ('pagerduty', 'Pagerduty'),
+ ('rocketchat', 'Rocket.Chat'),
+ ('slack', 'Slack'),
+ ('twilio', 'Twilio'),
+ ('webhook', 'Webhook'),
+ ],
+ max_length=32,
+ ),
),
migrations.AlterField(
model_name='notificationtemplate',
name='notification_type',
- field=models.CharField(choices=[('email', 'Email'), ('grafana', 'Grafana'), ('hipchat', 'HipChat'), ('irc', 'IRC'), ('mattermost', 'Mattermost'), ('pagerduty', 'Pagerduty'), ('rocketchat', 'Rocket.Chat'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('webhook', 'Webhook')], max_length=32),
+ field=models.CharField(
+ choices=[
+ ('email', 'Email'),
+ ('grafana', 'Grafana'),
+ ('hipchat', 'HipChat'),
+ ('irc', 'IRC'),
+ ('mattermost', 'Mattermost'),
+ ('pagerduty', 'Pagerduty'),
+ ('rocketchat', 'Rocket.Chat'),
+ ('slack', 'Slack'),
+ ('twilio', 'Twilio'),
+ ('webhook', 'Webhook'),
+ ],
+ max_length=32,
+ ),
),
]
diff --git a/awx/main/migrations/0086_v360_workflow_approval.py b/awx/main/migrations/0086_v360_workflow_approval.py
index fa3cadbc2f..d141e8c955 100644
--- a/awx/main/migrations/0086_v360_workflow_approval.py
+++ b/awx/main/migrations/0086_v360_workflow_approval.py
@@ -15,38 +15,89 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='WorkflowApprovalTemplate',
fields=[
- ('unifiedjobtemplate_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
- ('timeout', models.IntegerField(blank=True, default=0, help_text='The amount of time (in seconds) before the approval node expires and fails.')),
+ (
+ 'unifiedjobtemplate_ptr',
+ models.OneToOneField(
+ auto_created=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_link=True,
+ primary_key=True,
+ serialize=False,
+ to='main.UnifiedJobTemplate',
+ ),
+ ),
+ (
+ 'timeout',
+ models.IntegerField(blank=True, default=0, help_text='The amount of time (in seconds) before the approval node expires and fails.'),
+ ),
],
bases=('main.unifiedjobtemplate',),
),
migrations.AddField(
model_name='organization',
name='approval_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
+ ),
preserve_default='True',
),
migrations.AddField(
model_name='workflowjobtemplate',
name='approval_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['organization.approval_role', 'admin_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False,
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=['organization.approval_role', 'admin_role'],
+ related_name='+',
+ to='main.Role',
+ ),
preserve_default='True',
),
migrations.AlterField(
model_name='workflowjobnode',
name='unified_job_template',
- field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobnodes', to='main.UnifiedJobTemplate'),
+ field=models.ForeignKey(
+ blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobnodes', to='main.UnifiedJobTemplate'
+ ),
),
migrations.AlterField(
model_name='workflowjobtemplatenode',
name='unified_job_template',
- field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobtemplatenodes', to='main.UnifiedJobTemplate'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='workflowjobtemplatenodes',
+ to='main.UnifiedJobTemplate',
+ ),
),
migrations.CreateModel(
name='WorkflowApproval',
fields=[
- ('unifiedjob_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
- ('workflow_approval_template', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='approvals', to='main.WorkflowApprovalTemplate')),
+ (
+ 'unifiedjob_ptr',
+ models.OneToOneField(
+ auto_created=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_link=True,
+ primary_key=True,
+ serialize=False,
+ to='main.UnifiedJob',
+ ),
+ ),
+ (
+ 'workflow_approval_template',
+ models.ForeignKey(
+ blank=True,
+ default=None,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='approvals',
+ to='main.WorkflowApprovalTemplate',
+ ),
+ ),
],
bases=('main.unifiedjob',),
),
@@ -63,12 +114,37 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='organization',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['member_role', 'auditor_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role', 'approval_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False,
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=[
+ 'member_role',
+ 'auditor_role',
+ 'execute_role',
+ 'project_admin_role',
+ 'inventory_admin_role',
+ 'workflow_admin_role',
+ 'notification_admin_role',
+ 'credential_admin_role',
+ 'job_template_admin_role',
+ 'approval_role',
+ ],
+ related_name='+',
+ to='main.Role',
+ ),
),
migrations.AlterField(
model_name='workflowjobtemplate',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False,
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
+ related_name='+',
+ to='main.Role',
+ ),
),
migrations.AddField(
model_name='workflowapproval',
diff --git a/awx/main/migrations/0087_v360_update_credential_injector_help_text.py b/awx/main/migrations/0087_v360_update_credential_injector_help_text.py
index 6f30daa4ad..78549cd84e 100644
--- a/awx/main/migrations/0087_v360_update_credential_injector_help_text.py
+++ b/awx/main/migrations/0087_v360_update_credential_injector_help_text.py
@@ -14,16 +14,28 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credential',
name='inputs',
- field=awx.main.fields.CredentialInputField(blank=True, default=dict, help_text='Enter inputs using either JSON or YAML syntax. Refer to the Ansible Tower documentation for example syntax.'),
+ field=awx.main.fields.CredentialInputField(
+ blank=True,
+ default=dict,
+ help_text='Enter inputs using either JSON or YAML syntax. Refer to the Ansible Tower documentation for example syntax.',
+ ),
),
migrations.AlterField(
model_name='credentialtype',
name='injectors',
- field=awx.main.fields.CredentialTypeInjectorField(blank=True, default=dict, help_text='Enter injectors using either JSON or YAML syntax. Refer to the Ansible Tower documentation for example syntax.'),
+ field=awx.main.fields.CredentialTypeInjectorField(
+ blank=True,
+ default=dict,
+ help_text='Enter injectors using either JSON or YAML syntax. Refer to the Ansible Tower documentation for example syntax.',
+ ),
),
migrations.AlterField(
model_name='credentialtype',
name='inputs',
- field=awx.main.fields.CredentialTypeInputField(blank=True, default=dict, help_text='Enter inputs using either JSON or YAML syntax. Refer to the Ansible Tower documentation for example syntax.'),
+ field=awx.main.fields.CredentialTypeInputField(
+ blank=True,
+ default=dict,
+ help_text='Enter inputs using either JSON or YAML syntax. Refer to the Ansible Tower documentation for example syntax.',
+ ),
),
]
diff --git a/awx/main/migrations/0088_v360_dashboard_optimizations.py b/awx/main/migrations/0088_v360_dashboard_optimizations.py
index d79960cdb6..8935bde894 100644
--- a/awx/main/migrations/0088_v360_dashboard_optimizations.py
+++ b/awx/main/migrations/0088_v360_dashboard_optimizations.py
@@ -18,7 +18,22 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='unifiedjob',
name='launch_type',
- field=models.CharField(choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync'), ('scm', 'SCM Update')], db_index=True, default='manual', editable=False, max_length=20),
+ field=models.CharField(
+ choices=[
+ ('manual', 'Manual'),
+ ('relaunch', 'Relaunch'),
+ ('callback', 'Callback'),
+ ('scheduled', 'Scheduled'),
+ ('dependency', 'Dependency'),
+ ('workflow', 'Workflow'),
+ ('sync', 'Sync'),
+ ('scm', 'SCM Update'),
+ ],
+ db_index=True,
+ default='manual',
+ editable=False,
+ max_length=20,
+ ),
),
migrations.AlterField(
model_name='unifiedjob',
diff --git a/awx/main/migrations/0089_v360_new_job_event_types.py b/awx/main/migrations/0089_v360_new_job_event_types.py
index 5bab589371..b4d53e9083 100644
--- a/awx/main/migrations/0089_v360_new_job_event_types.py
+++ b/awx/main/migrations/0089_v360_new_job_event_types.py
@@ -13,11 +13,85 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='jobevent',
name='event',
- field=models.CharField(choices=[('runner_on_failed', 'Host Failed'), ('runner_on_start', 'Host Started'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')], max_length=100),
+ field=models.CharField(
+ choices=[
+ ('runner_on_failed', 'Host Failed'),
+ ('runner_on_start', 'Host Started'),
+ ('runner_on_ok', 'Host OK'),
+ ('runner_on_error', 'Host Failure'),
+ ('runner_on_skipped', 'Host Skipped'),
+ ('runner_on_unreachable', 'Host Unreachable'),
+ ('runner_on_no_hosts', 'No Hosts Remaining'),
+ ('runner_on_async_poll', 'Host Polling'),
+ ('runner_on_async_ok', 'Host Async OK'),
+ ('runner_on_async_failed', 'Host Async Failure'),
+ ('runner_item_on_ok', 'Item OK'),
+ ('runner_item_on_failed', 'Item Failed'),
+ ('runner_item_on_skipped', 'Item Skipped'),
+ ('runner_retry', 'Host Retry'),
+ ('runner_on_file_diff', 'File Difference'),
+ ('playbook_on_start', 'Playbook Started'),
+ ('playbook_on_notify', 'Running Handlers'),
+ ('playbook_on_include', 'Including File'),
+ ('playbook_on_no_hosts_matched', 'No Hosts Matched'),
+ ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'),
+ ('playbook_on_task_start', 'Task Started'),
+ ('playbook_on_vars_prompt', 'Variables Prompted'),
+ ('playbook_on_setup', 'Gathering Facts'),
+ ('playbook_on_import_for_host', 'internal: on Import for Host'),
+ ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'),
+ ('playbook_on_play_start', 'Play Started'),
+ ('playbook_on_stats', 'Playbook Complete'),
+ ('debug', 'Debug'),
+ ('verbose', 'Verbose'),
+ ('deprecated', 'Deprecated'),
+ ('warning', 'Warning'),
+ ('system_warning', 'System Warning'),
+ ('error', 'Error'),
+ ],
+ max_length=100,
+ ),
),
migrations.AlterField(
model_name='projectupdateevent',
name='event',
- field=models.CharField(choices=[('runner_on_failed', 'Host Failed'), ('runner_on_start', 'Host Started'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')], max_length=100),
+ field=models.CharField(
+ choices=[
+ ('runner_on_failed', 'Host Failed'),
+ ('runner_on_start', 'Host Started'),
+ ('runner_on_ok', 'Host OK'),
+ ('runner_on_error', 'Host Failure'),
+ ('runner_on_skipped', 'Host Skipped'),
+ ('runner_on_unreachable', 'Host Unreachable'),
+ ('runner_on_no_hosts', 'No Hosts Remaining'),
+ ('runner_on_async_poll', 'Host Polling'),
+ ('runner_on_async_ok', 'Host Async OK'),
+ ('runner_on_async_failed', 'Host Async Failure'),
+ ('runner_item_on_ok', 'Item OK'),
+ ('runner_item_on_failed', 'Item Failed'),
+ ('runner_item_on_skipped', 'Item Skipped'),
+ ('runner_retry', 'Host Retry'),
+ ('runner_on_file_diff', 'File Difference'),
+ ('playbook_on_start', 'Playbook Started'),
+ ('playbook_on_notify', 'Running Handlers'),
+ ('playbook_on_include', 'Including File'),
+ ('playbook_on_no_hosts_matched', 'No Hosts Matched'),
+ ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'),
+ ('playbook_on_task_start', 'Task Started'),
+ ('playbook_on_vars_prompt', 'Variables Prompted'),
+ ('playbook_on_setup', 'Gathering Facts'),
+ ('playbook_on_import_for_host', 'internal: on Import for Host'),
+ ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'),
+ ('playbook_on_play_start', 'Play Started'),
+ ('playbook_on_stats', 'Playbook Complete'),
+ ('debug', 'Debug'),
+ ('verbose', 'Verbose'),
+ ('deprecated', 'Deprecated'),
+ ('warning', 'Warning'),
+ ('system_warning', 'System Warning'),
+ ('error', 'Error'),
+ ],
+ max_length=100,
+ ),
),
]
diff --git a/awx/main/migrations/0090_v360_WFJT_prompts.py b/awx/main/migrations/0090_v360_WFJT_prompts.py
index 1fa317e71b..46fb497202 100644
--- a/awx/main/migrations/0090_v360_WFJT_prompts.py
+++ b/awx/main/migrations/0090_v360_WFJT_prompts.py
@@ -29,31 +29,79 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='joblaunchconfig',
name='inventory',
- field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='joblaunchconfigs', to='main.Inventory'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='Inventory applied as a prompt, assuming job template prompts for inventory',
+ null=True,
+ on_delete=models.deletion.SET_NULL,
+ related_name='joblaunchconfigs',
+ to='main.Inventory',
+ ),
),
migrations.AlterField(
model_name='schedule',
name='inventory',
- field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='schedules', to='main.Inventory'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='Inventory applied as a prompt, assuming job template prompts for inventory',
+ null=True,
+ on_delete=models.deletion.SET_NULL,
+ related_name='schedules',
+ to='main.Inventory',
+ ),
),
migrations.AlterField(
model_name='workflowjob',
name='inventory',
- field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobs', to='main.Inventory'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='Inventory applied as a prompt, assuming job template prompts for inventory',
+ null=True,
+ on_delete=models.deletion.SET_NULL,
+ related_name='workflowjobs',
+ to='main.Inventory',
+ ),
),
migrations.AlterField(
model_name='workflowjobnode',
name='inventory',
- field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobnodes', to='main.Inventory'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='Inventory applied as a prompt, assuming job template prompts for inventory',
+ null=True,
+ on_delete=models.deletion.SET_NULL,
+ related_name='workflowjobnodes',
+ to='main.Inventory',
+ ),
),
migrations.AlterField(
model_name='workflowjobtemplate',
name='inventory',
- field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobtemplates', to='main.Inventory'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='Inventory applied as a prompt, assuming job template prompts for inventory',
+ null=True,
+ on_delete=models.deletion.SET_NULL,
+ related_name='workflowjobtemplates',
+ to='main.Inventory',
+ ),
),
migrations.AlterField(
model_name='workflowjobtemplatenode',
name='inventory',
- field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied as a prompt, assuming job template prompts for inventory', null=True, on_delete=models.deletion.SET_NULL, related_name='workflowjobtemplatenodes', to='main.Inventory'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='Inventory applied as a prompt, assuming job template prompts for inventory',
+ null=True,
+ on_delete=models.deletion.SET_NULL,
+ related_name='workflowjobtemplatenodes',
+ to='main.Inventory',
+ ),
),
]
diff --git a/awx/main/migrations/0091_v360_approval_node_notifications.py b/awx/main/migrations/0091_v360_approval_node_notifications.py
index 3963b776b8..26be885cf0 100644
--- a/awx/main/migrations/0091_v360_approval_node_notifications.py
+++ b/awx/main/migrations/0091_v360_approval_node_notifications.py
@@ -23,6 +23,9 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='workflowjobnode',
name='do_not_run',
- field=models.BooleanField(default=False, help_text='Indicates that a job will not be created when True. Workflow runtime semantics will mark this True if the node is in a path that will decidedly not be ran. A value of False means the node may not run.'),
+ field=models.BooleanField(
+ default=False,
+ help_text='Indicates that a job will not be created when True. Workflow runtime semantics will mark this True if the node is in a path that will decidedly not be ran. A value of False means the node may not run.',
+ ),
),
]
diff --git a/awx/main/migrations/0092_v360_webhook_mixin.py b/awx/main/migrations/0092_v360_webhook_mixin.py
index c2887c6b1b..39adb22cdf 100644
--- a/awx/main/migrations/0092_v360_webhook_mixin.py
+++ b/awx/main/migrations/0092_v360_webhook_mixin.py
@@ -14,7 +14,14 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='jobtemplate',
name='webhook_credential',
- field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='jobtemplates', to='main.Credential'),
+ field=models.ForeignKey(
+ blank=True,
+ help_text='Personal Access Token for posting back the status to the service API',
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='jobtemplates',
+ to='main.Credential',
+ ),
),
migrations.AddField(
model_name='jobtemplate',
@@ -24,12 +31,21 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='jobtemplate',
name='webhook_service',
- field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
+ field=models.CharField(
+ blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16
+ ),
),
migrations.AddField(
model_name='workflowjobtemplate',
name='webhook_credential',
- field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobtemplates', to='main.Credential'),
+ field=models.ForeignKey(
+ blank=True,
+ help_text='Personal Access Token for posting back the status to the service API',
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='workflowjobtemplates',
+ to='main.Credential',
+ ),
),
migrations.AddField(
model_name='workflowjobtemplate',
@@ -39,11 +55,29 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjobtemplate',
name='webhook_service',
- field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
+ field=models.CharField(
+ blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16
+ ),
),
migrations.AlterField(
model_name='unifiedjob',
name='launch_type',
- field=models.CharField(choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('webhook', 'Webhook'), ('sync', 'Sync'), ('scm', 'SCM Update')], db_index=True, default='manual', editable=False, max_length=20),
+ field=models.CharField(
+ choices=[
+ ('manual', 'Manual'),
+ ('relaunch', 'Relaunch'),
+ ('callback', 'Callback'),
+ ('scheduled', 'Scheduled'),
+ ('dependency', 'Dependency'),
+ ('workflow', 'Workflow'),
+ ('webhook', 'Webhook'),
+ ('sync', 'Sync'),
+ ('scm', 'SCM Update'),
+ ],
+ db_index=True,
+ default='manual',
+ editable=False,
+ max_length=20,
+ ),
),
]
diff --git a/awx/main/migrations/0093_v360_personal_access_tokens.py b/awx/main/migrations/0093_v360_personal_access_tokens.py
index 1dd1bbc094..d5910f575a 100644
--- a/awx/main/migrations/0093_v360_personal_access_tokens.py
+++ b/awx/main/migrations/0093_v360_personal_access_tokens.py
@@ -21,7 +21,19 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credentialtype',
name='kind',
- field=models.CharField(choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('token', 'Personal Access Token'), ('insights', 'Insights'), ('external', 'External')], max_length=32),
+ field=models.CharField(
+ choices=[
+ ('ssh', 'Machine'),
+ ('vault', 'Vault'),
+ ('net', 'Network'),
+ ('scm', 'Source Control'),
+ ('cloud', 'Cloud'),
+ ('token', 'Personal Access Token'),
+ ('insights', 'Insights'),
+ ('external', 'External'),
+ ],
+ max_length=32,
+ ),
),
migrations.RunPython(setup_tower_managed_defaults),
]
diff --git a/awx/main/migrations/0094_v360_webhook_mixin2.py b/awx/main/migrations/0094_v360_webhook_mixin2.py
index 8b9dd17f1a..03e81c30b6 100644
--- a/awx/main/migrations/0094_v360_webhook_mixin2.py
+++ b/awx/main/migrations/0094_v360_webhook_mixin2.py
@@ -14,7 +14,14 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='job',
name='webhook_credential',
- field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='jobs', to='main.Credential'),
+ field=models.ForeignKey(
+ blank=True,
+ help_text='Personal Access Token for posting back the status to the service API',
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='jobs',
+ to='main.Credential',
+ ),
),
migrations.AddField(
model_name='job',
@@ -24,12 +31,21 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='job',
name='webhook_service',
- field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
+ field=models.CharField(
+ blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16
+ ),
),
migrations.AddField(
model_name='workflowjob',
name='webhook_credential',
- field=models.ForeignKey(blank=True, help_text='Personal Access Token for posting back the status to the service API', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobs', to='main.Credential'),
+ field=models.ForeignKey(
+ blank=True,
+ help_text='Personal Access Token for posting back the status to the service API',
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='workflowjobs',
+ to='main.Credential',
+ ),
),
migrations.AddField(
model_name='workflowjob',
@@ -39,6 +55,8 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjob',
name='webhook_service',
- field=models.CharField(blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16),
+ field=models.CharField(
+ blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16
+ ),
),
]
diff --git a/awx/main/migrations/0096_v360_container_groups.py b/awx/main/migrations/0096_v360_container_groups.py
index d5b5007cb1..23f0f2279c 100644
--- a/awx/main/migrations/0096_v360_container_groups.py
+++ b/awx/main/migrations/0096_v360_container_groups.py
@@ -22,7 +22,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='instancegroup',
name='credential',
- field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='instancegroups', to='main.Credential'),
+ field=models.ForeignKey(
+ blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='instancegroups', to='main.Credential'
+ ),
),
migrations.AddField(
model_name='instancegroup',
@@ -32,7 +34,20 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credentialtype',
name='kind',
- field=models.CharField(choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('token', 'Personal Access Token'), ('insights', 'Insights'), ('external', 'External'), ('kubernetes', 'Kubernetes')], max_length=32),
+ field=models.CharField(
+ choices=[
+ ('ssh', 'Machine'),
+ ('vault', 'Vault'),
+ ('net', 'Network'),
+ ('scm', 'Source Control'),
+ ('cloud', 'Cloud'),
+ ('token', 'Personal Access Token'),
+ ('insights', 'Insights'),
+ ('external', 'External'),
+ ('kubernetes', 'Kubernetes'),
+ ],
+ max_length=32,
+ ),
),
- migrations.RunPython(create_new_credential_types)
+ migrations.RunPython(create_new_credential_types),
]
diff --git a/awx/main/migrations/0097_v360_workflowapproval_approved_or_denied_by.py b/awx/main/migrations/0097_v360_workflowapproval_approved_or_denied_by.py
index 84bf80c7f6..b48c23fde1 100644
--- a/awx/main/migrations/0097_v360_workflowapproval_approved_or_denied_by.py
+++ b/awx/main/migrations/0097_v360_workflowapproval_approved_or_denied_by.py
@@ -16,6 +16,13 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowapproval',
name='approved_or_denied_by',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'workflowapproval', 'model_name': 'workflowapproval', 'app_label': 'main'}(class)s_approved+", to=settings.AUTH_USER_MODEL),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'workflowapproval', 'model_name': 'workflowapproval', 'app_label': 'main'}(class)s_approved+",
+ to=settings.AUTH_USER_MODEL,
+ ),
),
]
diff --git a/awx/main/migrations/0098_v360_rename_cyberark_aim_credential_type.py b/awx/main/migrations/0098_v360_rename_cyberark_aim_credential_type.py
index 0bd03b94ba..de2b3dc000 100644
--- a/awx/main/migrations/0098_v360_rename_cyberark_aim_credential_type.py
+++ b/awx/main/migrations/0098_v360_rename_cyberark_aim_credential_type.py
@@ -6,17 +6,11 @@ from awx.main.models import CredentialType
def update_cyberark_aim_name(apps, schema_editor):
CredentialType.setup_tower_managed_defaults()
- aim_types = apps.get_model('main', 'CredentialType').objects.filter(
- namespace='aim'
- ).order_by('id')
+ aim_types = apps.get_model('main', 'CredentialType').objects.filter(namespace='aim').order_by('id')
if aim_types.count() == 2:
original, renamed = aim_types.all()
- apps.get_model('main', 'Credential').objects.filter(
- credential_type_id=original.id
- ).update(
- credential_type_id=renamed.id
- )
+ apps.get_model('main', 'Credential').objects.filter(credential_type_id=original.id).update(credential_type_id=renamed.id)
original.delete()
@@ -26,6 +20,4 @@ class Migration(migrations.Migration):
('main', '0097_v360_workflowapproval_approved_or_denied_by'),
]
- operations = [
- migrations.RunPython(update_cyberark_aim_name)
- ]
+ operations = [migrations.RunPython(update_cyberark_aim_name)]
diff --git a/awx/main/migrations/0101_v370_generate_new_uuids_for_iso_nodes.py b/awx/main/migrations/0101_v370_generate_new_uuids_for_iso_nodes.py
index 6db5673419..a14399d30e 100644
--- a/awx/main/migrations/0101_v370_generate_new_uuids_for_iso_nodes.py
+++ b/awx/main/migrations/0101_v370_generate_new_uuids_for_iso_nodes.py
@@ -21,6 +21,4 @@ class Migration(migrations.Migration):
('main', '0100_v370_projectupdate_job_tags'),
]
- operations = [
- migrations.RunPython(_generate_new_uuid_for_iso_nodes)
- ]
+ operations = [migrations.RunPython(_generate_new_uuid_for_iso_nodes)]
diff --git a/awx/main/migrations/0107_v370_workflow_convergence_api_toggle.py b/awx/main/migrations/0107_v370_workflow_convergence_api_toggle.py
index ec22305f03..964f6e3e80 100644
--- a/awx/main/migrations/0107_v370_workflow_convergence_api_toggle.py
+++ b/awx/main/migrations/0107_v370_workflow_convergence_api_toggle.py
@@ -13,11 +13,15 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjobnode',
name='all_parents_must_converge',
- field=models.BooleanField(default=False, help_text='If enabled then the node will only run if all of the parent nodes have met the criteria to reach this node'),
+ field=models.BooleanField(
+ default=False, help_text='If enabled then the node will only run if all of the parent nodes have met the criteria to reach this node'
+ ),
),
migrations.AddField(
model_name='workflowjobtemplatenode',
name='all_parents_must_converge',
- field=models.BooleanField(default=False, help_text='If enabled then the node will only run if all of the parent nodes have met the criteria to reach this node'),
+ field=models.BooleanField(
+ default=False, help_text='If enabled then the node will only run if all of the parent nodes have met the criteria to reach this node'
+ ),
),
]
diff --git a/awx/main/migrations/0108_v370_unifiedjob_dependencies_processed.py b/awx/main/migrations/0108_v370_unifiedjob_dependencies_processed.py
index 6c10b11083..7a58d77be1 100644
--- a/awx/main/migrations/0108_v370_unifiedjob_dependencies_processed.py
+++ b/awx/main/migrations/0108_v370_unifiedjob_dependencies_processed.py
@@ -13,6 +13,8 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='unifiedjob',
name='dependencies_processed',
- field=models.BooleanField(default=False, editable=False, help_text='If True, the task manager has already processed potential dependencies for this job.'),
+ field=models.BooleanField(
+ default=False, editable=False, help_text='If True, the task manager has already processed potential dependencies for this job.'
+ ),
),
]
diff --git a/awx/main/migrations/0109_v370_job_template_organization_field.py b/awx/main/migrations/0109_v370_job_template_organization_field.py
index 505538594a..46544b8b3c 100644
--- a/awx/main/migrations/0109_v370_job_template_organization_field.py
+++ b/awx/main/migrations/0109_v370_job_template_organization_field.py
@@ -6,9 +6,12 @@ from django.db import migrations, models
import django.db.models.deletion
from awx.main.migrations._rbac import (
- rebuild_role_parentage, rebuild_role_hierarchy,
- migrate_ujt_organization, migrate_ujt_organization_backward,
- restore_inventory_admins, restore_inventory_admins_backward
+ rebuild_role_parentage,
+ rebuild_role_hierarchy,
+ migrate_ujt_organization,
+ migrate_ujt_organization_backward,
+ restore_inventory_admins,
+ restore_inventory_admins_backward,
)
@@ -29,12 +32,26 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='unifiedjob',
name='tmp_organization',
- field=models.ForeignKey(blank=True, help_text='The organization used to determine access to this unified job.', null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='unifiedjobs', to='main.Organization'),
+ field=models.ForeignKey(
+ blank=True,
+ help_text='The organization used to determine access to this unified job.',
+ null=True,
+ on_delete=awx.main.utils.polymorphic.SET_NULL,
+ related_name='unifiedjobs',
+ to='main.Organization',
+ ),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='tmp_organization',
- field=models.ForeignKey(blank=True, help_text='The organization used to determine access to this template.', null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='unifiedjobtemplates', to='main.Organization'),
+ field=models.ForeignKey(
+ blank=True,
+ help_text='The organization used to determine access to this template.',
+ null=True,
+ on_delete=awx.main.utils.polymorphic.SET_NULL,
+ related_name='unifiedjobtemplates',
+ to='main.Organization',
+ ),
),
# while new and old fields exist, copy the organization fields
migrations.RunPython(migrate_ujt_organization, migrate_ujt_organization_backward),
@@ -62,17 +79,38 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='jobtemplate',
name='admin_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['organization.job_template_admin_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False,
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=['organization.job_template_admin_role'],
+ related_name='+',
+ to='main.Role',
+ ),
),
migrations.AlterField(
model_name='jobtemplate',
name='execute_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'organization.execute_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False,
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=['admin_role', 'organization.execute_role'],
+ related_name='+',
+ to='main.Role',
+ ),
),
migrations.AlterField(
model_name='jobtemplate',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False,
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
+ related_name='+',
+ to='main.Role',
+ ),
),
# Re-compute the role parents and ancestors caching
migrations.RunPython(rebuild_jt_parents, migrations.RunPython.noop),
diff --git a/awx/main/migrations/0112_v370_workflow_node_identifier.py b/awx/main/migrations/0112_v370_workflow_node_identifier.py
index dff2a348b3..c10791898a 100644
--- a/awx/main/migrations/0112_v370_workflow_node_identifier.py
+++ b/awx/main/migrations/0112_v370_workflow_node_identifier.py
@@ -29,18 +29,29 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='workflowjobnode',
name='identifier',
- field=models.CharField(blank=True, help_text='An identifier coresponding to the workflow job template node that this node was created from.', max_length=512),
+ field=models.CharField(
+ blank=True, help_text='An identifier coresponding to the workflow job template node that this node was created from.', max_length=512
+ ),
),
migrations.AddField(
model_name='workflowjobtemplatenode',
name='identifier',
- field=models.CharField(blank=True, null=True, help_text='An identifier for this node that is unique within its workflow. It is copied to workflow job nodes corresponding to this node.', max_length=512),
+ field=models.CharField(
+ blank=True,
+ null=True,
+ help_text='An identifier for this node that is unique within its workflow. It is copied to workflow job nodes corresponding to this node.',
+ max_length=512,
+ ),
),
migrations.RunPython(create_uuid, migrations.RunPython.noop), # this fixes the uuid4 issue
migrations.AlterField(
model_name='workflowjobtemplatenode',
name='identifier',
- field=models.CharField(default=uuid.uuid4, help_text='An identifier for this node that is unique within its workflow. It is copied to workflow job nodes corresponding to this node.', max_length=512),
+ field=models.CharField(
+ default=uuid.uuid4,
+ help_text='An identifier for this node that is unique within its workflow. It is copied to workflow job nodes corresponding to this node.',
+ max_length=512,
+ ),
),
migrations.AlterUniqueTogether(
name='workflowjobtemplatenode',
diff --git a/awx/main/migrations/0113_v370_event_bigint.py b/awx/main/migrations/0113_v370_event_bigint.py
index e8b5af664f..214e5e4e28 100644
--- a/awx/main/migrations/0113_v370_event_bigint.py
+++ b/awx/main/migrations/0113_v370_event_bigint.py
@@ -19,37 +19,22 @@ def migrate_event_data(apps, schema_editor):
# *not* immediately show up, but will be repopulated over time progressively
# the trade-off here is not having to wait hours for the full data migration
# before you can start and run AWX again (including new playbook runs)
- for tblname in (
- 'main_jobevent', 'main_inventoryupdateevent',
- 'main_projectupdateevent', 'main_adhoccommandevent',
- 'main_systemjobevent'
- ):
+ for tblname in ('main_jobevent', 'main_inventoryupdateevent', 'main_projectupdateevent', 'main_adhoccommandevent', 'main_systemjobevent'):
with connection.cursor() as cursor:
# rename the current event table
- cursor.execute(
- f'ALTER TABLE {tblname} RENAME TO _old_{tblname};'
- )
+ cursor.execute(f'ALTER TABLE {tblname} RENAME TO _old_{tblname};')
# create a *new* table with the same schema
- cursor.execute(
- f'CREATE TABLE {tblname} (LIKE _old_{tblname} INCLUDING ALL);'
- )
+ cursor.execute(f'CREATE TABLE {tblname} (LIKE _old_{tblname} INCLUDING ALL);')
# alter the *new* table so that the primary key is a big int
- cursor.execute(
- f'ALTER TABLE {tblname} ALTER COLUMN id TYPE bigint USING id::bigint;'
- )
+ cursor.execute(f'ALTER TABLE {tblname} ALTER COLUMN id TYPE bigint USING id::bigint;')
# recreate counter for the new table's primary key to
# start where the *old* table left off (we have to do this because the
# counter changed from an int to a bigint)
cursor.execute(f'DROP SEQUENCE IF EXISTS "{tblname}_id_seq" CASCADE;')
cursor.execute(f'CREATE SEQUENCE "{tblname}_id_seq";')
- cursor.execute(
- f'ALTER TABLE "{tblname}" ALTER COLUMN "id" '
- f"SET DEFAULT nextval('{tblname}_id_seq');"
- )
- cursor.execute(
- f"SELECT setval('{tblname}_id_seq', (SELECT MAX(id) FROM _old_{tblname}), true);"
- )
+ cursor.execute(f'ALTER TABLE "{tblname}" ALTER COLUMN "id" ' f"SET DEFAULT nextval('{tblname}_id_seq');")
+ cursor.execute(f"SELECT setval('{tblname}_id_seq', (SELECT MAX(id) FROM _old_{tblname}), true);")
# replace the BTREE index on main_jobevent.job_id with
# a BRIN index to drastically improve per-UJ lookup performance
@@ -65,7 +50,9 @@ def migrate_event_data(apps, schema_editor):
cursor.execute(f"SELECT indexname, indexdef FROM pg_indexes WHERE tablename='_old_{tblname}' AND indexname != '{tblname}_pkey';")
indexes = cursor.fetchall()
- cursor.execute(f"SELECT conname, contype, pg_catalog.pg_get_constraintdef(r.oid, true) as condef FROM pg_catalog.pg_constraint r WHERE r.conrelid = '_old_{tblname}'::regclass AND conname != '{tblname}_pkey';")
+ cursor.execute(
+ f"SELECT conname, contype, pg_catalog.pg_get_constraintdef(r.oid, true) as condef FROM pg_catalog.pg_constraint r WHERE r.conrelid = '_old_{tblname}'::regclass AND conname != '{tblname}_pkey';"
+ )
constraints = cursor.fetchall()
for indexname, indexdef in indexes:
@@ -75,7 +62,6 @@ def migrate_event_data(apps, schema_editor):
class FakeAlterField(migrations.AlterField):
-
def database_forwards(self, *args):
# this is intentionally left blank, because we're
# going to accomplish the migration with some custom raw SQL
diff --git a/awx/main/migrations/0114_v370_remove_deprecated_manual_inventory_sources.py b/awx/main/migrations/0114_v370_remove_deprecated_manual_inventory_sources.py
index f3b796e0ae..501b2d190d 100644
--- a/awx/main/migrations/0114_v370_remove_deprecated_manual_inventory_sources.py
+++ b/awx/main/migrations/0114_v370_remove_deprecated_manual_inventory_sources.py
@@ -4,10 +4,10 @@ from django.db import migrations, models
def remove_manual_inventory_sources(apps, schema_editor):
- '''Previously we would automatically create inventory sources after
+ """Previously we would automatically create inventory sources after
Group creation and we would use the parent Group as our interface for the user.
During that process we would create InventorySource that had a source of "manual".
- '''
+ """
InventoryUpdate = apps.get_model('main', 'InventoryUpdate')
InventoryUpdate.objects.filter(source='').delete()
InventorySource = apps.get_model('main', 'InventorySource')
@@ -29,11 +29,45 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='inventorysource',
name='source',
- field=models.CharField(choices=[('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')], default=None, max_length=32),
+ field=models.CharField(
+ choices=[
+ ('file', 'File, Directory or Script'),
+ ('scm', 'Sourced from a Project'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('rhv', 'Red Hat Virtualization'),
+ ('tower', 'Ansible Tower'),
+ ('custom', 'Custom Script'),
+ ],
+ default=None,
+ max_length=32,
+ ),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
- field=models.CharField(choices=[('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')], default=None, max_length=32),
+ field=models.CharField(
+ choices=[
+ ('file', 'File, Directory or Script'),
+ ('scm', 'Sourced from a Project'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('cloudforms', 'Red Hat CloudForms'),
+ ('openstack', 'OpenStack'),
+ ('rhv', 'Red Hat Virtualization'),
+ ('tower', 'Ansible Tower'),
+ ('custom', 'Custom Script'),
+ ],
+ default=None,
+ max_length=32,
+ ),
),
]
diff --git a/awx/main/migrations/0115_v370_schedule_set_null.py b/awx/main/migrations/0115_v370_schedule_set_null.py
index 10e5798d17..2bb681e3a1 100644
--- a/awx/main/migrations/0115_v370_schedule_set_null.py
+++ b/awx/main/migrations/0115_v370_schedule_set_null.py
@@ -19,6 +19,13 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='unifiedjobtemplate',
name='next_schedule',
- field=models.ForeignKey(default=None, editable=False, null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='unifiedjobtemplate_as_next_schedule+', to='main.Schedule'),
+ field=models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=awx.main.utils.polymorphic.SET_NULL,
+ related_name='unifiedjobtemplate_as_next_schedule+',
+ to='main.Schedule',
+ ),
),
]
diff --git a/awx/main/migrations/0116_v400_remove_hipchat_notifications.py b/awx/main/migrations/0116_v400_remove_hipchat_notifications.py
index e366436bdc..91444fcd41 100644
--- a/awx/main/migrations/0116_v400_remove_hipchat_notifications.py
+++ b/awx/main/migrations/0116_v400_remove_hipchat_notifications.py
@@ -4,9 +4,9 @@ from django.db import migrations, models
def remove_hipchat_notifications(apps, schema_editor):
- '''
+ """
HipChat notifications are no longer in service, remove any that are found.
- '''
+ """
Notification = apps.get_model('main', 'Notification')
Notification.objects.filter(notification_type='hipchat').delete()
NotificationTemplate = apps.get_model('main', 'NotificationTemplate')
@@ -24,11 +24,37 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='notification',
name='notification_type',
- field=models.CharField(choices=[('email', 'Email'), ('grafana', 'Grafana'), ('irc', 'IRC'), ('mattermost', 'Mattermost'), ('pagerduty', 'Pagerduty'), ('rocketchat', 'Rocket.Chat'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('webhook', 'Webhook')], max_length=32),
+ field=models.CharField(
+ choices=[
+ ('email', 'Email'),
+ ('grafana', 'Grafana'),
+ ('irc', 'IRC'),
+ ('mattermost', 'Mattermost'),
+ ('pagerduty', 'Pagerduty'),
+ ('rocketchat', 'Rocket.Chat'),
+ ('slack', 'Slack'),
+ ('twilio', 'Twilio'),
+ ('webhook', 'Webhook'),
+ ],
+ max_length=32,
+ ),
),
migrations.AlterField(
model_name='notificationtemplate',
name='notification_type',
- field=models.CharField(choices=[('email', 'Email'), ('grafana', 'Grafana'), ('irc', 'IRC'), ('mattermost', 'Mattermost'), ('pagerduty', 'Pagerduty'), ('rocketchat', 'Rocket.Chat'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('webhook', 'Webhook')], max_length=32),
+ field=models.CharField(
+ choices=[
+ ('email', 'Email'),
+ ('grafana', 'Grafana'),
+ ('irc', 'IRC'),
+ ('mattermost', 'Mattermost'),
+ ('pagerduty', 'Pagerduty'),
+ ('rocketchat', 'Rocket.Chat'),
+ ('slack', 'Slack'),
+ ('twilio', 'Twilio'),
+ ('webhook', 'Webhook'),
+ ],
+ max_length=32,
+ ),
),
]
diff --git a/awx/main/migrations/0117_v400_remove_cloudforms_inventory.py b/awx/main/migrations/0117_v400_remove_cloudforms_inventory.py
index 9a94c6b02b..58fa9d574e 100644
--- a/awx/main/migrations/0117_v400_remove_cloudforms_inventory.py
+++ b/awx/main/migrations/0117_v400_remove_cloudforms_inventory.py
@@ -15,11 +15,43 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='inventorysource',
name='source',
- field=models.CharField(choices=[('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')], default=None, max_length=32),
+ field=models.CharField(
+ choices=[
+ ('file', 'File, Directory or Script'),
+ ('scm', 'Sourced from a Project'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('openstack', 'OpenStack'),
+ ('rhv', 'Red Hat Virtualization'),
+ ('tower', 'Ansible Tower'),
+ ('custom', 'Custom Script'),
+ ],
+ default=None,
+ max_length=32,
+ ),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
- field=models.CharField(choices=[('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')], default=None, max_length=32),
+ field=models.CharField(
+ choices=[
+ ('file', 'File, Directory or Script'),
+ ('scm', 'Sourced from a Project'),
+ ('ec2', 'Amazon EC2'),
+ ('gce', 'Google Compute Engine'),
+ ('azure_rm', 'Microsoft Azure Resource Manager'),
+ ('vmware', 'VMware vCenter'),
+ ('satellite6', 'Red Hat Satellite 6'),
+ ('openstack', 'OpenStack'),
+ ('rhv', 'Red Hat Virtualization'),
+ ('tower', 'Ansible Tower'),
+ ('custom', 'Custom Script'),
+ ],
+ default=None,
+ max_length=32,
+ ),
),
]
diff --git a/awx/main/migrations/0118_add_remote_archive_scm_type.py b/awx/main/migrations/0118_add_remote_archive_scm_type.py
index 246ca4c823..ab2a913733 100644
--- a/awx/main/migrations/0118_add_remote_archive_scm_type.py
+++ b/awx/main/migrations/0118_add_remote_archive_scm_type.py
@@ -13,11 +13,39 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='project',
name='scm_type',
- field=models.CharField(blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights'), ('archive', 'Remote Archive')], default='', help_text='Specifies the source control system used to store the project.', max_length=8, verbose_name='SCM Type'),
+ field=models.CharField(
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('git', 'Git'),
+ ('hg', 'Mercurial'),
+ ('svn', 'Subversion'),
+ ('insights', 'Red Hat Insights'),
+ ('archive', 'Remote Archive'),
+ ],
+ default='',
+ help_text='Specifies the source control system used to store the project.',
+ max_length=8,
+ verbose_name='SCM Type',
+ ),
),
migrations.AlterField(
model_name='projectupdate',
name='scm_type',
- field=models.CharField(blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights'), ('archive', 'Remote Archive')], default='', help_text='Specifies the source control system used to store the project.', max_length=8, verbose_name='SCM Type'),
+ field=models.CharField(
+ blank=True,
+ choices=[
+ ('', 'Manual'),
+ ('git', 'Git'),
+ ('hg', 'Mercurial'),
+ ('svn', 'Subversion'),
+ ('insights', 'Red Hat Insights'),
+ ('archive', 'Remote Archive'),
+ ],
+ default='',
+ help_text='Specifies the source control system used to store the project.',
+ max_length=8,
+ verbose_name='SCM Type',
+ ),
),
]
diff --git a/awx/main/migrations/0119_inventory_plugins.py b/awx/main/migrations/0119_inventory_plugins.py
index 670fb7887b..392bab15b8 100644
--- a/awx/main/migrations/0119_inventory_plugins.py
+++ b/awx/main/migrations/0119_inventory_plugins.py
@@ -74,12 +74,20 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='inventorysource',
name='enabled_value',
- field=models.TextField(blank=True, default='', help_text='Only used when enabled_var is set. Value when the host is considered enabled. For example if enabled_var="status.power_state"and enabled_value="powered_on" with host variables:{ "status": { "power_state": "powered_on", "created": "2020-08-04T18:13:04+00:00", "healthy": true }, "name": "foobar", "ip_address": "192.168.2.1"}The host would be marked enabled. If power_state where any value other than powered_on then the host would be disabled when imported into Tower. If the key is not found then the host will be enabled'),
+ field=models.TextField(
+ blank=True,
+ default='',
+ help_text='Only used when enabled_var is set. Value when the host is considered enabled. For example if enabled_var="status.power_state"and enabled_value="powered_on" with host variables:{ "status": { "power_state": "powered_on", "created": "2020-08-04T18:13:04+00:00", "healthy": true }, "name": "foobar", "ip_address": "192.168.2.1"}The host would be marked enabled. If power_state where any value other than powered_on then the host would be disabled when imported into Tower. If the key is not found then the host will be enabled',
+ ),
),
migrations.AddField(
model_name='inventorysource',
name='enabled_var',
- field=models.TextField(blank=True, default='', help_text='Retrieve the enabled state from the given dict of host variables. The enabled variable may be specified as "foo.bar", in which case the lookup will traverse into nested dicts, equivalent to: from_dict.get("foo", {}).get("bar", default)'),
+ field=models.TextField(
+ blank=True,
+ default='',
+ help_text='Retrieve the enabled state from the given dict of host variables. The enabled variable may be specified as "foo.bar", in which case the lookup will traverse into nested dicts, equivalent to: from_dict.get("foo", {}).get("bar", default)',
+ ),
),
migrations.AddField(
model_name='inventorysource',
@@ -89,12 +97,20 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='inventoryupdate',
name='enabled_value',
- field=models.TextField(blank=True, default='', help_text='Only used when enabled_var is set. Value when the host is considered enabled. For example if enabled_var="status.power_state"and enabled_value="powered_on" with host variables:{ "status": { "power_state": "powered_on", "created": "2020-08-04T18:13:04+00:00", "healthy": true }, "name": "foobar", "ip_address": "192.168.2.1"}The host would be marked enabled. If power_state where any value other than powered_on then the host would be disabled when imported into Tower. If the key is not found then the host will be enabled'),
+ field=models.TextField(
+ blank=True,
+ default='',
+ help_text='Only used when enabled_var is set. Value when the host is considered enabled. For example if enabled_var="status.power_state"and enabled_value="powered_on" with host variables:{ "status": { "power_state": "powered_on", "created": "2020-08-04T18:13:04+00:00", "healthy": true }, "name": "foobar", "ip_address": "192.168.2.1"}The host would be marked enabled. If power_state where any value other than powered_on then the host would be disabled when imported into Tower. If the key is not found then the host will be enabled',
+ ),
),
migrations.AddField(
model_name='inventoryupdate',
name='enabled_var',
- field=models.TextField(blank=True, default='', help_text='Retrieve the enabled state from the given dict of host variables. The enabled variable may be specified as "foo.bar", in which case the lookup will traverse into nested dicts, equivalent to: from_dict.get("foo", {}).get("bar", default)'),
+ field=models.TextField(
+ blank=True,
+ default='',
+ help_text='Retrieve the enabled state from the given dict of host variables. The enabled variable may be specified as "foo.bar", in which case the lookup will traverse into nested dicts, equivalent to: from_dict.get("foo", {}).get("bar", default)',
+ ),
),
migrations.AddField(
model_name='inventoryupdate',
diff --git a/awx/main/migrations/0120_galaxy_credentials.py b/awx/main/migrations/0120_galaxy_credentials.py
index a94c22e30b..effff89a14 100644
--- a/awx/main/migrations/0120_galaxy_credentials.py
+++ b/awx/main/migrations/0120_galaxy_credentials.py
@@ -26,7 +26,21 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credentialtype',
name='kind',
- field=models.CharField(choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('token', 'Personal Access Token'), ('insights', 'Insights'), ('external', 'External'), ('kubernetes', 'Kubernetes'), ('galaxy', 'Galaxy/Automation Hub')], max_length=32),
+ field=models.CharField(
+ choices=[
+ ('ssh', 'Machine'),
+ ('vault', 'Vault'),
+ ('net', 'Network'),
+ ('scm', 'Source Control'),
+ ('cloud', 'Cloud'),
+ ('token', 'Personal Access Token'),
+ ('insights', 'Insights'),
+ ('external', 'External'),
+ ('kubernetes', 'Kubernetes'),
+ ('galaxy', 'Galaxy/Automation Hub'),
+ ],
+ max_length=32,
+ ),
),
migrations.CreateModel(
name='OrganizationGalaxyCredentialMembership',
@@ -40,12 +54,14 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='organization',
name='galaxy_credentials',
- field=awx.main.fields.OrderedManyToManyField(blank=True, related_name='organization_galaxy_credentials', through='main.OrganizationGalaxyCredentialMembership', to='main.Credential'),
+ field=awx.main.fields.OrderedManyToManyField(
+ blank=True, related_name='organization_galaxy_credentials', through='main.OrganizationGalaxyCredentialMembership', to='main.Credential'
+ ),
),
migrations.AddField(
model_name='credential',
name='managed_by_tower',
field=models.BooleanField(default=False, editable=False),
),
- migrations.RunPython(galaxy.migrate_galaxy_settings)
+ migrations.RunPython(galaxy.migrate_galaxy_settings),
]
diff --git a/awx/main/migrations/0123_drop_hg_support.py b/awx/main/migrations/0123_drop_hg_support.py
index 089c6bba6f..2d4860dac9 100644
--- a/awx/main/migrations/0123_drop_hg_support.py
+++ b/awx/main/migrations/0123_drop_hg_support.py
@@ -13,11 +13,25 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='project',
name='scm_type',
- field=models.CharField(blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights'), ('archive', 'Remote Archive')], default='', help_text='Specifies the source control system used to store the project.', max_length=8, verbose_name='SCM Type'),
+ field=models.CharField(
+ blank=True,
+ choices=[('', 'Manual'), ('git', 'Git'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights'), ('archive', 'Remote Archive')],
+ default='',
+ help_text='Specifies the source control system used to store the project.',
+ max_length=8,
+ verbose_name='SCM Type',
+ ),
),
migrations.AlterField(
model_name='projectupdate',
name='scm_type',
- field=models.CharField(blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights'), ('archive', 'Remote Archive')], default='', help_text='Specifies the source control system used to store the project.', max_length=8, verbose_name='SCM Type'),
+ field=models.CharField(
+ blank=True,
+ choices=[('', 'Manual'), ('git', 'Git'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights'), ('archive', 'Remote Archive')],
+ default='',
+ help_text='Specifies the source control system used to store the project.',
+ max_length=8,
+ verbose_name='SCM Type',
+ ),
),
]
diff --git a/awx/main/migrations/0124_execution_environments.py b/awx/main/migrations/0124_execution_environments.py
index 18aad9a174..6679b59f0e 100644
--- a/awx/main/migrations/0124_execution_environments.py
+++ b/awx/main/migrations/0124_execution_environments.py
@@ -25,11 +25,57 @@ class Migration(migrations.Migration):
('description', models.TextField(blank=True, default='')),
('image', models.CharField(help_text='The registry location where the container is stored.', max_length=1024, verbose_name='image location')),
('managed_by_tower', models.BooleanField(default=False, editable=False)),
- ('created_by', models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'executionenvironment', 'model_name': 'executionenvironment', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL)),
- ('credential', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='executionenvironments', to='main.Credential')),
- ('modified_by', models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'executionenvironment', 'model_name': 'executionenvironment', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL)),
- ('organization', models.ForeignKey(blank=True, default=None, help_text='The organization used to determine access to this execution environment.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executionenvironments', to='main.Organization')),
- ('tags', taggit.managers.TaggableManager(blank=True, help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags')),
+ (
+ 'created_by',
+ models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'executionenvironment', 'model_name': 'executionenvironment', 'app_label': 'main'}(class)s_created+",
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
+ (
+ 'credential',
+ models.ForeignKey(
+ blank=True,
+ default=None,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='executionenvironments',
+ to='main.Credential',
+ ),
+ ),
+ (
+ 'modified_by',
+ models.ForeignKey(
+ default=None,
+ editable=False,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="{'class': 'executionenvironment', 'model_name': 'executionenvironment', 'app_label': 'main'}(class)s_modified+",
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
+ (
+ 'organization',
+ models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='The organization used to determine access to this execution environment.',
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name='executionenvironments',
+ to='main.Organization',
+ ),
+ ),
+ (
+ 'tags',
+ taggit.managers.TaggableManager(
+ blank=True, help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'
+ ),
+ ),
],
options={
'ordering': (django.db.models.expressions.OrderBy(django.db.models.expressions.F('organization_id'), nulls_first=True), 'image'),
@@ -44,16 +90,40 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='organization',
name='default_environment',
- field=models.ForeignKey(blank=True, default=None, help_text='The default execution environment for jobs run by this organization.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='main.ExecutionEnvironment'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='The default execution environment for jobs run by this organization.',
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='+',
+ to='main.ExecutionEnvironment',
+ ),
),
migrations.AddField(
model_name='unifiedjob',
name='execution_environment',
- field=models.ForeignKey(blank=True, default=None, help_text='The container image to be used for execution.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='unifiedjobs', to='main.ExecutionEnvironment'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='The container image to be used for execution.',
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='unifiedjobs',
+ to='main.ExecutionEnvironment',
+ ),
),
migrations.AddField(
model_name='unifiedjobtemplate',
name='execution_environment',
- field=models.ForeignKey(blank=True, default=None, help_text='The container image to be used for execution.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='unifiedjobtemplates', to='main.ExecutionEnvironment'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='The container image to be used for execution.',
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='unifiedjobtemplates',
+ to='main.ExecutionEnvironment',
+ ),
),
]
diff --git a/awx/main/migrations/0125_more_ee_modeling_changes.py b/awx/main/migrations/0125_more_ee_modeling_changes.py
index be999cbb79..629813a55f 100644
--- a/awx/main/migrations/0125_more_ee_modeling_changes.py
+++ b/awx/main/migrations/0125_more_ee_modeling_changes.py
@@ -26,18 +26,43 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='organization',
name='execution_environment_admin_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
+ ),
preserve_default='True',
),
migrations.AddField(
model_name='project',
name='default_environment',
- field=models.ForeignKey(blank=True, default=None, help_text='The default execution environment for jobs run using this project.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='main.ExecutionEnvironment'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='The default execution environment for jobs run using this project.',
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='+',
+ to='main.ExecutionEnvironment',
+ ),
),
migrations.AlterField(
model_name='credentialtype',
name='kind',
- field=models.CharField(choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('registry', 'Container Registry'), ('token', 'Personal Access Token'), ('insights', 'Insights'), ('external', 'External'), ('kubernetes', 'Kubernetes'), ('galaxy', 'Galaxy/Automation Hub')], max_length=32),
+ field=models.CharField(
+ choices=[
+ ('ssh', 'Machine'),
+ ('vault', 'Vault'),
+ ('net', 'Network'),
+ ('scm', 'Source Control'),
+ ('cloud', 'Cloud'),
+ ('registry', 'Container Registry'),
+ ('token', 'Personal Access Token'),
+ ('insights', 'Insights'),
+ ('external', 'External'),
+ ('kubernetes', 'Kubernetes'),
+ ('galaxy', 'Galaxy/Automation Hub'),
+ ],
+ max_length=32,
+ ),
),
migrations.AlterUniqueTogether(
name='executionenvironment',
diff --git a/awx/main/migrations/0126_executionenvironment_container_options.py b/awx/main/migrations/0126_executionenvironment_container_options.py
index d26fcb9298..8f97b8abc6 100644
--- a/awx/main/migrations/0126_executionenvironment_container_options.py
+++ b/awx/main/migrations/0126_executionenvironment_container_options.py
@@ -13,6 +13,16 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='executionenvironment',
name='pull',
- field=models.CharField(choices=[('always', 'Always pull container before running.'), ('missing', 'No pull option has been selected.'), ('never', 'Never pull container before running.')], blank=True, default='', help_text='Pull image before running?', max_length=16),
+ field=models.CharField(
+ choices=[
+ ('always', 'Always pull container before running.'),
+ ('missing', 'No pull option has been selected.'),
+ ('never', 'Never pull container before running.'),
+ ],
+ blank=True,
+ default='',
+ help_text='Pull image before running?',
+ max_length=16,
+ ),
),
]
diff --git a/awx/main/migrations/0127_reset_pod_spec_override.py b/awx/main/migrations/0127_reset_pod_spec_override.py
index c3ebe0b504..82364579f4 100644
--- a/awx/main/migrations/0127_reset_pod_spec_override.py
+++ b/awx/main/migrations/0127_reset_pod_spec_override.py
@@ -2,6 +2,7 @@
from django.db import migrations
+
def reset_pod_specs(apps, schema_editor):
InstanceGroup = apps.get_model('main', 'InstanceGroup')
InstanceGroup.objects.update(pod_spec_override="")
@@ -13,6 +14,4 @@ class Migration(migrations.Migration):
('main', '0126_executionenvironment_container_options'),
]
- operations = [
- migrations.RunPython(reset_pod_specs)
- ]
+ operations = [migrations.RunPython(reset_pod_specs)]
diff --git a/awx/main/migrations/0128_organiaztion_read_roles_ee_admin.py b/awx/main/migrations/0128_organiaztion_read_roles_ee_admin.py
index f03a4e0ba2..571bc30bb9 100644
--- a/awx/main/migrations/0128_organiaztion_read_roles_ee_admin.py
+++ b/awx/main/migrations/0128_organiaztion_read_roles_ee_admin.py
@@ -15,6 +15,25 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='organization',
name='read_role',
- field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['member_role', 'auditor_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role', 'approval_role', 'execution_environment_admin_role'], related_name='+', to='main.Role'),
+ field=awx.main.fields.ImplicitRoleField(
+ editable=False,
+ null='True',
+ on_delete=django.db.models.deletion.CASCADE,
+ parent_role=[
+ 'member_role',
+ 'auditor_role',
+ 'execute_role',
+ 'project_admin_role',
+ 'inventory_admin_role',
+ 'workflow_admin_role',
+ 'notification_admin_role',
+ 'credential_admin_role',
+ 'job_template_admin_role',
+ 'approval_role',
+ 'execution_environment_admin_role',
+ ],
+ related_name='+',
+ to='main.Role',
+ ),
),
]
diff --git a/awx/main/migrations/0129_unifiedjob_installed_collections.py b/awx/main/migrations/0129_unifiedjob_installed_collections.py
index 897708a631..d20c9068d0 100644
--- a/awx/main/migrations/0129_unifiedjob_installed_collections.py
+++ b/awx/main/migrations/0129_unifiedjob_installed_collections.py
@@ -14,6 +14,8 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='unifiedjob',
name='installed_collections',
- field=awx.main.fields.JSONBField(blank=True, default=dict, editable=False, help_text='The Collections names and versions installed in the execution environment.'),
+ field=awx.main.fields.JSONBField(
+ blank=True, default=dict, editable=False, help_text='The Collections names and versions installed in the execution environment.'
+ ),
),
]
diff --git a/awx/main/migrations/0130_ee_polymorphic_set_null.py b/awx/main/migrations/0130_ee_polymorphic_set_null.py
index a9a0b63715..a33e22a017 100644
--- a/awx/main/migrations/0130_ee_polymorphic_set_null.py
+++ b/awx/main/migrations/0130_ee_polymorphic_set_null.py
@@ -14,21 +14,53 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='organization',
name='default_environment',
- field=models.ForeignKey(blank=True, default=None, help_text='The default execution environment for jobs run by this organization.', null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='+', to='main.ExecutionEnvironment'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='The default execution environment for jobs run by this organization.',
+ null=True,
+ on_delete=awx.main.utils.polymorphic.SET_NULL,
+ related_name='+',
+ to='main.ExecutionEnvironment',
+ ),
),
migrations.AlterField(
model_name='project',
name='default_environment',
- field=models.ForeignKey(blank=True, default=None, help_text='The default execution environment for jobs run using this project.', null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='+', to='main.ExecutionEnvironment'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='The default execution environment for jobs run using this project.',
+ null=True,
+ on_delete=awx.main.utils.polymorphic.SET_NULL,
+ related_name='+',
+ to='main.ExecutionEnvironment',
+ ),
),
migrations.AlterField(
model_name='unifiedjob',
name='execution_environment',
- field=models.ForeignKey(blank=True, default=None, help_text='The container image to be used for execution.', null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='unifiedjobs', to='main.ExecutionEnvironment'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='The container image to be used for execution.',
+ null=True,
+ on_delete=awx.main.utils.polymorphic.SET_NULL,
+ related_name='unifiedjobs',
+ to='main.ExecutionEnvironment',
+ ),
),
migrations.AlterField(
model_name='unifiedjobtemplate',
name='execution_environment',
- field=models.ForeignKey(blank=True, default=None, help_text='The container image to be used for execution.', null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='unifiedjobtemplates', to='main.ExecutionEnvironment'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='The container image to be used for execution.',
+ null=True,
+ on_delete=awx.main.utils.polymorphic.SET_NULL,
+ related_name='unifiedjobtemplates',
+ to='main.ExecutionEnvironment',
+ ),
),
]
diff --git a/awx/main/migrations/0131_undo_org_polymorphic_ee.py b/awx/main/migrations/0131_undo_org_polymorphic_ee.py
index 0805992243..2a3a805bc1 100644
--- a/awx/main/migrations/0131_undo_org_polymorphic_ee.py
+++ b/awx/main/migrations/0131_undo_org_polymorphic_ee.py
@@ -14,6 +14,14 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='organization',
name='default_environment',
- field=models.ForeignKey(blank=True, default=None, help_text='The default execution environment for jobs run by this organization.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='main.ExecutionEnvironment'),
+ field=models.ForeignKey(
+ blank=True,
+ default=None,
+ help_text='The default execution environment for jobs run by this organization.',
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name='+',
+ to='main.ExecutionEnvironment',
+ ),
),
]
diff --git a/awx/main/migrations/0134_unifiedjob_ansible_version.py b/awx/main/migrations/0134_unifiedjob_ansible_version.py
index af5f226f24..6d92dc526b 100644
--- a/awx/main/migrations/0134_unifiedjob_ansible_version.py
+++ b/awx/main/migrations/0134_unifiedjob_ansible_version.py
@@ -13,6 +13,8 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='unifiedjob',
name='ansible_version',
- field=models.CharField(blank=True, default='', editable=False, help_text='The version of Ansible Core installed in the execution environment.', max_length=255),
+ field=models.CharField(
+ blank=True, default='', editable=False, help_text='The version of Ansible Core installed in the execution environment.', max_length=255
+ ),
),
]
diff --git a/awx/main/migrations/__init__.py b/awx/main/migrations/__init__.py
index 2ea54e7880..c3344a1f0e 100644
--- a/awx/main/migrations/__init__.py
+++ b/awx/main/migrations/__init__.py
@@ -5,8 +5,8 @@ from django.db.migrations import Migration
class ActivityStreamDisabledMigration(Migration):
-
def apply(self, project_state, schema_editor, collect_sql=False):
from awx.main.signals import disable_activity_stream
+
with disable_activity_stream():
return Migration.apply(self, project_state, schema_editor, collect_sql)
diff --git a/awx/main/migrations/_create_system_jobs.py b/awx/main/migrations/_create_system_jobs.py
index 6294fa7799..78f7dd689d 100644
--- a/awx/main/migrations/_create_system_jobs.py
+++ b/awx/main/migrations/_create_system_jobs.py
@@ -40,7 +40,7 @@ def create_clearsessions_jt(apps, schema_editor):
name='Cleanup Expired Sessions',
rrule='DTSTART:%s RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1' % schedule_time,
description='Cleans out expired browser sessions',
- enabled=True,
+ enabled=True,
created=now_dt,
modified=now_dt,
extra_data={},
diff --git a/awx/main/migrations/_credentialtypes.py b/awx/main/migrations/_credentialtypes.py
index 98eb62c5c4..75a825b239 100644
--- a/awx/main/migrations/_credentialtypes.py
+++ b/awx/main/migrations/_credentialtypes.py
@@ -7,17 +7,16 @@ DEPRECATED_CRED_KIND = {
'kind': 'cloud',
'name': 'Rackspace',
'inputs': {
- 'fields': [{
- 'id': 'username',
- 'label': 'Username',
- 'type': 'string'
- }, {
- 'id': 'password',
- 'label': 'Password',
- 'type': 'string',
- 'secret': True,
- }],
- 'required': ['username', 'password']
+ 'fields': [
+ {'id': 'username', 'label': 'Username', 'type': 'string'},
+ {
+ 'id': 'password',
+ 'label': 'Password',
+ 'type': 'string',
+ 'secret': True,
+ },
+ ],
+ 'required': ['username', 'password'],
},
'injectors': {
'env': {
diff --git a/awx/main/migrations/_galaxy.py b/awx/main/migrations/_galaxy.py
index b85b7b3aaf..eb9454a1bf 100644
--- a/awx/main/migrations/_galaxy.py
+++ b/awx/main/migrations/_galaxy.py
@@ -36,14 +36,7 @@ def migrate_galaxy_settings(apps, schema_editor):
public_galaxy_enabled = False
public_galaxy_credential = Credential(
- created=now(),
- modified=now(),
- name='Ansible Galaxy',
- managed_by_tower=True,
- credential_type=galaxy_type,
- inputs = {
- 'url': 'https://galaxy.ansible.com/'
- }
+ created=now(), modified=now(), name='Ansible Galaxy', managed_by_tower=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'}
)
public_galaxy_credential.save()
@@ -59,9 +52,7 @@ def migrate_galaxy_settings(apps, schema_editor):
'Please provide an API token instead after your upgrade '
'has completed',
)
- inputs = {
- 'url': private_galaxy_url.value
- }
+ inputs = {'url': private_galaxy_url.value}
token = Setting.objects.filter(key='PRIMARY_GALAXY_TOKEN').first()
if token and token.value:
inputs['token'] = decrypt_field(token, 'value')
@@ -71,14 +62,7 @@ def migrate_galaxy_settings(apps, schema_editor):
name = f'Private Galaxy ({private_galaxy_url.value})'
if 'cloud.redhat.com' in inputs['url']:
name = f'Ansible Automation Hub ({private_galaxy_url.value})'
- cred = Credential(
- created=now(),
- modified=now(),
- name=name,
- organization=org,
- credential_type=galaxy_type,
- inputs=inputs
- )
+ cred = Credential(created=now(), modified=now(), name=name, organization=org, credential_type=galaxy_type, inputs=inputs)
cred.save()
if token and token.value:
# encrypt based on the primary key from the prior save
@@ -105,14 +89,7 @@ def migrate_galaxy_settings(apps, schema_editor):
inputs['token'] = token
if auth_url:
inputs['auth_url'] = auth_url
- cred = Credential(
- created=now(),
- modified=now(),
- name=f'Ansible Galaxy ({url})',
- organization=org,
- credential_type=galaxy_type,
- inputs=inputs
- )
+ cred = Credential(created=now(), modified=now(), name=f'Ansible Galaxy ({url})', organization=org, credential_type=galaxy_type, inputs=inputs)
cred.save()
if token:
# encrypt based on the primary key from the prior save
diff --git a/awx/main/migrations/_hg_removal.py b/awx/main/migrations/_hg_removal.py
index 70ca0b5a29..e384ea5413 100644
--- a/awx/main/migrations/_hg_removal.py
+++ b/awx/main/migrations/_hg_removal.py
@@ -14,6 +14,4 @@ def delete_hg_scm(apps, schema_editor):
update_ct = Project.objects.filter(scm_type='hg').update(scm_type='')
if update_ct:
- logger.warn('Changed {} mercurial projects to manual, deprecation period ended'.format(
- update_ct
- ))
+ logger.warn('Changed {} mercurial projects to manual, deprecation period ended'.format(update_ct))
diff --git a/awx/main/migrations/_inventory_source.py b/awx/main/migrations/_inventory_source.py
index c53a18f035..a3560a537f 100644
--- a/awx/main/migrations/_inventory_source.py
+++ b/awx/main/migrations/_inventory_source.py
@@ -12,9 +12,9 @@ logger = logging.getLogger('awx.main.migrations')
def _get_instance_id(from_dict, new_id, default=''):
- '''logic mostly duplicated with inventory_import command Command._get_instance_id
+ """logic mostly duplicated with inventory_import command Command._get_instance_id
frozen in time here, for purposes of migrations
- '''
+ """
instance_id = default
for key in new_id.split('.'):
if not hasattr(from_dict, 'get'):
@@ -34,23 +34,19 @@ def _get_instance_id_for_upgrade(host, new_id):
new_id_value = _get_instance_id(host_vars, new_id)
if not new_id_value:
# another source type with overwrite_vars or pesky users could have done this
- logger.info('Host {}-{} has no {} var, probably due to separate modifications'.format(
- host.name, host.pk, new_id
- ))
+ logger.info('Host {}-{} has no {} var, probably due to separate modifications'.format(host.name, host.pk, new_id))
return None
if len(new_id) > 255:
# this should never happen
- logger.warn('Computed instance id "{}"" for host {}-{} is too long'.format(
- new_id_value, host.name, host.pk
- ))
+ logger.warn('Computed instance id "{}"" for host {}-{} is too long'.format(new_id_value, host.name, host.pk))
return None
return new_id_value
def set_new_instance_id(apps, source, new_id):
- '''This methods adds an instance_id in cases where there was not one before
- '''
+ """This methods adds an instance_id in cases where there was not one before"""
from django.conf import settings
+
id_from_settings = getattr(settings, '{}_INSTANCE_ID_VAR'.format(source.upper()))
if id_from_settings != new_id:
# User applied an instance ID themselves, so nope on out of there
@@ -67,9 +63,7 @@ def set_new_instance_id(apps, source, new_id):
host.save(update_fields=['instance_id'])
modified_ct += 1
if modified_ct:
- logger.info('Migrated instance ID for {} hosts imported by {} source'.format(
- modified_ct, source
- ))
+ logger.info('Migrated instance ID for {} hosts imported by {} source'.format(modified_ct, source))
def back_out_new_instance_id(apps, source, new_id):
@@ -79,17 +73,13 @@ def back_out_new_instance_id(apps, source, new_id):
host_vars = parse_yaml_or_json(host.variables)
predicted_id_value = _get_instance_id(host_vars, new_id)
if predicted_id_value != host.instance_id:
- logger.debug('Host {}-{} did not get its instance_id from {}, skipping'.format(
- host.name, host.pk, new_id
- ))
+ logger.debug('Host {}-{} did not get its instance_id from {}, skipping'.format(host.name, host.pk, new_id))
continue
host.instance_id = ''
host.save(update_fields=['instance_id'])
modified_ct += 1
if modified_ct:
- logger.info('Reverse migrated instance ID for {} hosts imported by {} source'.format(
- modified_ct, source
- ))
+ logger.info('Reverse migrated instance ID for {} hosts imported by {} source'.format(modified_ct, source))
def delete_cloudforms_inv_source(apps, schema_editor):
diff --git a/awx/main/migrations/_inventory_source_vars.py b/awx/main/migrations/_inventory_source_vars.py
index 263b5666a2..71c96403a6 100644
--- a/awx/main/migrations/_inventory_source_vars.py
+++ b/awx/main/migrations/_inventory_source_vars.py
@@ -47,21 +47,21 @@ class azure_rm(PluginFileInjector):
'security_group': {'prefix': '', 'separator': '', 'key': 'security_group'},
'resource_group': {'prefix': '', 'separator': '', 'key': 'resource_group'},
# Note, os_family was not documented correctly in script, but defaulted to grouping by it
- 'os_family': {'prefix': '', 'separator': '', 'key': 'os_disk.operating_system_type'}
+ 'os_family': {'prefix': '', 'separator': '', 'key': 'os_disk.operating_system_type'},
}
# by default group by everything
# always respect user setting, if they gave it
- group_by = [
- grouping_name for grouping_name in group_by_hostvar
- if source_vars.get('group_by_{}'.format(grouping_name), True)
- ]
+ group_by = [grouping_name for grouping_name in group_by_hostvar if source_vars.get('group_by_{}'.format(grouping_name), True)]
ret['keyed_groups'] = [group_by_hostvar[grouping_name] for grouping_name in group_by]
if 'tag' in group_by:
# Nasty syntax to reproduce "key_value" group names in addition to "key"
- ret['keyed_groups'].append({
- 'prefix': '', 'separator': '',
- 'key': r'dict(tags.keys() | map("regex_replace", "^(.*)$", "\1_") | list | zip(tags.values() | list)) if tags else []'
- })
+ ret['keyed_groups'].append(
+ {
+ 'prefix': '',
+ 'separator': '',
+ 'key': r'dict(tags.keys() | map("regex_replace", "^(.*)$", "\1_") | list | zip(tags.values() | list)) if tags else []',
+ }
+ )
# Compatibility content
# TODO: add proper support for instance_filters non-specific to compatibility
@@ -99,9 +99,7 @@ class azure_rm(PluginFileInjector):
if len(kv) > 1:
user_filters.append('tags["{}"] != "{}"'.format(kv[0].strip(), kv[1].strip()))
else:
- user_filters.append('{} not in {}'.format(
- loc, value.split(',')
- ))
+ user_filters.append('{} not in {}'.format(loc, value.split(',')))
if user_filters:
ret.setdefault('exclude_host_filters', [])
ret['exclude_host_filters'].extend(user_filters)
@@ -115,7 +113,7 @@ class azure_rm(PluginFileInjector):
'public_ip': 'public_ipv4_addresses[0] if public_ipv4_addresses else None',
'public_ip_name': 'public_ip_name if public_ip_name is defined else None',
'public_ip_id': 'public_ip_id if public_ip_id is defined else None',
- 'tags': 'tags if tags else None'
+ 'tags': 'tags if tags else None',
}
# Special functionality from script
if source_vars.get('use_private_ip', False):
@@ -139,7 +137,6 @@ class ec2(PluginFileInjector):
namespace = 'amazon'
collection = 'aws'
-
def _get_ec2_group_by_choices(self):
return [
('ami_id', _('Image ID')),
@@ -161,15 +158,14 @@ class ec2(PluginFileInjector):
return {
# vars that change
'ec2_block_devices': (
- "dict(block_device_mappings | map(attribute='device_name') | list | zip(block_device_mappings "
- "| map(attribute='ebs.volume_id') | list))"
+ "dict(block_device_mappings | map(attribute='device_name') | list | zip(block_device_mappings " "| map(attribute='ebs.volume_id') | list))"
),
'ec2_dns_name': 'public_dns_name',
'ec2_group_name': 'placement.group_name',
'ec2_instance_profile': 'iam_instance_profile | default("")',
'ec2_ip_address': 'public_ip_address',
'ec2_kernel': 'kernel_id | default("")',
- 'ec2_monitored': "monitoring.state in ['enabled', 'pending']",
+ 'ec2_monitored': "monitoring.state in ['enabled', 'pending']",
'ec2_monitoring_state': 'monitoring.state',
'ec2_placement': 'placement.availability_zone',
'ec2_ramdisk': 'ramdisk_id | default("")',
@@ -209,7 +205,7 @@ class ec2(PluginFileInjector):
# new with https://github.com/ansible/ansible/pull/53645
'ec2_eventsSet': 'events | default("")',
'ec2_persistent': 'persistent | default(false)',
- 'ec2_requester_id': 'requester_id | default("")'
+ 'ec2_requester_id': 'requester_id | default("")',
}
def inventory_as_dict(self, inventory_source, private_data_dir):
@@ -232,10 +228,7 @@ class ec2(PluginFileInjector):
'security_group': {'prefix': 'security_group', 'key': 'security_groups | map(attribute="group_name")', 'parent_group': 'security_groups'},
# tags cannot be parented in exactly the same way as the script due to
# https://github.com/ansible/ansible/pull/53812
- 'tag_keys': [
- {'prefix': 'tag', 'key': 'tags', 'parent_group': 'tags'},
- {'prefix': 'tag', 'key': 'tags.keys()', 'parent_group': 'tags'}
- ],
+ 'tag_keys': [{'prefix': 'tag', 'key': 'tags', 'parent_group': 'tags'}, {'prefix': 'tag', 'key': 'tags.keys()', 'parent_group': 'tags'}],
# 'tag_none': None, # grouping by no tags isn't a different thing with plugin
# naming is redundant, like vpc_id_vpc_8c412cea, but intended
'vpc_id': {'prefix': 'vpc_id', 'key': 'vpc_id', 'parent_group': 'vpcs'},
@@ -262,10 +255,7 @@ class ec2(PluginFileInjector):
# if true, it replaces dashes, but not in region / loc names
replace_dash = bool(source_vars.get('replace_dash_in_groups', True))
# Compatibility content
- legacy_regex = {
- True: r"[^A-Za-z0-9\_]",
- False: r"[^A-Za-z0-9\_\-]" # do not replace dash, dash is allowed
- }[replace_dash]
+ legacy_regex = {True: r"[^A-Za-z0-9\_]", False: r"[^A-Za-z0-9\_\-]"}[replace_dash] # do not replace dash, dash is allowed
list_replacer = 'map("regex_replace", "{rx}", "_") | list'.format(rx=legacy_regex)
# this option, a plugin option, will allow dashes, but not unicode
# when set to False, unicode will be allowed, but it was not allowed by script
@@ -278,9 +268,7 @@ class ec2(PluginFileInjector):
continue
if grouping_data['key'] == 'tags':
# dict jinja2 transformation
- grouping_data['key'] = 'dict(tags.keys() | {replacer} | zip(tags.values() | {replacer}))'.format(
- replacer=list_replacer
- )
+ grouping_data['key'] = 'dict(tags.keys() | {replacer} | zip(tags.values() | {replacer}))'.format(replacer=list_replacer)
elif grouping_data['key'] == 'tags.keys()' or grouping_data['prefix'] == 'security_group':
# list jinja2 transformation
grouping_data['key'] += ' | {replacer}'.format(replacer=list_replacer)
@@ -327,11 +315,7 @@ class ec2(PluginFileInjector):
ret['hostnames'] = hnames
else:
# public_ip as hostname is non-default plugin behavior, script behavior
- ret['hostnames'] = [
- 'network-interface.addresses.association.public-ip',
- 'dns-name',
- 'private-dns-name'
- ]
+ ret['hostnames'] = ['network-interface.addresses.association.public-ip', 'dns-name', 'private-dns-name']
# The script returned only running state by default, the plugin does not
# https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-instances.html#options
# options: pending | running | shutting-down | terminated | stopping | stopped
@@ -394,7 +378,7 @@ class gce(PluginFileInjector):
'gce_image': 'image',
# We need this as long as hostnames is non-default, otherwise hosts
# will not be addressed correctly, was returned in script
- 'ansible_ssh_host': 'networkInterfaces[0].accessConfigs[0].natIP | default(networkInterfaces[0].networkIP)'
+ 'ansible_ssh_host': 'networkInterfaces[0].accessConfigs[0].natIP | default(networkInterfaces[0].networkIP)',
}
def inventory_as_dict(self, inventory_source, private_data_dir):
@@ -476,7 +460,7 @@ class vmware(PluginFileInjector):
"rootSnapshot",
"snapshot", # optional
"triggeredAlarmState",
- "value"
+ "value",
]
NESTED_PROPS = [
"capability",
@@ -537,17 +521,11 @@ class vmware(PluginFileInjector):
if groupby_patterns:
for pattern in groupby_patterns.split(','):
stripped_pattern = pattern.replace('{', '').replace('}', '').strip() # make best effort
- ret['keyed_groups'].append({
- 'prefix': '', 'separator': '',
- 'key': stripped_pattern
- })
+ ret['keyed_groups'].append({'prefix': '', 'separator': '', 'key': stripped_pattern})
else:
# default groups from script
for entry in ('config.guestId', '"templates" if config.template else "guests"'):
- ret['keyed_groups'].append({
- 'prefix': '', 'separator': '',
- 'key': entry
- })
+ ret['keyed_groups'].append({'prefix': '', 'separator': '', 'key': entry})
return ret
@@ -593,8 +571,8 @@ class openstack(PluginFileInjector):
class rhv(PluginFileInjector):
- """ovirt uses the custom credential templating, and that is all
- """
+ """ovirt uses the custom credential templating, and that is all"""
+
plugin_name = 'ovirt'
initial_version = '2.9'
namespace = 'ovirt'
@@ -604,9 +582,7 @@ class rhv(PluginFileInjector):
ret = super(rhv, self).inventory_as_dict(inventory_source, private_data_dir)
ret['ovirt_insecure'] = False # Default changed from script
# TODO: process strict option upstream
- ret['compose'] = {
- 'ansible_host': '(devices.values() | list)[0][0] if devices else None'
- }
+ ret['compose'] = {'ansible_host': '(devices.values() | list)[0][0] if devices else None'}
ret['keyed_groups'] = []
for key in ('cluster', 'status'):
ret['keyed_groups'].append({'prefix': key, 'separator': '_', 'key': key})
@@ -656,24 +632,32 @@ class satellite6(PluginFileInjector):
# Compatibility content
group_by_hostvar = {
- "environment": {"prefix": "{}environment_".format(group_prefix),
- "separator": "",
- "key": "foreman['environment_name'] | lower | regex_replace(' ', '') | "
- "regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '')"},
- "location": {"prefix": "{}location_".format(group_prefix),
- "separator": "",
- "key": "foreman['location_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"},
- "organization": {"prefix": "{}organization_".format(group_prefix),
- "separator": "",
- "key": "foreman['organization_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"},
- "lifecycle_environment": {"prefix": "{}lifecycle_environment_".format(group_prefix),
- "separator": "",
- "key": "foreman['content_facet_attributes']['lifecycle_environment_name'] | "
- "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"},
- "content_view": {"prefix": "{}content_view_".format(group_prefix),
- "separator": "",
- "key": "foreman['content_facet_attributes']['content_view_name'] | "
- "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')"}
+ "environment": {
+ "prefix": "{}environment_".format(group_prefix),
+ "separator": "",
+ "key": "foreman['environment_name'] | lower | regex_replace(' ', '') | " "regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '')",
+ },
+ "location": {
+ "prefix": "{}location_".format(group_prefix),
+ "separator": "",
+ "key": "foreman['location_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')",
+ },
+ "organization": {
+ "prefix": "{}organization_".format(group_prefix),
+ "separator": "",
+ "key": "foreman['organization_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')",
+ },
+ "lifecycle_environment": {
+ "prefix": "{}lifecycle_environment_".format(group_prefix),
+ "separator": "",
+ "key": "foreman['content_facet_attributes']['lifecycle_environment_name'] | "
+ "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')",
+ },
+ "content_view": {
+ "prefix": "{}content_view_".format(group_prefix),
+ "separator": "",
+ "key": "foreman['content_facet_attributes']['content_view_name'] | " "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')",
+ },
}
ret['legacy_hostvars'] = True # convert hostvar structure to the form used by the script
@@ -715,8 +699,7 @@ class satellite6(PluginFileInjector):
# apply jinja filter to key
key = '"{}" | format({})'.format(key, ', '.join(param_names))
- keyed_group = {'key': key,
- 'separator': ''}
+ keyed_group = {'key': key, 'separator': ''}
return keyed_group
try:
@@ -728,8 +711,7 @@ class satellite6(PluginFileInjector):
if keyed_group:
ret['keyed_groups'].append(keyed_group)
except json.JSONDecodeError:
- logger.warning('Could not parse group_patterns. Expected JSON-formatted string, found: {}'
- .format(group_patterns))
+ logger.warning('Could not parse group_patterns. Expected JSON-formatted string, found: {}'.format(group_patterns))
return ret
diff --git a/awx/main/migrations/_migration_utils.py b/awx/main/migrations/_migration_utils.py
index 232310ca50..23ef8b0713 100644
--- a/awx/main/migrations/_migration_utils.py
+++ b/awx/main/migrations/_migration_utils.py
@@ -2,10 +2,10 @@ from awx.main.utils import set_current_apps
def set_current_apps_for_migrations(apps, schema_editor):
- '''
+ """
This is necessary for migrations which do explicit saves on any model that
has an ImplicitRoleFIeld (which generally means anything that has
some RBAC bindings associated with it). This sets the current 'apps' that
the ImplicitRoleFIeld should be using when creating new roles.
- '''
+ """
set_current_apps(apps)
diff --git a/awx/main/migrations/_rbac.py b/awx/main/migrations/_rbac.py
index 0b1f81953c..d6aabe49fd 100644
--- a/awx/main/migrations/_rbac.py
+++ b/awx/main/migrations/_rbac.py
@@ -10,7 +10,7 @@ logger = logging.getLogger('rbac_migrations')
def create_roles(apps, schema_editor):
- '''
+ """
Implicit role creation happens in our post_save hook for all of our
resources. Here we iterate through all of our resource types and call
.save() to ensure all that happens for every object in the system.
@@ -18,10 +18,11 @@ def create_roles(apps, schema_editor):
This can be used whenever new roles are introduced in a migration to
create those roles for pre-existing objects that did not previously
have them created via signals.
- '''
+ """
models = [
- apps.get_model('main', m) for m in [
+ apps.get_model('main', m)
+ for m in [
'Organization',
'Team',
'Inventory',
@@ -66,7 +67,7 @@ UNIFIED_ORG_LOOKUPS = {
# Sliced jobs are a special case, but old data is not given special treatment for simplicity
'workflowjob': 'workflow_job_template',
# AdHocCommands do not have a template, but still migrate them
- 'adhoccommand': 'inventory'
+ 'adhoccommand': 'inventory',
}
@@ -90,12 +91,18 @@ def implicit_org_subquery(UnifiedClass, cls, backward=False):
intermediary_field = cls._meta.get_field(source_field)
intermediary_model = intermediary_field.related_model
intermediary_reverse_rel = intermediary_field.remote_field.name
- qs = intermediary_model.objects.filter(**{
- # this filter leverages the fact that the Unified models have same pk as subclasses.
- # For instance... filters projects used in job template, where that job template
- # has same id same as UJT from the outer reference (which it does)
- intermediary_reverse_rel: OuterRef('id')}
- ).order_by().values_list('organization')[:1]
+ qs = (
+ intermediary_model.objects.filter(
+ **{
+ # this filter leverages the fact that the Unified models have same pk as subclasses.
+ # For instance... filters projects used in job template, where that job template
+ # has same id same as UJT from the outer reference (which it does)
+ intermediary_reverse_rel: OuterRef('id')
+ }
+ )
+ .order_by()
+ .values_list('organization')[:1]
+ )
return Subquery(qs)
@@ -160,9 +167,15 @@ def _restore_inventory_admins(apps, schema_editor, backward=False):
for jt in jt_qs.iterator():
org = jt.inventory.organization
for jt_role, org_roles in (
- ('admin_role', ('admin_role', 'job_template_admin_role',)),
- ('execute_role', ('execute_role',))
- ):
+ (
+ 'admin_role',
+ (
+ 'admin_role',
+ 'job_template_admin_role',
+ ),
+ ),
+ ('execute_role', ('execute_role',)),
+ ):
role_id = getattr(jt, '{}_id'.format(jt_role))
user_qs = User.objects
@@ -172,9 +185,7 @@ def _restore_inventory_admins(apps, schema_editor, backward=False):
user_qs = user_qs.filter(roles__in=org_role_ids)
# bizarre migration behavior - ancestors / descendents of
# migration version of Role model is reversed, using current model briefly
- ancestor_ids = list(
- Role.objects.filter(descendents=role_id).values_list('id', flat=True)
- )
+ ancestor_ids = list(Role.objects.filter(descendents=role_id).values_list('id', flat=True))
# same as Role.__contains__, filter for "user in jt.admin_role"
user_qs = user_qs.exclude(roles__in=ancestor_ids)
else:
@@ -189,10 +200,9 @@ def _restore_inventory_admins(apps, schema_editor, backward=False):
continue
role = getattr(jt, jt_role)
- logger.debug('{} {} on jt {} for users {} via inventory.organization {}'.format(
- 'Removing' if backward else 'Setting',
- jt_role, jt.pk, user_ids, org.pk
- ))
+ logger.debug(
+ '{} {} on jt {} for users {} via inventory.organization {}'.format('Removing' if backward else 'Setting', jt_role, jt.pk, user_ids, org.pk)
+ )
if not backward:
# in reverse, explit role becomes redundant
role.members.add(*user_ids)
@@ -201,10 +211,7 @@ def _restore_inventory_admins(apps, schema_editor, backward=False):
changed_ct += len(user_ids)
if changed_ct:
- logger.info('{} explicit JT permission for {} users in {:.4f} seconds'.format(
- 'Removed' if backward else 'Added',
- changed_ct, time() - start
- ))
+ logger.info('{} explicit JT permission for {} users in {:.4f} seconds'.format('Removed' if backward else 'Added', changed_ct, time() - start))
def restore_inventory_admins(apps, schema_editor):
@@ -216,16 +223,14 @@ def restore_inventory_admins_backward(apps, schema_editor):
def rebuild_role_hierarchy(apps, schema_editor):
- '''
+ """
This should be called in any migration when ownerships are changed.
Ex. I remove a user from the admin_role of a credential.
Ancestors are cached from parents for performance, this re-computes ancestors.
- '''
+ """
logger.info('Computing role roots..')
start = time()
- roots = Role.objects \
- .all() \
- .values_list('id', flat=True)
+ roots = Role.objects.all().values_list('id', flat=True)
stop = time()
logger.info('Found %d roots in %f seconds, rebuilding ancestry map' % (len(roots), stop - start))
start = time()
@@ -236,7 +241,7 @@ def rebuild_role_hierarchy(apps, schema_editor):
def rebuild_role_parentage(apps, schema_editor, models=None):
- '''
+ """
This should be called in any migration when any parent_role entry
is modified so that the cached parent fields will be updated. Ex:
foo_role = ImplicitRoleField(
@@ -245,7 +250,7 @@ def rebuild_role_parentage(apps, schema_editor, models=None):
This is like rebuild_role_hierarchy, but that method updates ancestors,
whereas this method updates parents.
- '''
+ """
start = time()
seen_models = set()
model_ct = 0
@@ -258,9 +263,7 @@ def rebuild_role_parentage(apps, schema_editor, models=None):
if models:
# update_role_parentage_for_instance is expensive
# if the models have been downselected, ignore those which are not in the list
- ct_ids = list(ContentType.objects.filter(
- model__in=[name.lower() for name in models]
- ).values_list('id', flat=True))
+ ct_ids = list(ContentType.objects.filter(model__in=[name.lower() for name in models]).values_list('id', flat=True))
role_qs = role_qs.filter(content_type__in=ct_ids)
for role in role_qs.iterator():
diff --git a/awx/main/migrations/_save_password_keys.py b/awx/main/migrations/_save_password_keys.py
index a5a231a92f..c7fafd16b6 100644
--- a/awx/main/migrations/_save_password_keys.py
+++ b/awx/main/migrations/_save_password_keys.py
@@ -10,9 +10,9 @@ def survey_password_variables(survey_spec):
def migrate_survey_passwords(apps, schema_editor):
- '''Take the output of the Job Template password list for all that
+ """Take the output of the Job Template password list for all that
have a survey enabled, and then save it into the job model.
- '''
+ """
Job = apps.get_model('main', 'Job')
for job in Job.objects.iterator():
if not job.job_template:
diff --git a/awx/main/migrations/_squashed.py b/awx/main/migrations/_squashed.py
index 6391a5bc3d..3aeac5a1e8 100644
--- a/awx/main/migrations/_squashed.py
+++ b/awx/main/migrations/_squashed.py
@@ -8,8 +8,8 @@ from django.db import (
def squash_data(squashed):
- '''Returns a tuple of the squashed_keys and the key position to begin
- processing replace and operation lists'''
+ """Returns a tuple of the squashed_keys and the key position to begin
+ processing replace and operation lists"""
cm = current_migration()
squashed_keys = sorted(squashed.keys())
@@ -36,13 +36,13 @@ def current_migration(exclude_squashed=True):
def replaces(squashed, applied=False):
- '''Build a list of replacement migrations based on the most recent non-squashed migration
+ """Build a list of replacement migrations based on the most recent non-squashed migration
and the provided list of SQUASHED migrations. If the most recent non-squashed migration
is not present anywhere in the SQUASHED dictionary, assume they have all been applied.
If applied is True, this will return a list of all the migrations that have already
been applied.
- '''
+ """
squashed_keys, key_index = squash_data(squashed)
if applied:
return [('main', key) for key in squashed_keys[:key_index]]
@@ -50,13 +50,13 @@ def replaces(squashed, applied=False):
def operations(squashed, applied=False):
- '''Build a list of migration operations based on the most recent non-squashed migration
+ """Build a list of migration operations based on the most recent non-squashed migration
and the provided list of squashed migrations. If the most recent non-squashed migration
is not present anywhere in the `squashed` dictionary, assume they have all been applied.
If applied is True, this will return a list of all the operations that have
already been applied.
- '''
+ """
squashed_keys, key_index = squash_data(squashed)
op_keys = squashed_keys[:key_index] if applied else squashed_keys[key_index:]
ops = [squashed[op_key] for op_key in op_keys]
diff --git a/awx/main/migrations/_squashed_30.py b/awx/main/migrations/_squashed_30.py
index 31ea44e885..c604b95c37 100644
--- a/awx/main/migrations/_squashed_30.py
+++ b/awx/main/migrations/_squashed_30.py
@@ -42,7 +42,9 @@ SQUASHED_30 = {
migrations.AlterField(
model_name='credential',
name='admin_role',
- field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'),
+ field=awx.main.fields.ImplicitRoleField(
+ related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'
+ ),
),
migrations.AlterField(
model_name='credential',
diff --git a/awx/main/migrations/_squashed_31.py b/awx/main/migrations/_squashed_31.py
index 1362943319..c803b05e48 100644
--- a/awx/main/migrations/_squashed_31.py
+++ b/awx/main/migrations/_squashed_31.py
@@ -13,28 +13,55 @@ SQUASHED_31 = {
migrations.DeleteModel(
name='TowerSettings',
),
-
migrations.AlterField(
model_name='project',
name='scm_type',
- field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
+ field=models.CharField(
+ default='',
+ choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')],
+ max_length=8,
+ blank=True,
+ help_text='Specifies the source control system used to store the project.',
+ verbose_name='SCM Type',
+ ),
),
migrations.AlterField(
model_name='projectupdate',
name='scm_type',
- field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
+ field=models.CharField(
+ default='',
+ choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')],
+ max_length=8,
+ blank=True,
+ help_text='Specifies the source control system used to store the project.',
+ verbose_name='SCM Type',
+ ),
),
],
'0036_v311_insights': [
migrations.AlterField(
model_name='project',
name='scm_type',
- field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
+ field=models.CharField(
+ default='',
+ choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')],
+ max_length=8,
+ blank=True,
+ help_text='Specifies the source control system used to store the project.',
+ verbose_name='SCM Type',
+ ),
),
migrations.AlterField(
model_name='projectupdate',
name='scm_type',
- field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
+ field=models.CharField(
+ default='',
+ choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')],
+ max_length=8,
+ blank=True,
+ help_text='Specifies the source control system used to store the project.',
+ verbose_name='SCM Type',
+ ),
),
],
'0037_v313_instance_version': [
diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py
index 52cabf3774..824f0807c4 100644
--- a/awx/main/models/__init__.py
+++ b/awx/main/models/__init__.py
@@ -2,77 +2,79 @@
# All Rights Reserved.
# Django
-from django.conf import settings # noqa
+from django.conf import settings # noqa
from django.db import connection
from django.db.models.signals import pre_delete # noqa
# AWX
-from awx.main.models.base import ( # noqa
- BaseModel, PrimordialModel, prevent_search, accepts_json,
- CLOUD_INVENTORY_SOURCES, VERBOSITY_CHOICES
-)
-from awx.main.models.unified_jobs import ( # noqa
- UnifiedJob, UnifiedJobTemplate, StdoutMaxBytesExceeded
-)
-from awx.main.models.organization import ( # noqa
- Organization, Profile, Team, UserSessionMembership
-)
-from awx.main.models.credential import ( # noqa
- Credential, CredentialType, CredentialInputSource, ManagedCredentialType, build_safe_env
-)
+from awx.main.models.base import BaseModel, PrimordialModel, prevent_search, accepts_json, CLOUD_INVENTORY_SOURCES, VERBOSITY_CHOICES # noqa
+from awx.main.models.unified_jobs import UnifiedJob, UnifiedJobTemplate, StdoutMaxBytesExceeded # noqa
+from awx.main.models.organization import Organization, Profile, Team, UserSessionMembership # noqa
+from awx.main.models.credential import Credential, CredentialType, CredentialInputSource, ManagedCredentialType, build_safe_env # noqa
from awx.main.models.projects import Project, ProjectUpdate # noqa
-from awx.main.models.inventory import ( # noqa
- CustomInventoryScript, Group, Host, Inventory, InventorySource,
- InventoryUpdate, SmartInventoryMembership
-)
+from awx.main.models.inventory import CustomInventoryScript, Group, Host, Inventory, InventorySource, InventoryUpdate, SmartInventoryMembership # noqa
from awx.main.models.jobs import ( # noqa
- Job, JobHostSummary, JobLaunchConfig, JobTemplate, SystemJob,
+ Job,
+ JobHostSummary,
+ JobLaunchConfig,
+ JobTemplate,
+ SystemJob,
SystemJobTemplate,
)
from awx.main.models.events import ( # noqa
- AdHocCommandEvent, InventoryUpdateEvent, JobEvent, ProjectUpdateEvent,
+ AdHocCommandEvent,
+ InventoryUpdateEvent,
+ JobEvent,
+ ProjectUpdateEvent,
SystemJobEvent,
)
-from awx.main.models.ad_hoc_commands import AdHocCommand # noqa
-from awx.main.models.schedules import Schedule # noqa
-from awx.main.models.execution_environments import ExecutionEnvironment # noqa
-from awx.main.models.activity_stream import ActivityStream # noqa
+from awx.main.models.ad_hoc_commands import AdHocCommand # noqa
+from awx.main.models.schedules import Schedule # noqa
+from awx.main.models.execution_environments import ExecutionEnvironment # noqa
+from awx.main.models.activity_stream import ActivityStream # noqa
from awx.main.models.ha import ( # noqa
- Instance, InstanceGroup, TowerScheduleState,
+ Instance,
+ InstanceGroup,
+ TowerScheduleState,
)
from awx.main.models.rbac import ( # noqa
- Role, batch_role_ancestor_rebuilding, get_roles_on_resource,
- role_summary_fields_generator, ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
+ Role,
+ batch_role_ancestor_rebuilding,
+ get_roles_on_resource,
+ role_summary_fields_generator,
+ ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
ROLE_SINGLETON_SYSTEM_AUDITOR,
)
from awx.main.models.mixins import ( # noqa
- CustomVirtualEnvMixin, ExecutionEnvironmentMixin, ResourceMixin, SurveyJobMixin,
- SurveyJobTemplateMixin, TaskManagerInventoryUpdateMixin,
- TaskManagerJobMixin, TaskManagerProjectUpdateMixin,
+ CustomVirtualEnvMixin,
+ ExecutionEnvironmentMixin,
+ ResourceMixin,
+ SurveyJobMixin,
+ SurveyJobTemplateMixin,
+ TaskManagerInventoryUpdateMixin,
+ TaskManagerJobMixin,
+ TaskManagerProjectUpdateMixin,
TaskManagerUnifiedJobMixin,
)
-from awx.main.models.notifications import ( # noqa
- Notification, NotificationTemplate,
- JobNotificationMixin
-)
-from awx.main.models.label import Label # noqa
+from awx.main.models.notifications import Notification, NotificationTemplate, JobNotificationMixin # noqa
+from awx.main.models.label import Label # noqa
from awx.main.models.workflow import ( # noqa
- WorkflowJob, WorkflowJobNode, WorkflowJobOptions, WorkflowJobTemplate,
- WorkflowJobTemplateNode, WorkflowApproval, WorkflowApprovalTemplate,
+ WorkflowJob,
+ WorkflowJobNode,
+ WorkflowJobOptions,
+ WorkflowJobTemplate,
+ WorkflowJobTemplateNode,
+ WorkflowApproval,
+ WorkflowApprovalTemplate,
)
from awx.api.versioning import reverse
-from awx.main.models.oauth import ( # noqa
- OAuth2AccessToken, OAuth2Application
-)
-from oauth2_provider.models import Grant, RefreshToken # noqa -- needed django-oauth-toolkit model migrations
+from awx.main.models.oauth import OAuth2AccessToken, OAuth2Application # noqa
+from oauth2_provider.models import Grant, RefreshToken # noqa -- needed django-oauth-toolkit model migrations
# Add custom methods to User model for permissions checks.
from django.contrib.auth.models import User # noqa
-from awx.main.access import ( # noqa
- get_user_queryset, check_user_access, check_user_access_with_errors,
- user_accessible_objects
-)
+from awx.main.access import get_user_queryset, check_user_access, check_user_access_with_errors, user_accessible_objects # noqa
User.add_to_class('get_queryset', get_user_queryset)
@@ -93,18 +95,12 @@ def enforce_bigint_pk_migration():
# from the *old* int primary key table to the replacement bigint table
# if not, attempt to migrate them in the background
#
- for tblname in (
- 'main_jobevent', 'main_inventoryupdateevent',
- 'main_projectupdateevent', 'main_adhoccommandevent',
- 'main_systemjobevent'
- ):
+ for tblname in ('main_jobevent', 'main_inventoryupdateevent', 'main_projectupdateevent', 'main_adhoccommandevent', 'main_systemjobevent'):
with connection.cursor() as cursor:
- cursor.execute(
- 'SELECT 1 FROM information_schema.tables WHERE table_name=%s',
- (f'_old_{tblname}',)
- )
+ cursor.execute('SELECT 1 FROM information_schema.tables WHERE table_name=%s', (f'_old_{tblname}',))
if bool(cursor.rowcount):
from awx.main.tasks import migrate_legacy_event_data
+
migrate_legacy_event_data.apply_async([tblname])
@@ -150,8 +146,7 @@ User.add_to_class('created', created)
def user_is_system_auditor(user):
if not hasattr(user, '_is_system_auditor'):
if user.pk:
- user._is_system_auditor = user.roles.filter(
- singleton_name='system_auditor', role_field='system_auditor').exists()
+ user._is_system_auditor = user.roles.filter(singleton_name='system_auditor', role_field='system_auditor').exists()
else:
# Odd case where user is unsaved, this should never be relied on
return False
@@ -195,8 +190,6 @@ def user_is_in_enterprise_category(user, category):
User.add_to_class('is_in_enterprise_category', user_is_in_enterprise_category)
-
-
def o_auth2_application_get_absolute_url(self, request=None):
return reverse('api:o_auth2_application_detail', kwargs={'pk': self.pk}, request=request)
@@ -210,18 +203,19 @@ def o_auth2_token_get_absolute_url(self, request=None):
OAuth2AccessToken.add_to_class('get_absolute_url', o_auth2_token_get_absolute_url)
-from awx.main.registrar import activity_stream_registrar # noqa
+from awx.main.registrar import activity_stream_registrar # noqa
+
activity_stream_registrar.connect(Organization)
activity_stream_registrar.connect(Inventory)
activity_stream_registrar.connect(Host)
activity_stream_registrar.connect(Group)
activity_stream_registrar.connect(InventorySource)
-#activity_stream_registrar.connect(InventoryUpdate)
+# activity_stream_registrar.connect(InventoryUpdate)
activity_stream_registrar.connect(Credential)
activity_stream_registrar.connect(CredentialType)
activity_stream_registrar.connect(Team)
activity_stream_registrar.connect(Project)
-#activity_stream_registrar.connect(ProjectUpdate)
+# activity_stream_registrar.connect(ProjectUpdate)
activity_stream_registrar.connect(ExecutionEnvironment)
activity_stream_registrar.connect(JobTemplate)
activity_stream_registrar.connect(Job)
diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py
index 1c344692d6..16f3dcb5c2 100644
--- a/awx/main/models/activity_stream.py
+++ b/awx/main/models/activity_stream.py
@@ -16,9 +16,9 @@ __all__ = ['ActivityStream']
class ActivityStream(models.Model):
- '''
+ """
Model used to describe activity stream (audit) events
- '''
+ """
class Meta:
app_label = 'main'
@@ -29,7 +29,7 @@ class ActivityStream(models.Model):
('update', _("Entity Updated")),
('delete', _("Entity Deleted")),
('associate', _("Entity Associated with another Entity")),
- ('disassociate', _("Entity was Disassociated with another Entity"))
+ ('disassociate', _("Entity was Disassociated with another Entity")),
]
actor = models.ForeignKey('auth.User', null=True, on_delete=models.SET_NULL, related_name='activity_stream')
@@ -85,8 +85,6 @@ class ActivityStream(models.Model):
o_auth2_application = models.ManyToManyField("OAuth2Application", blank=True)
o_auth2_access_token = models.ManyToManyField("OAuth2AccessToken", blank=True)
-
-
setting = JSONField(blank=True)
def __str__(self):
diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py
index f327e2a7e6..105991a8a0 100644
--- a/awx/main/models/ad_hoc_commands.py
+++ b/awx/main/models/ad_hoc_commands.py
@@ -14,9 +14,7 @@ from django.core.exceptions import ValidationError
# AWX
from awx.api.versioning import reverse
-from awx.main.models.base import (
- prevent_search, AD_HOC_JOB_TYPE_CHOICES, VERBOSITY_CHOICES, VarsDictProperty
-)
+from awx.main.models.base import prevent_search, AD_HOC_JOB_TYPE_CHOICES, VERBOSITY_CHOICES, VarsDictProperty
from awx.main.models.events import AdHocCommandEvent
from awx.main.models.unified_jobs import UnifiedJob
from awx.main.models.notifications import JobNotificationMixin, NotificationTemplate
@@ -27,7 +25,6 @@ __all__ = ['AdHocCommand']
class AdHocCommand(UnifiedJob, JobNotificationMixin):
-
class Meta(object):
app_label = 'main'
ordering = ('id',)
@@ -84,10 +81,12 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
editable=False,
through='AdHocCommandEvent',
)
- extra_vars = prevent_search(models.TextField(
- blank=True,
- default='',
- ))
+ extra_vars = prevent_search(
+ models.TextField(
+ blank=True,
+ default='',
+ )
+ )
extra_vars_dict = VarsDictProperty('extra_vars', True)
@@ -144,6 +143,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
@classmethod
def _get_task_class(cls):
from awx.main.tasks import RunAdHocCommand
+
return RunAdHocCommand
@classmethod
@@ -169,9 +169,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
all_orgs = set()
for h in self.hosts.all():
all_orgs.add(h.inventory.organization)
- active_templates = dict(error=set(),
- success=set(),
- started=set())
+ active_templates = dict(error=set(), success=set(), started=set())
base_notification_templates = NotificationTemplate.objects
for org in all_orgs:
for templ in base_notification_templates.filter(organization_notification_templates_for_errors=org):
@@ -192,14 +190,26 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
def task_impact(self):
# NOTE: We sorta have to assume the host count matches and that forks default to 5
from awx.main.models.inventory import Host
- count_hosts = Host.objects.filter( enabled=True, inventory__ad_hoc_commands__pk=self.pk).count()
+
+ count_hosts = Host.objects.filter(enabled=True, inventory__ad_hoc_commands__pk=self.pk).count()
return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1
def copy(self):
data = {}
- for field in ('job_type', 'inventory_id', 'limit', 'credential_id',
- 'execution_environment_id', 'module_name', 'module_args',
- 'forks', 'verbosity', 'extra_vars', 'become_enabled', 'diff_mode'):
+ for field in (
+ 'job_type',
+ 'inventory_id',
+ 'limit',
+ 'credential_id',
+ 'execution_environment_id',
+ 'module_name',
+ 'module_args',
+ 'forks',
+ 'verbosity',
+ 'extra_vars',
+ 'become_enabled',
+ 'diff_mode',
+ ):
data[field] = getattr(self, field)
return AdHocCommand.objects.create(**data)
@@ -232,6 +242,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
'''
JobNotificationMixin
'''
+
def get_notification_templates(self):
return self.notification_templates
diff --git a/awx/main/models/base.py b/awx/main/models/base.py
index 8fb8e2d782..e8c10f5fac 100644
--- a/awx/main/models/base.py
+++ b/awx/main/models/base.py
@@ -17,18 +17,29 @@ from crum import get_current_user
from awx.main.utils import encrypt_field, parse_yaml_or_json
from awx.main.constants import CLOUD_PROVIDERS
-__all__ = ['prevent_search', 'VarsDictProperty', 'BaseModel', 'CreatedModifiedModel',
- 'PasswordFieldsModel', 'PrimordialModel', 'CommonModel',
- 'CommonModelNameNotUnique', 'NotificationFieldsModel',
- 'PERM_INVENTORY_DEPLOY', 'PERM_INVENTORY_SCAN',
- 'PERM_INVENTORY_CHECK', 'JOB_TYPE_CHOICES',
- 'AD_HOC_JOB_TYPE_CHOICES', 'PROJECT_UPDATE_JOB_TYPE_CHOICES',
- 'CLOUD_INVENTORY_SOURCES',
- 'VERBOSITY_CHOICES']
+__all__ = [
+ 'prevent_search',
+ 'VarsDictProperty',
+ 'BaseModel',
+ 'CreatedModifiedModel',
+ 'PasswordFieldsModel',
+ 'PrimordialModel',
+ 'CommonModel',
+ 'CommonModelNameNotUnique',
+ 'NotificationFieldsModel',
+ 'PERM_INVENTORY_DEPLOY',
+ 'PERM_INVENTORY_SCAN',
+ 'PERM_INVENTORY_CHECK',
+ 'JOB_TYPE_CHOICES',
+ 'AD_HOC_JOB_TYPE_CHOICES',
+ 'PROJECT_UPDATE_JOB_TYPE_CHOICES',
+ 'CLOUD_INVENTORY_SOURCES',
+ 'VERBOSITY_CHOICES',
+]
PERM_INVENTORY_DEPLOY = 'run'
-PERM_INVENTORY_CHECK = 'check'
-PERM_INVENTORY_SCAN = 'scan'
+PERM_INVENTORY_CHECK = 'check'
+PERM_INVENTORY_SCAN = 'scan'
JOB_TYPE_CHOICES = [
(PERM_INVENTORY_DEPLOY, _('Run')),
@@ -64,9 +75,9 @@ VERBOSITY_CHOICES = [
class VarsDictProperty(object):
- '''
+ """
Retrieve a string of variables in YAML or JSON as a dictionary.
- '''
+ """
def __init__(self, field='variables', key_value=False):
self.field = field
@@ -86,9 +97,9 @@ class VarsDictProperty(object):
class BaseModel(models.Model):
- '''
+ """
Base model class with common methods for all models.
- '''
+ """
class Meta:
abstract = True
@@ -100,10 +111,10 @@ class BaseModel(models.Model):
return u'%s-%s' % (self._meta.verbose_name, self.pk)
def clean_fields(self, exclude=None):
- '''
+ """
Override default clean_fields to support methods for cleaning
individual model fields.
- '''
+ """
exclude = exclude or []
errors = {}
try:
@@ -134,11 +145,11 @@ class BaseModel(models.Model):
class CreatedModifiedModel(BaseModel):
- '''
+ """
Common model with created/modified timestamp fields. Allows explicitly
specifying created/modified timestamps in certain cases (migrations, job
events), calculates automatically if not specified.
- '''
+ """
class Meta:
abstract = True
@@ -166,10 +177,10 @@ class CreatedModifiedModel(BaseModel):
class PasswordFieldsModel(BaseModel):
- '''
+ """
Abstract base class for a model with password fields that should be stored
as encrypted values.
- '''
+ """
PASSWORD_FIELDS = ()
@@ -177,7 +188,7 @@ class PasswordFieldsModel(BaseModel):
abstract = True
def _password_field_allows_ask(self, field):
- return False # Override in subclasses if needed.
+ return False # Override in subclasses if needed.
def save(self, *args, **kwargs):
new_instance = not bool(self.pk)
@@ -207,6 +218,7 @@ class PasswordFieldsModel(BaseModel):
self.mark_field_for_save(update_fields, field)
from awx.main.signals import disable_activity_stream
+
with disable_activity_stream():
# We've already got an activity stream record for the object
# creation, there's no need to have an extra one for the
@@ -255,18 +267,15 @@ class HasEditsMixin(BaseModel):
return new_values
def _values_have_edits(self, new_values):
- return any(
- new_values.get(fd_name, None) != self._prior_values_store.get(fd_name, None)
- for fd_name in new_values.keys()
- )
+ return any(new_values.get(fd_name, None) != self._prior_values_store.get(fd_name, None) for fd_name in new_values.keys())
class PrimordialModel(HasEditsMixin, CreatedModifiedModel):
- '''
+ """
Common model for all object types that have these standard fields
must use a subclass CommonModel or CommonModelNameNotUnique though
as this lacks a name field.
- '''
+ """
class Meta:
abstract = True
@@ -339,12 +348,7 @@ class PrimordialModel(HasEditsMixin, CreatedModifiedModel):
except ObjectDoesNotExist:
continue
if not (self.pk and self.pk == obj.pk):
- errors.append(
- '%s with this (%s) combination already exists.' % (
- model.__name__,
- ', '.join(set(ut) - {'polymorphic_ctype'})
- )
- )
+ errors.append('%s with this (%s) combination already exists.' % (model.__name__, ', '.join(set(ut) - {'polymorphic_ctype'})))
if errors:
raise ValidationError(errors)
@@ -374,27 +378,14 @@ class CommonModelNameNotUnique(PrimordialModel):
class NotificationFieldsModel(BaseModel):
-
class Meta:
abstract = True
- notification_templates_error = models.ManyToManyField(
- "NotificationTemplate",
- blank=True,
- related_name='%(class)s_notification_templates_for_errors'
- )
+ notification_templates_error = models.ManyToManyField("NotificationTemplate", blank=True, related_name='%(class)s_notification_templates_for_errors')
- notification_templates_success = models.ManyToManyField(
- "NotificationTemplate",
- blank=True,
- related_name='%(class)s_notification_templates_for_success'
- )
+ notification_templates_success = models.ManyToManyField("NotificationTemplate", blank=True, related_name='%(class)s_notification_templates_for_success')
- notification_templates_started = models.ManyToManyField(
- "NotificationTemplate",
- blank=True,
- related_name='%(class)s_notification_templates_for_started'
- )
+ notification_templates_started = models.ManyToManyField("NotificationTemplate", blank=True, related_name='%(class)s_notification_templates_for_started')
def prevent_search(relation):
diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py
index 7cdd9898d3..1d29fbb5e7 100644
--- a/awx/main/models/credential/__init__.py
+++ b/awx/main/models/credential/__init__.py
@@ -22,18 +22,17 @@ from django.utils.functional import cached_property
# AWX
from awx.api.versioning import reverse
-from awx.main.fields import (ImplicitRoleField, CredentialInputField,
- CredentialTypeInputField,
- CredentialTypeInjectorField,
- DynamicCredentialInputField,)
+from awx.main.fields import (
+ ImplicitRoleField,
+ CredentialInputField,
+ CredentialTypeInputField,
+ CredentialTypeInjectorField,
+ DynamicCredentialInputField,
+)
from awx.main.utils import decrypt_field, classproperty
from awx.main.utils.safe_yaml import safe_dump
from awx.main.validators import validate_ssh_private_key
-from awx.main.models.base import (
- CommonModelNameNotUnique,
- PasswordFieldsModel,
- PrimordialModel
-)
+from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, PrimordialModel
from awx.main.models.mixins import ResourceMixin
from awx.main.models.rbac import (
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
@@ -45,19 +44,16 @@ from . import injectors as builtin_injectors
__all__ = ['Credential', 'CredentialType', 'CredentialInputSource', 'build_safe_env']
logger = logging.getLogger('awx.main.models.credential')
-credential_plugins = dict(
- (ep.name, ep.load())
- for ep in iter_entry_points('awx.credential_plugins')
-)
+credential_plugins = dict((ep.name, ep.load()) for ep in iter_entry_points('awx.credential_plugins'))
HIDDEN_PASSWORD = '**********'
def build_safe_env(env):
- '''
+ """
Build environment dictionary, hiding potentially sensitive information
such as passwords or keys.
- '''
+ """
hidden_re = re.compile(r'API|TOKEN|KEY|SECRET|PASS', re.I)
urlpass_re = re.compile(r'^.*?://[^:]+:(.*?)@.*?$')
safe_env = dict(env)
@@ -74,16 +70,16 @@ def build_safe_env(env):
class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
- '''
+ """
A credential contains information about how to talk to a remote resource
Usually this is a SSH key location, and possibly an unlock password.
If used with sudo, a sudo password should be set if required.
- '''
+ """
class Meta:
app_label = 'main'
ordering = ('name',)
- unique_together = (('organization', 'name', 'credential_type'))
+ unique_together = ('organization', 'name', 'credential_type')
PASSWORD_FIELDS = ['inputs']
FIELDS_TO_PRESERVE_AT_COPY = ['input_sources']
@@ -93,13 +89,9 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
related_name='credentials',
null=False,
on_delete=models.CASCADE,
- help_text=_('Specify the type of credential you want to create. Refer '
- 'to the Ansible Tower documentation for details on each type.')
- )
- managed_by_tower = models.BooleanField(
- default=False,
- editable=False
+ help_text=_('Specify the type of credential you want to create. Refer ' 'to the Ansible Tower documentation for details on each type.'),
)
+ managed_by_tower = models.BooleanField(default=False, editable=False)
organization = models.ForeignKey(
'Organization',
null=True,
@@ -109,10 +101,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
related_name='credentials',
)
inputs = CredentialInputField(
- blank=True,
- default=dict,
- help_text=_('Enter inputs using either JSON or YAML syntax. '
- 'Refer to the Ansible Tower documentation for example syntax.')
+ blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. ' 'Refer to the Ansible Tower documentation for example syntax.')
)
admin_role = ImplicitRoleField(
parent_role=[
@@ -125,12 +114,14 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
'admin_role',
]
)
- read_role = ImplicitRoleField(parent_role=[
- 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
- 'organization.auditor_role',
- 'use_role',
- 'admin_role',
- ])
+ read_role = ImplicitRoleField(
+ parent_role=[
+ 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
+ 'organization.auditor_role',
+ 'use_role',
+ 'admin_role',
+ ]
+ )
@property
def kind(self):
@@ -242,11 +233,11 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
return field_val
def unique_hash(self, display=False):
- '''
+ """
Credential exclusivity is not defined solely by the related
credential type (due to vault), so this produces a hash
that can be used to evaluate exclusivity
- '''
+ """
if display:
type_alias = self.credential_type.name
else:
@@ -313,12 +304,12 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
class CredentialType(CommonModelNameNotUnique):
- '''
+ """
A reusable schema for a credential.
Used to define a named credential type with fields (e.g., an API key) and
output injectors (i.e., an environment variable that uses the API key).
- '''
+ """
class Meta:
app_label = 'main'
@@ -339,31 +330,16 @@ class CredentialType(CommonModelNameNotUnique):
('galaxy', _('Galaxy/Automation Hub')),
)
- kind = models.CharField(
- max_length=32,
- choices=KIND_CHOICES
- )
- managed_by_tower = models.BooleanField(
- default=False,
- editable=False
- )
- namespace = models.CharField(
- max_length=1024,
- null=True,
- default=None,
- editable=False
- )
+ kind = models.CharField(max_length=32, choices=KIND_CHOICES)
+ managed_by_tower = models.BooleanField(default=False, editable=False)
+ namespace = models.CharField(max_length=1024, null=True, default=None, editable=False)
inputs = CredentialTypeInputField(
- blank=True,
- default=dict,
- help_text=_('Enter inputs using either JSON or YAML syntax. '
- 'Refer to the Ansible Tower documentation for example syntax.')
+ blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. ' 'Refer to the Ansible Tower documentation for example syntax.')
)
injectors = CredentialTypeInjectorField(
blank=True,
default=dict,
- help_text=_('Enter injectors using either JSON or YAML syntax. '
- 'Refer to the Ansible Tower documentation for example syntax.')
+ help_text=_('Enter injectors using either JSON or YAML syntax. ' 'Refer to the Ansible Tower documentation for example syntax.'),
)
@classmethod
@@ -384,26 +360,17 @@ class CredentialType(CommonModelNameNotUnique):
@property
def secret_fields(self):
- return [
- field['id'] for field in self.inputs.get('fields', [])
- if field.get('secret', False) is True
- ]
+ return [field['id'] for field in self.inputs.get('fields', []) if field.get('secret', False) is True]
@property
def askable_fields(self):
- return [
- field['id'] for field in self.inputs.get('fields', [])
- if field.get('ask_at_runtime', False) is True
- ]
+ return [field['id'] for field in self.inputs.get('fields', []) if field.get('ask_at_runtime', False) is True]
@property
def plugin(self):
if self.kind != 'external':
raise AttributeError('plugin')
- [plugin] = [
- plugin for ns, plugin in credential_plugins.items()
- if ns == self.namespace
- ]
+ [plugin] = [plugin for ns, plugin in credential_plugins.items() if ns == self.namespace]
return plugin
def default_for_field(self, field_id):
@@ -415,10 +382,7 @@ class CredentialType(CommonModelNameNotUnique):
@classproperty
def defaults(cls):
- return dict(
- (k, functools.partial(v.create))
- for k, v in ManagedCredentialType.registry.items()
- )
+ return dict((k, functools.partial(v.create)) for k, v in ManagedCredentialType.registry.items())
@classmethod
def setup_tower_managed_defaults(cls):
@@ -430,21 +394,14 @@ class CredentialType(CommonModelNameNotUnique):
existing.injectors = {}
existing.save()
continue
- logger.debug(_(
- "adding %s credential type" % default.name
- ))
+ logger.debug(_("adding %s credential type" % default.name))
created = default.create()
created.inputs = created.injectors = {}
created.save()
@classmethod
def load_plugin(cls, ns, plugin):
- ManagedCredentialType(
- namespace=ns,
- name=plugin.name,
- kind='external',
- inputs=plugin.inputs
- )
+ ManagedCredentialType(namespace=ns, name=plugin.name, kind='external', inputs=plugin.inputs)
def inject_credential(self, credential, env, safe_env, args, private_data_dir):
"""
@@ -474,9 +431,7 @@ class CredentialType(CommonModelNameNotUnique):
if not self.injectors:
if self.managed_by_tower and credential.credential_type.namespace in dir(builtin_injectors):
injected_env = {}
- getattr(builtin_injectors, credential.credential_type.namespace)(
- credential, injected_env, private_data_dir
- )
+ getattr(builtin_injectors, credential.credential_type.namespace)(credential, injected_env, private_data_dir)
env.update(injected_env)
safe_env.update(build_safe_env(injected_env))
return
@@ -530,10 +485,7 @@ class CredentialType(CommonModelNameNotUnique):
f.write(data)
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
# FIXME: develop some better means of referencing paths inside containers
- container_path = os.path.join(
- '/runner',
- os.path.basename(path)
- )
+ container_path = os.path.join('/runner', os.path.basename(path))
# determine if filename indicates single file or many
if file_label.find('.') == -1:
@@ -561,7 +513,7 @@ class CredentialType(CommonModelNameNotUnique):
extra_vars[var_name] = sandbox_env.from_string(tmpl).render(**namespace)
def build_extra_vars_file(vars, private_dir):
- handle, path = tempfile.mkstemp(dir = private_dir)
+ handle, path = tempfile.mkstemp(dir=private_dir)
f = os.fdopen(handle, 'w')
f.write(safe_dump(vars))
f.close()
@@ -571,10 +523,7 @@ class CredentialType(CommonModelNameNotUnique):
if extra_vars:
path = build_extra_vars_file(extra_vars, private_data_dir)
# FIXME: develop some better means of referencing paths inside containers
- container_path = os.path.join(
- '/runner',
- os.path.basename(path)
- )
+ container_path = os.path.join('/runner', os.path.basename(path))
args.extend(['-e', '@%s' % container_path])
@@ -590,8 +539,7 @@ class ManagedCredentialType(SimpleNamespace):
if namespace in ManagedCredentialType.registry:
raise ValueError(
'a ManagedCredentialType with namespace={} is already defined in {}'.format(
- namespace,
- inspect.getsourcefile(ManagedCredentialType.registry[namespace].__class__)
+ namespace, inspect.getsourcefile(ManagedCredentialType.registry[namespace].__class__)
)
)
ManagedCredentialType.registry[namespace] = self
@@ -612,54 +560,34 @@ ManagedCredentialType(
kind='ssh',
name=ugettext_noop('Machine'),
inputs={
- 'fields': [{
- 'id': 'username',
- 'label': ugettext_noop('Username'),
- 'type': 'string'
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Password'),
- 'type': 'string',
- 'secret': True,
- 'ask_at_runtime': True
- }, {
- 'id': 'ssh_key_data',
- 'label': ugettext_noop('SSH Private Key'),
- 'type': 'string',
- 'format': 'ssh_private_key',
- 'secret': True,
- 'multiline': True
- }, {
- 'id': 'ssh_public_key_data',
- 'label': ugettext_noop('Signed SSH Certificate'),
- 'type': 'string',
- 'multiline': True,
- 'secret': True,
- }, {
- 'id': 'ssh_key_unlock',
- 'label': ugettext_noop('Private Key Passphrase'),
- 'type': 'string',
- 'secret': True,
- 'ask_at_runtime': True
- }, {
- 'id': 'become_method',
- 'label': ugettext_noop('Privilege Escalation Method'),
- 'type': 'string',
- 'help_text': ugettext_noop('Specify a method for "become" operations. This is '
- 'equivalent to specifying the --become-method '
- 'Ansible parameter.')
- }, {
- 'id': 'become_username',
- 'label': ugettext_noop('Privilege Escalation Username'),
- 'type': 'string',
- }, {
- 'id': 'become_password',
- 'label': ugettext_noop('Privilege Escalation Password'),
- 'type': 'string',
- 'secret': True,
- 'ask_at_runtime': True
- }],
- }
+ 'fields': [
+ {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
+ {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
+ {'id': 'ssh_key_data', 'label': ugettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True},
+ {
+ 'id': 'ssh_public_key_data',
+ 'label': ugettext_noop('Signed SSH Certificate'),
+ 'type': 'string',
+ 'multiline': True,
+ 'secret': True,
+ },
+ {'id': 'ssh_key_unlock', 'label': ugettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
+ {
+ 'id': 'become_method',
+ 'label': ugettext_noop('Privilege Escalation Method'),
+ 'type': 'string',
+ 'help_text': ugettext_noop(
+ 'Specify a method for "become" operations. This is ' 'equivalent to specifying the --become-method ' 'Ansible parameter.'
+ ),
+ },
+ {
+ 'id': 'become_username',
+ 'label': ugettext_noop('Privilege Escalation Username'),
+ 'type': 'string',
+ },
+ {'id': 'become_password', 'label': ugettext_noop('Privilege Escalation Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
+ ],
+ },
)
ManagedCredentialType(
@@ -668,29 +596,13 @@ ManagedCredentialType(
name=ugettext_noop('Source Control'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'username',
- 'label': ugettext_noop('Username'),
- 'type': 'string'
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Password'),
- 'type': 'string',
- 'secret': True
- }, {
- 'id': 'ssh_key_data',
- 'label': ugettext_noop('SCM Private Key'),
- 'type': 'string',
- 'format': 'ssh_private_key',
- 'secret': True,
- 'multiline': True
- }, {
- 'id': 'ssh_key_unlock',
- 'label': ugettext_noop('Private Key Passphrase'),
- 'type': 'string',
- 'secret': True
- }],
- }
+ 'fields': [
+ {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
+ {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True},
+ {'id': 'ssh_key_data', 'label': ugettext_noop('SCM Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True},
+ {'id': 'ssh_key_unlock', 'label': ugettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True},
+ ],
+ },
)
ManagedCredentialType(
@@ -699,25 +611,24 @@ ManagedCredentialType(
name=ugettext_noop('Vault'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'vault_password',
- 'label': ugettext_noop('Vault Password'),
- 'type': 'string',
- 'secret': True,
- 'ask_at_runtime': True
- }, {
- 'id': 'vault_id',
- 'label': ugettext_noop('Vault Identifier'),
- 'type': 'string',
- 'format': 'vault_id',
- 'help_text': ugettext_noop('Specify an (optional) Vault ID. This is '
- 'equivalent to specifying the --vault-id '
- 'Ansible parameter for providing multiple Vault '
- 'passwords. Note: this feature only works in '
- 'Ansible 2.4+.')
- }],
+ 'fields': [
+ {'id': 'vault_password', 'label': ugettext_noop('Vault Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
+ {
+ 'id': 'vault_id',
+ 'label': ugettext_noop('Vault Identifier'),
+ 'type': 'string',
+ 'format': 'vault_id',
+ 'help_text': ugettext_noop(
+ 'Specify an (optional) Vault ID. This is '
+ 'equivalent to specifying the --vault-id '
+ 'Ansible parameter for providing multiple Vault '
+ 'passwords. Note: this feature only works in '
+ 'Ansible 2.4+.'
+ ),
+ },
+ ],
'required': ['vault_password'],
- }
+ },
)
ManagedCredentialType(
@@ -726,42 +637,38 @@ ManagedCredentialType(
name=ugettext_noop('Network'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'username',
- 'label': ugettext_noop('Username'),
- 'type': 'string'
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Password'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'ssh_key_data',
- 'label': ugettext_noop('SSH Private Key'),
- 'type': 'string',
- 'format': 'ssh_private_key',
- 'secret': True,
- 'multiline': True
- }, {
- 'id': 'ssh_key_unlock',
- 'label': ugettext_noop('Private Key Passphrase'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'authorize',
- 'label': ugettext_noop('Authorize'),
- 'type': 'boolean',
- }, {
- 'id': 'authorize_password',
- 'label': ugettext_noop('Authorize Password'),
- 'type': 'string',
- 'secret': True,
- }],
+ 'fields': [
+ {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
+ {
+ 'id': 'password',
+ 'label': ugettext_noop('Password'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {'id': 'ssh_key_data', 'label': ugettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True},
+ {
+ 'id': 'ssh_key_unlock',
+ 'label': ugettext_noop('Private Key Passphrase'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {
+ 'id': 'authorize',
+ 'label': ugettext_noop('Authorize'),
+ 'type': 'boolean',
+ },
+ {
+ 'id': 'authorize_password',
+ 'label': ugettext_noop('Authorize Password'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ ],
'dependencies': {
'authorize_password': ['authorize'],
},
'required': ['username'],
- }
+ },
)
ManagedCredentialType(
@@ -770,27 +677,29 @@ ManagedCredentialType(
name=ugettext_noop('Amazon Web Services'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'username',
- 'label': ugettext_noop('Access Key'),
- 'type': 'string'
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Secret Key'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'security_token',
- 'label': ugettext_noop('STS Token'),
- 'type': 'string',
- 'secret': True,
- 'help_text': ugettext_noop('Security Token Service (STS) is a web service '
- 'that enables you to request temporary, '
- 'limited-privilege credentials for AWS Identity '
- 'and Access Management (IAM) users.'),
- }],
- 'required': ['username', 'password']
- }
+ 'fields': [
+ {'id': 'username', 'label': ugettext_noop('Access Key'), 'type': 'string'},
+ {
+ 'id': 'password',
+ 'label': ugettext_noop('Secret Key'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {
+ 'id': 'security_token',
+ 'label': ugettext_noop('STS Token'),
+ 'type': 'string',
+ 'secret': True,
+ 'help_text': ugettext_noop(
+ 'Security Token Service (STS) is a web service '
+ 'that enables you to request temporary, '
+ 'limited-privilege credentials for AWS Identity '
+ 'and Access Management (IAM) users.'
+ ),
+ },
+ ],
+ 'required': ['username', 'password'],
+ },
)
ManagedCredentialType(
@@ -799,50 +708,56 @@ ManagedCredentialType(
name=ugettext_noop('OpenStack'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'username',
- 'label': ugettext_noop('Username'),
- 'type': 'string'
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Password (API Key)'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'host',
- 'label': ugettext_noop('Host (Authentication URL)'),
- 'type': 'string',
- 'help_text': ugettext_noop('The host to authenticate with. For example, '
- 'https://openstack.business.com/v2.0/')
- }, {
- 'id': 'project',
- 'label': ugettext_noop('Project (Tenant Name)'),
- 'type': 'string',
- }, {
- 'id': 'project_domain_name',
- 'label': ugettext_noop('Project (Domain Name)'),
- 'type': 'string',
- }, {
- 'id': 'domain',
- 'label': ugettext_noop('Domain Name'),
- 'type': 'string',
- 'help_text': ugettext_noop('OpenStack domains define administrative boundaries. '
- 'It is only needed for Keystone v3 authentication '
- 'URLs. Refer to Ansible Tower documentation for '
- 'common scenarios.')
- }, {
- 'id': 'region',
- 'label': ugettext_noop('Region Name'),
- 'type': 'string',
- 'help_text': ugettext_noop('For some cloud providers, like OVH, region must be specified'),
- }, {
- 'id': 'verify_ssl',
- 'label': ugettext_noop('Verify SSL'),
- 'type': 'boolean',
- 'default': True,
- }],
- 'required': ['username', 'password', 'host', 'project']
- }
+ 'fields': [
+ {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
+ {
+ 'id': 'password',
+ 'label': ugettext_noop('Password (API Key)'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {
+ 'id': 'host',
+ 'label': ugettext_noop('Host (Authentication URL)'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('The host to authenticate with. For example, ' 'https://openstack.business.com/v2.0/'),
+ },
+ {
+ 'id': 'project',
+ 'label': ugettext_noop('Project (Tenant Name)'),
+ 'type': 'string',
+ },
+ {
+ 'id': 'project_domain_name',
+ 'label': ugettext_noop('Project (Domain Name)'),
+ 'type': 'string',
+ },
+ {
+ 'id': 'domain',
+ 'label': ugettext_noop('Domain Name'),
+ 'type': 'string',
+ 'help_text': ugettext_noop(
+ 'OpenStack domains define administrative boundaries. '
+ 'It is only needed for Keystone v3 authentication '
+ 'URLs. Refer to Ansible Tower documentation for '
+ 'common scenarios.'
+ ),
+ },
+ {
+ 'id': 'region',
+ 'label': ugettext_noop('Region Name'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('For some cloud providers, like OVH, region must be specified'),
+ },
+ {
+ 'id': 'verify_ssl',
+ 'label': ugettext_noop('Verify SSL'),
+ 'type': 'boolean',
+ 'default': True,
+ },
+ ],
+ 'required': ['username', 'password', 'host', 'project'],
+ },
)
ManagedCredentialType(
@@ -851,24 +766,23 @@ ManagedCredentialType(
name=ugettext_noop('VMware vCenter'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'host',
- 'label': ugettext_noop('VCenter Host'),
- 'type': 'string',
- 'help_text': ugettext_noop('Enter the hostname or IP address that corresponds '
- 'to your VMware vCenter.')
- }, {
- 'id': 'username',
- 'label': ugettext_noop('Username'),
- 'type': 'string'
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Password'),
- 'type': 'string',
- 'secret': True,
- }],
- 'required': ['host', 'username', 'password']
- }
+ 'fields': [
+ {
+ 'id': 'host',
+ 'label': ugettext_noop('VCenter Host'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('Enter the hostname or IP address that corresponds ' 'to your VMware vCenter.'),
+ },
+ {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
+ {
+ 'id': 'password',
+ 'label': ugettext_noop('Password'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ ],
+ 'required': ['host', 'username', 'password'],
+ },
)
ManagedCredentialType(
@@ -877,24 +791,23 @@ ManagedCredentialType(
name=ugettext_noop('Red Hat Satellite 6'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'host',
- 'label': ugettext_noop('Satellite 6 URL'),
- 'type': 'string',
- 'help_text': ugettext_noop('Enter the URL that corresponds to your Red Hat '
- 'Satellite 6 server. For example, https://satellite.example.org')
- }, {
- 'id': 'username',
- 'label': ugettext_noop('Username'),
- 'type': 'string'
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Password'),
- 'type': 'string',
- 'secret': True,
- }],
+ 'fields': [
+ {
+ 'id': 'host',
+ 'label': ugettext_noop('Satellite 6 URL'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('Enter the URL that corresponds to your Red Hat ' 'Satellite 6 server. For example, https://satellite.example.org'),
+ },
+ {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
+ {
+ 'id': 'password',
+ 'label': ugettext_noop('Password'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ ],
'required': ['host', 'username', 'password'],
- }
+ },
)
ManagedCredentialType(
@@ -903,32 +816,36 @@ ManagedCredentialType(
name=ugettext_noop('Google Compute Engine'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'username',
- 'label': ugettext_noop('Service Account Email Address'),
- 'type': 'string',
- 'help_text': ugettext_noop('The email address assigned to the Google Compute '
- 'Engine service account.')
- }, {
- 'id': 'project',
- 'label': 'Project',
- 'type': 'string',
- 'help_text': ugettext_noop('The Project ID is the GCE assigned identification. '
- 'It is often constructed as three words or two words '
- 'followed by a three-digit number. Examples: project-id-000 '
- 'and another-project-id')
- }, {
- 'id': 'ssh_key_data',
- 'label': ugettext_noop('RSA Private Key'),
- 'type': 'string',
- 'format': 'ssh_private_key',
- 'secret': True,
- 'multiline': True,
- 'help_text': ugettext_noop('Paste the contents of the PEM file associated '
- 'with the service account email.')
- }],
+ 'fields': [
+ {
+ 'id': 'username',
+ 'label': ugettext_noop('Service Account Email Address'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('The email address assigned to the Google Compute ' 'Engine service account.'),
+ },
+ {
+ 'id': 'project',
+ 'label': 'Project',
+ 'type': 'string',
+ 'help_text': ugettext_noop(
+ 'The Project ID is the GCE assigned identification. '
+ 'It is often constructed as three words or two words '
+ 'followed by a three-digit number. Examples: project-id-000 '
+ 'and another-project-id'
+ ),
+ },
+ {
+ 'id': 'ssh_key_data',
+ 'label': ugettext_noop('RSA Private Key'),
+ 'type': 'string',
+ 'format': 'ssh_private_key',
+ 'secret': True,
+ 'multiline': True,
+ 'help_text': ugettext_noop('Paste the contents of the PEM file associated ' 'with the service account email.'),
+ },
+ ],
'required': ['username', 'ssh_key_data'],
- }
+ },
)
ManagedCredentialType(
@@ -937,43 +854,37 @@ ManagedCredentialType(
name=ugettext_noop('Microsoft Azure Resource Manager'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'subscription',
- 'label': ugettext_noop('Subscription ID'),
- 'type': 'string',
- 'help_text': ugettext_noop('Subscription ID is an Azure construct, which is '
- 'mapped to a username.')
- }, {
- 'id': 'username',
- 'label': ugettext_noop('Username'),
- 'type': 'string'
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Password'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'client',
- 'label': ugettext_noop('Client ID'),
- 'type': 'string'
- }, {
- 'id': 'secret',
- 'label': ugettext_noop('Client Secret'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'tenant',
- 'label': ugettext_noop('Tenant ID'),
- 'type': 'string'
- }, {
- 'id': 'cloud_environment',
- 'label': ugettext_noop('Azure Cloud Environment'),
- 'type': 'string',
- 'help_text': ugettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when'
- ' using Azure GovCloud or Azure stack.')
- }],
+ 'fields': [
+ {
+ 'id': 'subscription',
+ 'label': ugettext_noop('Subscription ID'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('Subscription ID is an Azure construct, which is ' 'mapped to a username.'),
+ },
+ {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
+ {
+ 'id': 'password',
+ 'label': ugettext_noop('Password'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {'id': 'client', 'label': ugettext_noop('Client ID'), 'type': 'string'},
+ {
+ 'id': 'secret',
+ 'label': ugettext_noop('Client Secret'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {'id': 'tenant', 'label': ugettext_noop('Tenant ID'), 'type': 'string'},
+ {
+ 'id': 'cloud_environment',
+ 'label': ugettext_noop('Azure Cloud Environment'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when' ' using Azure GovCloud or Azure stack.'),
+ },
+ ],
'required': ['subscription'],
- }
+ },
)
ManagedCredentialType(
@@ -982,13 +893,15 @@ ManagedCredentialType(
name=ugettext_noop('GitHub Personal Access Token'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'token',
- 'label': ugettext_noop('Token'),
- 'type': 'string',
- 'secret': True,
- 'help_text': ugettext_noop('This token needs to come from your profile settings in GitHub')
- }],
+ 'fields': [
+ {
+ 'id': 'token',
+ 'label': ugettext_noop('Token'),
+ 'type': 'string',
+ 'secret': True,
+ 'help_text': ugettext_noop('This token needs to come from your profile settings in GitHub'),
+ }
+ ],
'required': ['token'],
},
)
@@ -999,13 +912,15 @@ ManagedCredentialType(
name=ugettext_noop('GitLab Personal Access Token'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'token',
- 'label': ugettext_noop('Token'),
- 'type': 'string',
- 'secret': True,
- 'help_text': ugettext_noop('This token needs to come from your profile settings in GitLab')
- }],
+ 'fields': [
+ {
+ 'id': 'token',
+ 'label': ugettext_noop('Token'),
+ 'type': 'string',
+ 'secret': True,
+ 'help_text': ugettext_noop('This token needs to come from your profile settings in GitLab'),
+ }
+ ],
'required': ['token'],
},
)
@@ -1016,16 +931,10 @@ ManagedCredentialType(
name=ugettext_noop('Insights'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'username',
- 'label': ugettext_noop('Username'),
- 'type': 'string'
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Password'),
- 'type': 'string',
- 'secret': True
- }],
+ 'fields': [
+ {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
+ {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True},
+ ],
'required': ['username', 'password'],
},
injectors={
@@ -1042,26 +951,22 @@ ManagedCredentialType(
name=ugettext_noop('Red Hat Virtualization'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'host',
- 'label': ugettext_noop('Host (Authentication URL)'),
- 'type': 'string',
- 'help_text': ugettext_noop('The host to authenticate with.')
- }, {
- 'id': 'username',
- 'label': ugettext_noop('Username'),
- 'type': 'string'
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Password'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'ca_file',
- 'label': ugettext_noop('CA File'),
- 'type': 'string',
- 'help_text': ugettext_noop('Absolute file path to the CA file to use (optional)')
- }],
+ 'fields': [
+ {'id': 'host', 'label': ugettext_noop('Host (Authentication URL)'), 'type': 'string', 'help_text': ugettext_noop('The host to authenticate with.')},
+ {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
+ {
+ 'id': 'password',
+ 'label': ugettext_noop('Password'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {
+ 'id': 'ca_file',
+ 'label': ugettext_noop('CA File'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('Absolute file path to the CA file to use (optional)'),
+ },
+ ],
'required': ['host', 'username', 'password'],
},
injectors={
@@ -1070,19 +975,17 @@ ManagedCredentialType(
# ovirt4 use a separate authentication process that support
# environment variables; by injecting both, we support both
'file': {
- 'template': '\n'.join([
- '[ovirt]',
- 'ovirt_url={{host}}',
- 'ovirt_username={{username}}',
- 'ovirt_password={{password}}',
- '{% if ca_file %}ovirt_ca_file={{ca_file}}{% endif %}'])
+ 'template': '\n'.join(
+ [
+ '[ovirt]',
+ 'ovirt_url={{host}}',
+ 'ovirt_username={{username}}',
+ 'ovirt_password={{password}}',
+ '{% if ca_file %}ovirt_ca_file={{ca_file}}{% endif %}',
+ ]
+ )
},
- 'env': {
- 'OVIRT_INI_PATH': '{{tower.filename}}',
- 'OVIRT_URL': '{{host}}',
- 'OVIRT_USERNAME': '{{username}}',
- 'OVIRT_PASSWORD': '{{password}}'
- }
+ 'env': {'OVIRT_INI_PATH': '{{tower.filename}}', 'OVIRT_URL': '{{host}}', 'OVIRT_USERNAME': '{{username}}', 'OVIRT_PASSWORD': '{{password}}'},
},
)
@@ -1092,35 +995,36 @@ ManagedCredentialType(
name=ugettext_noop('Ansible Tower'),
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'host',
- 'label': ugettext_noop('Ansible Tower Hostname'),
- 'type': 'string',
- 'help_text': ugettext_noop('The Ansible Tower base URL to authenticate with.')
- }, {
- 'id': 'username',
- 'label': ugettext_noop('Username'),
- 'type': 'string',
- 'help_text': ugettext_noop('The Ansible Tower user to authenticate as.'
- 'This should not be set if an OAuth token is being used.')
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Password'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'oauth_token',
- 'label': ugettext_noop('OAuth Token'),
- 'type': 'string',
- 'secret': True,
- 'help_text': ugettext_noop('An OAuth token to use to authenticate to Tower with.'
- 'This should not be set if username/password are being used.')
- }, {
- 'id': 'verify_ssl',
- 'label': ugettext_noop('Verify SSL'),
- 'type': 'boolean',
- 'secret': False
- }],
+ 'fields': [
+ {
+ 'id': 'host',
+ 'label': ugettext_noop('Ansible Tower Hostname'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('The Ansible Tower base URL to authenticate with.'),
+ },
+ {
+ 'id': 'username',
+ 'label': ugettext_noop('Username'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('The Ansible Tower user to authenticate as.' 'This should not be set if an OAuth token is being used.'),
+ },
+ {
+ 'id': 'password',
+ 'label': ugettext_noop('Password'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {
+ 'id': 'oauth_token',
+ 'label': ugettext_noop('OAuth Token'),
+ 'type': 'string',
+ 'secret': True,
+ 'help_text': ugettext_noop(
+ 'An OAuth token to use to authenticate to Tower with.' 'This should not be set if username/password are being used.'
+ ),
+ },
+ {'id': 'verify_ssl', 'label': ugettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False},
+ ],
'required': ['host'],
},
injectors={
@@ -1129,7 +1033,7 @@ ManagedCredentialType(
'TOWER_USERNAME': '{{username}}',
'TOWER_PASSWORD': '{{password}}',
'TOWER_VERIFY_SSL': '{{verify_ssl}}',
- 'TOWER_OAUTH_TOKEN': '{{oauth_token}}'
+ 'TOWER_OAUTH_TOKEN': '{{oauth_token}}',
}
},
)
@@ -1139,30 +1043,35 @@ ManagedCredentialType(
kind='kubernetes',
name=ugettext_noop('OpenShift or Kubernetes API Bearer Token'),
inputs={
- 'fields': [{
- 'id': 'host',
- 'label': ugettext_noop('OpenShift or Kubernetes API Endpoint'),
- 'type': 'string',
- 'help_text': ugettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.')
- },{
- 'id': 'bearer_token',
- 'label': ugettext_noop('API authentication bearer token'),
- 'type': 'string',
- 'secret': True,
- },{
- 'id': 'verify_ssl',
- 'label': ugettext_noop('Verify SSL'),
- 'type': 'boolean',
- 'default': True,
- },{
- 'id': 'ssl_ca_cert',
- 'label': ugettext_noop('Certificate Authority data'),
- 'type': 'string',
- 'secret': True,
- 'multiline': True,
- }],
+ 'fields': [
+ {
+ 'id': 'host',
+ 'label': ugettext_noop('OpenShift or Kubernetes API Endpoint'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.'),
+ },
+ {
+ 'id': 'bearer_token',
+ 'label': ugettext_noop('API authentication bearer token'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {
+ 'id': 'verify_ssl',
+ 'label': ugettext_noop('Verify SSL'),
+ 'type': 'boolean',
+ 'default': True,
+ },
+ {
+ 'id': 'ssl_ca_cert',
+ 'label': ugettext_noop('Certificate Authority data'),
+ 'type': 'string',
+ 'secret': True,
+ 'multiline': True,
+ },
+ ],
'required': ['host', 'bearer_token'],
- }
+ },
)
ManagedCredentialType(
@@ -1170,30 +1079,34 @@ ManagedCredentialType(
kind='registry',
name=ugettext_noop('Container Registry'),
inputs={
- 'fields': [{
- 'id': 'host',
- 'label': ugettext_noop('Authentication URL'),
- 'type': 'string',
- 'help_text': ugettext_noop('Authentication endpoint for the container registry.'),
- }, {
- 'id': 'username',
- 'label': ugettext_noop('Username'),
- 'type': 'string',
- }, {
- 'id': 'password',
- 'label': ugettext_noop('Password'),
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'token',
- 'label': ugettext_noop('Access Token'),
- 'type': 'string',
- 'secret': True,
- 'help_text': ugettext_noop('A token to use to authenticate with. '
- 'This should not be set if username/password are being used.'),
- }],
+ 'fields': [
+ {
+ 'id': 'host',
+ 'label': ugettext_noop('Authentication URL'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('Authentication endpoint for the container registry.'),
+ },
+ {
+ 'id': 'username',
+ 'label': ugettext_noop('Username'),
+ 'type': 'string',
+ },
+ {
+ 'id': 'password',
+ 'label': ugettext_noop('Password'),
+ 'type': 'string',
+ 'secret': True,
+ },
+ {
+ 'id': 'token',
+ 'label': ugettext_noop('Access Token'),
+ 'type': 'string',
+ 'secret': True,
+ 'help_text': ugettext_noop('A token to use to authenticate with. ' 'This should not be set if username/password are being used.'),
+ },
+ ],
'required': ['host'],
- }
+ },
)
@@ -1202,39 +1115,41 @@ ManagedCredentialType(
kind='galaxy',
name=ugettext_noop('Ansible Galaxy/Automation Hub API Token'),
inputs={
- 'fields': [{
- 'id': 'url',
- 'label': ugettext_noop('Galaxy Server URL'),
- 'type': 'string',
- 'help_text': ugettext_noop('The URL of the Galaxy instance to connect to.')
- },{
- 'id': 'auth_url',
- 'label': ugettext_noop('Auth Server URL'),
- 'type': 'string',
- 'help_text': ugettext_noop(
- 'The URL of a Keycloak server token_endpoint, if using '
- 'SSO auth.'
- )
- },{
- 'id': 'token',
- 'label': ugettext_noop('API Token'),
- 'type': 'string',
- 'secret': True,
- 'help_text': ugettext_noop(
- 'A token to use for authentication against the Galaxy instance.'
- )
- }],
+ 'fields': [
+ {
+ 'id': 'url',
+ 'label': ugettext_noop('Galaxy Server URL'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('The URL of the Galaxy instance to connect to.'),
+ },
+ {
+ 'id': 'auth_url',
+ 'label': ugettext_noop('Auth Server URL'),
+ 'type': 'string',
+ 'help_text': ugettext_noop('The URL of a Keycloak server token_endpoint, if using ' 'SSO auth.'),
+ },
+ {
+ 'id': 'token',
+ 'label': ugettext_noop('API Token'),
+ 'type': 'string',
+ 'secret': True,
+ 'help_text': ugettext_noop('A token to use for authentication against the Galaxy instance.'),
+ },
+ ],
'required': ['url'],
- }
+ },
)
class CredentialInputSource(PrimordialModel):
-
class Meta:
app_label = 'main'
unique_together = (('target_credential', 'input_field_name'),)
- ordering = ('target_credential', 'source_credential', 'input_field_name',)
+ ordering = (
+ 'target_credential',
+ 'source_credential',
+ 'input_field_name',
+ )
FIELDS_TO_PRESERVE_AT_COPY = ['source_credential', 'metadata', 'input_field_name']
@@ -1253,10 +1168,7 @@ class CredentialInputSource(PrimordialModel):
input_field_name = models.CharField(
max_length=1024,
)
- metadata = DynamicCredentialInputField(
- blank=True,
- default=dict
- )
+ metadata = DynamicCredentialInputField(blank=True, default=dict)
def clean_target_credential(self):
if self.target_credential.credential_type.kind == 'external':
@@ -1271,11 +1183,7 @@ class CredentialInputSource(PrimordialModel):
def clean_input_field_name(self):
defined_fields = self.target_credential.credential_type.defined_fields
if self.input_field_name not in defined_fields:
- raise ValidationError(_(
- 'Input field must be defined on target credential (options are {}).'.format(
- ', '.join(sorted(defined_fields))
- )
- ))
+ raise ValidationError(_('Input field must be defined on target credential (options are {}).'.format(', '.join(sorted(defined_fields)))))
return self.input_field_name
def get_input_value(self):
diff --git a/awx/main/models/credential/injectors.py b/awx/main/models/credential/injectors.py
index 75a08482cc..b5f7e37fed 100644
--- a/awx/main/models/credential/injectors.py
+++ b/awx/main/models/credential/injectors.py
@@ -19,12 +19,7 @@ def gce(cred, env, private_data_dir):
project = cred.get_input('project', default='')
username = cred.get_input('username', default='')
- json_cred = {
- 'type': 'service_account',
- 'private_key': cred.get_input('ssh_key_data', default=''),
- 'client_email': username,
- 'project_id': project
- }
+ json_cred = {'type': 'service_account', 'private_key': cred.get_input('ssh_key_data', default=''), 'client_email': username, 'project_id': project}
if 'INVENTORY_UPDATE_ID' not in env:
env['GCE_EMAIL'] = username
env['GCE_PROJECT'] = project
@@ -73,10 +68,12 @@ def vmware(cred, env, private_data_dir):
def _openstack_data(cred):
- openstack_auth = dict(auth_url=cred.get_input('host', default=''),
- username=cred.get_input('username', default=''),
- password=cred.get_input('password', default=''),
- project_name=cred.get_input('project', default=''))
+ openstack_auth = dict(
+ auth_url=cred.get_input('host', default=''),
+ username=cred.get_input('username', default=''),
+ password=cred.get_input('password', default=''),
+ project_name=cred.get_input('project', default=''),
+ )
if cred.has_input('project_domain_name'):
openstack_auth['project_domain_name'] = cred.get_input('project_domain_name', default='')
if cred.has_input('domain'):
diff --git a/awx/main/models/events.py b/awx/main/models/events.py
index 90cc6f6094..40f922e1e2 100644
--- a/awx/main/models/events.py
+++ b/awx/main/models/events.py
@@ -24,8 +24,7 @@ analytics_logger = logging.getLogger('awx.analytics.job_events')
logger = logging.getLogger('awx.main.models.events')
-__all__ = ['JobEvent', 'ProjectUpdateEvent', 'AdHocCommandEvent',
- 'InventoryUpdateEvent', 'SystemJobEvent']
+__all__ = ['JobEvent', 'ProjectUpdateEvent', 'AdHocCommandEvent', 'InventoryUpdateEvent', 'SystemJobEvent']
def sanitize_event_keys(kwargs, valid_keys):
@@ -35,9 +34,7 @@ def sanitize_event_keys(kwargs, valid_keys):
kwargs.pop(key)
# Truncate certain values over 1k
- for key in [
- 'play', 'role', 'task', 'playbook'
- ]:
+ for key in ['play', 'role', 'task', 'playbook']:
if isinstance(kwargs.get('event_data', {}).get(key), str):
if len(kwargs['event_data'][key]) > 1024:
kwargs['event_data'][key] = Truncator(kwargs['event_data'][key]).chars(1024)
@@ -59,17 +56,11 @@ def create_host_status_counts(event_data):
return dict(host_status_counts)
-MINIMAL_EVENTS = set([
- 'playbook_on_play_start', 'playbook_on_task_start',
- 'playbook_on_stats', 'EOF'
-])
+MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
def emit_event_detail(event):
- if (
- settings.UI_LIVE_UPDATES_ENABLED is False and
- event.event not in MINIMAL_EVENTS
- ):
+ if settings.UI_LIVE_UPDATES_ENABLED is False and event.event not in MINIMAL_EVENTS:
return
cls = event.__class__
relation = {
@@ -109,21 +100,32 @@ def emit_event_detail(event):
'play': getattr(event, 'play', ''),
'role': getattr(event, 'role', ''),
'task': getattr(event, 'task', ''),
- }
+ },
)
-
-
class BasePlaybookEvent(CreatedModifiedModel):
- '''
+ """
An event/message logged from a playbook callback for each host.
- '''
+ """
VALID_KEYS = [
- 'event', 'event_data', 'playbook', 'play', 'role', 'task', 'created',
- 'counter', 'uuid', 'stdout', 'parent_uuid', 'start_line', 'end_line',
- 'host_id', 'host_name', 'verbosity',
+ 'event',
+ 'event_data',
+ 'playbook',
+ 'play',
+ 'role',
+ 'task',
+ 'created',
+ 'counter',
+ 'uuid',
+ 'stdout',
+ 'parent_uuid',
+ 'start_line',
+ 'end_line',
+ 'host_id',
+ 'host_name',
+ 'verbosity',
]
class Meta:
@@ -191,7 +193,6 @@ class BasePlaybookEvent(CreatedModifiedModel):
(2, 'playbook_on_not_import_for_host', _('internal: on Not Import for Host'), False),
(1, 'playbook_on_play_start', _('Play Started'), False),
(1, 'playbook_on_stats', _('Playbook Complete'), False),
-
# Additional event types for captured stdout not directly related to
# playbook or runner events.
(0, 'debug', _('Debug'), False),
@@ -342,8 +343,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
try:
failures_dict = event_data.get('failures', {})
dark_dict = event_data.get('dark', {})
- self.failed = bool(sum(failures_dict.values()) +
- sum(dark_dict.values()))
+ self.failed = bool(sum(failures_dict.values()) + sum(dark_dict.values()))
changed_dict = event_data.get('changed', {})
self.changed = bool(sum(changed_dict.values()))
except (AttributeError, TypeError):
@@ -364,33 +364,30 @@ class BasePlaybookEvent(CreatedModifiedModel):
logger.exception('Computed fields database error saving event {}'.format(self.pk))
# find parent links and progagate changed=T and failed=T
- changed = job.job_events.filter(changed=True).exclude(parent_uuid=None).only('parent_uuid').values_list('parent_uuid', flat=True).distinct() # noqa
- failed = job.job_events.filter(failed=True).exclude(parent_uuid=None).only('parent_uuid').values_list('parent_uuid', flat=True).distinct() # noqa
+ changed = (
+ job.job_events.filter(changed=True).exclude(parent_uuid=None).only('parent_uuid').values_list('parent_uuid', flat=True).distinct()
+ ) # noqa
+ failed = (
+ job.job_events.filter(failed=True).exclude(parent_uuid=None).only('parent_uuid').values_list('parent_uuid', flat=True).distinct()
+ ) # noqa
- JobEvent.objects.filter(
- job_id=self.job_id, uuid__in=changed
- ).update(changed=True)
- JobEvent.objects.filter(
- job_id=self.job_id, uuid__in=failed
- ).update(failed=True)
+ JobEvent.objects.filter(job_id=self.job_id, uuid__in=changed).update(changed=True)
+ JobEvent.objects.filter(job_id=self.job_id, uuid__in=failed).update(failed=True)
# send success/failure notifications when we've finished handling the playbook_on_stats event
from awx.main.tasks import handle_success_and_failure_notifications # circular import
def _send_notifications():
handle_success_and_failure_notifications.apply_async([job.id])
- connection.on_commit(_send_notifications)
+ connection.on_commit(_send_notifications)
for field in ('playbook', 'play', 'task', 'role'):
value = force_text(event_data.get(field, '')).strip()
if value != getattr(self, field):
setattr(self, field, value)
if settings.LOG_AGGREGATOR_ENABLED:
- analytics_logger.info(
- 'Event data saved.',
- extra=dict(python_objects=dict(job_event=self))
- )
+ analytics_logger.info('Event data saved.', extra=dict(python_objects=dict(job_event=self)))
@classmethod
def create_from_data(cls, **kwargs):
@@ -443,9 +440,9 @@ class BasePlaybookEvent(CreatedModifiedModel):
class JobEvent(BasePlaybookEvent):
- '''
+ """
An event/message logged from the callback when running a job.
- '''
+ """
VALID_KEYS = BasePlaybookEvent.VALID_KEYS + ['job_id', 'workflow_job_id']
@@ -513,9 +510,8 @@ class JobEvent(BasePlaybookEvent):
job = self.job
from awx.main.models import Host, JobHostSummary # circular import
- all_hosts = Host.objects.filter(
- pk__in=self.host_map.values()
- ).only('id')
+
+ all_hosts = Host.objects.filter(pk__in=self.host_map.values()).only('id')
existing_host_ids = set(h.id for h in all_hosts)
summaries = dict()
@@ -529,9 +525,7 @@ class JobEvent(BasePlaybookEvent):
host_stats[stat] = self.event_data.get(stat, {}).get(host, 0)
except AttributeError: # in case event_data[stat] isn't a dict.
pass
- summary = JobHostSummary(
- created=now(), modified=now(), job_id=job.id, host_id=host_id, host_name=host, **host_stats
- )
+ summary = JobHostSummary(created=now(), modified=now(), job_id=job.id, host_id=host_id, host_name=host, **host_stats)
summary.failed = bool(summary.dark or summary.failures)
summaries[(host_id, host)] = summary
@@ -539,10 +533,7 @@ class JobEvent(BasePlaybookEvent):
# update the last_job_id and last_job_host_summary_id
# in single queries
- host_mapping = dict(
- (summary['host_id'], summary['id'])
- for summary in JobHostSummary.objects.filter(job_id=job.id).values('id', 'host_id')
- )
+ host_mapping = dict((summary['host_id'], summary['id']) for summary in JobHostSummary.objects.filter(job_id=job.id).values('id', 'host_id'))
updated_hosts = set()
for h in all_hosts:
# if the hostname *shows up* in the playbook_on_stats event
@@ -553,12 +544,7 @@ class JobEvent(BasePlaybookEvent):
h.last_job_host_summary_id = host_mapping[h.id]
updated_hosts.add(h)
- Host.objects.bulk_update(
- list(updated_hosts),
- ['last_job_id', 'last_job_host_summary_id'],
- batch_size=100
- )
-
+ Host.objects.bulk_update(list(updated_hosts), ['last_job_id', 'last_job_host_summary_id'], batch_size=100)
@property
def job_verbosity(self):
@@ -593,14 +579,11 @@ class ProjectUpdateEvent(BasePlaybookEvent):
class BaseCommandEvent(CreatedModifiedModel):
- '''
+ """
An event/message logged from a command for each host.
- '''
+ """
- VALID_KEYS = [
- 'event_data', 'created', 'counter', 'uuid', 'stdout', 'start_line',
- 'end_line', 'verbosity'
- ]
+ VALID_KEYS = ['event_data', 'created', 'counter', 'uuid', 'stdout', 'start_line', 'end_line', 'verbosity']
class Meta:
abstract = True
@@ -671,9 +654,9 @@ class BaseCommandEvent(CreatedModifiedModel):
return event
def get_event_display(self):
- '''
+ """
Needed for __unicode__
- '''
+ """
return self.event
def get_event_display2(self):
@@ -688,9 +671,7 @@ class BaseCommandEvent(CreatedModifiedModel):
class AdHocCommandEvent(BaseCommandEvent):
- VALID_KEYS = BaseCommandEvent.VALID_KEYS + [
- 'ad_hoc_command_id', 'event', 'host_name', 'host_id', 'workflow_job_id'
- ]
+ VALID_KEYS = BaseCommandEvent.VALID_KEYS + ['ad_hoc_command_id', 'event', 'host_name', 'host_id', 'workflow_job_id']
class Meta:
app_label = 'main'
@@ -718,7 +699,6 @@ class AdHocCommandEvent(BaseCommandEvent):
# ('runner_on_async_failed', _('Host Async Failure'), True),
# Tower does not yet support --diff mode.
# ('runner_on_file_diff', _('File Difference'), False),
-
# Additional event types for captured stdout not directly related to
# runner events.
('debug', _('Debug'), False),
@@ -775,10 +755,7 @@ class AdHocCommandEvent(BaseCommandEvent):
if isinstance(res, dict) and res.get('changed', False):
self.changed = True
- analytics_logger.info(
- 'Event data saved.',
- extra=dict(python_objects=dict(job_event=self))
- )
+ analytics_logger.info('Event data saved.', extra=dict(python_objects=dict(job_event=self)))
class InventoryUpdateEvent(BaseCommandEvent):
diff --git a/awx/main/models/execution_environments.py b/awx/main/models/execution_environments.py
index eabd0cce7c..4a95f783b5 100644
--- a/awx/main/models/execution_environments.py
+++ b/awx/main/models/execution_environments.py
@@ -15,7 +15,7 @@ class ExecutionEnvironment(CommonModel):
PULL_CHOICES = [
('always', _("Always pull container before running.")),
('missing', _("No pull option has been selected.")),
- ('never', _("Never pull container before running."))
+ ('never', _("Never pull container before running.")),
]
organization = models.ForeignKey(
diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py
index eda3100b17..b8e5ab27a6 100644
--- a/awx/main/models/ha.py
+++ b/awx/main/models/ha.py
@@ -28,7 +28,6 @@ __all__ = ('Instance', 'InstanceGroup', 'TowerScheduleState')
class HasPolicyEditsMixin(HasEditsMixin):
-
class Meta:
abstract = True
@@ -50,6 +49,7 @@ class HasPolicyEditsMixin(HasEditsMixin):
class Instance(HasPolicyEditsMixin, BaseModel):
"""A model representing an AWX instance running against this database."""
+
objects = InstanceManager()
uuid = models.CharField(max_length=40)
@@ -72,18 +72,9 @@ class Instance(HasPolicyEditsMixin, BaseModel):
default=100,
editable=False,
)
- capacity_adjustment = models.DecimalField(
- default=Decimal(1.0),
- max_digits=3,
- decimal_places=2,
- validators=[MinValueValidator(0)]
- )
- enabled = models.BooleanField(
- default=True
- )
- managed_by_policy = models.BooleanField(
- default=True
- )
+ capacity_adjustment = models.DecimalField(default=Decimal(1.0), max_digits=3, decimal_places=2, validators=[MinValueValidator(0)])
+ enabled = models.BooleanField(default=True)
+ managed_by_policy = models.BooleanField(default=True)
cpu = models.IntegerField(
default=0,
editable=False,
@@ -112,8 +103,7 @@ class Instance(HasPolicyEditsMixin, BaseModel):
@property
def consumed_capacity(self):
- return sum(x.task_impact for x in UnifiedJob.objects.filter(execution_node=self.hostname,
- status__in=('running', 'waiting')))
+ return sum(x.task_impact for x in UnifiedJob.objects.filter(execution_node=self.hostname, status__in=('running', 'waiting')))
@property
def remaining_capacity(self):
@@ -126,7 +116,13 @@ class Instance(HasPolicyEditsMixin, BaseModel):
@property
def jobs_running(self):
- return UnifiedJob.objects.filter(execution_node=self.hostname, status__in=('running', 'waiting',)).count()
+ return UnifiedJob.objects.filter(
+ execution_node=self.hostname,
+ status__in=(
+ 'running',
+ 'waiting',
+ ),
+ ).count()
@property
def jobs_total(self):
@@ -150,8 +146,7 @@ class Instance(HasPolicyEditsMixin, BaseModel):
if settings.IS_K8S:
self.capacity = self.cpu = self.memory = self.cpu_capacity = self.mem_capacity = 0 # noqa
self.version = awx_application_version
- self.save(update_fields=['capacity', 'version', 'modified', 'cpu',
- 'memory', 'cpu_capacity', 'mem_capacity'])
+ self.save(update_fields=['capacity', 'version', 'modified', 'cpu', 'memory', 'cpu_capacity', 'mem_capacity'])
return
cpu = get_cpu_capacity()
@@ -173,12 +168,12 @@ class Instance(HasPolicyEditsMixin, BaseModel):
self.cpu_capacity = cpu[1]
self.mem_capacity = mem[1]
self.version = awx_application_version
- self.save(update_fields=['capacity', 'version', 'modified', 'cpu',
- 'memory', 'cpu_capacity', 'mem_capacity'])
+ self.save(update_fields=['capacity', 'version', 'modified', 'cpu', 'memory', 'cpu_capacity', 'mem_capacity'])
class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
"""A model representing a Queue/Group of AWX Instances."""
+
objects = InstanceGroupManager()
name = models.CharField(max_length=250, unique=True)
@@ -197,11 +192,9 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
editable=False,
default=None,
null=True,
- on_delete=models.CASCADE
- )
- is_container_group = models.BooleanField(
- default=False
+ on_delete=models.CASCADE,
)
+ is_container_group = models.BooleanField(default=False)
credential = models.ForeignKey(
'Credential',
related_name='%(class)ss',
@@ -210,27 +203,19 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
default=None,
on_delete=models.SET_NULL,
)
- pod_spec_override = prevent_search(models.TextField(
- blank=True,
- default='',
- ))
- policy_instance_percentage = models.IntegerField(
- default=0,
- help_text=_("Percentage of Instances to automatically assign to this group")
- )
- policy_instance_minimum = models.IntegerField(
- default=0,
- help_text=_("Static minimum number of Instances to automatically assign to this group")
+ pod_spec_override = prevent_search(
+ models.TextField(
+ blank=True,
+ default='',
+ )
)
+ policy_instance_percentage = models.IntegerField(default=0, help_text=_("Percentage of Instances to automatically assign to this group"))
+ policy_instance_minimum = models.IntegerField(default=0, help_text=_("Static minimum number of Instances to automatically assign to this group"))
policy_instance_list = JSONField(
- default=[],
- blank=True,
- help_text=_("List of exact-match Instances that will always be automatically assigned to this group")
+ default=[], blank=True, help_text=_("List of exact-match Instances that will always be automatically assigned to this group")
)
- POLICY_FIELDS = frozenset((
- 'policy_instance_list', 'policy_instance_minimum', 'policy_instance_percentage', 'controller'
- ))
+ POLICY_FIELDS = frozenset(('policy_instance_list', 'policy_instance_minimum', 'policy_instance_percentage', 'controller'))
def get_absolute_url(self, request=None):
return reverse('api:instance_group_detail', kwargs={'pk': self.pk}, request=request)
@@ -241,8 +226,7 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
@property
def jobs_running(self):
- return UnifiedJob.objects.filter(status__in=('running', 'waiting'),
- instance_group=self).count()
+ return UnifiedJob.objects.filter(status__in=('running', 'waiting'), instance_group=self).count()
@property
def jobs_total(self):
@@ -259,21 +243,20 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
'''
RelatedJobsMixin
'''
+
def _get_related_jobs(self):
return UnifiedJob.objects.filter(instance_group=self)
-
class Meta:
app_label = 'main'
-
@staticmethod
def fit_task_to_most_remaining_capacity_instance(task, instances):
instance_most_capacity = None
for i in instances:
- if i.remaining_capacity >= task.task_impact and \
- (instance_most_capacity is None or
- i.remaining_capacity > instance_most_capacity.remaining_capacity):
+ if i.remaining_capacity >= task.task_impact and (
+ instance_most_capacity is None or i.remaining_capacity > instance_most_capacity.remaining_capacity
+ ):
instance_most_capacity = i
return instance_most_capacity
@@ -289,10 +272,7 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
return largest_instance
def choose_online_controller_node(self):
- return random.choice(list(self.controller
- .instances
- .filter(capacity__gt=0, enabled=True)
- .values_list('hostname', flat=True)))
+ return random.choice(list(self.controller.instances.filter(capacity__gt=0, enabled=True).values_list('hostname', flat=True)))
def set_default_policy_fields(self):
self.policy_instance_list = []
@@ -306,6 +286,7 @@ class TowerScheduleState(SingletonModel):
def schedule_policy_task():
from awx.main.tasks import apply_cluster_membership_policies
+
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
@@ -337,14 +318,8 @@ def on_instance_deleted(sender, instance, using, **kwargs):
class UnifiedJobTemplateInstanceGroupMembership(models.Model):
- unifiedjobtemplate = models.ForeignKey(
- 'UnifiedJobTemplate',
- on_delete=models.CASCADE
- )
- instancegroup = models.ForeignKey(
- 'InstanceGroup',
- on_delete=models.CASCADE
- )
+ unifiedjobtemplate = models.ForeignKey('UnifiedJobTemplate', on_delete=models.CASCADE)
+ instancegroup = models.ForeignKey('InstanceGroup', on_delete=models.CASCADE)
position = models.PositiveIntegerField(
null=True,
default=None,
@@ -354,14 +329,8 @@ class UnifiedJobTemplateInstanceGroupMembership(models.Model):
class OrganizationInstanceGroupMembership(models.Model):
- organization = models.ForeignKey(
- 'Organization',
- on_delete=models.CASCADE
- )
- instancegroup = models.ForeignKey(
- 'InstanceGroup',
- on_delete=models.CASCADE
- )
+ organization = models.ForeignKey('Organization', on_delete=models.CASCADE)
+ instancegroup = models.ForeignKey('InstanceGroup', on_delete=models.CASCADE)
position = models.PositiveIntegerField(
null=True,
default=None,
@@ -371,14 +340,8 @@ class OrganizationInstanceGroupMembership(models.Model):
class InventoryInstanceGroupMembership(models.Model):
- inventory = models.ForeignKey(
- 'Inventory',
- on_delete=models.CASCADE
- )
- instancegroup = models.ForeignKey(
- 'InstanceGroup',
- on_delete=models.CASCADE
- )
+ inventory = models.ForeignKey('Inventory', on_delete=models.CASCADE)
+ instancegroup = models.ForeignKey('InstanceGroup', on_delete=models.CASCADE)
position = models.PositiveIntegerField(
null=True,
default=None,
diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py
index 9000fe41c4..cbec2963ca 100644
--- a/awx/main/models/inventory.py
+++ b/awx/main/models/inventory.py
@@ -34,13 +34,7 @@ from awx.main.fields import (
OrderedManyToManyField,
)
from awx.main.managers import HostManager
-from awx.main.models.base import (
- BaseModel,
- CommonModelNameNotUnique,
- VarsDictProperty,
- CLOUD_INVENTORY_SOURCES,
- prevent_search, accepts_json
-)
+from awx.main.models.base import BaseModel, CommonModelNameNotUnique, VarsDictProperty, CLOUD_INVENTORY_SOURCES, prevent_search, accepts_json
from awx.main.models.events import InventoryUpdateEvent
from awx.main.models.unified_jobs import UnifiedJob, UnifiedJobTemplate
from awx.main.models.mixins import (
@@ -58,16 +52,15 @@ from awx.main.utils import _inventory_updates
from awx.main.utils.safe_yaml import sanitize_jinja
-__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate',
- 'CustomInventoryScript', 'SmartInventoryMembership']
+__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'CustomInventoryScript', 'SmartInventoryMembership']
logger = logging.getLogger('awx.main.models.inventory')
class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
- '''
+ """
an inventory source contains lists and hosts.
- '''
+ """
FIELDS_TO_PRESERVE_AT_COPY = ['hosts', 'groups', 'instance_groups']
KIND_CHOICES = [
@@ -88,40 +81,39 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
on_delete=models.SET_NULL,
null=True,
)
- variables = accepts_json(models.TextField(
- blank=True,
- default='',
- help_text=_('Inventory variables in JSON or YAML format.'),
- ))
+ variables = accepts_json(
+ models.TextField(
+ blank=True,
+ default='',
+ help_text=_('Inventory variables in JSON or YAML format.'),
+ )
+ )
has_active_failures = models.BooleanField(
default=False,
editable=False,
- help_text=_('This field is deprecated and will be removed in a future release. '
- 'Flag indicating whether any hosts in this inventory have failed.'),
+ help_text=_('This field is deprecated and will be removed in a future release. ' 'Flag indicating whether any hosts in this inventory have failed.'),
)
total_hosts = models.PositiveIntegerField(
default=0,
editable=False,
- help_text=_('This field is deprecated and will be removed in a future release. '
- 'Total number of hosts in this inventory.'),
+ help_text=_('This field is deprecated and will be removed in a future release. ' 'Total number of hosts in this inventory.'),
)
hosts_with_active_failures = models.PositiveIntegerField(
default=0,
editable=False,
- help_text=_('This field is deprecated and will be removed in a future release. '
- 'Number of hosts in this inventory with active failures.'),
+ help_text=_('This field is deprecated and will be removed in a future release. ' 'Number of hosts in this inventory with active failures.'),
)
total_groups = models.PositiveIntegerField(
default=0,
editable=False,
- help_text=_('This field is deprecated and will be removed in a future release. '
- 'Total number of groups in this inventory.'),
+ help_text=_('This field is deprecated and will be removed in a future release. ' 'Total number of groups in this inventory.'),
)
has_inventory_sources = models.BooleanField(
default=False,
editable=False,
- help_text=_('This field is deprecated and will be removed in a future release. '
- 'Flag indicating whether this inventory has any external inventory sources.'),
+ help_text=_(
+ 'This field is deprecated and will be removed in a future release. ' 'Flag indicating whether this inventory has any external inventory sources.'
+ ),
)
total_inventory_sources = models.PositiveIntegerField(
default=0,
@@ -163,12 +155,14 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
use_role = ImplicitRoleField(
parent_role='adhoc_role',
)
- read_role = ImplicitRoleField(parent_role=[
- 'organization.auditor_role',
- 'update_role',
- 'use_role',
- 'admin_role',
- ])
+ read_role = ImplicitRoleField(
+ parent_role=[
+ 'organization.auditor_role',
+ 'update_role',
+ 'use_role',
+ 'admin_role',
+ ]
+ )
insights_credential = models.ForeignKey(
'Credential',
related_name='insights_inventories',
@@ -184,16 +178,15 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
help_text=_('Flag indicating the inventory is being deleted.'),
)
-
def get_absolute_url(self, request=None):
return reverse('api:inventory_detail', kwargs={'pk': self.pk}, request=request)
variables_dict = VarsDictProperty('variables')
def get_group_hosts_map(self):
- '''
+ """
Return dictionary mapping group_id to set of child host_id's.
- '''
+ """
# FIXME: Cache this mapping?
group_hosts_kw = dict(group__inventory_id=self.pk, host__inventory_id=self.pk)
group_hosts_qs = Group.hosts.through.objects.filter(**group_hosts_kw)
@@ -205,9 +198,9 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
return group_hosts_map
def get_group_parents_map(self):
- '''
+ """
Return dictionary mapping group_id to set of parent group_id's.
- '''
+ """
# FIXME: Cache this mapping?
group_parents_kw = dict(from_group__inventory_id=self.pk, to_group__inventory_id=self.pk)
group_parents_qs = Group.parents.through.objects.filter(**group_parents_kw)
@@ -219,9 +212,9 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
return group_parents_map
def get_group_children_map(self):
- '''
+ """
Return dictionary mapping group_id to set of child group_id's.
- '''
+ """
# FIXME: Cache this mapping?
group_parents_kw = dict(from_group__inventory_id=self.pk, to_group__inventory_id=self.pk)
group_parents_qs = Group.parents.through.objects.filter(**group_parents_kw)
@@ -271,10 +264,9 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
grouped_hosts = set([])
# Build in-memory mapping of groups and their hosts.
- group_hosts_qs = Group.hosts.through.objects.filter(
- group__inventory_id=self.id,
- host__inventory_id=self.id
- ).values_list('group_id', 'host_id', 'host__name')
+ group_hosts_qs = Group.hosts.through.objects.filter(group__inventory_id=self.id, host__inventory_id=self.id).values_list(
+ 'group_id', 'host_id', 'host__name'
+ )
group_hosts_map = {}
for group_id, host_id, host_name in group_hosts_qs:
if host_name not in all_hostnames:
@@ -321,16 +313,15 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
for host in hosts:
data['_meta']['hostvars'][host.name] = host.variables_dict
if towervars:
- tower_dict = dict(remote_tower_enabled=str(host.enabled).lower(),
- remote_tower_id=host.id)
+ tower_dict = dict(remote_tower_enabled=str(host.enabled).lower(), remote_tower_id=host.id)
data['_meta']['hostvars'][host.name].update(tower_dict)
return data
def update_computed_fields(self):
- '''
+ """
Update model fields that are computed from database relationships.
- '''
+ """
logger.debug("Going to update inventory computed fields, pk={0}".format(self.pk))
start_time = time.time()
active_hosts = self.hosts
@@ -363,14 +354,12 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
computed_fields.pop(field)
if computed_fields:
iobj.save(update_fields=computed_fields.keys())
- logger.debug("Finished updating inventory computed fields, pk={0}, in "
- "{1:.3f} seconds".format(self.pk, time.time() - start_time))
+ logger.debug("Finished updating inventory computed fields, pk={0}, in " "{1:.3f} seconds".format(self.pk, time.time() - start_time))
def websocket_emit_status(self, status):
- connection.on_commit(lambda: emit_channel_notification(
- 'inventories-status_changed',
- {'group_name': 'inventories', 'inventory_id': self.id, 'status': status}
- ))
+ connection.on_commit(
+ lambda: emit_channel_notification('inventories-status_changed', {'group_name': 'inventories', 'inventory_id': self.id, 'status': status})
+ )
@property
def root_groups(self):
@@ -388,6 +377,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
def schedule_deletion(self, user_id=None):
from awx.main.tasks import delete_inventory
from awx.main.signals import activity_stream_delete
+
if self.pending_deletion is True:
raise RuntimeError("Inventory is already pending deletion.")
self.pending_deletion = True
@@ -399,16 +389,18 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
def _update_host_smart_inventory_memeberships(self):
if self.kind == 'smart' and settings.AWX_REBUILD_SMART_MEMBERSHIP:
+
def on_commit():
from awx.main.tasks import update_host_smart_inventory_memberships
+
update_host_smart_inventory_memberships.delay()
+
connection.on_commit(on_commit)
def save(self, *args, **kwargs):
self._update_host_smart_inventory_memeberships()
super(Inventory, self).save(*args, **kwargs)
- if (self.kind == 'smart' and 'host_filter' in kwargs.get('update_fields', ['host_filter']) and
- connection.vendor != 'sqlite'):
+ if self.kind == 'smart' and 'host_filter' in kwargs.get('update_fields', ['host_filter']) and connection.vendor != 'sqlite':
# Minimal update of host_count for smart inventory host filter changes
self.update_computed_fields()
@@ -419,18 +411,15 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
'''
RelatedJobsMixin
'''
+
def _get_related_jobs(self):
- return UnifiedJob.objects.non_polymorphic().filter(
- Q(job__inventory=self) |
- Q(inventoryupdate__inventory=self) |
- Q(adhoccommand__inventory=self)
- )
+ return UnifiedJob.objects.non_polymorphic().filter(Q(job__inventory=self) | Q(inventoryupdate__inventory=self) | Q(adhoccommand__inventory=self))
class SmartInventoryMembership(BaseModel):
- '''
+ """
A lookup table for Host membership in Smart Inventory
- '''
+ """
class Meta:
app_label = 'main'
@@ -441,17 +430,15 @@ class SmartInventoryMembership(BaseModel):
class Host(CommonModelNameNotUnique, RelatedJobsMixin):
- '''
+ """
A managed node
- '''
+ """
- FIELDS_TO_PRESERVE_AT_COPY = [
- 'name', 'description', 'groups', 'inventory', 'enabled', 'instance_id', 'variables'
- ]
+ FIELDS_TO_PRESERVE_AT_COPY = ['name', 'description', 'groups', 'inventory', 'enabled', 'instance_id', 'variables']
class Meta:
app_label = 'main'
- unique_together = (("name", "inventory"),) # FIXME: Add ('instance_id', 'inventory') after migration.
+ unique_together = (("name", "inventory"),) # FIXME: Add ('instance_id', 'inventory') after migration.
ordering = ('name',)
inventory = models.ForeignKey(
@@ -474,11 +461,13 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin):
default='',
help_text=_('The value used by the remote inventory source to uniquely identify the host'),
)
- variables = accepts_json(models.TextField(
- blank=True,
- default='',
- help_text=_('Host variables in JSON or YAML format.'),
- ))
+ variables = accepts_json(
+ models.TextField(
+ blank=True,
+ default='',
+ help_text=_('Host variables in JSON or YAML format.'),
+ )
+ )
last_job = models.ForeignKey(
'Job',
related_name='hosts_as_last_job+',
@@ -530,10 +519,10 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin):
@property
def all_groups(self):
- '''
+ """
Return all groups of which this host is a member, avoiding infinite
recursion in the case of cyclical group relations.
- '''
+ """
group_parents_map = self.inventory.get_group_parents_map()
group_pks = set(self.groups.values_list('pk', flat=True))
child_pks_to_check = set()
@@ -554,6 +543,7 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin):
'''
We don't use timestamp, but we may in the future.
'''
+
def update_ansible_facts(self, module, facts, timestamp=None):
if module == "ansible":
self.ansible_facts.update(facts)
@@ -562,10 +552,10 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin):
self.save()
def get_effective_host_name(self):
- '''
+ """
Return the name of the host that will be used in actual ansible
command run.
- '''
+ """
host_name = self.name
if 'ansible_ssh_host' in self.variables_dict:
host_name = self.variables_dict['ansible_ssh_host']
@@ -575,9 +565,12 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin):
def _update_host_smart_inventory_memeberships(self):
if settings.AWX_REBUILD_SMART_MEMBERSHIP:
+
def on_commit():
from awx.main.tasks import update_host_smart_inventory_memberships
+
update_host_smart_inventory_memberships.delay()
+
connection.on_commit(on_commit)
def clean_name(self):
@@ -598,19 +591,18 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin):
'''
RelatedJobsMixin
'''
+
def _get_related_jobs(self):
return self.inventory._get_related_jobs()
class Group(CommonModelNameNotUnique, RelatedJobsMixin):
- '''
+ """
A group containing managed hosts. A group or host may belong to multiple
groups.
- '''
+ """
- FIELDS_TO_PRESERVE_AT_COPY = [
- 'name', 'description', 'inventory', 'children', 'parents', 'hosts', 'variables'
- ]
+ FIELDS_TO_PRESERVE_AT_COPY = ['name', 'description', 'inventory', 'children', 'parents', 'hosts', 'variables']
class Meta:
app_label = 'main'
@@ -629,11 +621,13 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
related_name='children',
blank=True,
)
- variables = accepts_json(models.TextField(
- blank=True,
- default='',
- help_text=_('Group variables in JSON or YAML format.'),
- ))
+ variables = accepts_json(
+ models.TextField(
+ blank=True,
+ default='',
+ help_text=_('Group variables in JSON or YAML format.'),
+ )
+ )
hosts = models.ManyToManyField(
'Host',
related_name='groups',
@@ -656,7 +650,6 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
from awx.main.tasks import update_inventory_computed_fields
from awx.main.signals import disable_activity_stream, activity_stream_delete
-
def mark_actual():
all_group_hosts = Group.hosts.through.objects.select_related("host", "group").filter(group__inventory=self.inventory)
group_hosts = {'groups': {}, 'hosts': {}}
@@ -709,6 +702,7 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
Group.objects.filter(id__in=marked_groups).delete()
Host.objects.filter(id__in=marked_hosts).delete()
update_inventory_computed_fields.delay(self.inventory.id)
+
with ignore_inventory_computed_fields():
with disable_activity_stream():
mark_actual()
@@ -717,10 +711,10 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
variables_dict = VarsDictProperty('variables')
def get_all_parents(self, except_pks=None):
- '''
+ """
Return all parents of this group recursively. The group itself will
be excluded unless there is a cycle leading back to it.
- '''
+ """
group_parents_map = self.inventory.get_group_parents_map()
child_pks_to_check = set([self.pk])
child_pks_checked = set()
@@ -739,10 +733,10 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
return self.get_all_parents()
def get_all_children(self, except_pks=None):
- '''
+ """
Return all children of this group recursively. The group itself will
be excluded unless there is a cycle leading back to it.
- '''
+ """
group_children_map = self.inventory.get_group_children_map()
parent_pks_to_check = set([self.pk])
parent_pks_checked = set()
@@ -761,9 +755,9 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
return self.get_all_children()
def get_all_hosts(self, except_group_pks=None):
- '''
+ """
Return all hosts associated with this group or any of its children.
- '''
+ """
group_children_map = self.inventory.get_group_children_map()
group_hosts_map = self.inventory.get_group_hosts_map()
parent_pks_to_check = set([self.pk])
@@ -786,32 +780,33 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
@property
def job_host_summaries(self):
from awx.main.models.jobs import JobHostSummary
+
return JobHostSummary.objects.filter(host__in=self.all_hosts)
@property
def job_events(self):
from awx.main.models.jobs import JobEvent
+
return JobEvent.objects.filter(host__in=self.all_hosts)
@property
def ad_hoc_commands(self):
from awx.main.models.ad_hoc_commands import AdHocCommand
+
return AdHocCommand.objects.filter(hosts__in=self.all_hosts)
'''
RelatedJobsMixin
'''
+
def _get_related_jobs(self):
- return UnifiedJob.objects.non_polymorphic().filter(
- Q(job__inventory=self.inventory) |
- Q(inventoryupdate__inventory_source__groups=self)
- )
+ return UnifiedJob.objects.non_polymorphic().filter(Q(job__inventory=self.inventory) | Q(inventoryupdate__inventory_source__groups=self))
class InventorySourceOptions(BaseModel):
- '''
+ """
Common fields for InventorySource and InventoryUpdate.
- '''
+ """
injectors = dict()
@@ -865,30 +860,34 @@ class InventorySourceOptions(BaseModel):
enabled_var = models.TextField(
blank=True,
default='',
- help_text=_('Retrieve the enabled state from the given dict of host '
- 'variables. The enabled variable may be specified as "foo.bar", '
- 'in which case the lookup will traverse into nested dicts, '
- 'equivalent to: from_dict.get("foo", {}).get("bar", default)'),
+ help_text=_(
+ 'Retrieve the enabled state from the given dict of host '
+ 'variables. The enabled variable may be specified as "foo.bar", '
+ 'in which case the lookup will traverse into nested dicts, '
+ 'equivalent to: from_dict.get("foo", {}).get("bar", default)'
+ ),
)
enabled_value = models.TextField(
blank=True,
default='',
- help_text=_('Only used when enabled_var is set. Value when the host is '
- 'considered enabled. For example if enabled_var="status.power_state"'
- 'and enabled_value="powered_on" with host variables:'
- '{'
- ' "status": {'
- ' "power_state": "powered_on",'
- ' "created": "2020-08-04T18:13:04+00:00",'
- ' "healthy": true'
- ' },'
- ' "name": "foobar",'
- ' "ip_address": "192.168.2.1"'
- '}'
- 'The host would be marked enabled. If power_state where any '
- 'value other than powered_on then the host would be disabled '
- 'when imported into Tower. If the key is not found then the '
- 'host will be enabled'),
+ help_text=_(
+ 'Only used when enabled_var is set. Value when the host is '
+ 'considered enabled. For example if enabled_var="status.power_state"'
+ 'and enabled_value="powered_on" with host variables:'
+ '{'
+ ' "status": {'
+ ' "power_state": "powered_on",'
+ ' "created": "2020-08-04T18:13:04+00:00",'
+ ' "healthy": true'
+ ' },'
+ ' "name": "foobar",'
+ ' "ip_address": "192.168.2.1"'
+ '}'
+ 'The host would be marked enabled. If power_state where any '
+ 'value other than powered_on then the host would be disabled '
+ 'when imported into Tower. If the key is not found then the '
+ 'host will be enabled'
+ ),
)
host_filter = models.TextField(
blank=True,
@@ -923,28 +922,20 @@ class InventorySourceOptions(BaseModel):
# the actual inventory source being used (Amazon requires Amazon
# credentials; Rackspace requires Rackspace credentials; etc...)
if source.replace('ec2', 'aws') != cred.kind:
- return _('Cloud-based inventory sources (such as %s) require '
- 'credentials for the matching cloud service.') % source
+ return _('Cloud-based inventory sources (such as %s) require ' 'credentials for the matching cloud service.') % source
# Allow an EC2 source to omit the credential. If Tower is running on
# an EC2 instance with an IAM Role assigned, boto will use credentials
# from the instance metadata instead of those explicitly provided.
elif source in CLOUD_PROVIDERS and source != 'ec2':
return _('Credential is required for a cloud source.')
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
- return _(
- 'Credentials of type machine, source control, insights and vault are '
- 'disallowed for custom inventory sources.'
- )
+ return _('Credentials of type machine, source control, insights and vault are ' 'disallowed for custom inventory sources.')
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
- return _(
- 'Credentials of type insights and vault are '
- 'disallowed for scm inventory sources.'
- )
+ return _('Credentials of type insights and vault are ' 'disallowed for scm inventory sources.')
return None
def get_cloud_credential(self):
- """Return the credential which is directly tied to the inventory source type.
- """
+ """Return the credential which is directly tied to the inventory source type."""
credential = None
for cred in self.credentials.all():
if self.source in CLOUD_PROVIDERS:
@@ -978,7 +969,6 @@ class InventorySourceOptions(BaseModel):
if cred is not None:
return cred.pk
-
source_vars_dict = VarsDictProperty('source_vars')
@@ -1005,7 +995,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
on_delete=models.CASCADE,
blank=True,
default=None,
- null=True
+ null=True,
)
scm_last_revision = models.CharField(
max_length=1024,
@@ -1029,9 +1019,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
@classmethod
def _get_unified_job_field_names(cls):
- return set(f.name for f in InventorySourceOptions._meta.fields) | set(
- ['name', 'description', 'organization', 'credentials', 'inventory']
- )
+ return set(f.name for f in InventorySourceOptions._meta.fields) | set(['name', 'description', 'organization', 'credentials', 'inventory'])
def save(self, *args, **kwargs):
# if this is a new object, inherit organization from its inventory
@@ -1059,7 +1047,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
if 'name' not in update_fields:
update_fields.append('name')
# Reset revision if SCM source has changed parameters
- if self.source=='scm' and not is_new_instance:
+ if self.source == 'scm' and not is_new_instance:
before_is = self.__class__.objects.get(pk=self.pk)
if before_is.source_path != self.source_path or before_is.source_project_id != self.source_project_id:
# Reset the scm_revision if file changed to force update
@@ -1074,10 +1062,9 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
if replace_text in self.name:
self.name = self.name.replace(replace_text, str(self.pk))
super(InventorySource, self).save(update_fields=['name'])
- if self.source=='scm' and is_new_instance and self.update_on_project_update:
+ if self.source == 'scm' and is_new_instance and self.update_on_project_update:
# Schedule a new Project update if one is not already queued
- if self.source_project and not self.source_project.project_updates.filter(
- status__in=['new', 'pending', 'waiting']).exists():
+ if self.source_project and not self.source_project.project_updates.filter(status__in=['new', 'pending', 'waiting']).exists():
self.update()
if not getattr(_inventory_updates, 'is_updating', False):
if self.inventory is not None:
@@ -1126,7 +1113,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
name = '{} - {}'.format(self.inventory.name, self.name)
name_field = self._meta.get_field('name')
if len(name) > name_field.max_length:
- name = name[:name_field.max_length]
+ name = name[: name_field.max_length]
kwargs['_eager_fields']['name'] = name
return super(InventorySource, self).create_unified_job(**kwargs)
@@ -1150,39 +1137,41 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
@property
def notification_templates(self):
base_notification_templates = NotificationTemplate.objects
- error_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_errors__in=[self]))
- started_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_started__in=[self]))
- success_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_success__in=[self]))
+ error_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_errors__in=[self]))
+ started_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_started__in=[self]))
+ success_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_success__in=[self]))
if self.inventory.organization is not None:
- error_notification_templates = set(error_notification_templates + list(base_notification_templates
- .filter(organization_notification_templates_for_errors=self.inventory.organization)))
- started_notification_templates = set(started_notification_templates + list(base_notification_templates
- .filter(organization_notification_templates_for_started=self.inventory.organization)))
- success_notification_templates = set(success_notification_templates + list(base_notification_templates
- .filter(organization_notification_templates_for_success=self.inventory.organization)))
- return dict(error=list(error_notification_templates),
- started=list(started_notification_templates),
- success=list(success_notification_templates))
+ error_notification_templates = set(
+ error_notification_templates
+ + list(base_notification_templates.filter(organization_notification_templates_for_errors=self.inventory.organization))
+ )
+ started_notification_templates = set(
+ started_notification_templates
+ + list(base_notification_templates.filter(organization_notification_templates_for_started=self.inventory.organization))
+ )
+ success_notification_templates = set(
+ success_notification_templates
+ + list(base_notification_templates.filter(organization_notification_templates_for_success=self.inventory.organization))
+ )
+ return dict(error=list(error_notification_templates), started=list(started_notification_templates), success=list(success_notification_templates))
def clean_update_on_project_update(self):
- if self.update_on_project_update is True and \
- self.source == 'scm' and \
- InventorySource.objects.filter(
- Q(inventory=self.inventory,
- update_on_project_update=True, source='scm') &
- ~Q(id=self.id)).exists():
+ if (
+ self.update_on_project_update is True
+ and self.source == 'scm'
+ and InventorySource.objects.filter(Q(inventory=self.inventory, update_on_project_update=True, source='scm') & ~Q(id=self.id)).exists()
+ ):
raise ValidationError(_("More than one SCM-based inventory source with update on project update per-inventory not allowed."))
return self.update_on_project_update
def clean_update_on_launch(self):
- if self.update_on_project_update is True and \
- self.source == 'scm' and \
- self.update_on_launch is True:
- raise ValidationError(_("Cannot update SCM-based inventory source on launch if set to update on project update. "
- "Instead, configure the corresponding source project to update on launch."))
+ if self.update_on_project_update is True and self.source == 'scm' and self.update_on_launch is True:
+ raise ValidationError(
+ _(
+ "Cannot update SCM-based inventory source on launch if set to update on project update. "
+ "Instead, configure the corresponding source project to update on launch."
+ )
+ )
return self.update_on_launch
def clean_source_path(self):
@@ -1193,14 +1182,15 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
'''
RelatedJobsMixin
'''
+
def _get_related_jobs(self):
return InventoryUpdate.objects.filter(inventory_source=self)
class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin, TaskManagerInventoryUpdateMixin, CustomVirtualEnvMixin):
- '''
+ """
Internal job for tracking inventory updates from external sources.
- '''
+ """
class Meta:
app_label = 'main'
@@ -1234,7 +1224,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
on_delete=models.CASCADE,
blank=True,
default=None,
- null=True
+ null=True,
)
def _get_parent_field_name(self):
@@ -1243,6 +1233,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
@classmethod
def _get_task_class(cls):
from awx.main.tasks import RunInventoryUpdate
+
return RunInventoryUpdate
def _global_timeout_setting(self):
@@ -1267,9 +1258,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
'''Alias to source_path that combines with project path for for SCM file based sources'''
if self.inventory_source_id is None or self.inventory_source.source_project_id is None:
return self.source_path
- return os.path.join(
- self.inventory_source.source_project.get_project_path(check_if_exists=False),
- self.source_path)
+ return os.path.join(self.inventory_source.source_project.get_project_path(check_if_exists=False), self.source_path)
@property
def event_class(self):
@@ -1292,6 +1281,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
'''
JobNotificationMixin
'''
+
def get_notification_templates(self):
return self.inventory_source.notification_templates
@@ -1332,17 +1322,18 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
class CustomInventoryScript(CommonModelNameNotUnique, ResourceMixin):
-
class Meta:
app_label = 'main'
unique_together = [('name', 'organization')]
ordering = ('name',)
- script = prevent_search(models.TextField(
- blank=True,
- default='',
- help_text=_('Inventory script contents'),
- ))
+ script = prevent_search(
+ models.TextField(
+ blank=True,
+ default='',
+ help_text=_('Inventory script contents'),
+ )
+ )
organization = models.ForeignKey(
'Organization',
related_name='custom_inventory_scripts',
@@ -1388,16 +1379,11 @@ class PluginFileInjector(object):
return '{0}.yml'.format(self.plugin_name)
def inventory_contents(self, inventory_update, private_data_dir):
- """Returns a string that is the content for the inventory file for the inventory plugin
- """
- return yaml.safe_dump(
- self.inventory_as_dict(inventory_update, private_data_dir),
- default_flow_style=False,
- width=1000
- )
+ """Returns a string that is the content for the inventory file for the inventory plugin"""
+ return yaml.safe_dump(self.inventory_as_dict(inventory_update, private_data_dir), default_flow_style=False, width=1000)
def inventory_as_dict(self, inventory_update, private_data_dir):
- source_vars = dict(inventory_update.source_vars_dict) # make a copy
+ source_vars = dict(inventory_update.source_vars_dict) # make a copy
'''
None conveys that we should use the user-provided plugin.
Note that a plugin value of '' should still be overridden.
@@ -1414,8 +1400,7 @@ class PluginFileInjector(object):
return env
def _get_shared_env(self, inventory_update, private_data_dir, private_data_files):
- """By default, we will apply the standard managed_by_tower injectors
- """
+ """By default, we will apply the standard managed_by_tower injectors"""
injected_env = {}
credential = inventory_update.get_cloud_credential()
# some sources may have no credential, specifically ec2
@@ -1425,15 +1410,14 @@ class PluginFileInjector(object):
injected_env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk) # so injector knows this is inventory
if self.base_injector == 'managed':
from awx.main.models.credential import injectors as builtin_injectors
+
cred_kind = inventory_update.source.replace('ec2', 'aws')
if cred_kind in dir(builtin_injectors):
getattr(builtin_injectors, cred_kind)(credential, injected_env, private_data_dir)
elif self.base_injector == 'template':
safe_env = injected_env.copy()
args = []
- credential.credential_type.inject_credential(
- credential, injected_env, safe_env, args, private_data_dir
- )
+ credential.credential_type.inject_credential(credential, injected_env, safe_env, args, private_data_dir)
# NOTE: safe_env is handled externally to injector class by build_safe_env static method
# that means that managed_by_tower injectors must only inject detectable env keys
# enforcement of this is accomplished by tests
@@ -1534,9 +1518,7 @@ class openstack(PluginFileInjector):
private_data = {'credentials': {}}
openstack_data = self._get_clouds_dict(inventory_update, credential, private_data_dir)
- private_data['credentials'][credential] = yaml.safe_dump(
- openstack_data, default_flow_style=False, allow_unicode=True
- )
+ private_data['credentials'][credential] = yaml.safe_dump(openstack_data, default_flow_style=False, allow_unicode=True)
return private_data
def get_plugin_env(self, inventory_update, private_data_dir, private_data_files):
@@ -1548,8 +1530,8 @@ class openstack(PluginFileInjector):
class rhv(PluginFileInjector):
- """ovirt uses the custom credential templating, and that is all
- """
+ """ovirt uses the custom credential templating, and that is all"""
+
plugin_name = 'ovirt'
base_injector = 'template'
initial_version = '2.9'
diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py
index 70cdfa363a..807c276930 100644
--- a/awx/main/models/jobs.py
+++ b/awx/main/models/jobs.py
@@ -15,7 +15,8 @@ from urllib.parse import urljoin
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
-#from django.core.cache import cache
+
+# from django.core.cache import cache
from django.utils.encoding import smart_str
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
@@ -27,15 +28,17 @@ from rest_framework.exceptions import ParseError
# AWX
from awx.api.versioning import reverse
from awx.main.models.base import (
- BaseModel, CreatedModifiedModel,
- prevent_search, accepts_json,
- JOB_TYPE_CHOICES, NEW_JOB_TYPE_CHOICES, VERBOSITY_CHOICES,
- VarsDictProperty
+ BaseModel,
+ CreatedModifiedModel,
+ prevent_search,
+ accepts_json,
+ JOB_TYPE_CHOICES,
+ NEW_JOB_TYPE_CHOICES,
+ VERBOSITY_CHOICES,
+ VarsDictProperty,
)
from awx.main.models.events import JobEvent, SystemJobEvent
-from awx.main.models.unified_jobs import (
- UnifiedJobTemplate, UnifiedJob
-)
+from awx.main.models.unified_jobs import UnifiedJobTemplate, UnifiedJob
from awx.main.models.notifications import (
NotificationTemplate,
JobNotificationMixin,
@@ -62,9 +65,9 @@ __all__ = ['JobTemplate', 'JobLaunchConfig', 'Job', 'JobHostSummary', 'SystemJob
class JobOptions(BaseModel):
- '''
+ """
Common options for job templates and jobs.
- '''
+ """
class Meta:
abstract = True
@@ -103,8 +106,7 @@ class JobOptions(BaseModel):
max_length=1024,
default='',
blank=True,
- help_text=_('Branch to use in job run. Project default used if blank. '
- 'Only allowed if project allow_override field is set to true.'),
+ help_text=_('Branch to use in job run. Project default used if blank. ' 'Only allowed if project allow_override field is set to true.'),
)
forks = models.PositiveIntegerField(
blank=True,
@@ -119,10 +121,14 @@ class JobOptions(BaseModel):
blank=True,
default=0,
)
- extra_vars = prevent_search(accepts_json(models.TextField(
- blank=True,
- default='',
- )))
+ extra_vars = prevent_search(
+ accepts_json(
+ models.TextField(
+ blank=True,
+ default='',
+ )
+ )
+ )
job_tags = models.CharField(
max_length=1024,
blank=True,
@@ -157,7 +163,8 @@ class JobOptions(BaseModel):
default=False,
help_text=_(
"If enabled, Tower will act as an Ansible Fact Cache Plugin; persisting "
- "facts at the end of a playbook run to the database and caching facts for use by Ansible."),
+ "facts at the end of a playbook run to the database and caching facts for use by Ansible."
+ ),
)
extra_vars_dict = VarsDictProperty('extra_vars', True)
@@ -191,13 +198,12 @@ class JobOptions(BaseModel):
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, WebhookTemplateMixin):
- '''
+ """
A job template is a reusable job definition for applying a project (with
playbook) to an inventory source with a given credential.
- '''
- FIELDS_TO_PRESERVE_AT_COPY = [
- 'labels', 'instance_groups', 'credentials', 'survey_spec'
- ]
+ """
+
+ FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'instance_groups', 'credentials', 'survey_spec']
FIELDS_TO_DISCARD_AT_COPY = ['vault_credential', 'credential']
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
@@ -210,11 +216,13 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
choices=NEW_JOB_TYPE_CHOICES,
default='run',
)
- host_config_key = prevent_search(models.CharField(
- max_length=1024,
- blank=True,
- default='',
- ))
+ host_config_key = prevent_search(
+ models.CharField(
+ max_length=1024,
+ blank=True,
+ default='',
+ )
+ )
ask_diff_mode_on_launch = AskForField(
blank=True,
default=False,
@@ -223,11 +231,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
blank=True,
default=False,
)
- ask_tags_on_launch = AskForField(
- blank=True,
- default=False,
- allows_field='job_tags'
- )
+ ask_tags_on_launch = AskForField(blank=True, default=False, allows_field='job_tags')
ask_skip_tags_on_launch = AskForField(
blank=True,
default=False,
@@ -244,26 +248,15 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
blank=True,
default=False,
)
- ask_credential_on_launch = AskForField(
- blank=True,
- default=False,
- allows_field='credentials'
- )
- ask_scm_branch_on_launch = AskForField(
- blank=True,
- default=False,
- allows_field='scm_branch'
- )
+ ask_credential_on_launch = AskForField(blank=True, default=False, allows_field='credentials')
+ ask_scm_branch_on_launch = AskForField(blank=True, default=False, allows_field='scm_branch')
job_slice_count = models.PositiveIntegerField(
blank=True,
default=1,
- help_text=_("The number of jobs to slice into at runtime. "
- "Will cause the Job Template to launch a workflow if value is greater than 1."),
+ help_text=_("The number of jobs to slice into at runtime. " "Will cause the Job Template to launch a workflow if value is greater than 1."),
)
- admin_role = ImplicitRoleField(
- parent_role=['organization.job_template_admin_role']
- )
+ admin_role = ImplicitRoleField(parent_role=['organization.job_template_admin_role'])
execute_role = ImplicitRoleField(
parent_role=['admin_role', 'organization.execute_role'],
)
@@ -271,11 +264,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
parent_role=[
'organization.auditor_role',
'inventory.organization.auditor_role', # partial support for old inheritance via inventory
- 'execute_role', 'admin_role'
+ 'execute_role',
+ 'admin_role',
],
)
-
@classmethod
def _get_unified_job_class(cls):
return Job
@@ -283,20 +276,23 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
@classmethod
def _get_unified_job_field_names(cls):
return set(f.name for f in JobOptions._meta.fields) | set(
- ['name', 'description', 'organization', 'survey_passwords', 'labels', 'credentials',
- 'job_slice_number', 'job_slice_count', 'execution_environment']
+ ['name', 'description', 'organization', 'survey_passwords', 'labels', 'credentials', 'job_slice_number', 'job_slice_count', 'execution_environment']
)
@property
def validation_errors(self):
- '''
+ """
Fields needed to start, which cannot be given on launch, invalid state.
- '''
+ """
validation_errors = {}
if self.inventory is None and not self.ask_inventory_on_launch:
- validation_errors['inventory'] = [_("Job Template must provide 'inventory' or allow prompting for it."),]
+ validation_errors['inventory'] = [
+ _("Job Template must provide 'inventory' or allow prompting for it."),
+ ]
if self.project is None:
- validation_errors['project'] = [_("Job Templates must have a project assigned."),]
+ validation_errors['project'] = [
+ _("Job Templates must have a project assigned."),
+ ]
return validation_errors
@property
@@ -309,9 +305,9 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
return self.forks
def create_job(self, **kwargs):
- '''
+ """
Create a new job based on this template.
- '''
+ """
return self.create_unified_job(**kwargs)
def get_effective_slice_ct(self, kwargs):
@@ -349,12 +345,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
if self.pk:
qs = qs.exclude(pk=self.pk)
if qs.exists():
- errors.append(
- '%s with this (%s) combination already exists.' % (
- JobTemplate.__name__,
- ', '.join(set(ut) - {'polymorphic_ctype'})
- )
- )
+ errors.append('%s with this (%s) combination already exists.' % (JobTemplate.__name__, ', '.join(set(ut) - {'polymorphic_ctype'})))
if errors:
raise ValidationError(errors)
@@ -365,6 +356,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
if slice_event:
# A Slice Job Template will generate a WorkflowJob rather than a Job
from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode
+
kwargs['_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class()
kwargs['_parent_field_name'] = "job_template"
kwargs.setdefault('_eager_fields', {})
@@ -379,9 +371,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
job = super(JobTemplate, self).create_unified_job(**kwargs)
if slice_event:
for idx in range(slice_ct):
- create_kwargs = dict(workflow_job=job,
- unified_job_template=self,
- ancestor_artifacts=dict(job_slice=idx + 1))
+ create_kwargs = dict(workflow_job=job, unified_job_template=self, ancestor_artifacts=dict(job_slice=idx + 1))
WorkflowJobNode.objects.create(**create_kwargs)
return job
@@ -389,10 +379,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
return reverse('api:job_template_detail', kwargs={'pk': self.pk}, request=request)
def can_start_without_user_input(self, callback_extra_vars=None):
- '''
+ """
Return whether job template can be used to start a new job without
requiring any user input.
- '''
+ """
variables_needed = False
if callback_extra_vars:
extra_vars_dict = parse_yaml_or_json(callback_extra_vars)
@@ -411,18 +401,15 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
if getattr(self, ask_field_name):
prompting_needed = True
break
- return (not prompting_needed and
- not self.passwords_needed_to_start and
- not variables_needed)
+ return not prompting_needed and not self.passwords_needed_to_start and not variables_needed
def _accept_or_ignore_job_kwargs(self, **kwargs):
exclude_errors = kwargs.pop('_exclude_errors', [])
prompted_data = {}
rejected_data = {}
accepted_vars, rejected_vars, errors_dict = self.accept_or_ignore_variables(
- kwargs.get('extra_vars', {}),
- _exclude_errors=exclude_errors,
- extra_passwords=kwargs.get('survey_passwords', {}))
+ kwargs.get('extra_vars', {}), _exclude_errors=exclude_errors, extra_passwords=kwargs.get('survey_passwords', {})
+ )
if accepted_vars:
prompted_data['extra_vars'] = accepted_vars
if rejected_vars:
@@ -472,11 +459,8 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
if 'prompts' not in exclude_errors:
errors_dict[field_name] = _('Field is not configured to prompt on launch.')
- if ('prompts' not in exclude_errors and
- (not getattr(self, 'ask_credential_on_launch', False)) and
- self.passwords_needed_to_start):
- errors_dict['passwords_needed_to_start'] = _(
- 'Saved launch configurations cannot provide passwords needed to start.')
+ if 'prompts' not in exclude_errors and (not getattr(self, 'ask_credential_on_launch', False)) and self.passwords_needed_to_start:
+ errors_dict['passwords_needed_to_start'] = _('Saved launch configurations cannot provide passwords needed to start.')
needed = self.resources_needed_to_start
if needed:
@@ -493,8 +477,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
@property
def cache_timeout_blocked(self):
if Job.objects.filter(job_template=self, status__in=['pending', 'waiting', 'running']).count() >= getattr(settings, 'SCHEDULE_MAX_JOBS', 10):
- logger.error("Job template %s could not be started because there are more than %s other jobs from that template waiting to run" %
- (self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10)))
+ logger.error(
+ "Job template %s could not be started because there are more than %s other jobs from that template waiting to run"
+ % (self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10))
+ )
return True
return False
@@ -507,37 +493,40 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
# TODO: Currently there is no org fk on project so this will need to be added once that is
# available after the rbac pr
base_notification_templates = NotificationTemplate.objects
- error_notification_templates = list(base_notification_templates.filter(
- unifiedjobtemplate_notification_templates_for_errors__in=[self, self.project]))
- started_notification_templates = list(base_notification_templates.filter(
- unifiedjobtemplate_notification_templates_for_started__in=[self, self.project]))
- success_notification_templates = list(base_notification_templates.filter(
- unifiedjobtemplate_notification_templates_for_success__in=[self, self.project]))
+ error_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_errors__in=[self, self.project]))
+ started_notification_templates = list(
+ base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_started__in=[self, self.project])
+ )
+ success_notification_templates = list(
+ base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_success__in=[self, self.project])
+ )
# Get Organization NotificationTemplates
if self.organization is not None:
- error_notification_templates = set(error_notification_templates + list(base_notification_templates.filter(
- organization_notification_templates_for_errors=self.organization)))
- started_notification_templates = set(started_notification_templates + list(base_notification_templates.filter(
- organization_notification_templates_for_started=self.organization)))
- success_notification_templates = set(success_notification_templates + list(base_notification_templates.filter(
- organization_notification_templates_for_success=self.organization)))
- return dict(error=list(error_notification_templates),
- started=list(started_notification_templates),
- success=list(success_notification_templates))
+ error_notification_templates = set(
+ error_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_errors=self.organization))
+ )
+ started_notification_templates = set(
+ started_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_started=self.organization))
+ )
+ success_notification_templates = set(
+ success_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_success=self.organization))
+ )
+ return dict(error=list(error_notification_templates), started=list(started_notification_templates), success=list(success_notification_templates))
'''
RelatedJobsMixin
'''
+
def _get_related_jobs(self):
return UnifiedJob.objects.filter(unified_job_template=self)
class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskManagerJobMixin, CustomVirtualEnvMixin, WebhookMixin):
- '''
+ """
A job applies a project (with playbook) to an inventory source with a given
credential. It represents a single invocation of ansible-playbook with the
given parameters.
- '''
+ """
class Meta:
app_label = 'main'
@@ -581,23 +570,21 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
job_slice_number = models.PositiveIntegerField(
blank=True,
default=0,
- help_text=_("If part of a sliced job, the ID of the inventory slice operated on. "
- "If not part of sliced job, parameter is not used."),
+ help_text=_("If part of a sliced job, the ID of the inventory slice operated on. " "If not part of sliced job, parameter is not used."),
)
job_slice_count = models.PositiveIntegerField(
blank=True,
default=1,
- help_text=_("If ran as part of sliced jobs, the total number of slices. "
- "If 1, job is not part of a sliced job."),
+ help_text=_("If ran as part of sliced jobs, the total number of slices. " "If 1, job is not part of a sliced job."),
)
-
def _get_parent_field_name(self):
return 'job_template'
@classmethod
def _get_task_class(cls):
from awx.main.tasks import RunJob
+
return RunJob
@classmethod
@@ -623,7 +610,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
for virtualenv in (
self.job_template.custom_virtualenv if self.job_template else None,
self.project.custom_virtualenv,
- self.organization.custom_virtualenv if self.organization else None
+ self.organization.custom_virtualenv if self.organization else None,
):
if virtualenv:
return virtualenv
@@ -651,10 +638,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
return Host.objects.filter(**kwargs)
def retry_qs(self, status):
- '''
+ """
Returns Host queryset that will be used to produce the `limit`
field in a retry on a subset of hosts
- '''
+ """
kwargs = {}
if status == 'all':
pass
@@ -668,9 +655,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
status_field = status
kwargs['job_host_summaries__{}__gt'.format(status_field)] = 0
else:
- raise ParseError(_(
- '{status_value} is not a valid status option.'
- ).format(status_value=status))
+ raise ParseError(_('{status_value} is not a valid status option.').format(status_value=status))
return self._get_hosts(**kwargs)
@property
@@ -736,31 +721,37 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
else:
summaries = self.job_host_summaries.all()
for h in self.job_host_summaries.all():
- all_hosts[h.host_name] = dict(failed=h.failed,
- changed=h.changed,
- dark=h.dark,
- failures=h.failures,
- ok=h.ok,
- processed=h.processed,
- skipped=h.skipped,
- rescued=h.rescued,
- ignored=h.ignored)
- data.update(dict(inventory=self.inventory.name if self.inventory else None,
- project=self.project.name if self.project else None,
- playbook=self.playbook,
- credential=getattr(self.machine_credential, 'name', None),
- limit=self.limit,
- extra_vars=self.display_extra_vars(),
- hosts=all_hosts))
+ all_hosts[h.host_name] = dict(
+ failed=h.failed,
+ changed=h.changed,
+ dark=h.dark,
+ failures=h.failures,
+ ok=h.ok,
+ processed=h.processed,
+ skipped=h.skipped,
+ rescued=h.rescued,
+ ignored=h.ignored,
+ )
+ data.update(
+ dict(
+ inventory=self.inventory.name if self.inventory else None,
+ project=self.project.name if self.project else None,
+ playbook=self.playbook,
+ credential=getattr(self.machine_credential, 'name', None),
+ limit=self.limit,
+ extra_vars=self.display_extra_vars(),
+ hosts=all_hosts,
+ )
+ )
return data
def _resources_sufficient_for_launch(self):
return not (self.inventory_id is None or self.project_id is None)
def display_artifacts(self):
- '''
+ """
Hides artifacts if they are marked as no_log type artifacts.
- '''
+ """
artifacts = self.artifacts
if artifacts.get('_ansible_no_log', False):
return "$hidden due to Ansible no_log flag$"
@@ -811,6 +802,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
'''
JobNotificationMixin
'''
+
def get_notification_templates(self):
if not self.job_template:
return NotificationTemplate.objects.none()
@@ -819,10 +811,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
def get_notification_friendly_name(self):
return "Job"
- def _get_inventory_hosts(
- self,
- only=['name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id']
- ):
+ def _get_inventory_hosts(self, only=['name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id']):
if not self.inventory:
return []
return self.inventory.hosts.only(*only)
@@ -876,44 +865,46 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
ansible_facts_system_id = ansible_facts.get('system_id', None) if isinstance(ansible_facts, dict) else None
if ansible_local_system_id:
print("Setting local {}".format(ansible_local_system_id))
- logger.debug("Insights system_id {} found for host <{}, {}> in"
- " ansible local facts".format(ansible_local_system_id,
- host.inventory.id,
- host.name))
+ logger.debug(
+ "Insights system_id {} found for host <{}, {}> in"
+ " ansible local facts".format(ansible_local_system_id, host.inventory.id, host.name)
+ )
host.insights_system_id = ansible_local_system_id
elif ansible_facts_system_id:
- logger.debug("Insights system_id {} found for host <{}, {}> in"
- " insights facts".format(ansible_local_system_id,
- host.inventory.id,
- host.name))
+ logger.debug(
+ "Insights system_id {} found for host <{}, {}> in"
+ " insights facts".format(ansible_local_system_id, host.inventory.id, host.name)
+ )
host.insights_system_id = ansible_facts_system_id
host.save()
system_tracking_logger.info(
- 'New fact for inventory {} host {}'.format(
- smart_str(host.inventory.name), smart_str(host.name)),
- extra=dict(inventory_id=host.inventory.id, host_name=host.name,
- ansible_facts=host.ansible_facts,
- ansible_facts_modified=host.ansible_facts_modified.isoformat(),
- job_id=self.id))
+ 'New fact for inventory {} host {}'.format(smart_str(host.inventory.name), smart_str(host.name)),
+ extra=dict(
+ inventory_id=host.inventory.id,
+ host_name=host.name,
+ ansible_facts=host.ansible_facts,
+ ansible_facts_modified=host.ansible_facts_modified.isoformat(),
+ job_id=self.id,
+ ),
+ )
else:
# if the file goes missing, ansible removed it (likely via clear_facts)
host.ansible_facts = {}
host.ansible_facts_modified = now()
- system_tracking_logger.info(
- 'Facts cleared for inventory {} host {}'.format(
- smart_str(host.inventory.name), smart_str(host.name)))
+ system_tracking_logger.info('Facts cleared for inventory {} host {}'.format(smart_str(host.inventory.name), smart_str(host.name)))
host.save()
class LaunchTimeConfigBase(BaseModel):
- '''
+ """
Needed as separate class from LaunchTimeConfig because some models
use `extra_data` and some use `extra_vars`. We cannot change the API,
so we force fake it in the model definitions
- model defines extra_vars - use this class
- model needs to use extra data - use LaunchTimeConfig
Use this for models which are SurveyMixins and UnifiedJobs or Templates
- '''
+ """
+
class Meta:
abstract = True
@@ -925,14 +916,11 @@ class LaunchTimeConfigBase(BaseModel):
null=True,
default=None,
on_delete=models.SET_NULL,
- help_text=_('Inventory applied as a prompt, assuming job template prompts for inventory')
+ help_text=_('Inventory applied as a prompt, assuming job template prompts for inventory'),
)
# All standard fields are stored in this dictionary field
# This is a solution to the nullable CharField problem, specific to prompting
- char_prompts = JSONField(
- blank=True,
- default=dict
- )
+ char_prompts = JSONField(blank=True, default=dict)
def prompts_dict(self, display=False):
data = {}
@@ -976,28 +964,25 @@ for field_name in JobTemplate.get_ask_mapping().keys():
class LaunchTimeConfig(LaunchTimeConfigBase):
- '''
+ """
Common model for all objects that save details of a saved launch config
WFJT / WJ nodes, schedules, and job launch configs (not all implemented yet)
- '''
+ """
+
class Meta:
abstract = True
# Special case prompting fields, even more special than the other ones
- extra_data = JSONField(
- blank=True,
- default=dict
+ extra_data = JSONField(blank=True, default=dict)
+ survey_passwords = prevent_search(
+ JSONField(
+ blank=True,
+ default=dict,
+ editable=False,
+ )
)
- survey_passwords = prevent_search(JSONField(
- blank=True,
- default=dict,
- editable=False,
- ))
# Credentials needed for non-unified job / unified JT models
- credentials = models.ManyToManyField(
- 'Credential',
- related_name='%(class)ss'
- )
+ credentials = models.ManyToManyField('Credential', related_name='%(class)ss')
@property
def extra_vars(self):
@@ -1008,9 +993,9 @@ class LaunchTimeConfig(LaunchTimeConfigBase):
self.extra_data = extra_vars
def display_extra_vars(self):
- '''
+ """
Hides fields marked as passwords in survey.
- '''
+ """
if hasattr(self, 'survey_passwords') and self.survey_passwords:
extra_vars = parse_yaml_or_json(self.extra_vars).copy()
for key, value in self.survey_passwords.items():
@@ -1025,11 +1010,12 @@ class LaunchTimeConfig(LaunchTimeConfigBase):
class JobLaunchConfig(LaunchTimeConfig):
- '''
+ """
Historical record of user launch-time overrides for a job
Not exposed in the API
Used for relaunch, scheduling, etc.
- '''
+ """
+
class Meta:
app_label = 'main'
@@ -1041,18 +1027,18 @@ class JobLaunchConfig(LaunchTimeConfig):
)
def has_user_prompts(self, template):
- '''
+ """
Returns True if any fields exist in the launch config that are
not permissions exclusions
(has to exist because of callback relaunch exception)
- '''
+ """
return self._has_user_prompts(template, only_unprompted=False)
def has_unprompted(self, template):
- '''
+ """
returns True if the template has set ask_ fields to False after
launching with those prompts
- '''
+ """
return self._has_user_prompts(template, only_unprompted=True)
def _has_user_prompts(self, template, only_unprompted=True):
@@ -1061,10 +1047,7 @@ class JobLaunchConfig(LaunchTimeConfig):
if template.survey_enabled and (not template.ask_variables_on_launch):
ask_mapping.pop('extra_vars')
provided_vars = set(prompts.get('extra_vars', {}).keys())
- survey_vars = set(
- element.get('variable') for element in
- template.survey_spec.get('spec', {}) if 'variable' in element
- )
+ survey_vars = set(element.get('variable') for element in template.survey_spec.get('spec', {}) if 'variable' in element)
if (provided_vars and not only_unprompted) or (provided_vars - survey_vars):
return True
for field_name, ask_field_name in ask_mapping.items():
@@ -1077,9 +1060,9 @@ class JobLaunchConfig(LaunchTimeConfig):
class JobHostSummary(CreatedModifiedModel):
- '''
+ """
Per-host statistics for each job.
- '''
+ """
class Meta:
app_label = 'main'
@@ -1093,12 +1076,7 @@ class JobHostSummary(CreatedModifiedModel):
on_delete=models.CASCADE,
editable=False,
)
- host = models.ForeignKey('Host',
- related_name='job_host_summaries',
- null=True,
- default=None,
- on_delete=models.SET_NULL,
- editable=False)
+ host = models.ForeignKey('Host', related_name='job_host_summaries', null=True, default=None, on_delete=models.SET_NULL, editable=False)
host_name = models.CharField(
max_length=1024,
@@ -1119,9 +1097,17 @@ class JobHostSummary(CreatedModifiedModel):
def __str__(self):
host = getattr_dne(self, 'host')
hostname = host.name if host else 'N/A'
- return '%s changed=%d dark=%d failures=%d ignored=%d ok=%d processed=%d rescued=%d skipped=%s' % \
- (hostname, self.changed, self.dark, self.failures, self.ignored, self.ok,
- self.processed, self.rescued, self.skipped)
+ return '%s changed=%d dark=%d failures=%d ignored=%d ok=%d processed=%d rescued=%d skipped=%s' % (
+ hostname,
+ self.changed,
+ self.dark,
+ self.failures,
+ self.ignored,
+ self.ok,
+ self.processed,
+ self.rescued,
+ self.skipped,
+ )
def get_absolute_url(self, request=None):
return reverse('api:job_host_summary_detail', kwargs={'pk': self.pk}, request=request)
@@ -1138,15 +1124,15 @@ class JobHostSummary(CreatedModifiedModel):
class SystemJobOptions(BaseModel):
- '''
+ """
Common fields for SystemJobTemplate and SystemJob.
- '''
+ """
SYSTEM_JOB_TYPE = [
('cleanup_jobs', _('Remove jobs older than a certain number of days')),
('cleanup_activitystream', _('Remove activity stream entries older than a certain number of days')),
('cleanup_sessions', _('Removes expired browser sessions from the database')),
- ('cleanup_tokens', _('Removes expired OAuth 2 access tokens and refresh tokens'))
+ ('cleanup_tokens', _('Removes expired OAuth 2 access tokens and refresh tokens')),
]
class Meta:
@@ -1161,7 +1147,6 @@ class SystemJobOptions(BaseModel):
class SystemJobTemplate(UnifiedJobTemplate, SystemJobOptions):
-
class Meta:
app_label = 'main'
@@ -1184,15 +1169,10 @@ class SystemJobTemplate(UnifiedJobTemplate, SystemJobOptions):
def notification_templates(self):
# TODO: Go through RBAC instead of calling all(). Need to account for orphaned NotificationTemplates
base_notification_templates = NotificationTemplate.objects.all()
- error_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_errors__in=[self]))
- started_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_started__in=[self]))
- success_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_success__in=[self]))
- return dict(error=list(error_notification_templates),
- started=list(started_notification_templates),
- success=list(success_notification_templates))
+ error_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_errors__in=[self]))
+ started_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_started__in=[self]))
+ success_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_success__in=[self]))
+ return dict(error=list(error_notification_templates), started=list(started_notification_templates), success=list(success_notification_templates))
def _accept_or_ignore_job_kwargs(self, _exclude_errors=None, **kwargs):
extra_data = kwargs.pop('extra_vars', {})
@@ -1205,19 +1185,18 @@ class SystemJobTemplate(UnifiedJobTemplate, SystemJobOptions):
return (prompted_data, rejected_data, errors)
def _accept_or_ignore_variables(self, data, errors, _exclude_errors=()):
- '''
+ """
Unlike other templates, like project updates and inventory sources,
system job templates can accept a limited number of fields
used as options for the management commands.
- '''
+ """
rejected = {}
allowed_vars = set(['days', 'older_than', 'granularity'])
given_vars = set(data.keys())
unallowed_vars = given_vars - (allowed_vars & given_vars)
errors_list = []
if unallowed_vars:
- errors_list.append(_('Variables {list_of_keys} are not allowed for system jobs.').format(
- list_of_keys=', '.join(unallowed_vars)))
+ errors_list.append(_('Variables {list_of_keys} are not allowed for system jobs.').format(list_of_keys=', '.join(unallowed_vars)))
for key in unallowed_vars:
rejected[key] = data.pop(key)
@@ -1241,7 +1220,6 @@ class SystemJobTemplate(UnifiedJobTemplate, SystemJobOptions):
class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin):
-
class Meta:
app_label = 'main'
ordering = ('id',)
@@ -1255,10 +1233,12 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin):
on_delete=models.SET_NULL,
)
- extra_vars = prevent_search(models.TextField(
- blank=True,
- default='',
- ))
+ extra_vars = prevent_search(
+ models.TextField(
+ blank=True,
+ default='',
+ )
+ )
extra_vars_dict = VarsDictProperty('extra_vars', True)
@@ -1269,6 +1249,7 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin):
@classmethod
def _get_task_class(cls):
from awx.main.tasks import RunSystemJob
+
return RunSystemJob
def websocket_emit_data(self):
@@ -1297,6 +1278,7 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin):
'''
JobNotificationMixin
'''
+
def get_notification_templates(self):
return self.system_job_template.notification_templates
diff --git a/awx/main/models/label.py b/awx/main/models/label.py
index deb83b1b54..2a3d26776d 100644
--- a/awx/main/models/label.py
+++ b/awx/main/models/label.py
@@ -10,13 +10,13 @@ from awx.api.versioning import reverse
from awx.main.models.base import CommonModelNameNotUnique
from awx.main.models.unified_jobs import UnifiedJobTemplate, UnifiedJob
-__all__ = ('Label', )
+__all__ = ('Label',)
class Label(CommonModelNameNotUnique):
- '''
+ """
Generic Tag. Designed for tagging Job Templates, but expandable to other models.
- '''
+ """
class Meta:
app_label = 'main'
@@ -35,19 +35,10 @@ class Label(CommonModelNameNotUnique):
@staticmethod
def get_orphaned_labels():
- return \
- Label.objects.filter(
- organization=None,
- unifiedjobtemplate_labels__isnull=True
- )
+ return Label.objects.filter(organization=None, unifiedjobtemplate_labels__isnull=True)
def is_detached(self):
- return bool(
- Label.objects.filter(
- id=self.id,
- unifiedjob_labels__isnull=True,
- unifiedjobtemplate_labels__isnull=True
- ).count())
+ return bool(Label.objects.filter(id=self.id, unifiedjob_labels__isnull=True, unifiedjobtemplate_labels__isnull=True).count())
def is_candidate_for_detach(self):
c1 = UnifiedJob.objects.filter(labels__in=[self.id]).count()
diff --git a/awx/main/models/mixins.py b/awx/main/models/mixins.py
index 9df38b3fa4..8055502096 100644
--- a/awx/main/models/mixins.py
+++ b/awx/main/models/mixins.py
@@ -9,7 +9,7 @@ import requests
# Django
from django.apps import apps
from django.conf import settings
-from django.contrib.auth.models import User # noqa
+from django.contrib.auth.models import User # noqa
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.db import models
@@ -19,9 +19,7 @@ from django.utils.translation import ugettext_lazy as _
# AWX
from awx.main.models.base import prevent_search
-from awx.main.models.rbac import (
- Role, RoleAncestorEntry, get_roles_on_resource
-)
+from awx.main.models.rbac import Role, RoleAncestorEntry, get_roles_on_resource
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser, polymorphic
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
from awx.main.utils.polymorphic import build_polymorphic_ctypes_map
@@ -32,19 +30,26 @@ from awx.main.constants import ACTIVE_STATES
logger = logging.getLogger('awx.main.models.mixins')
-__all__ = ['ResourceMixin', 'SurveyJobTemplateMixin', 'SurveyJobMixin',
- 'TaskManagerUnifiedJobMixin', 'TaskManagerJobMixin', 'TaskManagerProjectUpdateMixin',
- 'TaskManagerInventoryUpdateMixin', 'ExecutionEnvironmentMixin', 'CustomVirtualEnvMixin']
+__all__ = [
+ 'ResourceMixin',
+ 'SurveyJobTemplateMixin',
+ 'SurveyJobMixin',
+ 'TaskManagerUnifiedJobMixin',
+ 'TaskManagerJobMixin',
+ 'TaskManagerProjectUpdateMixin',
+ 'TaskManagerInventoryUpdateMixin',
+ 'ExecutionEnvironmentMixin',
+ 'CustomVirtualEnvMixin',
+]
class ResourceMixin(models.Model):
-
class Meta:
abstract = True
@classmethod
def accessible_objects(cls, accessor, role_field):
- '''
+ """
Use instead of `MyModel.objects` when you want to only consider
resources that a user has specific permissions for. For example:
MyModel.accessible_objects(user, 'read_role').filter(name__istartswith='bar');
@@ -52,7 +57,7 @@ class ResourceMixin(models.Model):
specific resource you want to check permissions on, it is more
performant to resolve the resource in question then call
`myresource.get_permissions(user)`.
- '''
+ """
return ResourceMixin._accessible_objects(cls, accessor, role_field)
@classmethod
@@ -67,32 +72,25 @@ class ResourceMixin(models.Model):
ancestor_roles = [accessor]
else:
accessor_type = ContentType.objects.get_for_model(accessor)
- ancestor_roles = Role.objects.filter(content_type__pk=accessor_type.id,
- object_id=accessor.id)
+ ancestor_roles = Role.objects.filter(content_type__pk=accessor_type.id, object_id=accessor.id)
if content_types is None:
- ct_kwarg = dict(content_type_id = ContentType.objects.get_for_model(cls).id)
+ ct_kwarg = dict(content_type_id=ContentType.objects.get_for_model(cls).id)
else:
- ct_kwarg = dict(content_type_id__in = content_types)
-
- return RoleAncestorEntry.objects.filter(
- ancestor__in = ancestor_roles,
- role_field = role_field,
- **ct_kwarg
- ).values_list('object_id').distinct()
+ ct_kwarg = dict(content_type_id__in=content_types)
+ return RoleAncestorEntry.objects.filter(ancestor__in=ancestor_roles, role_field=role_field, **ct_kwarg).values_list('object_id').distinct()
@staticmethod
def _accessible_objects(cls, accessor, role_field):
- return cls.objects.filter(pk__in = ResourceMixin._accessible_pk_qs(cls, accessor, role_field))
-
+ return cls.objects.filter(pk__in=ResourceMixin._accessible_pk_qs(cls, accessor, role_field))
def get_permissions(self, accessor):
- '''
+ """
Returns a string list of the roles a accessor has for a given resource.
An accessor can be either a User, Role, or an arbitrary resource that
contains one or more Roles associated with it.
- '''
+ """
return get_roles_on_resource(self, accessor)
@@ -104,15 +102,13 @@ class SurveyJobTemplateMixin(models.Model):
survey_enabled = models.BooleanField(
default=False,
)
- survey_spec = prevent_search(JSONField(
- blank=True,
- default=dict,
- ))
- ask_variables_on_launch = AskForField(
- blank=True,
- default=False,
- allows_field='extra_vars'
+ survey_spec = prevent_search(
+ JSONField(
+ blank=True,
+ default=dict,
+ )
)
+ ask_variables_on_launch = AskForField(blank=True, default=False, allows_field='extra_vars')
def survey_password_variables(self):
vars = []
@@ -133,7 +129,7 @@ class SurveyJobTemplateMixin(models.Model):
return vars
def _update_unified_job_kwargs(self, create_kwargs, kwargs):
- '''
+ """
Combine extra_vars with variable precedence order:
JT extra_vars -> JT survey defaults -> runtime extra_vars
@@ -143,7 +139,7 @@ class SurveyJobTemplateMixin(models.Model):
:type kwargs: dict
:return: modified create_kwargs.
:rtype: dict
- '''
+ """
# Job Template extra_vars
extra_vars = self.extra_vars_dict
@@ -170,11 +166,7 @@ class SurveyJobTemplateMixin(models.Model):
if default is not None:
decrypted_default = default
- if (
- survey_element['type'] == "password" and
- isinstance(decrypted_default, str) and
- decrypted_default.startswith('$encrypted$')
- ):
+ if survey_element['type'] == "password" and isinstance(decrypted_default, str) and decrypted_default.startswith('$encrypted$'):
decrypted_default = decrypt_value(get_encryption_key('value', pk=None), decrypted_default)
errors = self._survey_element_validation(survey_element, {variable_key: decrypted_default})
if not errors:
@@ -192,12 +184,9 @@ class SurveyJobTemplateMixin(models.Model):
# default (if any) will be validated against instead
errors = []
- if (survey_element['type'] == "password"):
+ if survey_element['type'] == "password":
password_value = data.get(survey_element['variable'])
- if (
- isinstance(password_value, str) and
- password_value == '$encrypted$'
- ):
+ if isinstance(password_value, str) and password_value == '$encrypted$':
if survey_element.get('default') is None and survey_element['required']:
if validate_required:
errors.append("'%s' value missing" % survey_element['variable'])
@@ -209,43 +198,60 @@ class SurveyJobTemplateMixin(models.Model):
elif survey_element['type'] in ["textarea", "text", "password"]:
if survey_element['variable'] in data:
if not isinstance(data[survey_element['variable']], str):
- errors.append("Value %s for '%s' expected to be a string." % (data[survey_element['variable']],
- survey_element['variable']))
+ errors.append("Value %s for '%s' expected to be a string." % (data[survey_element['variable']], survey_element['variable']))
return errors
if 'min' in survey_element and survey_element['min'] not in ["", None] and len(data[survey_element['variable']]) < int(survey_element['min']):
- errors.append("'%s' value %s is too small (length is %s must be at least %s)." %
- (survey_element['variable'], data[survey_element['variable']], len(data[survey_element['variable']]), survey_element['min']))
+ errors.append(
+ "'%s' value %s is too small (length is %s must be at least %s)."
+ % (survey_element['variable'], data[survey_element['variable']], len(data[survey_element['variable']]), survey_element['min'])
+ )
if 'max' in survey_element and survey_element['max'] not in ["", None] and len(data[survey_element['variable']]) > int(survey_element['max']):
- errors.append("'%s' value %s is too large (must be no more than %s)." %
- (survey_element['variable'], data[survey_element['variable']], survey_element['max']))
+ errors.append(
+ "'%s' value %s is too large (must be no more than %s)."
+ % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])
+ )
elif survey_element['type'] == 'integer':
if survey_element['variable'] in data:
if type(data[survey_element['variable']]) != int:
- errors.append("Value %s for '%s' expected to be an integer." % (data[survey_element['variable']],
- survey_element['variable']))
+ errors.append("Value %s for '%s' expected to be an integer." % (data[survey_element['variable']], survey_element['variable']))
return errors
- if 'min' in survey_element and survey_element['min'] not in ["", None] and survey_element['variable'] in data and \
- data[survey_element['variable']] < int(survey_element['min']):
- errors.append("'%s' value %s is too small (must be at least %s)." %
- (survey_element['variable'], data[survey_element['variable']], survey_element['min']))
- if 'max' in survey_element and survey_element['max'] not in ["", None] and survey_element['variable'] in data and \
- data[survey_element['variable']] > int(survey_element['max']):
- errors.append("'%s' value %s is too large (must be no more than %s)." %
- (survey_element['variable'], data[survey_element['variable']], survey_element['max']))
+ if (
+ 'min' in survey_element
+ and survey_element['min'] not in ["", None]
+ and survey_element['variable'] in data
+ and data[survey_element['variable']] < int(survey_element['min'])
+ ):
+ errors.append(
+ "'%s' value %s is too small (must be at least %s)."
+ % (survey_element['variable'], data[survey_element['variable']], survey_element['min'])
+ )
+ if (
+ 'max' in survey_element
+ and survey_element['max'] not in ["", None]
+ and survey_element['variable'] in data
+ and data[survey_element['variable']] > int(survey_element['max'])
+ ):
+ errors.append(
+ "'%s' value %s is too large (must be no more than %s)."
+ % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])
+ )
elif survey_element['type'] == 'float':
if survey_element['variable'] in data:
if type(data[survey_element['variable']]) not in (float, int):
- errors.append("Value %s for '%s' expected to be a numeric type." % (data[survey_element['variable']],
- survey_element['variable']))
+ errors.append("Value %s for '%s' expected to be a numeric type." % (data[survey_element['variable']], survey_element['variable']))
return errors
if 'min' in survey_element and survey_element['min'] not in ["", None] and data[survey_element['variable']] < float(survey_element['min']):
- errors.append("'%s' value %s is too small (must be at least %s)." %
- (survey_element['variable'], data[survey_element['variable']], survey_element['min']))
+ errors.append(
+ "'%s' value %s is too small (must be at least %s)."
+ % (survey_element['variable'], data[survey_element['variable']], survey_element['min'])
+ )
if 'max' in survey_element and survey_element['max'] not in ["", None] and data[survey_element['variable']] > float(survey_element['max']):
- errors.append("'%s' value %s is too large (must be no more than %s)." %
- (survey_element['variable'], data[survey_element['variable']], survey_element['max']))
+ errors.append(
+ "'%s' value %s is too large (must be no more than %s)."
+ % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])
+ )
elif survey_element['type'] == 'multiselect':
if survey_element['variable'] in data:
if type(data[survey_element['variable']]) != list:
@@ -256,21 +262,18 @@ class SurveyJobTemplateMixin(models.Model):
choice_list = [choice for choice in choice_list.splitlines() if choice.strip() != '']
for val in data[survey_element['variable']]:
if val not in choice_list:
- errors.append("Value %s for '%s' expected to be one of %s." % (val, survey_element['variable'],
- choice_list))
+ errors.append("Value %s for '%s' expected to be one of %s." % (val, survey_element['variable'], choice_list))
elif survey_element['type'] == 'multiplechoice':
choice_list = copy(survey_element['choices'])
if isinstance(choice_list, str):
choice_list = [choice for choice in choice_list.splitlines() if choice.strip() != '']
if survey_element['variable'] in data:
if data[survey_element['variable']] not in choice_list:
- errors.append("Value %s for '%s' expected to be one of %s." % (data[survey_element['variable']],
- survey_element['variable'],
- choice_list))
+ errors.append("Value %s for '%s' expected to be one of %s." % (data[survey_element['variable']], survey_element['variable'], choice_list))
return errors
def _accept_or_ignore_variables(self, data, errors=None, _exclude_errors=(), extra_passwords=None):
- survey_is_enabled = (self.survey_enabled and self.survey_spec)
+ survey_is_enabled = self.survey_enabled and self.survey_spec
extra_vars = data.copy()
if errors is None:
errors = {}
@@ -285,12 +288,11 @@ class SurveyJobTemplateMixin(models.Model):
value = data.get(key, None)
validate_required = 'required' not in _exclude_errors
if extra_passwords and key in extra_passwords and is_encrypted(value):
- element_errors = self._survey_element_validation(survey_element, {
- key: decrypt_value(get_encryption_key('value', pk=None), value)
- }, validate_required=validate_required)
- else:
element_errors = self._survey_element_validation(
- survey_element, data, validate_required=validate_required)
+ survey_element, {key: decrypt_value(get_encryption_key('value', pk=None), value)}, validate_required=validate_required
+ )
+ else:
+ element_errors = self._survey_element_validation(survey_element, data, validate_required=validate_required)
if element_errors:
survey_errors += element_errors
@@ -309,7 +311,7 @@ class SurveyJobTemplateMixin(models.Model):
if extra_vars:
# Prune the prompted variables for those identical to template
tmp_extra_vars = self.extra_vars_dict
- for key in (set(tmp_extra_vars.keys()) & set(extra_vars.keys())):
+ for key in set(tmp_extra_vars.keys()) & set(extra_vars.keys()):
if tmp_extra_vars[key] == extra_vars[key]:
extra_vars.pop(key)
@@ -318,18 +320,20 @@ class SurveyJobTemplateMixin(models.Model):
rejected.update(extra_vars)
# ignored variables does not block manual launch
if 'prompts' not in _exclude_errors:
- errors['extra_vars'] = [_('Variables {list_of_keys} are not allowed on launch. Check the Prompt on Launch setting '+
- 'on the {model_name} to include Extra Variables.').format(
- list_of_keys=', '.join([str(key) for key in extra_vars.keys()]),
- model_name=self._meta.verbose_name.title())]
+ errors['extra_vars'] = [
+ _(
+ 'Variables {list_of_keys} are not allowed on launch. Check the Prompt on Launch setting '
+ + 'on the {model_name} to include Extra Variables.'
+ ).format(list_of_keys=', '.join([str(key) for key in extra_vars.keys()]), model_name=self._meta.verbose_name.title())
+ ]
return (accepted, rejected, errors)
@staticmethod
def pivot_spec(spec):
- '''
+ """
Utility method that will return a dictionary keyed off variable names
- '''
+ """
pivoted = {}
for element_data in spec.get('spec', []):
if 'variable' in element_data:
@@ -349,9 +353,9 @@ class SurveyJobTemplateMixin(models.Model):
return errors
def display_survey_spec(self):
- '''
+ """
Hide encrypted default passwords in survey specs
- '''
+ """
survey_spec = deepcopy(self.survey_spec) if self.survey_spec else {}
for field in survey_spec.get('spec', []):
if field.get('type') == 'password':
@@ -364,16 +368,18 @@ class SurveyJobMixin(models.Model):
class Meta:
abstract = True
- survey_passwords = prevent_search(JSONField(
- blank=True,
- default=dict,
- editable=False,
- ))
+ survey_passwords = prevent_search(
+ JSONField(
+ blank=True,
+ default=dict,
+ editable=False,
+ )
+ )
def display_extra_vars(self):
- '''
+ """
Hides fields marked as passwords in survey.
- '''
+ """
if self.survey_passwords:
extra_vars = json.loads(self.extra_vars)
for key, value in self.survey_passwords.items():
@@ -384,9 +390,9 @@ class SurveyJobMixin(models.Model):
return self.extra_vars
def decrypted_extra_vars(self):
- '''
+ """
Decrypts fields marked as passwords in survey.
- '''
+ """
if self.survey_passwords:
extra_vars = json.loads(self.extra_vars)
for key in self.survey_passwords:
@@ -484,19 +490,13 @@ class CustomVirtualEnvMixin(models.Model):
abstract = True
custom_virtualenv = models.CharField(
- blank=True,
- null=True,
- default=None,
- max_length=100,
- help_text=_('Local absolute file path containing a custom Python virtualenv to use')
+ blank=True, null=True, default=None, max_length=100, help_text=_('Local absolute file path containing a custom Python virtualenv to use')
)
def clean_custom_virtualenv(self):
value = self.custom_virtualenv
if value and os.path.join(value, '') not in get_custom_venv_choices():
- raise ValidationError(
- _('{} is not a valid virtualenv in {}').format(value, settings.BASE_VENV_PATH)
- )
+ raise ValidationError(_('{} is not a valid virtualenv in {}').format(value, settings.BASE_VENV_PATH))
if value:
return os.path.join(value, '')
return None
@@ -504,12 +504,13 @@ class CustomVirtualEnvMixin(models.Model):
class RelatedJobsMixin(object):
- '''
+ """
This method is intended to be overwritten.
Called by get_active_jobs()
Returns a list of active jobs (i.e. running) associated with the calling
resource (self). Expected to return a QuerySet
- '''
+ """
+
def _get_related_jobs(self):
return self.objects.none()
@@ -519,6 +520,7 @@ class RelatedJobsMixin(object):
'''
Returns [{'id': 1, 'type': 'job'}, {'id': 2, 'type': 'project_update'}, ...]
'''
+
def get_active_jobs(self):
UnifiedJob = apps.get_model('main', 'UnifiedJob')
mapping = build_polymorphic_ctypes_map(UnifiedJob)
@@ -538,24 +540,15 @@ class WebhookTemplateMixin(models.Model):
('gitlab', "GitLab"),
]
- webhook_service = models.CharField(
- max_length=16,
- choices=SERVICES,
- blank=True,
- help_text=_('Service that webhook requests will be accepted from')
- )
- webhook_key = prevent_search(models.CharField(
- max_length=64,
- blank=True,
- help_text=_('Shared secret that the webhook service will use to sign requests')
- ))
+ webhook_service = models.CharField(max_length=16, choices=SERVICES, blank=True, help_text=_('Service that webhook requests will be accepted from'))
+ webhook_key = prevent_search(models.CharField(max_length=64, blank=True, help_text=_('Shared secret that the webhook service will use to sign requests')))
webhook_credential = models.ForeignKey(
'Credential',
blank=True,
null=True,
on_delete=models.SET_NULL,
related_name='%(class)ss',
- help_text=_('Personal Access Token for posting back the status to the service API')
+ help_text=_('Personal Access Token for posting back the status to the service API'),
)
def rotate_webhook_key(self):
@@ -582,25 +575,16 @@ class WebhookMixin(models.Model):
SERVICES = WebhookTemplateMixin.SERVICES
- webhook_service = models.CharField(
- max_length=16,
- choices=SERVICES,
- blank=True,
- help_text=_('Service that webhook requests will be accepted from')
- )
+ webhook_service = models.CharField(max_length=16, choices=SERVICES, blank=True, help_text=_('Service that webhook requests will be accepted from'))
webhook_credential = models.ForeignKey(
'Credential',
blank=True,
null=True,
on_delete=models.SET_NULL,
related_name='%(class)ss',
- help_text=_('Personal Access Token for posting back the status to the service API')
- )
- webhook_guid = models.CharField(
- blank=True,
- max_length=128,
- help_text=_('Unique identifier of the event that triggered this webhook')
+ help_text=_('Personal Access Token for posting back the status to the service API'),
)
+ webhook_guid = models.CharField(blank=True, max_length=128, help_text=_('Unique identifier of the event that triggered this webhook'))
def update_webhook_status(self, status):
if not self.webhook_credential:
@@ -645,10 +629,7 @@ class WebhookMixin(models.Model):
'target_url': self.get_ui_url(),
}
k, v = service_header[self.webhook_service]
- headers = {
- k: v.format(self.webhook_credential.get_input('token')),
- 'Content-Type': 'application/json'
- }
+ headers = {k: v.format(self.webhook_credential.get_input('token')), 'Content-Type': 'application/json'}
response = requests.post(status_api, data=json.dumps(data), headers=headers, timeout=30)
except Exception:
logger.exception("Posting webhook status caused an error.")
@@ -657,8 +638,4 @@ class WebhookMixin(models.Model):
if response.status_code < 400:
logger.debug("Webhook status update sent.")
else:
- logger.error(
- "Posting webhook status failed, code: {}\n"
- "{}\n"
- "Payload sent: {}".format(response.status_code, response.text, json.dumps(data))
- )
+ logger.error("Posting webhook status failed, code: {}\n" "{}\n" "Payload sent: {}".format(response.status_code, response.text, json.dumps(data)))
diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py
index 53860563e0..afb55f7682 100644
--- a/awx/main/models/notifications.py
+++ b/awx/main/models/notifications.py
@@ -38,15 +38,17 @@ __all__ = ['NotificationTemplate', 'Notification']
class NotificationTemplate(CommonModelNameNotUnique):
- NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend),
- ('slack', _('Slack'), SlackBackend),
- ('twilio', _('Twilio'), TwilioBackend),
- ('pagerduty', _('Pagerduty'), PagerDutyBackend),
- ('grafana', _('Grafana'), GrafanaBackend),
- ('webhook', _('Webhook'), WebhookBackend),
- ('mattermost', _('Mattermost'), MattermostBackend),
- ('rocketchat', _('Rocket.Chat'), RocketChatBackend),
- ('irc', _('IRC'), IrcBackend)]
+ NOTIFICATION_TYPES = [
+ ('email', _('Email'), CustomEmailBackend),
+ ('slack', _('Slack'), SlackBackend),
+ ('twilio', _('Twilio'), TwilioBackend),
+ ('pagerduty', _('Pagerduty'), PagerDutyBackend),
+ ('grafana', _('Grafana'), GrafanaBackend),
+ ('webhook', _('Webhook'), WebhookBackend),
+ ('mattermost', _('Mattermost'), MattermostBackend),
+ ('rocketchat', _('Rocket.Chat'), RocketChatBackend),
+ ('irc', _('IRC'), IrcBackend),
+ ]
NOTIFICATION_TYPE_CHOICES = sorted([(x[0], x[1]) for x in NOTIFICATION_TYPES])
CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES])
@@ -64,7 +66,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
)
notification_type = models.CharField(
- max_length = 32,
+ max_length=32,
choices=NOTIFICATION_TYPE_CHOICES,
)
@@ -73,11 +75,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
def default_messages():
return {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
- messages = JSONField(
- null=True,
- blank=True,
- default=default_messages,
- help_text=_('Optional custom messages for notification template.'))
+ messages = JSONField(null=True, blank=True, default=default_messages, help_text=_('Optional custom messages for notification template.'))
def has_message(self, condition):
potential_template = self.messages.get(condition, {})
@@ -114,6 +112,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
for msg_type in ['message', 'body']:
if msg_type not in local_new_event_msgs and local_old_event_msgs.get(msg_type, None):
local_new_event_msgs[msg_type] = local_old_event_msgs[msg_type]
+
if old_messages is not None and new_messages is not None:
for event in ('started', 'success', 'error', 'workflow_approval'):
if not new_messages.get(event, {}) and old_messages.get(event, {}):
@@ -134,9 +133,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
merge_messages(old_messages, new_messages, event)
new_messages.setdefault(event, None)
-
- for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
- self.notification_class.init_parameters):
+ for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", self.notification_class.init_parameters):
if self.notification_configuration[field].startswith("$encrypted$"):
continue
if new_instance:
@@ -151,8 +148,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
super(NotificationTemplate, self).save(*args, **kwargs)
if new_instance:
update_fields = []
- for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
- self.notification_class.init_parameters):
+ for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", self.notification_class.init_parameters):
saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '')
self.notification_configuration[field] = saved_value
if 'notification_configuration' not in update_fields:
@@ -164,21 +160,16 @@ class NotificationTemplate(CommonModelNameNotUnique):
return self.notification_configuration[self.notification_class.recipient_parameter]
def generate_notification(self, msg, body):
- notification = Notification(notification_template=self,
- notification_type=self.notification_type,
- recipients=smart_str(self.recipients),
- subject=msg,
- body=body)
+ notification = Notification(
+ notification_template=self, notification_type=self.notification_type, recipients=smart_str(self.recipients), subject=msg, body=body
+ )
notification.save()
return notification
def send(self, subject, body):
- for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
- self.notification_class.init_parameters):
+ for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password", self.notification_class.init_parameters):
if field in self.notification_configuration:
- self.notification_configuration[field] = decrypt_field(self,
- 'notification_configuration',
- subfield=field)
+ self.notification_configuration[field] = decrypt_field(self, 'notification_configuration', subfield=field)
recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter)
if not isinstance(recipients, list):
recipients = [recipients]
@@ -202,9 +193,9 @@ class NotificationTemplate(CommonModelNameNotUnique):
class Notification(CreatedModifiedModel):
- '''
+ """
A notification event emitted when a NotificationTemplate is run
- '''
+ """
NOTIFICATION_STATE_CHOICES = [
('pending', _('Pending')),
@@ -216,12 +207,7 @@ class Notification(CreatedModifiedModel):
app_label = 'main'
ordering = ('pk',)
- notification_template = models.ForeignKey(
- 'NotificationTemplate',
- related_name='notifications',
- on_delete=models.CASCADE,
- editable=False
- )
+ notification_template = models.ForeignKey('NotificationTemplate', related_name='notifications', on_delete=models.CASCADE, editable=False)
status = models.CharField(
max_length=20,
choices=NOTIFICATION_STATE_CHOICES,
@@ -238,7 +224,7 @@ class Notification(CreatedModifiedModel):
editable=False,
)
notification_type = models.CharField(
- max_length = 32,
+ max_length=32,
choices=NotificationTemplate.NOTIFICATION_TYPE_CHOICES,
)
recipients = models.TextField(
@@ -258,112 +244,160 @@ class Notification(CreatedModifiedModel):
class JobNotificationMixin(object):
- STATUS_TO_TEMPLATE_TYPE = {'succeeded': 'success',
- 'running': 'started',
- 'failed': 'error'}
+ STATUS_TO_TEMPLATE_TYPE = {'succeeded': 'success', 'running': 'started', 'failed': 'error'}
# Tree of fields that can be safely referenced in a notification message
- JOB_FIELDS_ALLOWED_LIST = ['id', 'type', 'url', 'created', 'modified', 'name', 'description', 'job_type', 'playbook',
- 'forks', 'limit', 'verbosity', 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task',
- 'timeout', 'use_fact_cache', 'launch_type', 'status', 'failed', 'started', 'finished',
- 'elapsed', 'job_explanation', 'execution_node', 'controller_node', 'allow_simultaneous',
- 'scm_revision', 'diff_mode', 'job_slice_number', 'job_slice_count', 'custom_virtualenv',
- 'approval_status', 'approval_node_name', 'workflow_url', 'scm_branch', 'artifacts',
- {'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark'
- 'processed', 'rescued', 'ignored']},
- {'summary_fields': [{'inventory': ['id', 'name', 'description', 'has_active_failures',
- 'total_hosts', 'hosts_with_active_failures', 'total_groups',
- 'has_inventory_sources',
- 'total_inventory_sources', 'inventory_sources_with_failures',
- 'organization_id', 'kind']},
- {'project': ['id', 'name', 'description', 'status', 'scm_type']},
- {'job_template': ['id', 'name', 'description']},
- {'unified_job_template': ['id', 'name', 'description', 'unified_job_type']},
- {'instance_group': ['name', 'id']},
- {'created_by': ['id', 'username', 'first_name', 'last_name']},
- {'schedule': ['id', 'name', 'description', 'next_run']},
- {'labels': ['count', 'results']}]}]
+ JOB_FIELDS_ALLOWED_LIST = [
+ 'id',
+ 'type',
+ 'url',
+ 'created',
+ 'modified',
+ 'name',
+ 'description',
+ 'job_type',
+ 'playbook',
+ 'forks',
+ 'limit',
+ 'verbosity',
+ 'job_tags',
+ 'force_handlers',
+ 'skip_tags',
+ 'start_at_task',
+ 'timeout',
+ 'use_fact_cache',
+ 'launch_type',
+ 'status',
+ 'failed',
+ 'started',
+ 'finished',
+ 'elapsed',
+ 'job_explanation',
+ 'execution_node',
+ 'controller_node',
+ 'allow_simultaneous',
+ 'scm_revision',
+ 'diff_mode',
+ 'job_slice_number',
+ 'job_slice_count',
+ 'custom_virtualenv',
+ 'approval_status',
+ 'approval_node_name',
+ 'workflow_url',
+ 'scm_branch',
+ 'artifacts',
+ {'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark' 'processed', 'rescued', 'ignored']},
+ {
+ 'summary_fields': [
+ {
+ 'inventory': [
+ 'id',
+ 'name',
+ 'description',
+ 'has_active_failures',
+ 'total_hosts',
+ 'hosts_with_active_failures',
+ 'total_groups',
+ 'has_inventory_sources',
+ 'total_inventory_sources',
+ 'inventory_sources_with_failures',
+ 'organization_id',
+ 'kind',
+ ]
+ },
+ {'project': ['id', 'name', 'description', 'status', 'scm_type']},
+ {'job_template': ['id', 'name', 'description']},
+ {'unified_job_template': ['id', 'name', 'description', 'unified_job_type']},
+ {'instance_group': ['name', 'id']},
+ {'created_by': ['id', 'username', 'first_name', 'last_name']},
+ {'schedule': ['id', 'name', 'description', 'next_run']},
+ {'labels': ['count', 'results']},
+ ]
+ },
+ ]
@classmethod
def context_stub(cls):
"""Returns a stub context that can be used for validating notification messages.
Context has the same structure as the context that will actually be used to render
a notification message."""
- context = {'job': {'allow_simultaneous': False,
- 'artifacts': {},
- 'controller_node': 'foo_controller',
- 'created': datetime.datetime(2018, 11, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
- 'custom_virtualenv': 'my_venv',
- 'description': 'Sample job description',
- 'diff_mode': False,
- 'elapsed': 0.403018,
- 'execution_node': 'awx',
- 'failed': False,
- 'finished': False,
- 'force_handlers': False,
- 'forks': 0,
- 'host_status_counts': {'skipped': 1, 'ok': 5, 'changed': 3, 'failures': 0, 'dark': 0, 'failed': False, 'processed': 0, 'rescued': 0},
- 'id': 42,
- 'job_explanation': 'Sample job explanation',
- 'job_slice_count': 1,
- 'job_slice_number': 0,
- 'job_tags': '',
- 'job_type': 'run',
- 'launch_type': 'workflow',
- 'limit': 'bar_limit',
- 'modified': datetime.datetime(2018, 12, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
- 'name': 'Stub JobTemplate',
- 'playbook': 'ping.yml',
- 'scm_branch': '',
- 'scm_revision': '',
- 'skip_tags': '',
- 'start_at_task': '',
- 'started': '2019-07-29T17:38:14.137461Z',
- 'status': 'running',
- 'summary_fields': {'created_by': {'first_name': '',
- 'id': 1,
- 'last_name': '',
- 'username': 'admin'},
- 'instance_group': {'id': 1, 'name': 'tower'},
- 'inventory': {'description': 'Sample inventory description',
- 'has_active_failures': False,
- 'has_inventory_sources': False,
- 'hosts_with_active_failures': 0,
- 'id': 17,
- 'inventory_sources_with_failures': 0,
- 'kind': '',
- 'name': 'Stub Inventory',
- 'organization_id': 121,
- 'total_groups': 0,
- 'total_hosts': 1,
- 'total_inventory_sources': 0},
- 'job_template': {'description': 'Sample job template description',
- 'id': 39,
- 'name': 'Stub JobTemplate'},
- 'labels': {'count': 0, 'results': []},
- 'project': {'description': 'Sample project description',
- 'id': 38,
- 'name': 'Stub project',
- 'scm_type': 'git',
- 'status': 'successful'},
- 'schedule': {'description': 'Sample schedule',
- 'id': 42,
- 'name': 'Stub schedule',
- 'next_run': datetime.datetime(2038, 1, 1, 0, 0, 0, 0, tzinfo=datetime.timezone.utc)},
- 'unified_job_template': {'description': 'Sample unified job template description',
- 'id': 39,
- 'name': 'Stub Job Template',
- 'unified_job_type': 'job'}},
- 'timeout': 0,
- 'type': 'job',
- 'url': '/api/v2/jobs/13/',
- 'use_fact_cache': False,
- 'verbosity': 0},
- 'job_friendly_name': 'Job',
- 'url': 'https://towerhost/#/jobs/playbook/1010',
- 'approval_status': 'approved',
- 'approval_node_name': 'Approve Me',
- 'workflow_url': 'https://towerhost/#/jobs/workflow/1010',
- 'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
+ context = {
+ 'job': {
+ 'allow_simultaneous': False,
+ 'artifacts': {},
+ 'controller_node': 'foo_controller',
+ 'created': datetime.datetime(2018, 11, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
+ 'custom_virtualenv': 'my_venv',
+ 'description': 'Sample job description',
+ 'diff_mode': False,
+ 'elapsed': 0.403018,
+ 'execution_node': 'awx',
+ 'failed': False,
+ 'finished': False,
+ 'force_handlers': False,
+ 'forks': 0,
+ 'host_status_counts': {'skipped': 1, 'ok': 5, 'changed': 3, 'failures': 0, 'dark': 0, 'failed': False, 'processed': 0, 'rescued': 0},
+ 'id': 42,
+ 'job_explanation': 'Sample job explanation',
+ 'job_slice_count': 1,
+ 'job_slice_number': 0,
+ 'job_tags': '',
+ 'job_type': 'run',
+ 'launch_type': 'workflow',
+ 'limit': 'bar_limit',
+ 'modified': datetime.datetime(2018, 12, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
+ 'name': 'Stub JobTemplate',
+ 'playbook': 'ping.yml',
+ 'scm_branch': '',
+ 'scm_revision': '',
+ 'skip_tags': '',
+ 'start_at_task': '',
+ 'started': '2019-07-29T17:38:14.137461Z',
+ 'status': 'running',
+ 'summary_fields': {
+ 'created_by': {'first_name': '', 'id': 1, 'last_name': '', 'username': 'admin'},
+ 'instance_group': {'id': 1, 'name': 'tower'},
+ 'inventory': {
+ 'description': 'Sample inventory description',
+ 'has_active_failures': False,
+ 'has_inventory_sources': False,
+ 'hosts_with_active_failures': 0,
+ 'id': 17,
+ 'inventory_sources_with_failures': 0,
+ 'kind': '',
+ 'name': 'Stub Inventory',
+ 'organization_id': 121,
+ 'total_groups': 0,
+ 'total_hosts': 1,
+ 'total_inventory_sources': 0,
+ },
+ 'job_template': {'description': 'Sample job template description', 'id': 39, 'name': 'Stub JobTemplate'},
+ 'labels': {'count': 0, 'results': []},
+ 'project': {'description': 'Sample project description', 'id': 38, 'name': 'Stub project', 'scm_type': 'git', 'status': 'successful'},
+ 'schedule': {
+ 'description': 'Sample schedule',
+ 'id': 42,
+ 'name': 'Stub schedule',
+ 'next_run': datetime.datetime(2038, 1, 1, 0, 0, 0, 0, tzinfo=datetime.timezone.utc),
+ },
+ 'unified_job_template': {
+ 'description': 'Sample unified job template description',
+ 'id': 39,
+ 'name': 'Stub Job Template',
+ 'unified_job_type': 'job',
+ },
+ },
+ 'timeout': 0,
+ 'type': 'job',
+ 'url': '/api/v2/jobs/13/',
+ 'use_fact_cache': False,
+ 'verbosity': 0,
+ },
+ 'job_friendly_name': 'Job',
+ 'url': 'https://towerhost/#/jobs/playbook/1010',
+ 'approval_status': 'approved',
+ 'approval_node_name': 'Approve Me',
+ 'workflow_url': 'https://towerhost/#/jobs/workflow/1010',
+ 'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
'traceback': '',
'status': 'running',
'started': '2019-08-07T21:46:38.362630+00:00',
@@ -377,7 +411,8 @@ class JobNotificationMixin(object):
'friendly_name': 'Job',
'finished': False,
'credential': 'Stub credential',
- 'created_by': 'admin'}"""}
+ 'created_by': 'admin'}""",
+ }
return context
@@ -403,11 +438,7 @@ class JobNotificationMixin(object):
'job': job_context,
'job_friendly_name': self.get_notification_friendly_name(),
'url': self.get_ui_url(),
- 'job_metadata': json.dumps(
- self.notification_data(),
- ensure_ascii=False,
- indent=4
- )
+ 'job_metadata': json.dumps(self.notification_data(), ensure_ascii=False, indent=4),
}
def build_context(node, fields, allowed_fields):
@@ -425,6 +456,7 @@ class JobNotificationMixin(object):
if safe_field not in fields:
continue
node[safe_field] = fields[safe_field]
+
build_context(context['job'], serialized_job, self.JOB_FIELDS_ALLOWED_LIST)
return context
@@ -442,6 +474,7 @@ class JobNotificationMixin(object):
env = sandbox.ImmutableSandboxedEnvironment(undefined=ChainableUndefined)
from awx.api.serializers import UnifiedJobSerializer
+
job_serialization = UnifiedJobSerializer(self).to_representation(self)
context = self.context(job_serialization)
@@ -476,6 +509,7 @@ class JobNotificationMixin(object):
def send_notification_templates(self, status):
from awx.main.tasks import send_notifications # avoid circular import
+
if status not in ['running', 'succeeded', 'failed']:
raise ValueError(_("status must be either running, succeeded or failed"))
try:
@@ -494,7 +528,8 @@ class JobNotificationMixin(object):
# https://stackoverflow.com/a/3431699/10669572
def send_it(local_nt=nt, local_msg=msg, local_body=body):
def _func():
- send_notifications.delay([local_nt.generate_notification(local_msg, local_body).id],
- job_id=self.id)
+ send_notifications.delay([local_nt.generate_notification(local_msg, local_body).id], job_id=self.id)
+
return _func
+
connection.on_commit(send_it())
diff --git a/awx/main/models/oauth.py b/awx/main/models/oauth.py
index a71fb4f67f..b9b4b8c217 100644
--- a/awx/main/models/oauth.py
+++ b/awx/main/models/oauth.py
@@ -27,13 +27,12 @@ logger = logging.getLogger('awx.main.models.oauth')
class OAuth2Application(AbstractApplication):
-
class Meta:
app_label = 'main'
verbose_name = _('application')
unique_together = (("name", "organization"),)
ordering = ('organization', 'name')
-
+
CLIENT_CONFIDENTIAL = "confidential"
CLIENT_PUBLIC = "public"
CLIENT_TYPES = (
@@ -65,42 +64,34 @@ class OAuth2Application(AbstractApplication):
null=True,
)
client_secret = OAuth2ClientSecretField(
- max_length=1024,
- blank=True,
- default=generate_client_secret,
+ max_length=1024,
+ blank=True,
+ default=generate_client_secret,
db_index=True,
- help_text=_('Used for more stringent verification of access to an application when creating a token.')
+ help_text=_('Used for more stringent verification of access to an application when creating a token.'),
)
client_type = models.CharField(
- max_length=32,
- choices=CLIENT_TYPES,
- help_text=_('Set to Public or Confidential depending on how secure the client device is.')
- )
- skip_authorization = models.BooleanField(
- default=False,
- help_text=_('Set True to skip authorization step for completely trusted applications.')
+ max_length=32, choices=CLIENT_TYPES, help_text=_('Set to Public or Confidential depending on how secure the client device is.')
)
+ skip_authorization = models.BooleanField(default=False, help_text=_('Set True to skip authorization step for completely trusted applications.'))
authorization_grant_type = models.CharField(
- max_length=32,
- choices=GRANT_TYPES,
- help_text=_('The Grant type the user must use for acquire tokens for this application.')
+ max_length=32, choices=GRANT_TYPES, help_text=_('The Grant type the user must use for acquire tokens for this application.')
)
class OAuth2AccessToken(AbstractAccessToken):
-
class Meta:
app_label = 'main'
verbose_name = _('access token')
ordering = ('id',)
user = models.ForeignKey(
- settings.AUTH_USER_MODEL,
- on_delete=models.CASCADE,
- blank=True,
+ settings.AUTH_USER_MODEL,
+ on_delete=models.CASCADE,
+ blank=True,
null=True,
related_name="%(app_label)s_%(class)s",
- help_text=_('The user representing the token owner')
+ help_text=_('The user representing the token owner'),
)
description = models.TextField(
default='',
@@ -114,12 +105,11 @@ class OAuth2AccessToken(AbstractAccessToken):
scope = models.TextField(
blank=True,
default='write',
- help_text=_('Allowed scopes, further restricts user\'s permissions. Must be a simple space-separated string with allowed scopes [\'read\', \'write\'].')
- )
- modified = models.DateTimeField(
- editable=False,
- auto_now=True
+ help_text=_(
+ 'Allowed scopes, further restricts user\'s permissions. Must be a simple space-separated string with allowed scopes [\'read\', \'write\'].'
+ ),
)
+ modified = models.DateTimeField(editable=False, auto_now=True)
def is_valid(self, scopes=None):
valid = super(OAuth2AccessToken, self).is_valid(scopes)
@@ -129,6 +119,7 @@ class OAuth2AccessToken(AbstractAccessToken):
def _update_last_used():
if OAuth2AccessToken.objects.filter(pk=self.pk).exists():
self.save(update_fields=['last_used'])
+
connection.on_commit(_update_last_used)
return valid
@@ -136,9 +127,9 @@ class OAuth2AccessToken(AbstractAccessToken):
if self.user and settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS is False:
external_account = get_external_account(self.user)
if external_account is not None:
- raise oauth2.AccessDeniedError(_(
- 'OAuth2 Tokens cannot be created by users associated with an external authentication provider ({})'
- ).format(external_account))
+ raise oauth2.AccessDeniedError(
+ _('OAuth2 Tokens cannot be created by users associated with an external authentication provider ({})').format(external_account)
+ )
def save(self, *args, **kwargs):
if not self.pk:
diff --git a/awx/main/models/organization.py b/awx/main/models/organization.py
index f0ecfea5c7..6dc92a06ac 100644
--- a/awx/main/models/organization.py
+++ b/awx/main/models/organization.py
@@ -2,7 +2,6 @@
# All Rights Reserved.
-
# Django
from django.conf import settings
from django.db import models
@@ -14,13 +13,8 @@ from django.utils.translation import ugettext_lazy as _
# AWX
from awx.api.versioning import reverse
-from awx.main.fields import (
- AutoOneToOneField, ImplicitRoleField, OrderedManyToManyField
-)
-from awx.main.models.base import (
- BaseModel, CommonModel, CommonModelNameNotUnique, CreatedModifiedModel,
- NotificationFieldsModel
-)
+from awx.main.fields import AutoOneToOneField, ImplicitRoleField, OrderedManyToManyField
+from awx.main.models.base import BaseModel, CommonModel, CommonModelNameNotUnique, CreatedModifiedModel, NotificationFieldsModel
from awx.main.models.rbac import (
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
ROLE_SINGLETON_SYSTEM_AUDITOR,
@@ -32,35 +26,24 @@ __all__ = ['Organization', 'Team', 'Profile', 'UserSessionMembership']
class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin):
- '''
+ """
An organization is the basic unit of multi-tenancy divisions
- '''
+ """
class Meta:
app_label = 'main'
ordering = ('name',)
- instance_groups = OrderedManyToManyField(
- 'InstanceGroup',
- blank=True,
- through='OrganizationInstanceGroupMembership'
- )
+ instance_groups = OrderedManyToManyField('InstanceGroup', blank=True, through='OrganizationInstanceGroupMembership')
galaxy_credentials = OrderedManyToManyField(
- 'Credential',
- blank=True,
- through='OrganizationGalaxyCredentialMembership',
- related_name='%(class)s_galaxy_credentials'
+ 'Credential', blank=True, through='OrganizationGalaxyCredentialMembership', related_name='%(class)s_galaxy_credentials'
)
max_hosts = models.PositiveIntegerField(
blank=True,
default=0,
help_text=_('Maximum number of hosts allowed to be managed by this organization.'),
)
- notification_templates_approvals = models.ManyToManyField(
- "NotificationTemplate",
- blank=True,
- related_name='%(class)s_notification_templates_for_approvals'
- )
+ notification_templates_approvals = models.ManyToManyField("NotificationTemplate", blank=True, related_name='%(class)s_notification_templates_for_approvals')
default_environment = models.ForeignKey(
'ExecutionEnvironment',
null=True,
@@ -101,42 +84,41 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
auditor_role = ImplicitRoleField(
parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
)
- member_role = ImplicitRoleField(
- parent_role=['admin_role']
- )
+ member_role = ImplicitRoleField(parent_role=['admin_role'])
read_role = ImplicitRoleField(
- parent_role=['member_role', 'auditor_role',
- 'execute_role', 'project_admin_role',
- 'inventory_admin_role', 'workflow_admin_role',
- 'notification_admin_role', 'credential_admin_role',
- 'job_template_admin_role', 'approval_role',
- 'execution_environment_admin_role',],
+ parent_role=[
+ 'member_role',
+ 'auditor_role',
+ 'execute_role',
+ 'project_admin_role',
+ 'inventory_admin_role',
+ 'workflow_admin_role',
+ 'notification_admin_role',
+ 'credential_admin_role',
+ 'job_template_admin_role',
+ 'approval_role',
+ 'execution_environment_admin_role',
+ ],
)
approval_role = ImplicitRoleField(
parent_role='admin_role',
)
-
def get_absolute_url(self, request=None):
return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request)
'''
RelatedJobsMixin
'''
+
def _get_related_jobs(self):
return UnifiedJob.objects.non_polymorphic().filter(organization=self)
class OrganizationGalaxyCredentialMembership(models.Model):
- organization = models.ForeignKey(
- 'Organization',
- on_delete=models.CASCADE
- )
- credential = models.ForeignKey(
- 'Credential',
- on_delete=models.CASCADE
- )
+ organization = models.ForeignKey('Organization', on_delete=models.CASCADE)
+ credential = models.ForeignKey('Credential', on_delete=models.CASCADE)
position = models.PositiveIntegerField(
null=True,
default=None,
@@ -145,9 +127,9 @@ class OrganizationGalaxyCredentialMembership(models.Model):
class Team(CommonModelNameNotUnique, ResourceMixin):
- '''
+ """
A team is a group of users that work on common projects.
- '''
+ """
class Meta:
app_label = 'main'
@@ -176,20 +158,15 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
class Profile(CreatedModifiedModel):
- '''
+ """
Profile model related to User object. Currently stores LDAP DN for users
loaded from LDAP.
- '''
+ """
class Meta:
app_label = 'main'
- user = AutoOneToOneField(
- 'auth.User',
- related_name='profile',
- editable=False,
- on_delete=models.CASCADE
- )
+ user = AutoOneToOneField('auth.User', related_name='profile', editable=False, on_delete=models.CASCADE)
ldap_dn = models.CharField(
max_length=1024,
default='',
@@ -197,21 +174,17 @@ class Profile(CreatedModifiedModel):
class UserSessionMembership(BaseModel):
- '''
+ """
A lookup table for API session membership given user. Note, there is a
different session created by channels for websockets using the same
underlying model.
- '''
+ """
class Meta:
app_label = 'main'
- user = models.ForeignKey(
- 'auth.User', related_name='+', blank=False, null=False, on_delete=models.CASCADE
- )
- session = models.OneToOneField(
- Session, related_name='+', blank=False, null=False, on_delete=models.CASCADE
- )
+ user = models.ForeignKey('auth.User', related_name='+', blank=False, null=False, on_delete=models.CASCADE)
+ session = models.OneToOneField(Session, related_name='+', blank=False, null=False, on_delete=models.CASCADE)
created = models.DateTimeField(default=None, editable=False)
@staticmethod
@@ -220,16 +193,15 @@ class UserSessionMembership(BaseModel):
return []
if now is None:
now = tz_now()
- query_set = UserSessionMembership.objects\
- .select_related('session')\
- .filter(user_id=user_id)\
- .order_by('-created')
+ query_set = UserSessionMembership.objects.select_related('session').filter(user_id=user_id).order_by('-created')
non_expire_memberships = [x for x in query_set if x.session.expire_date > now]
- return non_expire_memberships[settings.SESSIONS_PER_USER:]
+ return non_expire_memberships[settings.SESSIONS_PER_USER :]
# Add get_absolute_url method to User model if not present.
if not hasattr(User, 'get_absolute_url'):
+
def user_get_absolute_url(user, request=None):
return reverse('api:user_detail', kwargs={'pk': user.pk}, request=request)
+
User.add_to_class('get_absolute_url', user_get_absolute_url)
diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py
index 5a613b6c21..4429409eb6 100644
--- a/awx/main/models/projects.py
+++ b/awx/main/models/projects.py
@@ -29,12 +29,7 @@ from awx.main.models.unified_jobs import (
UnifiedJobTemplate,
)
from awx.main.models.jobs import Job
-from awx.main.models.mixins import (
- ResourceMixin,
- TaskManagerProjectUpdateMixin,
- CustomVirtualEnvMixin,
- RelatedJobsMixin
-)
+from awx.main.models.mixins import ResourceMixin, TaskManagerProjectUpdateMixin, CustomVirtualEnvMixin, RelatedJobsMixin
from awx.main.utils import update_scm_url, polymorphic
from awx.main.utils.ansible import skip_directory, could_be_inventory, could_be_playbook
from awx.main.fields import ImplicitRoleField
@@ -68,9 +63,11 @@ class ProjectOptions(models.Model):
@classmethod
def get_local_path_choices(cls):
if os.path.exists(settings.PROJECTS_ROOT):
- paths = [x for x in os.listdir(settings.PROJECTS_ROOT)
- if (os.path.isdir(os.path.join(settings.PROJECTS_ROOT, x)) and
- not x.startswith('.') and not x.startswith('_'))]
+ paths = [
+ x
+ for x in os.listdir(settings.PROJECTS_ROOT)
+ if (os.path.isdir(os.path.join(settings.PROJECTS_ROOT, x)) and not x.startswith('.') and not x.startswith('_'))
+ ]
qs = Project.objects
used_paths = qs.values_list('local_path', flat=True)
return [x for x in paths if x not in used_paths]
@@ -78,10 +75,7 @@ class ProjectOptions(models.Model):
return []
local_path = models.CharField(
- max_length=1024,
- blank=True,
- help_text=_('Local path (relative to PROJECTS_ROOT) containing '
- 'playbooks and related files for this project.')
+ max_length=1024, blank=True, help_text=_('Local path (relative to PROJECTS_ROOT) containing ' 'playbooks and related files for this project.')
)
scm_type = models.CharField(
@@ -145,8 +139,7 @@ class ProjectOptions(models.Model):
if not self.scm_type:
return ''
try:
- scm_url = update_scm_url(self.scm_type, scm_url,
- check_special_cases=False)
+ scm_url = update_scm_url(self.scm_type, scm_url, check_special_cases=False)
except ValueError as e:
raise ValidationError((e.args or (_('Invalid SCM URL.'),))[0])
scm_url_parts = urlparse.urlsplit(scm_url)
@@ -169,8 +162,7 @@ class ProjectOptions(models.Model):
try:
if self.scm_type == 'insights':
self.scm_url = settings.INSIGHTS_URL_BASE
- scm_url = update_scm_url(self.scm_type, self.scm_url,
- check_special_cases=False)
+ scm_url = update_scm_url(self.scm_type, self.scm_url, check_special_cases=False)
scm_url_parts = urlparse.urlsplit(scm_url)
# Prefer the username/password in the URL, if provided.
scm_username = scm_url_parts.username or cred.get_input('username', default='')
@@ -179,8 +171,7 @@ class ProjectOptions(models.Model):
else:
scm_password = ''
try:
- update_scm_url(self.scm_type, self.scm_url, scm_username,
- scm_password)
+ update_scm_url(self.scm_type, self.scm_url, scm_username, scm_password)
except ValueError as e:
raise ValidationError((e.args or (_('Invalid credential.'),))[0])
except ValueError:
@@ -221,7 +212,6 @@ class ProjectOptions(models.Model):
results.append(smart_text(playbook))
return sorted(results, key=lambda x: smart_str(x).lower())
-
@property
def inventories(self):
results = []
@@ -243,10 +233,10 @@ class ProjectOptions(models.Model):
return sorted(results, key=lambda x: smart_str(x).lower())
def get_lock_file(self):
- '''
+ """
We want the project path in name only, we don't care if it exists or
not. This method will just append .lock onto the full directory path.
- '''
+ """
proj_path = self.get_project_path(check_if_exists=False)
if not proj_path:
return None
@@ -254,9 +244,9 @@ class ProjectOptions(models.Model):
class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin):
- '''
+ """
A project represents a playbook git repo that can access a set of inventories
- '''
+ """
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'instance_groups', 'credentials']
@@ -283,13 +273,11 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
scm_update_cache_timeout = models.PositiveIntegerField(
default=0,
blank=True,
- help_text=_('The number of seconds after the last project update ran that a new '
- 'project update will be launched as a job dependency.'),
+ help_text=_('The number of seconds after the last project update ran that a new ' 'project update will be launched as a job dependency.'),
)
allow_override = models.BooleanField(
default=False,
- help_text=_('Allow changing the SCM branch or revision in a job template '
- 'that uses this project.'),
+ help_text=_('Allow changing the SCM branch or revision in a job template ' 'that uses this project.'),
)
scm_revision = models.CharField(
@@ -317,10 +305,12 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
help_text=_('Suggested list of content that could be Ansible inventory in the project'),
)
- admin_role = ImplicitRoleField(parent_role=[
- 'organization.project_admin_role',
- 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
- ])
+ admin_role = ImplicitRoleField(
+ parent_role=[
+ 'organization.project_admin_role',
+ 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
+ ]
+ )
use_role = ImplicitRoleField(
parent_role='admin_role',
@@ -330,12 +320,14 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
parent_role='admin_role',
)
- read_role = ImplicitRoleField(parent_role=[
- 'organization.auditor_role',
- 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
- 'use_role',
- 'update_role',
- ])
+ read_role = ImplicitRoleField(
+ parent_role=[
+ 'organization.auditor_role',
+ 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
+ 'use_role',
+ 'update_role',
+ ]
+ )
@classmethod
def _get_unified_job_class(cls):
@@ -343,9 +335,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
@classmethod
def _get_unified_job_field_names(cls):
- return set(f.name for f in ProjectOptions._meta.fields) | set(
- ['name', 'description', 'organization']
- )
+ return set(f.name for f in ProjectOptions._meta.fields) | set(['name', 'description', 'organization'])
def clean_organization(self):
if self.pk:
@@ -370,20 +360,18 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
# Do the actual save.
super(Project, self).save(*args, **kwargs)
if new_instance:
- update_fields=[]
+ update_fields = []
# Generate local_path for SCM after initial save (so we have a PK).
if self.scm_type and not self.local_path.startswith('_'):
update_fields.append('local_path')
if update_fields:
from awx.main.signals import disable_activity_stream
+
with disable_activity_stream():
self.save(update_fields=update_fields)
# If we just created a new project with SCM, start the initial update.
# also update if certain fields have changed
- relevant_change = any(
- pre_save_vals.get(fd_name, None) != self._prior_values_store.get(fd_name, None)
- for fd_name in self.FIELDS_TRIGGER_UPDATE
- )
+ relevant_change = any(pre_save_vals.get(fd_name, None) != self._prior_values_store.get(fd_name, None) for fd_name in self.FIELDS_TRIGGER_UPDATE)
if (relevant_change or new_instance) and (not skip_update) and self.scm_type:
self.update()
@@ -447,26 +435,21 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
@property
def notification_templates(self):
base_notification_templates = NotificationTemplate.objects
- error_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_errors=self))
- started_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_started=self))
- success_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_success=self))
+ error_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_errors=self))
+ started_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_started=self))
+ success_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_success=self))
# Get Organization NotificationTemplates
if self.organization is not None:
- error_notification_templates = set(error_notification_templates +
- list(base_notification_templates
- .filter(organization_notification_templates_for_errors=self.organization)))
- started_notification_templates = set(started_notification_templates +
- list(base_notification_templates
- .filter(organization_notification_templates_for_started=self.organization)))
- success_notification_templates = set(success_notification_templates +
- list(base_notification_templates
- .filter(organization_notification_templates_for_success=self.organization)))
- return dict(error=list(error_notification_templates),
- started=list(started_notification_templates),
- success=list(success_notification_templates))
+ error_notification_templates = set(
+ error_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_errors=self.organization))
+ )
+ started_notification_templates = set(
+ started_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_started=self.organization))
+ )
+ success_notification_templates = set(
+ success_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_success=self.organization))
+ )
+ return dict(error=list(error_notification_templates), started=list(started_notification_templates), success=list(success_notification_templates))
def get_absolute_url(self, request=None):
return reverse('api:project_detail', kwargs={'pk': self.pk}, request=request)
@@ -474,11 +457,9 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
'''
RelatedJobsMixin
'''
+
def _get_related_jobs(self):
- return UnifiedJob.objects.non_polymorphic().filter(
- models.Q(job__project=self) |
- models.Q(projectupdate__project=self)
- )
+ return UnifiedJob.objects.non_polymorphic().filter(models.Q(job__project=self) | models.Q(projectupdate__project=self))
def delete(self, *args, **kwargs):
paths_to_delete = (self.get_project_path(check_if_exists=False), self.get_cache_path())
@@ -486,14 +467,15 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
for path_to_delete in paths_to_delete:
if self.scm_type and path_to_delete: # non-manual, concrete path
from awx.main.tasks import delete_project_files
+
delete_project_files.delay(path_to_delete)
return r
class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManagerProjectUpdateMixin):
- '''
+ """
Internal job for tracking project updates from SCM.
- '''
+ """
class Meta:
app_label = 'main'
@@ -546,6 +528,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
@classmethod
def _get_task_class(cls):
from awx.main.tasks import RunProjectUpdate
+
return RunProjectUpdate
def _global_timeout_setting(self):
@@ -618,6 +601,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
'''
JobNotificationMixin
'''
+
def get_notification_templates(self):
return self.project.notification_templates
diff --git a/awx/main/models/rbac.py b/awx/main/models/rbac.py
index fe8d622ac6..485f70bd0d 100644
--- a/awx/main/models/rbac.py
+++ b/awx/main/models/rbac.py
@@ -15,7 +15,7 @@ from django.utils.translation import ugettext_lazy as _
# AWX
from awx.api.versioning import reverse
-from django.contrib.auth.models import User # noqa
+from django.contrib.auth.models import User # noqa
__all__ = [
'Role',
@@ -23,13 +23,13 @@ __all__ = [
'get_roles_on_resource',
'ROLE_SINGLETON_SYSTEM_ADMINISTRATOR',
'ROLE_SINGLETON_SYSTEM_AUDITOR',
- 'role_summary_fields_generator'
+ 'role_summary_fields_generator',
]
logger = logging.getLogger('awx.main.models.rbac')
-ROLE_SINGLETON_SYSTEM_ADMINISTRATOR='system_administrator'
-ROLE_SINGLETON_SYSTEM_AUDITOR='system_auditor'
+ROLE_SINGLETON_SYSTEM_ADMINISTRATOR = 'system_administrator'
+ROLE_SINGLETON_SYSTEM_AUDITOR = 'system_auditor'
role_names = {
'system_administrator': _('System Administrator'),
@@ -77,15 +77,16 @@ role_descriptions = {
}
-tls = threading.local() # thread local storage
+tls = threading.local() # thread local storage
def check_singleton(func):
- '''
+ """
check_singleton is a decorator that checks if a user given
to a `visible_roles` method is in either of our singleton roles (Admin, Auditor)
and if so, returns their full list of roles without filtering.
- '''
+ """
+
def wrapper(*args, **kwargs):
sys_admin = Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR)
sys_audit = Role.singleton(ROLE_SINGLETON_SYSTEM_AUDITOR)
@@ -95,12 +96,13 @@ def check_singleton(func):
return args[1]
return Role.objects.all()
return func(*args, **kwargs)
+
return wrapper
@contextlib.contextmanager
def batch_role_ancestor_rebuilding(allow_nesting=False):
- '''
+ """
Batches the role ancestor rebuild work necessary whenever role-role
relations change. This can result in a big speedup when performing
any bulk manipulation.
@@ -108,7 +110,7 @@ def batch_role_ancestor_rebuilding(allow_nesting=False):
WARNING: Calls to anything related to checking access/permissions
while within the context of the batch_role_ancestor_rebuilding will
likely not work.
- '''
+ """
batch_role_rebuilding = getattr(tls, 'batch_role_rebuilding', False)
@@ -131,17 +133,15 @@ def batch_role_ancestor_rebuilding(allow_nesting=False):
class Role(models.Model):
- '''
+ """
Role model
- '''
+ """
class Meta:
app_label = 'main'
verbose_name_plural = _('roles')
db_table = 'main_rbac_roles'
- index_together = [
- ("content_type", "object_id")
- ]
+ index_together = [("content_type", "object_id")]
ordering = ("content_type", "object_id")
role_field = models.TextField(null=False)
@@ -149,11 +149,8 @@ class Role(models.Model):
parents = models.ManyToManyField('Role', related_name='children')
implicit_parents = models.TextField(null=False, default='[]')
ancestors = models.ManyToManyField(
- 'Role',
- through='RoleAncestorEntry',
- through_fields=('descendent', 'ancestor'),
- related_name='descendents'
- ) # auto-generated by `rebuild_role_ancestor_list`
+ 'Role', through='RoleAncestorEntry', through_fields=('descendent', 'ancestor'), related_name='descendents'
+ ) # auto-generated by `rebuild_role_ancestor_list`
members = models.ManyToManyField('auth.User', related_name='roles')
content_type = models.ForeignKey(ContentType, null=True, default=None, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField(null=True, default=None)
@@ -181,8 +178,7 @@ class Role(models.Model):
return self.ancestors.filter(pk=accessor.pk).exists()
else:
accessor_type = ContentType.objects.get_for_model(accessor)
- roles = Role.objects.filter(content_type__pk=accessor_type.id,
- object_id=accessor.id)
+ roles = Role.objects.filter(content_type__pk=accessor_type.id, object_id=accessor.id)
return self.ancestors.filter(pk__in=roles).exists()
@property
@@ -214,12 +210,12 @@ class Role(models.Model):
@staticmethod
def rebuild_role_ancestor_list(additions, removals):
- '''
+ """
Updates our `ancestors` map to accurately reflect all of the ancestors for a role
You should never need to call this. Signal handlers should be calling
this method when the role hierachy changes automatically.
- '''
+ """
# The ancestry table
# =================================================
#
@@ -320,8 +316,7 @@ class Role(models.Model):
# to the magic number of 41496, or 40000 for a nice round number
def split_ids_for_sqlite(role_ids):
for i in range(0, len(role_ids), 40000):
- yield role_ids[i:i + 40000]
-
+ yield role_ids[i : i + 40000]
with transaction.atomic():
while len(additions) > 0 or len(removals) > 0:
@@ -333,7 +328,8 @@ class Role(models.Model):
if len(removals) > 0:
for ids in split_ids_for_sqlite(removals):
sql_params['ids'] = ','.join(str(x) for x in ids)
- cursor.execute('''
+ cursor.execute(
+ '''
DELETE FROM %(ancestors_table)s
WHERE descendent_id IN (%(ids)s)
AND descendent_id != ancestor_id
@@ -345,7 +341,9 @@ class Role(models.Model):
WHERE parents.from_role_id = %(ancestors_table)s.descendent_id
AND %(ancestors_table)s.ancestor_id = inner_ancestors.ancestor_id
)
- ''' % sql_params)
+ '''
+ % sql_params
+ )
delete_ct += cursor.rowcount
@@ -353,7 +351,8 @@ class Role(models.Model):
if len(additions) > 0:
for ids in split_ids_for_sqlite(additions):
sql_params['ids'] = ','.join(str(x) for x in ids)
- cursor.execute('''
+ cursor.execute(
+ '''
INSERT INTO %(ancestors_table)s (descendent_id, ancestor_id, role_field, content_type_id, object_id)
SELECT from_id, to_id, new_ancestry_list.role_field, new_ancestry_list.content_type_id, new_ancestry_list.object_id FROM (
SELECT roles.id from_id,
@@ -383,7 +382,9 @@ class Role(models.Model):
AND %(ancestors_table)s.ancestor_id = new_ancestry_list.to_id
)
- ''' % sql_params)
+ '''
+ % sql_params
+ )
insert_ct += cursor.rowcount
if insert_ct == 0 and delete_ct == 0:
@@ -405,7 +406,6 @@ class Role(models.Model):
new_removals.update([row[0] for row in cursor.fetchall()])
removals = list(new_removals)
-
@staticmethod
def visible_roles(user):
return Role.filter_visible_roles(user, Role.objects.all())
@@ -413,19 +413,21 @@ class Role(models.Model):
@staticmethod
@check_singleton
def filter_visible_roles(user, roles_qs):
- '''
+ """
Visible roles include all roles that are ancestors of any
roles that the user has access to.
Case in point - organization auditor_role must see all roles
in their organization, but some of those roles descend from
organization admin_role, but not auditor_role.
- '''
+ """
return roles_qs.filter(
id__in=RoleAncestorEntry.objects.filter(
- descendent__in=RoleAncestorEntry.objects.filter(
- ancestor_id__in=list(user.roles.values_list('id', flat=True))
- ).values_list('descendent', flat=True)
- ).distinct().values_list('ancestor', flat=True)
+ descendent__in=RoleAncestorEntry.objects.filter(ancestor_id__in=list(user.roles.values_list('id', flat=True))).values_list(
+ 'descendent', flat=True
+ )
+ )
+ .distinct()
+ .values_list('ancestor', flat=True)
)
@staticmethod
@@ -441,30 +443,29 @@ class Role(models.Model):
class RoleAncestorEntry(models.Model):
-
class Meta:
app_label = 'main'
verbose_name_plural = _('role_ancestors')
db_table = 'main_rbac_role_ancestors'
index_together = [
- ("ancestor", "content_type_id", "object_id"), # used by get_roles_on_resource
- ("ancestor", "content_type_id", "role_field"), # used by accessible_objects
- ("ancestor", "descendent"), # used by rebuild_role_ancestor_list in the NOT EXISTS clauses.
+ ("ancestor", "content_type_id", "object_id"), # used by get_roles_on_resource
+ ("ancestor", "content_type_id", "role_field"), # used by accessible_objects
+ ("ancestor", "descendent"), # used by rebuild_role_ancestor_list in the NOT EXISTS clauses.
]
- descendent = models.ForeignKey(Role, null=False, on_delete=models.CASCADE, related_name='+')
- ancestor = models.ForeignKey(Role, null=False, on_delete=models.CASCADE, related_name='+')
- role_field = models.TextField(null=False)
+ descendent = models.ForeignKey(Role, null=False, on_delete=models.CASCADE, related_name='+')
+ ancestor = models.ForeignKey(Role, null=False, on_delete=models.CASCADE, related_name='+')
+ role_field = models.TextField(null=False)
content_type_id = models.PositiveIntegerField(null=False)
- object_id = models.PositiveIntegerField(null=False)
+ object_id = models.PositiveIntegerField(null=False)
def get_roles_on_resource(resource, accessor):
- '''
+ """
Returns a string list of the roles a accessor has for a given resource.
An accessor can be either a User, Role, or an arbitrary resource that
contains one or more Roles associated with it.
- '''
+ """
if type(accessor) == User:
roles = accessor.roles.all()
@@ -472,16 +473,15 @@ def get_roles_on_resource(resource, accessor):
roles = [accessor]
else:
accessor_type = ContentType.objects.get_for_model(accessor)
- roles = Role.objects.filter(content_type__pk=accessor_type.id,
- object_id=accessor.id)
+ roles = Role.objects.filter(content_type__pk=accessor_type.id, object_id=accessor.id)
return [
- role_field for role_field in
- RoleAncestorEntry.objects.filter(
- ancestor__in=roles,
- content_type_id=ContentType.objects.get_for_model(resource).id,
- object_id=resource.id
- ).values_list('role_field', flat=True).distinct()
+ role_field
+ for role_field in RoleAncestorEntry.objects.filter(
+ ancestor__in=roles, content_type_id=ContentType.objects.get_for_model(resource).id, object_id=resource.id
+ )
+ .values_list('role_field', flat=True)
+ .distinct()
]
diff --git a/awx/main/models/schedules.py b/awx/main/models/schedules.py
index 5b907a4333..d30d44372d 100644
--- a/awx/main/models/schedules.py
+++ b/awx/main/models/schedules.py
@@ -35,7 +35,6 @@ UTC_TIMEZONES = {x: tzutc() for x in dateutil.parser.parserinfo().UTCZONE}
class ScheduleFilterMethods(object):
-
def enabled(self, enabled=True):
return self.filter(enabled=enabled)
@@ -62,7 +61,6 @@ class ScheduleManager(ScheduleFilterMethods, models.Manager):
class Schedule(PrimordialModel, LaunchTimeConfig):
-
class Meta:
app_label = 'main'
ordering = ['-next_run']
@@ -78,32 +76,13 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
name = models.CharField(
max_length=512,
)
- enabled = models.BooleanField(
- default=True,
- help_text=_("Enables processing of this schedule.")
- )
- dtstart = models.DateTimeField(
- null=True,
- default=None,
- editable=False,
- help_text=_("The first occurrence of the schedule occurs on or after this time.")
- )
+ enabled = models.BooleanField(default=True, help_text=_("Enables processing of this schedule."))
+ dtstart = models.DateTimeField(null=True, default=None, editable=False, help_text=_("The first occurrence of the schedule occurs on or after this time."))
dtend = models.DateTimeField(
- null=True,
- default=None,
- editable=False,
- help_text=_("The last occurrence of the schedule occurs before this time, aftewards the schedule expires.")
- )
- rrule = models.CharField(
- max_length=255,
- help_text=_("A value representing the schedules iCal recurrence rule.")
- )
- next_run = models.DateTimeField(
- null=True,
- default=None,
- editable=False,
- help_text=_("The next time that the scheduled action will run.")
+ null=True, default=None, editable=False, help_text=_("The last occurrence of the schedule occurs before this time, aftewards the schedule expires.")
)
+ rrule = models.CharField(max_length=255, help_text=_("A value representing the schedules iCal recurrence rule."))
+ next_run = models.DateTimeField(null=True, default=None, editable=False, help_text=_("The next time that the scheduled action will run."))
@classmethod
def get_zoneinfo(self):
@@ -113,7 +92,7 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
def timezone(self):
utc = tzutc()
all_zones = Schedule.get_zoneinfo()
- all_zones.sort(key = lambda x: -len(x))
+ all_zones.sort(key=lambda x: -len(x))
for r in Schedule.rrulestr(self.rrule)._rrule:
if r._dtstart:
tzinfo = r._dtstart.tzinfo
@@ -169,17 +148,11 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
# What is the DTSTART timezone for:
# DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T170000Z
# local_tz = tzfile('/usr/share/zoneinfo/America/New_York')
- local_tz = dateutil.rrule.rrulestr(
- rrule.replace(naive_until, naive_until + 'Z'),
- tzinfos=UTC_TIMEZONES
- )._dtstart.tzinfo
+ local_tz = dateutil.rrule.rrulestr(rrule.replace(naive_until, naive_until + 'Z'), tzinfos=UTC_TIMEZONES)._dtstart.tzinfo
# Make a datetime object with tzinfo=<the DTSTART timezone>
# localized_until = datetime.datetime(2020, 6, 1, 17, 0, tzinfo=tzfile('/usr/share/zoneinfo/America/New_York'))
- localized_until = make_aware(
- datetime.datetime.strptime(re.sub('^UNTIL=', '', naive_until), "%Y%m%dT%H%M%S"),
- local_tz
- )
+ localized_until = make_aware(datetime.datetime.strptime(re.sub('^UNTIL=', '', naive_until), "%Y%m%dT%H%M%S"), local_tz)
# Coerce the datetime to UTC and format it as a string w/ Zulu format
# utc_until = UNTIL=20200601T220000Z
@@ -201,15 +174,9 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
for r in x._rrule:
if r._dtstart and r._dtstart.tzinfo is None:
- raise ValueError(
- 'A valid TZID must be provided (e.g., America/New_York)'
- )
-
- if (
- fast_forward and
- ('MINUTELY' in rrule or 'HOURLY' in rrule) and
- 'COUNT=' not in rrule
- ):
+ raise ValueError('A valid TZID must be provided (e.g., America/New_York)')
+
+ if fast_forward and ('MINUTELY' in rrule or 'HOURLY' in rrule) and 'COUNT=' not in rrule:
try:
first_event = x[0]
# If the first event was over a week ago...
diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py
index e803a29c0c..176896f19b 100644
--- a/awx/main/models/unified_jobs.py
+++ b/awx/main/models/unified_jobs.py
@@ -30,22 +30,24 @@ from rest_framework.exceptions import ParseError
from polymorphic.models import PolymorphicModel
# AWX
-from awx.main.models.base import (
- CommonModelNameNotUnique,
- PasswordFieldsModel,
- NotificationFieldsModel,
- prevent_search
-)
+from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel, prevent_search
from awx.main.dispatch import get_local_queuename
from awx.main.dispatch.control import Control as ControlDispatcher
from awx.main.registrar import activity_stream_registrar
from awx.main.models.mixins import ResourceMixin, TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin
from awx.main.utils import (
- camelcase_to_underscore, get_model_for_type,
- encrypt_dict, decrypt_field, _inventory_updates,
- copy_model_by_class, copy_m2m_relationships,
- get_type_for_model, parse_yaml_or_json, getattr_dne,
- polymorphic, schedule_task_manager
+ camelcase_to_underscore,
+ get_model_for_type,
+ encrypt_dict,
+ decrypt_field,
+ _inventory_updates,
+ copy_model_by_class,
+ copy_m2m_relationships,
+ get_type_for_model,
+ parse_yaml_or_json,
+ getattr_dne,
+ polymorphic,
+ schedule_task_manager,
)
from awx.main.constants import ACTIVE_STATES, CAN_CANCEL
from awx.main.redact import UriCleaner, REPLACE_STR
@@ -60,40 +62,40 @@ logger_job_lifecycle = logging.getLogger('awx.analytics.job_lifecycle')
class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEnvironmentMixin, NotificationFieldsModel):
- '''
+ """
Concrete base class for unified job templates.
- '''
+ """
# status inherits from related jobs. Thus, status must be able to be set to any status that a job status is settable to.
JOB_STATUS_CHOICES = [
- ('new', _('New')), # Job has been created, but not started.
- ('pending', _('Pending')), # Job is pending Task Manager processing (blocked by dependency req, capacity or a concurrent job)
- ('waiting', _('Waiting')), # Job has been assigned to run on a specific node (and is about to run).
- ('running', _('Running')), # Job is currently running.
- ('successful', _('Successful')), # Job completed successfully.
- ('failed', _('Failed')), # Job completed, but with failures.
- ('error', _('Error')), # The job was unable to run.
- ('canceled', _('Canceled')), # The job was canceled before completion.
+ ('new', _('New')), # Job has been created, but not started.
+ ('pending', _('Pending')), # Job is pending Task Manager processing (blocked by dependency req, capacity or a concurrent job)
+ ('waiting', _('Waiting')), # Job has been assigned to run on a specific node (and is about to run).
+ ('running', _('Running')), # Job is currently running.
+ ('successful', _('Successful')), # Job completed successfully.
+ ('failed', _('Failed')), # Job completed, but with failures.
+ ('error', _('Error')), # The job was unable to run.
+ ('canceled', _('Canceled')), # The job was canceled before completion.
]
COMMON_STATUS_CHOICES = JOB_STATUS_CHOICES + [
- ('never updated', _('Never Updated')), # A job has never been run using this template.
+ ('never updated', _('Never Updated')), # A job has never been run using this template.
]
PROJECT_STATUS_CHOICES = COMMON_STATUS_CHOICES + [
- ('ok', _('OK')), # Project is not configured for SCM and path exists.
- ('missing', _('Missing')), # Project path does not exist.
+ ('ok', _('OK')), # Project is not configured for SCM and path exists.
+ ('missing', _('Missing')), # Project path does not exist.
]
INVENTORY_SOURCE_STATUS_CHOICES = COMMON_STATUS_CHOICES + [
- ('none', _('No External Source')), # Inventory source is not configured to update from an external source.
+ ('none', _('No External Source')), # Inventory source is not configured to update from an external source.
]
JOB_TEMPLATE_STATUS_CHOICES = COMMON_STATUS_CHOICES
DEPRECATED_STATUS_CHOICES = [
# No longer used for Project / Inventory Source:
- ('updating', _('Updating')), # Same as running.
+ ('updating', _('Updating')), # Same as running.
]
ALL_STATUS_CHOICES = OrderedDict(PROJECT_STATUS_CHOICES + INVENTORY_SOURCE_STATUS_CHOICES + JOB_TEMPLATE_STATUS_CHOICES + DEPRECATED_STATUS_CHOICES).items()
@@ -103,7 +105,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
ordering = ('name',)
# unique_together here is intentionally commented out. Please make sure sub-classes of this model
# contain at least this uniqueness restriction: SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name')]
- #unique_together = [('polymorphic_ctype', 'name', 'organization')]
+ # unique_together = [('polymorphic_ctype', 'name', 'organization')]
old_pk = models.PositiveIntegerField(
null=True,
@@ -135,16 +137,16 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
default=None,
editable=False,
)
- #on_missed_schedule = models.CharField(
+ # on_missed_schedule = models.CharField(
# max_length=32,
# choices=[],
- #)
+ # )
next_job_run = models.DateTimeField(
null=True,
default=None,
editable=False,
)
- next_schedule = models.ForeignKey( # Schedule entry responsible for next_job_run.
+ next_schedule = models.ForeignKey( # Schedule entry responsible for next_job_run.
'Schedule',
null=True,
default=None,
@@ -170,16 +172,8 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
'Credential',
related_name='%(class)ss',
)
- labels = models.ManyToManyField(
- "Label",
- blank=True,
- related_name='%(class)s_labels'
- )
- instance_groups = OrderedManyToManyField(
- 'InstanceGroup',
- blank=True,
- through='UnifiedJobTemplateInstanceGroupMembership'
- )
+ labels = models.ManyToManyField("Label", blank=True, related_name='%(class)s_labels')
+ instance_groups = OrderedManyToManyField('InstanceGroup', blank=True, through='UnifiedJobTemplateInstanceGroupMembership')
def get_absolute_url(self, request=None):
real_instance = self.get_real_instance()
@@ -198,23 +192,21 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
@classmethod
def _submodels_with_roles(cls):
- ujt_classes = [c for c in cls.__subclasses__()
- if c._meta.model_name not in ['inventorysource', 'systemjobtemplate']]
+ ujt_classes = [c for c in cls.__subclasses__() if c._meta.model_name not in ['inventorysource', 'systemjobtemplate']]
ct_dict = ContentType.objects.get_for_models(*ujt_classes)
return [ct.id for ct in ct_dict.values()]
@classmethod
def accessible_pk_qs(cls, accessor, role_field):
- '''
+ """
A re-implementation of accessible pk queryset for the "normal" unified JTs.
Does not return inventory sources or system JTs, these should
be handled inside of get_queryset where it is utilized.
- '''
+ """
# do not use this if in a subclass
if cls != UnifiedJobTemplate:
return super(UnifiedJobTemplate, cls).accessible_pk_qs(accessor, role_field)
- return ResourceMixin._accessible_pk_qs(
- cls, accessor, role_field, content_types=cls._submodels_with_roles())
+ return ResourceMixin._accessible_pk_qs(cls, accessor, role_field, content_types=cls._submodels_with_roles())
def _perform_unique_checks(self, unique_checks):
# Handle the list of unique fields returned above. Replace with an
@@ -246,19 +238,19 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
exclude = [x for x in exclude if x != 'polymorphic_ctype']
return super(UnifiedJobTemplate, self).validate_unique(exclude)
- @property # Alias for backwards compatibility.
+ @property # Alias for backwards compatibility.
def current_update(self):
return self.current_job
- @property # Alias for backwards compatibility.
+ @property # Alias for backwards compatibility.
def last_update(self):
return self.last_job
- @property # Alias for backwards compatibility.
+ @property # Alias for backwards compatibility.
def last_update_failed(self):
return self.last_job_failed
- @property # Alias for backwards compatibility.
+ @property # Alias for backwards compatibility.
def last_updated(self):
return self.last_job_run
@@ -285,7 +277,6 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
# Do the actual save.
super(UnifiedJobTemplate, self).save(*args, **kwargs)
-
def _get_current_status(self):
# Override in subclasses as needed.
if self.current_job and self.current_job.status:
@@ -305,8 +296,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
def _set_status_and_last_job_run(self, save=True):
status = self._get_current_status()
last_job_run = self._get_last_job_run()
- return self.update_fields(status=status, last_job_run=last_job_run,
- save=save)
+ return self.update_fields(status=status, last_job_run=last_job_run, save=save)
def _can_update(self):
# Override in subclasses as needed.
@@ -324,24 +314,25 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
@classmethod
def _get_unified_job_class(cls):
- '''
+ """
Return subclass of UnifiedJob that is created from this template.
- '''
- raise NotImplementedError # Implement in subclass.
+ """
+ raise NotImplementedError # Implement in subclass.
@property
def notification_templates(self):
- '''
+ """
Return notification_templates relevant to this Unified Job Template
- '''
+ """
# NOTE: Derived classes should implement
from awx.main.models.notifications import NotificationTemplate
+
return NotificationTemplate.objects.none()
def create_unified_job(self, **kwargs):
- '''
+ """
Create a new unified job based on this unified job template.
- '''
+ """
new_job_passwords = kwargs.pop('survey_passwords', {})
eager_fields = kwargs.pop('_eager_fields', None)
@@ -364,9 +355,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
validated_kwargs = kwargs.copy()
if unallowed_fields:
if parent_field_name is None:
- logger.warn('Fields {} are not allowed as overrides to spawn from {}.'.format(
- ', '.join(unallowed_fields), self
- ))
+ logger.warn('Fields {} are not allowed as overrides to spawn from {}.'.format(', '.join(unallowed_fields), self))
for f in unallowed_fields:
validated_kwargs.pop(f)
@@ -393,6 +382,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
kwargs['survey_passwords'] = new_job_passwords # saved in config object for relaunch
from awx.main.signals import disable_activity_stream, activity_stream_create
+
with disable_activity_stream():
# Don't emit the activity stream record here for creation,
# because we haven't attached important M2M relations yet, like
@@ -427,10 +417,10 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
@classmethod
def get_ask_mapping(cls):
- '''
+ """
Creates dictionary that maps the unified job field (keys)
to the field that enables prompting for the field (values)
- '''
+ """
mapping = {}
for field in cls._meta.fields:
if isinstance(field, AskForField):
@@ -442,10 +432,10 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
return cls._get_unified_job_field_names()
def copy_unified_jt(self):
- '''
+ """
Returns saved object, including related fields.
Create a copy of this unified job template.
- '''
+ """
unified_jt_class = self.__class__
fields = self._get_unified_jt_copy_names()
unified_jt = copy_model_by_class(self, unified_jt_class, fields, {})
@@ -458,9 +448,9 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
return unified_jt
def _accept_or_ignore_job_kwargs(self, _exclude_errors=(), **kwargs):
- '''
+ """
Override in subclass if template accepts _any_ prompted params
- '''
+ """
errors = {}
if kwargs:
for field_name in kwargs.keys():
@@ -468,11 +458,11 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
return ({}, kwargs, errors)
def accept_or_ignore_variables(self, data, errors=None, _exclude_errors=(), extra_passwords=None):
- '''
+ """
If subclasses accept any `variables` or `extra_vars`, they should
define _accept_or_ignore_variables to place those variables in the accepted dict,
according to the acceptance rules of the template.
- '''
+ """
if errors is None:
errors = {}
if not isinstance(data, dict):
@@ -486,14 +476,13 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
# resolution order, forced by how metaclass processes fields,
# thus the need for hasattr check
if extra_passwords:
- return self._accept_or_ignore_variables(
- data, errors, _exclude_errors=_exclude_errors, extra_passwords=extra_passwords)
+ return self._accept_or_ignore_variables(data, errors, _exclude_errors=_exclude_errors, extra_passwords=extra_passwords)
else:
return self._accept_or_ignore_variables(data, errors, _exclude_errors=_exclude_errors)
elif data:
errors['extra_vars'] = [
- _('Variables {list_of_keys} provided, but this template cannot accept variables.'.format(
- list_of_keys=', '.join(data.keys())))]
+ _('Variables {list_of_keys} provided, but this template cannot accept variables.'.format(list_of_keys=', '.join(data.keys())))
+ ]
return ({}, data, errors)
@@ -510,7 +499,6 @@ class UnifiedJobTypeStringMixin(object):
class UnifiedJobDeprecatedStdout(models.Model):
-
class Meta:
managed = False
db_table = 'main_unifiedjob'
@@ -522,30 +510,30 @@ class UnifiedJobDeprecatedStdout(models.Model):
class StdoutMaxBytesExceeded(Exception):
-
def __init__(self, total, supported):
self.total = total
self.supported = supported
-class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique,
- UnifiedJobTypeStringMixin, TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin):
- '''
+class UnifiedJob(
+ PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique, UnifiedJobTypeStringMixin, TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin
+):
+ """
Concrete base class for unified job run by the task engine.
- '''
+ """
STATUS_CHOICES = UnifiedJobTemplate.JOB_STATUS_CHOICES
LAUNCH_TYPE_CHOICES = [
- ('manual', _('Manual')), # Job was started manually by a user.
- ('relaunch', _('Relaunch')), # Job was started via relaunch.
- ('callback', _('Callback')), # Job was started via host callback.
- ('scheduled', _('Scheduled')), # Job was started from a schedule.
- ('dependency', _('Dependency')), # Job was started as a dependency of another job.
- ('workflow', _('Workflow')), # Job was started from a workflow job.
- ('webhook', _('Webhook')), # Job was started from a webhook event.
- ('sync', _('Sync')), # Job was started from a project sync.
- ('scm', _('SCM Update')) # Job was created as an Inventory SCM sync.
+ ('manual', _('Manual')), # Job was started manually by a user.
+ ('relaunch', _('Relaunch')), # Job was started via relaunch.
+ ('callback', _('Callback')), # Job was started via host callback.
+ ('scheduled', _('Scheduled')), # Job was started from a schedule.
+ ('dependency', _('Dependency')), # Job was started as a dependency of another job.
+ ('workflow', _('Workflow')), # Job was started from a workflow job.
+ ('webhook', _('Webhook')), # Job was started from a webhook event.
+ ('sync', _('Sync')), # Job was started from a project sync.
+ ('scm', _('SCM Update')), # Job was created as an Inventory SCM sync.
]
PASSWORD_FIELDS = ('start_args',)
@@ -565,7 +553,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
)
unified_job_template = models.ForeignKey(
'UnifiedJobTemplate',
- null=True, # Some jobs can be run without a template.
+ null=True, # Some jobs can be run without a template.
default=None,
editable=False,
related_name='%(class)s_unified_jobs',
@@ -576,14 +564,8 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
editable=False,
db_index=True, # add an index, this is a commonly queried field
)
- launch_type = models.CharField(
- max_length=20,
- choices=LAUNCH_TYPE_CHOICES,
- default='manual',
- editable=False,
- db_index=True
- )
- schedule = models.ForeignKey( # Which schedule entry was responsible for starting this job.
+ launch_type = models.CharField(max_length=20, choices=LAUNCH_TYPE_CHOICES, default='manual', editable=False, db_index=True)
+ schedule = models.ForeignKey( # Which schedule entry was responsible for starting this job.
'Schedule',
null=True,
default=None,
@@ -635,9 +617,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
help_text=_("The date and time the job was queued for starting."),
)
dependencies_processed = models.BooleanField(
- default=False,
- editable=False,
- help_text=_("If True, the task manager has already processed potential dependencies for this job.")
+ default=False, editable=False, help_text=_("If True, the task manager has already processed potential dependencies for this job.")
)
finished = models.DateTimeField(
null=True,
@@ -659,33 +639,39 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
editable=False,
help_text=_("Elapsed time in seconds that the job ran."),
)
- job_args = prevent_search(models.TextField(
- blank=True,
- default='',
- editable=False,
- ))
+ job_args = prevent_search(
+ models.TextField(
+ blank=True,
+ default='',
+ editable=False,
+ )
+ )
job_cwd = models.CharField(
max_length=1024,
blank=True,
default='',
editable=False,
)
- job_env = prevent_search(JSONField(
- blank=True,
- default=dict,
- editable=False,
- ))
+ job_env = prevent_search(
+ JSONField(
+ blank=True,
+ default=dict,
+ editable=False,
+ )
+ )
job_explanation = models.TextField(
blank=True,
default='',
editable=False,
help_text=_("A status field to indicate the state of the job if it wasn't able to run and capture stdout"),
)
- start_args = prevent_search(models.TextField(
- blank=True,
- default='',
- editable=False,
- ))
+ start_args = prevent_search(
+ models.TextField(
+ blank=True,
+ default='',
+ editable=False,
+ )
+ )
result_traceback = models.TextField(
blank=True,
default='',
@@ -697,11 +683,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
default='',
editable=False,
)
- labels = models.ManyToManyField(
- "Label",
- blank=True,
- related_name='%(class)s_labels'
- )
+ labels = models.ManyToManyField("Label", blank=True, related_name='%(class)s_labels')
instance_group = models.ForeignKey(
'InstanceGroup',
blank=True,
@@ -752,7 +734,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
@classmethod
def _get_task_class(cls):
- raise NotImplementedError # Implement in subclasses.
+ raise NotImplementedError # Implement in subclasses.
@classmethod
def supports_isolation(cls):
@@ -763,14 +745,14 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
return False
def _get_parent_field_name(self):
- return 'unified_job_template' # Override in subclasses.
+ return 'unified_job_template' # Override in subclasses.
@classmethod
def _get_unified_job_template_class(cls):
- '''
+ """
Return subclass of UnifiedJobTemplate that applies to this unified job.
- '''
- raise NotImplementedError # Implement in subclass.
+ """
+ raise NotImplementedError # Implement in subclass.
def _global_timeout_setting(self):
"Override in child classes, None value indicates this is not configurable"
@@ -900,10 +882,10 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
return result
def copy_unified_job(self, _eager_fields=None, **new_prompts):
- '''
+ """
Returns saved object, including related fields.
Create a copy of this unified job for the purpose of relaunch
- '''
+ """
unified_job_class = self.__class__
unified_jt_class = self._get_unified_job_template_class()
parent_field_name = self._get_parent_field_name()
@@ -927,16 +909,17 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
# Labels copied here
from awx.main.signals import disable_activity_stream
+
with disable_activity_stream():
copy_m2m_relationships(self, unified_job, fields)
return unified_job
def launch_prompts(self):
- '''
+ """
Return dictionary of prompts job was launched with
returns None if unknown
- '''
+ """
JobLaunchConfig = self._meta.get_field('launch_config').related_model
try:
config = self.launch_config
@@ -945,10 +928,10 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
return None
def create_config_from_prompts(self, kwargs, parent=None):
- '''
+ """
Create a launch configuration entry for this job, given prompts
returns None if it can not be created
- '''
+ """
JobLaunchConfig = self._meta.get_field('launch_config').related_model
config = JobLaunchConfig(job=self)
if parent is None:
@@ -1016,9 +999,9 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
@property
def event_processing_finished(self):
- '''
+ """
Returns True / False, whether all events from job have been saved
- '''
+ """
if self.status in ACTIVE_STATES:
return False # tally of events is only available at end of run
try:
@@ -1051,13 +1034,10 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
if not os.path.exists(settings.JOBOUTPUT_ROOT):
os.makedirs(settings.JOBOUTPUT_ROOT)
fd = tempfile.NamedTemporaryFile(
- mode='w',
- prefix='{}-{}-'.format(self.model_to_str(), self.pk),
- suffix='.out',
- dir=settings.JOBOUTPUT_ROOT,
- encoding='utf-8'
+ mode='w', prefix='{}-{}-'.format(self.model_to_str(), self.pk), suffix='.out', dir=settings.JOBOUTPUT_ROOT, encoding='utf-8'
)
from awx.main.tasks import purge_old_stdout_files # circular import
+
purge_old_stdout_files.apply_async()
# Before the addition of event-based stdout, older versions of
@@ -1092,9 +1072,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
# detect the length of all stdout for this UnifiedJob, and
# if it exceeds settings.STDOUT_MAX_BYTES_DISPLAY bytes,
# don't bother actually fetching the data
- total = self.get_event_queryset().aggregate(
- total=models.Sum(models.Func(models.F('stdout'), function='LENGTH'))
- )['total'] or 0
+ total = self.get_event_queryset().aggregate(total=models.Sum(models.Func(models.F('stdout'), function='LENGTH')))['total'] or 0
if total > max_supported:
raise StdoutMaxBytesExceeded(total, max_supported)
@@ -1106,11 +1084,9 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
cursor.copy_expert(
"copy (select stdout from {} where {}={} and stdout != '' order by start_line) to stdout".format(
- self._meta.db_table + 'event',
- self.event_parent_key,
- self.id
+ self._meta.db_table + 'event', self.event_parent_key, self.id
),
- fd
+ fd,
)
if hasattr(fd, 'name'):
@@ -1154,7 +1130,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
end_line = int(end_line)
stdout_lines = self.result_stdout_raw_handle().readlines()
absolute_end = len(stdout_lines)
- for line in stdout_lines[int(start_line):end_line]:
+ for line in stdout_lines[int(start_line) : end_line]:
return_buffer.write(line)
if int(start_line) < 0:
start_actual = len(stdout_lines) + int(start_line)
@@ -1243,14 +1219,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
@property
def task_impact(self):
- raise NotImplementedError # Implement in subclass.
+ raise NotImplementedError # Implement in subclass.
def websocket_emit_data(self):
''' Return extra data that should be included when submitting data to the browser over the websocket connection '''
websocket_data = dict(type=self.job_type_name)
if self.spawned_by_workflow:
- websocket_data.update(dict(workflow_job_id=self.workflow_job_id,
- workflow_node_id=self.workflow_node_id))
+ websocket_data.update(dict(workflow_job_id=self.workflow_job_id, workflow_node_id=self.workflow_node_id))
return websocket_data
def _websocket_emit_status(self, status):
@@ -1282,14 +1257,16 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
connection.on_commit(lambda: self.update_webhook_status(status))
def notification_data(self):
- return dict(id=self.id,
- name=self.name,
- url=self.get_ui_url(),
- created_by=smart_text(self.created_by),
- started=self.started.isoformat() if self.started is not None else None,
- finished=self.finished.isoformat() if self.finished is not None else None,
- status=self.status,
- traceback=self.result_traceback)
+ return dict(
+ id=self.id,
+ name=self.name,
+ url=self.get_ui_url(),
+ created_by=smart_text(self.created_by),
+ started=self.started.isoformat() if self.started is not None else None,
+ finished=self.finished.isoformat() if self.finished is not None else None,
+ status=self.status,
+ traceback=self.result_traceback,
+ )
def pre_start(self, **kwargs):
if not self.can_start:
@@ -1307,9 +1284,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
if missing_credential_inputs:
self.job_explanation = '{} cannot start because Credential {} does not provide one or more required fields ({}).'.format(
- self._meta.verbose_name.title(),
- credential.name,
- ', '.join(sorted(missing_credential_inputs))
+ self._meta.verbose_name.title(), credential.name, ', '.join(sorted(missing_credential_inputs))
)
self.save(update_fields=['job_explanation'])
return (False, None)
@@ -1326,7 +1301,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
opts = dict([(field, start_args.get(field, '')) for field in needed])
if not all(opts.values()):
- missing_fields = ', '.join([k for k,v in opts.items() if not v])
+ missing_fields = ', '.join([k for k, v in opts.items() if not v])
self.job_explanation = u'Missing needed fields: %s.' % missing_fields
self.save(update_fields=['job_explanation'])
return (False, None)
@@ -1367,35 +1342,26 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
# Actually tell the task runner to run this task.
# FIXME: This will deadlock the task runner
- #from awx.main.tasks import notify_task_runner
- #notify_task_runner.delay({'id': self.id, 'metadata': kwargs,
+ # from awx.main.tasks import notify_task_runner
+ # notify_task_runner.delay({'id': self.id, 'metadata': kwargs,
# 'task_type': task_type})
# Done!
return True
-
@property
def actually_running(self):
# returns True if the job is running in the appropriate dispatcher process
running = False
- if all([
- self.status == 'running',
- self.celery_task_id,
- self.execution_node
- ]):
+ if all([self.status == 'running', self.celery_task_id, self.execution_node]):
# If the job is marked as running, but the dispatcher
# doesn't know about it (or the dispatcher doesn't reply),
# then cancel the job
timeout = 5
try:
- running = self.celery_task_id in ControlDispatcher(
- 'dispatcher', self.controller_node or self.execution_node
- ).running(timeout=timeout)
+ running = self.celery_task_id in ControlDispatcher('dispatcher', self.controller_node or self.execution_node).running(timeout=timeout)
except (socket.timeout, RuntimeError):
- logger.error('could not reach dispatcher on {} within {}s'.format(
- self.execution_node, timeout
- ))
+ logger.error('could not reach dispatcher on {} within {}s'.format(self.execution_node, timeout))
running = False
return running
@@ -1405,8 +1371,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
def _build_job_explanation(self):
if not self.job_explanation:
- return 'Previous Task Canceled: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % \
- (self.model_to_str(), self.name, self.id)
+ return 'Previous Task Canceled: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % (self.model_to_str(), self.name, self.id)
return None
def cancel(self, job_explanation=None, is_chain=False):
@@ -1434,9 +1399,9 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
@property
def preferred_instance_groups(self):
- '''
+ """
Return Instance/Rampart Groups preferred by this unified job templates
- '''
+ """
if not self.unified_job_template:
return []
template_groups = [x for x in self.unified_job_template.instance_groups.all()]
@@ -1445,16 +1410,17 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
@property
def global_instance_groups(self):
from awx.main.models.ha import InstanceGroup
+
default_instance_group = InstanceGroup.objects.filter(name='tower')
if default_instance_group.exists():
return [default_instance_group.first()]
return []
def awx_meta_vars(self):
- '''
+ """
The result of this method is used as extra_vars of a job launched
by AWX, for purposes of client playbook hooks
- '''
+ """
r = {}
for name in ('awx', 'tower'):
r['{}_job_id'.format(name)] = self.pk
@@ -1507,9 +1473,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
return False
def log_lifecycle(self, state, blocked_by=None):
- extra={'type': self._meta.model_name,
- 'task_id': self.id,
- 'state': state}
+ extra = {'type': self._meta.model_name, 'task_id': self.id, 'state': state}
if self.unified_job_template:
extra["template_name"] = self.unified_job_template.name
if state == "blocked" and blocked_by:
diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py
index d9ac8afcf9..ff4ba37f68 100644
--- a/awx/main/models/workflow.py
+++ b/awx/main/models/workflow.py
@@ -13,7 +13,8 @@ from django.db import connection, models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ObjectDoesNotExist
-#from django import settings as tower_settings
+
+# from django import settings as tower_settings
# Django-CRUM
from crum import get_current_user
@@ -23,17 +24,10 @@ from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
# AWX
from awx.api.versioning import reverse
-from awx.main.models import (prevent_search, accepts_json, UnifiedJobTemplate,
- UnifiedJob)
-from awx.main.models.notifications import (
- NotificationTemplate,
- JobNotificationMixin
-)
+from awx.main.models import prevent_search, accepts_json, UnifiedJobTemplate, UnifiedJob
+from awx.main.models.notifications import NotificationTemplate, JobNotificationMixin
from awx.main.models.base import CreatedModifiedModel, VarsDictProperty
-from awx.main.models.rbac import (
- ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
- ROLE_SINGLETON_SYSTEM_AUDITOR
-)
+from awx.main.models.rbac import ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR
from awx.main.fields import ImplicitRoleField, AskForField
from awx.main.models.mixins import (
ResourceMixin,
@@ -50,8 +44,15 @@ from awx.main.fields import JSONField
from awx.main.utils import schedule_task_manager
-__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode',
- 'WorkflowJobTemplateNode', 'WorkflowApprovalTemplate', 'WorkflowApproval']
+__all__ = [
+ 'WorkflowJobTemplate',
+ 'WorkflowJob',
+ 'WorkflowJobOptions',
+ 'WorkflowJobNode',
+ 'WorkflowJobTemplateNode',
+ 'WorkflowApprovalTemplate',
+ 'WorkflowApproval',
+]
logger = logging.getLogger('awx.main.models.workflow')
@@ -81,9 +82,7 @@ class WorkflowNodeBase(CreatedModifiedModel, LaunchTimeConfig):
related_name='%(class)ss_always',
)
all_parents_must_converge = models.BooleanField(
- default=False,
- help_text=_("If enabled then the node will only run if all of the parent nodes "
- "have met the criteria to reach this node")
+ default=False, help_text=_("If enabled then the node will only run if all of the parent nodes " "have met the criteria to reach this node")
)
unified_job_template = models.ForeignKey(
'UnifiedJobTemplate',
@@ -103,17 +102,24 @@ class WorkflowNodeBase(CreatedModifiedModel, LaunchTimeConfig):
@classmethod
def _get_workflow_job_field_names(cls):
- '''
+ """
Return field names that should be copied from template node to job node.
- '''
- return ['workflow_job', 'unified_job_template',
- 'extra_data', 'survey_passwords',
- 'inventory', 'credentials', 'char_prompts', 'all_parents_must_converge']
+ """
+ return [
+ 'workflow_job',
+ 'unified_job_template',
+ 'extra_data',
+ 'survey_passwords',
+ 'inventory',
+ 'credentials',
+ 'char_prompts',
+ 'all_parents_must_converge',
+ ]
def create_workflow_job_node(self, **kwargs):
- '''
+ """
Create a new workflow job node based on this workflow node.
- '''
+ """
create_kwargs = {}
for field_name in self._get_workflow_job_field_names():
if field_name == 'credentials':
@@ -135,9 +141,18 @@ class WorkflowNodeBase(CreatedModifiedModel, LaunchTimeConfig):
class WorkflowJobTemplateNode(WorkflowNodeBase):
FIELDS_TO_PRESERVE_AT_COPY = [
- 'unified_job_template', 'workflow_job_template', 'success_nodes', 'failure_nodes',
- 'always_nodes', 'credentials', 'inventory', 'extra_data', 'survey_passwords',
- 'char_prompts', 'all_parents_must_converge', 'identifier'
+ 'unified_job_template',
+ 'workflow_job_template',
+ 'success_nodes',
+ 'failure_nodes',
+ 'always_nodes',
+ 'credentials',
+ 'inventory',
+ 'extra_data',
+ 'survey_passwords',
+ 'char_prompts',
+ 'all_parents_must_converge',
+ 'identifier',
]
REENCRYPTION_BLOCKLIST_AT_COPY = ['extra_data', 'survey_passwords']
@@ -150,9 +165,7 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
max_length=512,
default=uuid4,
blank=False,
- help_text=_(
- 'An identifier for this node that is unique within its workflow. '
- 'It is copied to workflow job nodes corresponding to this node.'),
+ help_text=_('An identifier for this node that is unique within its workflow. ' 'It is copied to workflow job nodes corresponding to this node.'),
)
class Meta:
@@ -166,10 +179,10 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
return reverse('api:workflow_job_template_node_detail', kwargs={'pk': self.pk}, request=request)
def create_wfjt_node_copy(self, user, workflow_job_template=None):
- '''
+ """
Copy this node to a new WFJT, leaving out related fields the user
is not allowed to access
- '''
+ """
create_kwargs = {}
allowed_creds = []
for field_name in self._get_workflow_job_field_names():
@@ -226,9 +239,11 @@ class WorkflowJobNode(WorkflowNodeBase):
)
do_not_run = models.BooleanField(
default=False,
- help_text=_("Indicates that a job will not be created when True. Workflow runtime "
- "semantics will mark this True if the node is in a path that will "
- "decidedly not be ran. A value of False means the node may not run."),
+ help_text=_(
+ "Indicates that a job will not be created when True. Workflow runtime "
+ "semantics will mark this True if the node is in a path that will "
+ "decidedly not be ran. A value of False means the node may not run."
+ ),
)
identifier = models.CharField(
max_length=512,
@@ -260,12 +275,12 @@ class WorkflowJobNode(WorkflowNodeBase):
return r
def get_job_kwargs(self):
- '''
+ """
In advance of creating a new unified job as part of a workflow,
this method builds the attributes to use
It alters the node by saving its updated version of
ancestor_artifacts, making it available to subsequent nodes.
- '''
+ """
# reject/accept prompted fields
data = {}
ujt_obj = self.unified_job_template
@@ -279,11 +294,11 @@ class WorkflowJobNode(WorkflowNodeBase):
prompts_data['extra_vars'].update(self.workflow_job.extra_vars_dict)
accepted_fields, ignored_fields, errors = ujt_obj._accept_or_ignore_job_kwargs(**prompts_data)
if errors:
- logger.info(_('Bad launch configuration starting template {template_pk} as part of '
- 'workflow {workflow_pk}. Errors:\n{error_text}').format(
- template_pk=ujt_obj.pk,
- workflow_pk=self.pk,
- error_text=errors))
+ logger.info(
+ _('Bad launch configuration starting template {template_pk} as part of ' 'workflow {workflow_pk}. Errors:\n{error_text}').format(
+ template_pk=ujt_obj.pk, workflow_pk=self.pk, error_text=errors
+ )
+ )
data.update(accepted_fields) # missing fields are handled in the scheduler
try:
# config saved on the workflow job itself
@@ -347,13 +362,15 @@ class WorkflowJobOptions(LaunchTimeConfigBase):
class Meta:
abstract = True
- extra_vars = accepts_json(prevent_search(models.TextField(
- blank=True,
- default='',
- )))
- allow_simultaneous = models.BooleanField(
- default=False
+ extra_vars = accepts_json(
+ prevent_search(
+ models.TextField(
+ blank=True,
+ default='',
+ )
+ )
)
+ allow_simultaneous = models.BooleanField(default=False)
extra_vars_dict = VarsDictProperty('extra_vars', True)
@@ -404,9 +421,7 @@ class WorkflowJobOptions(LaunchTimeConfigBase):
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin, RelatedJobsMixin, WebhookTemplateMixin):
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
- FIELDS_TO_PRESERVE_AT_COPY = [
- 'labels', 'organization', 'instance_groups', 'workflow_job_template_nodes', 'credentials', 'survey_spec'
- ]
+ FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'organization', 'instance_groups', 'workflow_job_template_nodes', 'credentials', 'survey_spec']
class Meta:
app_label = 'main'
@@ -423,28 +438,30 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
blank=True,
default=False,
)
- notification_templates_approvals = models.ManyToManyField(
- "NotificationTemplate",
- blank=True,
- related_name='%(class)s_notification_templates_for_approvals'
- )
+ notification_templates_approvals = models.ManyToManyField("NotificationTemplate", blank=True, related_name='%(class)s_notification_templates_for_approvals')
- admin_role = ImplicitRoleField(parent_role=[
- 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
- 'organization.workflow_admin_role'
- ])
- execute_role = ImplicitRoleField(parent_role=[
- 'admin_role',
- 'organization.execute_role',
- ])
- read_role = ImplicitRoleField(parent_role=[
- 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
- 'organization.auditor_role', 'execute_role', 'admin_role',
- 'approval_role',
- ])
- approval_role = ImplicitRoleField(parent_role=[
- 'organization.approval_role', 'admin_role',
- ])
+ admin_role = ImplicitRoleField(parent_role=['singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, 'organization.workflow_admin_role'])
+ execute_role = ImplicitRoleField(
+ parent_role=[
+ 'admin_role',
+ 'organization.execute_role',
+ ]
+ )
+ read_role = ImplicitRoleField(
+ parent_role=[
+ 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
+ 'organization.auditor_role',
+ 'execute_role',
+ 'admin_role',
+ 'approval_role',
+ ]
+ )
+ approval_role = ImplicitRoleField(
+ parent_role=[
+ 'organization.approval_role',
+ 'admin_role',
+ ]
+ )
@property
def workflow_nodes(self):
@@ -458,46 +475,50 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
def _get_unified_jt_copy_names(cls):
base_list = super(WorkflowJobTemplate, cls)._get_unified_jt_copy_names()
base_list.remove('labels')
- return (base_list |
- set(['survey_spec', 'survey_enabled', 'ask_variables_on_launch', 'organization']))
+ return base_list | set(['survey_spec', 'survey_enabled', 'ask_variables_on_launch', 'organization'])
def get_absolute_url(self, request=None):
return reverse('api:workflow_job_template_detail', kwargs={'pk': self.pk}, request=request)
@property
def cache_timeout_blocked(self):
- if WorkflowJob.objects.filter(workflow_job_template=self,
- status__in=['pending', 'waiting', 'running']).count() >= getattr(settings, 'SCHEDULE_MAX_JOBS', 10):
- logger.error("Workflow Job template %s could not be started because there are more than %s other jobs from that template waiting to run" %
- (self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10)))
+ if WorkflowJob.objects.filter(workflow_job_template=self, status__in=['pending', 'waiting', 'running']).count() >= getattr(
+ settings, 'SCHEDULE_MAX_JOBS', 10
+ ):
+ logger.error(
+ "Workflow Job template %s could not be started because there are more than %s other jobs from that template waiting to run"
+ % (self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10))
+ )
return True
return False
@property
def notification_templates(self):
base_notification_templates = NotificationTemplate.objects.all()
- error_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_errors__in=[self]))
- started_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_started__in=[self]))
- success_notification_templates = list(base_notification_templates
- .filter(unifiedjobtemplate_notification_templates_for_success__in=[self]))
- approval_notification_templates = list(base_notification_templates
- .filter(workflowjobtemplate_notification_templates_for_approvals__in=[self]))
+ error_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_errors__in=[self]))
+ started_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_started__in=[self]))
+ success_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_success__in=[self]))
+ approval_notification_templates = list(base_notification_templates.filter(workflowjobtemplate_notification_templates_for_approvals__in=[self]))
# Get Organization NotificationTemplates
if self.organization is not None:
- error_notification_templates = set(error_notification_templates + list(base_notification_templates.filter(
- organization_notification_templates_for_errors=self.organization)))
- started_notification_templates = set(started_notification_templates + list(base_notification_templates.filter(
- organization_notification_templates_for_started=self.organization)))
- success_notification_templates = set(success_notification_templates + list(base_notification_templates.filter(
- organization_notification_templates_for_success=self.organization)))
- approval_notification_templates = set(approval_notification_templates + list(base_notification_templates.filter(
- organization_notification_templates_for_approvals=self.organization)))
- return dict(error=list(error_notification_templates),
- started=list(started_notification_templates),
- success=list(success_notification_templates),
- approvals=list(approval_notification_templates))
+ error_notification_templates = set(
+ error_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_errors=self.organization))
+ )
+ started_notification_templates = set(
+ started_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_started=self.organization))
+ )
+ success_notification_templates = set(
+ success_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_success=self.organization))
+ )
+ approval_notification_templates = set(
+ approval_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_approvals=self.organization))
+ )
+ return dict(
+ error=list(error_notification_templates),
+ started=list(started_notification_templates),
+ success=list(success_notification_templates),
+ approvals=list(approval_notification_templates),
+ )
def create_unified_job(self, **kwargs):
workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs)
@@ -516,9 +537,8 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
if field_name == 'extra_vars':
accepted_vars, rejected_vars, vars_errors = self.accept_or_ignore_variables(
- kwargs.get('extra_vars', {}),
- _exclude_errors=exclude_errors,
- extra_passwords=kwargs.get('survey_passwords', {}))
+ kwargs.get('extra_vars', {}), _exclude_errors=exclude_errors, extra_passwords=kwargs.get('survey_passwords', {})
+ )
if accepted_vars:
prompted_data['extra_vars'] = accepted_vars
if rejected_vars:
@@ -550,8 +570,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
return not bool(self.variables_needed_to_start)
def node_templates_missing(self):
- return [node.pk for node in self.workflow_job_template_nodes.filter(
- unified_job_template__isnull=True).all()]
+ return [node.pk for node in self.workflow_job_template_nodes.filter(unified_job_template__isnull=True).all()]
def node_prompts_rejected(self):
node_list = []
@@ -568,6 +587,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
'''
RelatedJobsMixin
'''
+
def _get_related_jobs(self):
return WorkflowJob.objects.filter(workflow_job_template=self)
@@ -592,12 +612,9 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
null=True,
default=None,
on_delete=models.SET_NULL,
- help_text=_("If automatically created for a sliced job run, the job template "
- "the workflow job was created from."),
- )
- is_sliced_job = models.BooleanField(
- default=False
+ help_text=_("If automatically created for a sliced job run, the job template " "the workflow job was created from."),
)
+ is_sliced_job = models.BooleanField(default=False)
@property
def workflow_nodes(self):
@@ -629,8 +646,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
if node.job is None:
node_job_description = 'no job.'
else:
- node_job_description = ('job #{0}, "{1}", which finished with status {2}.'
- .format(node.job.id, node.job.name, node.job.status))
+ node_job_description = 'job #{0}, "{1}", which finished with status {2}.'.format(node.job.id, node.job.name, node.job.status)
str_arr.append("- node #{0} spawns {1}".format(node.id, node_job_description))
result['body'] = '\n'.join(str_arr)
return result
@@ -649,8 +665,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
wj = self.get_workflow_job()
while wj and wj.workflow_job_template_id:
if wj.pk in wj_ids:
- logger.critical('Cycles detected in the workflow jobs graph, '
- 'this is not normal and suggests task manager degeneracy.')
+ logger.critical('Cycles detected in the workflow jobs graph, ' 'this is not normal and suggests task manager degeneracy.')
break
wj_ids.add(wj.pk)
ancestors.append(wj.workflow_job_template)
@@ -676,7 +691,10 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
class WorkflowApprovalTemplate(UnifiedJobTemplate, RelatedJobsMixin):
- FIELDS_TO_PRESERVE_AT_COPY = ['description', 'timeout',]
+ FIELDS_TO_PRESERVE_AT_COPY = [
+ 'description',
+ 'timeout',
+ ]
class Meta:
app_label = 'main'
@@ -705,6 +723,7 @@ class WorkflowApprovalTemplate(UnifiedJobTemplate, RelatedJobsMixin):
'''
RelatedJobsMixin
'''
+
def _get_related_jobs(self):
return UnifiedJob.objects.filter(unified_job_template=self)
@@ -726,10 +745,7 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
default=0,
help_text=_("The amount of time (in seconds) before the approval node expires and fails."),
)
- timed_out = models.BooleanField(
- default=False,
- help_text=_("Shows when an approval node (with a timeout assigned to it) has timed out.")
- )
+ timed_out = models.BooleanField(default=False, help_text=_("Shows when an approval node (with a timeout assigned to it) has timed out."))
approved_or_denied_by = models.ForeignKey(
'auth.User',
related_name='%s(class)s_approved+',
@@ -739,7 +755,6 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
on_delete=models.SET_NULL,
)
-
@classmethod
def _get_unified_job_template_class(cls):
return WorkflowApprovalTemplate
@@ -788,6 +803,7 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
def send_approval_notification(self, approval_status):
from awx.main.tasks import send_notifications # avoid circular import
+
if self.workflow_job_template is None:
return
for nt in self.workflow_job_template.notification_templates["approvals"]:
@@ -800,9 +816,10 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
# https://stackoverflow.com/a/3431699/10669572
def send_it(local_nt=nt, local_subject=notification_subject, local_body=notification_body):
def _func():
- send_notifications.delay([local_nt.generate_notification(local_subject, local_body).id],
- job_id=self.id)
+ send_notifications.delay([local_nt.generate_notification(local_subject, local_body).id], job_id=self.id)
+
return _func
+
connection.on_commit(send_it())
def build_approval_notification_message(self, nt, approval_status):
@@ -841,10 +858,12 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
def context(self, approval_status):
workflow_url = urljoin(settings.TOWER_URL_BASE, '/#/jobs/workflow/{}'.format(self.workflow_job.id))
- return {'approval_status': approval_status,
- 'approval_node_name': self.workflow_approval_template.name,
- 'workflow_url': workflow_url,
- 'job_metadata': json.dumps(self.notification_data(), indent=4)}
+ return {
+ 'approval_status': approval_status,
+ 'approval_node_name': self.workflow_approval_template.name,
+ 'workflow_url': workflow_url,
+ 'job_metadata': json.dumps(self.notification_data(), indent=4),
+ }
@property
def workflow_job_template(self):
diff --git a/awx/main/notifications/base.py b/awx/main/notifications/base.py
index 66ac369cbc..23e8681211 100644
--- a/awx/main/notifications/base.py
+++ b/awx/main/notifications/base.py
@@ -5,6 +5,5 @@ from django.core.mail.backends.base import BaseEmailBackend
class AWXBaseEmailBackend(BaseEmailBackend):
-
def format_body(self, body):
return body
diff --git a/awx/main/notifications/custom_notification_base.py b/awx/main/notifications/custom_notification_base.py
index f8da2dab52..52d1fea4fb 100644
--- a/awx/main/notifications/custom_notification_base.py
+++ b/awx/main/notifications/custom_notification_base.py
@@ -7,8 +7,9 @@ class CustomNotificationBase(object):
DEFAULT_BODY = "{{ job_friendly_name }} #{{ job.id }} had status {{ job.status }}, view details at {{ url }}\n\n{{ job_metadata }}"
DEFAULT_APPROVAL_RUNNING_MSG = 'The approval node "{{ approval_node_name }}" needs review. This node can be viewed at: {{ workflow_url }}'
- DEFAULT_APPROVAL_RUNNING_BODY = ('The approval node "{{ approval_node_name }}" needs review. '
- 'This approval node can be viewed at: {{ workflow_url }}\n\n{{ job_metadata }}')
+ DEFAULT_APPROVAL_RUNNING_BODY = (
+ 'The approval node "{{ approval_node_name }}" needs review. ' 'This approval node can be viewed at: {{ workflow_url }}\n\n{{ job_metadata }}'
+ )
DEFAULT_APPROVAL_APPROVED_MSG = 'The approval node "{{ approval_node_name }}" was approved. {{ workflow_url }}'
DEFAULT_APPROVAL_APPROVED_BODY = 'The approval node "{{ approval_node_name }}" was approved. {{ workflow_url }}\n\n{{ job_metadata }}'
@@ -19,11 +20,14 @@ class CustomNotificationBase(object):
DEFAULT_APPROVAL_DENIED_MSG = 'The approval node "{{ approval_node_name }}" was denied. {{ workflow_url }}'
DEFAULT_APPROVAL_DENIED_BODY = 'The approval node "{{ approval_node_name }}" was denied. {{ workflow_url }}\n\n{{ job_metadata }}'
-
- default_messages = {"started": {"message": DEFAULT_MSG, "body": None},
- "success": {"message": DEFAULT_MSG, "body": None},
- "error": {"message": DEFAULT_MSG, "body": None},
- "workflow_approval": {"running": {"message": DEFAULT_APPROVAL_RUNNING_MSG, "body": None},
- "approved": {"message": DEFAULT_APPROVAL_APPROVED_MSG, "body": None},
- "timed_out": {"message": DEFAULT_APPROVAL_TIMEOUT_MSG, "body": None},
- "denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None}}}
+ default_messages = {
+ "started": {"message": DEFAULT_MSG, "body": None},
+ "success": {"message": DEFAULT_MSG, "body": None},
+ "error": {"message": DEFAULT_MSG, "body": None},
+ "workflow_approval": {
+ "running": {"message": DEFAULT_APPROVAL_RUNNING_MSG, "body": None},
+ "approved": {"message": DEFAULT_APPROVAL_APPROVED_MSG, "body": None},
+ "timed_out": {"message": DEFAULT_APPROVAL_TIMEOUT_MSG, "body": None},
+ "denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None},
+ },
+ }
diff --git a/awx/main/notifications/email_backend.py b/awx/main/notifications/email_backend.py
index 657b18d282..5e37ac5f32 100644
--- a/awx/main/notifications/email_backend.py
+++ b/awx/main/notifications/email_backend.py
@@ -23,25 +23,31 @@ DEFAULT_APPROVAL_DENIED_BODY = CustomNotificationBase.DEFAULT_APPROVAL_DENIED_BO
class CustomEmailBackend(EmailBackend, CustomNotificationBase):
- init_parameters = {"host": {"label": "Host", "type": "string"},
- "port": {"label": "Port", "type": "int"},
- "username": {"label": "Username", "type": "string"},
- "password": {"label": "Password", "type": "password"},
- "use_tls": {"label": "Use TLS", "type": "bool"},
- "use_ssl": {"label": "Use SSL", "type": "bool"},
- "sender": {"label": "Sender Email", "type": "string"},
- "recipients": {"label": "Recipient List", "type": "list"},
- "timeout": {"label": "Timeout", "type": "int", "default": 30}}
+ init_parameters = {
+ "host": {"label": "Host", "type": "string"},
+ "port": {"label": "Port", "type": "int"},
+ "username": {"label": "Username", "type": "string"},
+ "password": {"label": "Password", "type": "password"},
+ "use_tls": {"label": "Use TLS", "type": "bool"},
+ "use_ssl": {"label": "Use SSL", "type": "bool"},
+ "sender": {"label": "Sender Email", "type": "string"},
+ "recipients": {"label": "Recipient List", "type": "list"},
+ "timeout": {"label": "Timeout", "type": "int", "default": 30},
+ }
recipient_parameter = "recipients"
sender_parameter = "sender"
- default_messages = {"started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
- "success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
- "error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
- "workflow_approval": {"running": {"message": DEFAULT_APPROVAL_RUNNING_MSG, "body": DEFAULT_APPROVAL_RUNNING_BODY},
- "approved": {"message": DEFAULT_APPROVAL_APPROVED_MSG, "body": DEFAULT_APPROVAL_APPROVED_BODY},
- "timed_out": {"message": DEFAULT_APPROVAL_TIMEOUT_MSG, "body": DEFAULT_APPROVAL_TIMEOUT_BODY},
- "denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": DEFAULT_APPROVAL_DENIED_BODY}}}
+ default_messages = {
+ "started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
+ "success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
+ "error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
+ "workflow_approval": {
+ "running": {"message": DEFAULT_APPROVAL_RUNNING_MSG, "body": DEFAULT_APPROVAL_RUNNING_BODY},
+ "approved": {"message": DEFAULT_APPROVAL_APPROVED_MSG, "body": DEFAULT_APPROVAL_APPROVED_BODY},
+ "timed_out": {"message": DEFAULT_APPROVAL_TIMEOUT_MSG, "body": DEFAULT_APPROVAL_TIMEOUT_BODY},
+ "denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": DEFAULT_APPROVAL_DENIED_BODY},
+ },
+ }
def format_body(self, body):
# leave body unchanged (expect a string)
diff --git a/awx/main/notifications/grafana_backend.py b/awx/main/notifications/grafana_backend.py
index 8e8b648952..4e9a7a6262 100644
--- a/awx/main/notifications/grafana_backend.py
+++ b/awx/main/notifications/grafana_backend.py
@@ -32,22 +32,26 @@ logger = logging.getLogger('awx.main.notifications.grafana_backend')
class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
- init_parameters = {"grafana_url": {"label": "Grafana URL", "type": "string"},
- "grafana_key": {"label": "Grafana API Key", "type": "password"}}
+ init_parameters = {"grafana_url": {"label": "Grafana URL", "type": "string"}, "grafana_key": {"label": "Grafana API Key", "type": "password"}}
recipient_parameter = "grafana_url"
sender_parameter = None
DEFAULT_BODY = "{{ job_metadata }}"
- default_messages = {"started": {"body": DEFAULT_BODY, "message": DEFAULT_MSG},
- "success": {"body": DEFAULT_BODY, "message": DEFAULT_MSG},
- "error": {"body": DEFAULT_BODY, "message": DEFAULT_MSG},
- "workflow_approval": {"running": {"message": DEFAULT_APPROVAL_RUNNING_MSG, "body": DEFAULT_APPROVAL_RUNNING_BODY},
- "approved": {"message": DEFAULT_APPROVAL_APPROVED_MSG,"body": DEFAULT_APPROVAL_APPROVED_BODY},
- "timed_out": {"message": DEFAULT_APPROVAL_TIMEOUT_MSG, "body": DEFAULT_APPROVAL_TIMEOUT_BODY},
- "denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": DEFAULT_APPROVAL_DENIED_BODY}}}
-
- def __init__(self, grafana_key,dashboardId=None, panelId=None, annotation_tags=None, grafana_no_verify_ssl=False, isRegion=True,
- fail_silently=False, **kwargs):
+ default_messages = {
+ "started": {"body": DEFAULT_BODY, "message": DEFAULT_MSG},
+ "success": {"body": DEFAULT_BODY, "message": DEFAULT_MSG},
+ "error": {"body": DEFAULT_BODY, "message": DEFAULT_MSG},
+ "workflow_approval": {
+ "running": {"message": DEFAULT_APPROVAL_RUNNING_MSG, "body": DEFAULT_APPROVAL_RUNNING_BODY},
+ "approved": {"message": DEFAULT_APPROVAL_APPROVED_MSG, "body": DEFAULT_APPROVAL_APPROVED_BODY},
+ "timed_out": {"message": DEFAULT_APPROVAL_TIMEOUT_MSG, "body": DEFAULT_APPROVAL_TIMEOUT_BODY},
+ "denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": DEFAULT_APPROVAL_DENIED_BODY},
+ },
+ }
+
+ def __init__(
+ self, grafana_key, dashboardId=None, panelId=None, annotation_tags=None, grafana_no_verify_ssl=False, isRegion=True, fail_silently=False, **kwargs
+ ):
super(GrafanaBackend, self).__init__(fail_silently=fail_silently)
self.grafana_key = grafana_key
self.dashboardId = dashboardId
@@ -73,14 +77,14 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
grafana_headers = {}
if 'started' in m.body:
try:
- epoch=datetime.datetime.utcfromtimestamp(0)
+ epoch = datetime.datetime.utcfromtimestamp(0)
grafana_data['time'] = grafana_data['timeEnd'] = int((dp.parse(m.body['started']).replace(tzinfo=None) - epoch).total_seconds() * 1000)
if m.body.get('finished'):
grafana_data['timeEnd'] = int((dp.parse(m.body['finished']).replace(tzinfo=None) - epoch).total_seconds() * 1000)
except ValueError:
- logger.error(smart_text(_("Error converting time {} or timeEnd {} to int.").format(m.body['started'],m.body['finished'])))
+ logger.error(smart_text(_("Error converting time {} or timeEnd {} to int.").format(m.body['started'], m.body['finished'])))
if not self.fail_silently:
- raise Exception(smart_text(_("Error converting time {} and/or timeEnd {} to int.").format(m.body['started'],m.body['finished'])))
+ raise Exception(smart_text(_("Error converting time {} and/or timeEnd {} to int.").format(m.body['started'], m.body['finished'])))
grafana_data['isRegion'] = self.isRegion
grafana_data['dashboardId'] = self.dashboardId
grafana_data['panelId'] = self.panelId
@@ -89,10 +93,9 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
grafana_data['text'] = m.subject
grafana_headers['Authorization'] = "Bearer {}".format(self.grafana_key)
grafana_headers['Content-Type'] = "application/json"
- r = requests.post("{}/api/annotations".format(m.recipients()[0]),
- json=grafana_data,
- headers=grafana_headers,
- verify=(not self.grafana_no_verify_ssl))
+ r = requests.post(
+ "{}/api/annotations".format(m.recipients()[0]), json=grafana_data, headers=grafana_headers, verify=(not self.grafana_no_verify_ssl)
+ )
if r.status_code >= 400:
logger.error(smart_text(_("Error sending notification grafana: {}").format(r.status_code)))
if not self.fail_silently:
diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py
index b9a056f479..d020de824d 100644
--- a/awx/main/notifications/irc_backend.py
+++ b/awx/main/notifications/irc_backend.py
@@ -18,12 +18,14 @@ logger = logging.getLogger('awx.main.notifications.irc_backend')
class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
- init_parameters = {"server": {"label": "IRC Server Address", "type": "string"},
- "port": {"label": "IRC Server Port", "type": "int"},
- "nickname": {"label": "IRC Nick", "type": "string"},
- "password": {"label": "IRC Server Password", "type": "password"},
- "use_ssl": {"label": "SSL Connection", "type": "bool"},
- "targets": {"label": "Destination Channels or Users", "type": "list"}}
+ init_parameters = {
+ "server": {"label": "IRC Server Address", "type": "string"},
+ "port": {"label": "IRC Server Port", "type": "int"},
+ "nickname": {"label": "IRC Nick", "type": "string"},
+ "password": {"label": "IRC Server Password", "type": "password"},
+ "use_ssl": {"label": "SSL Connection", "type": "bool"},
+ "targets": {"label": "Destination Channels or Users", "type": "list"},
+ }
recipient_parameter = "targets"
sender_parameter = None
diff --git a/awx/main/notifications/mattermost_backend.py b/awx/main/notifications/mattermost_backend.py
index 59a1c6f5e1..b9cc513ba7 100644
--- a/awx/main/notifications/mattermost_backend.py
+++ b/awx/main/notifications/mattermost_backend.py
@@ -15,13 +15,13 @@ logger = logging.getLogger('awx.main.notifications.mattermost_backend')
class MattermostBackend(AWXBaseEmailBackend, CustomNotificationBase):
- init_parameters = {"mattermost_url": {"label": "Target URL", "type": "string"},
- "mattermost_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
+ init_parameters = {"mattermost_url": {"label": "Target URL", "type": "string"}, "mattermost_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
recipient_parameter = "mattermost_url"
sender_parameter = None
- def __init__(self, mattermost_no_verify_ssl=False, mattermost_channel=None, mattermost_username=None,
- mattermost_icon_url=None, fail_silently=False, **kwargs):
+ def __init__(
+ self, mattermost_no_verify_ssl=False, mattermost_channel=None, mattermost_username=None, mattermost_icon_url=None, fail_silently=False, **kwargs
+ ):
super(MattermostBackend, self).__init__(fail_silently=fail_silently)
self.mattermost_channel = mattermost_channel
self.mattermost_username = mattermost_username
@@ -35,16 +35,14 @@ class MattermostBackend(AWXBaseEmailBackend, CustomNotificationBase):
sent_messages = 0
for m in messages:
payload = {}
- for opt, optval in {'mattermost_icon_url':'icon_url',
- 'mattermost_channel': 'channel', 'mattermost_username': 'username'}.items():
+ for opt, optval in {'mattermost_icon_url': 'icon_url', 'mattermost_channel': 'channel', 'mattermost_username': 'username'}.items():
optvalue = getattr(self, opt)
if optvalue is not None:
payload[optval] = optvalue.strip()
payload['text'] = m.subject
- r = requests.post("{}".format(m.recipients()[0]),
- json=payload, verify=(not self.mattermost_no_verify_ssl))
+ r = requests.post("{}".format(m.recipients()[0]), json=payload, verify=(not self.mattermost_no_verify_ssl))
if r.status_code >= 400:
logger.error(smart_text(_("Error sending notification mattermost: {}").format(r.status_code)))
if not self.fail_silently:
diff --git a/awx/main/notifications/pagerduty_backend.py b/awx/main/notifications/pagerduty_backend.py
index 18d2290caf..8cde9e3cfd 100644
--- a/awx/main/notifications/pagerduty_backend.py
+++ b/awx/main/notifications/pagerduty_backend.py
@@ -30,21 +30,27 @@ logger = logging.getLogger('awx.main.notifications.pagerduty_backend')
class PagerDutyBackend(AWXBaseEmailBackend, CustomNotificationBase):
- init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"},
- "token": {"label": "API Token", "type": "password"},
- "service_key": {"label": "API Service/Integration Key", "type": "string"},
- "client_name": {"label": "Client Identifier", "type": "string"}}
+ init_parameters = {
+ "subdomain": {"label": "Pagerduty subdomain", "type": "string"},
+ "token": {"label": "API Token", "type": "password"},
+ "service_key": {"label": "API Service/Integration Key", "type": "string"},
+ "client_name": {"label": "Client Identifier", "type": "string"},
+ }
recipient_parameter = "service_key"
sender_parameter = "client_name"
DEFAULT_BODY = "{{ job_metadata }}"
- default_messages = {"started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
- "success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
- "error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
- "workflow_approval": {"running": {"message": DEFAULT_APPROVAL_RUNNING_MSG, "body": DEFAULT_APPROVAL_RUNNING_BODY},
- "approved": {"message": DEFAULT_APPROVAL_APPROVED_MSG,"body": DEFAULT_APPROVAL_APPROVED_BODY},
- "timed_out": {"message": DEFAULT_APPROVAL_TIMEOUT_MSG, "body": DEFAULT_APPROVAL_TIMEOUT_BODY},
- "denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": DEFAULT_APPROVAL_DENIED_BODY}}}
+ default_messages = {
+ "started": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
+ "success": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
+ "error": {"message": DEFAULT_MSG, "body": DEFAULT_BODY},
+ "workflow_approval": {
+ "running": {"message": DEFAULT_APPROVAL_RUNNING_MSG, "body": DEFAULT_APPROVAL_RUNNING_BODY},
+ "approved": {"message": DEFAULT_APPROVAL_APPROVED_MSG, "body": DEFAULT_APPROVAL_APPROVED_BODY},
+ "timed_out": {"message": DEFAULT_APPROVAL_TIMEOUT_MSG, "body": DEFAULT_APPROVAL_TIMEOUT_BODY},
+ "denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": DEFAULT_APPROVAL_DENIED_BODY},
+ },
+ }
def __init__(self, subdomain, token, fail_silently=False, **kwargs):
super(PagerDutyBackend, self).__init__(fail_silently=fail_silently)
@@ -75,10 +81,7 @@ class PagerDutyBackend(AWXBaseEmailBackend, CustomNotificationBase):
logger.error(smart_text(_("Exception connecting to PagerDuty: {}").format(e)))
for m in messages:
try:
- pager.trigger_incident(m.recipients()[0],
- description=m.subject,
- details=m.body,
- client=m.from_email)
+ pager.trigger_incident(m.recipients()[0], description=m.subject, details=m.body, client=m.from_email)
sent_messages += 1
except Exception as e:
logger.error(smart_text(_("Exception sending messages: {}").format(e)))
diff --git a/awx/main/notifications/rocketchat_backend.py b/awx/main/notifications/rocketchat_backend.py
index df271bf80d..6d331d9e65 100644
--- a/awx/main/notifications/rocketchat_backend.py
+++ b/awx/main/notifications/rocketchat_backend.py
@@ -16,12 +16,10 @@ logger = logging.getLogger('awx.main.notifications.rocketchat_backend')
class RocketChatBackend(AWXBaseEmailBackend, CustomNotificationBase):
- init_parameters = {"rocketchat_url": {"label": "Target URL", "type": "string"},
- "rocketchat_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
+ init_parameters = {"rocketchat_url": {"label": "Target URL", "type": "string"}, "rocketchat_no_verify_ssl": {"label": "Verify SSL", "type": "bool"}}
recipient_parameter = "rocketchat_url"
sender_parameter = None
-
def __init__(self, rocketchat_no_verify_ssl=False, rocketchat_username=None, rocketchat_icon_url=None, fail_silently=False, **kwargs):
super(RocketChatBackend, self).__init__(fail_silently=fail_silently)
self.rocketchat_no_verify_ssl = rocketchat_no_verify_ssl
@@ -35,20 +33,16 @@ class RocketChatBackend(AWXBaseEmailBackend, CustomNotificationBase):
sent_messages = 0
for m in messages:
payload = {"text": m.subject}
- for opt, optval in {'rocketchat_icon_url': 'icon_url',
- 'rocketchat_username': 'username'}.items():
+ for opt, optval in {'rocketchat_icon_url': 'icon_url', 'rocketchat_username': 'username'}.items():
optvalue = getattr(self, opt)
if optvalue is not None:
payload[optval] = optvalue.strip()
- r = requests.post("{}".format(m.recipients()[0]),
- data=json.dumps(payload), verify=(not self.rocketchat_no_verify_ssl))
+ r = requests.post("{}".format(m.recipients()[0]), data=json.dumps(payload), verify=(not self.rocketchat_no_verify_ssl))
if r.status_code >= 400:
- logger.error(smart_text(
- _("Error sending notification rocket.chat: {}").format(r.status_code)))
+ logger.error(smart_text(_("Error sending notification rocket.chat: {}").format(r.status_code)))
if not self.fail_silently:
- raise Exception(smart_text(
- _("Error sending notification rocket.chat: {}").format(r.status_code)))
+ raise Exception(smart_text(_("Error sending notification rocket.chat: {}").format(r.status_code)))
sent_messages += 1
return sent_messages
diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py
index d70debf67c..881abced70 100644
--- a/awx/main/notifications/slack_backend.py
+++ b/awx/main/notifications/slack_backend.py
@@ -16,8 +16,7 @@ WEBSOCKET_TIMEOUT = 30
class SlackBackend(AWXBaseEmailBackend, CustomNotificationBase):
- init_parameters = {"token": {"label": "Token", "type": "password"},
- "channels": {"label": "Destination Channels", "type": "list"}}
+ init_parameters = {"token": {"label": "Token", "type": "password"}, "channels": {"label": "Destination Channels", "type": "list"}}
recipient_parameter = "channels"
sender_parameter = None
@@ -37,18 +36,9 @@ class SlackBackend(AWXBaseEmailBackend, CustomNotificationBase):
if r.startswith('#'):
r = r[1:]
if self.color:
- ret = connection.api_call("chat.postMessage",
- channel=r,
- as_user=True,
- attachments=[{
- "color": self.color,
- "text": m.subject
- }])
+ ret = connection.api_call("chat.postMessage", channel=r, as_user=True, attachments=[{"color": self.color, "text": m.subject}])
else:
- ret = connection.api_call("chat.postMessage",
- channel=r,
- as_user=True,
- text=m.subject)
+ ret = connection.api_call("chat.postMessage", channel=r, as_user=True, text=m.subject)
logger.debug(ret)
if ret['ok']:
sent_messages += 1
diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py
index 38a364e00b..0b730a56b2 100644
--- a/awx/main/notifications/twilio_backend.py
+++ b/awx/main/notifications/twilio_backend.py
@@ -16,10 +16,12 @@ logger = logging.getLogger('awx.main.notifications.twilio_backend')
class TwilioBackend(AWXBaseEmailBackend, CustomNotificationBase):
- init_parameters = {"account_sid": {"label": "Account SID", "type": "string"},
- "account_token": {"label": "Account Token", "type": "password"},
- "from_number": {"label": "Source Phone Number", "type": "string"},
- "to_numbers": {"label": "Destination SMS Numbers", "type": "list"}}
+ init_parameters = {
+ "account_sid": {"label": "Account SID", "type": "string"},
+ "account_token": {"label": "Account Token", "type": "password"},
+ "from_number": {"label": "Source Phone Number", "type": "string"},
+ "to_numbers": {"label": "Destination SMS Numbers", "type": "list"},
+ }
recipient_parameter = "to_numbers"
sender_parameter = "from_number"
@@ -39,10 +41,7 @@ class TwilioBackend(AWXBaseEmailBackend, CustomNotificationBase):
for m in messages:
try:
- connection.messages.create(
- to=m.to,
- from_=m.from_email,
- body=m.subject)
+ connection.messages.create(to=m.to, from_=m.from_email, body=m.subject)
sent_messages += 1
except Exception as e:
logger.error(smart_text(_("Exception sending messages: {}").format(e)))
diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py
index d67fc11a35..b99052f09e 100644
--- a/awx/main/notifications/webhook_backend.py
+++ b/awx/main/notifications/webhook_backend.py
@@ -17,25 +17,29 @@ logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
- init_parameters = {"url": {"label": "Target URL", "type": "string"},
- "http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
- "disable_ssl_verification": {"label": "Verify SSL", "type": "bool", "default": False},
- "username": {"label": "Username", "type": "string", "default": ""},
- "password": {"label": "Password", "type": "password", "default": ""},
- "headers": {"label": "HTTP Headers", "type": "object"}}
+ init_parameters = {
+ "url": {"label": "Target URL", "type": "string"},
+ "http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
+ "disable_ssl_verification": {"label": "Verify SSL", "type": "bool", "default": False},
+ "username": {"label": "Username", "type": "string", "default": ""},
+ "password": {"label": "Password", "type": "password", "default": ""},
+ "headers": {"label": "HTTP Headers", "type": "object"},
+ }
recipient_parameter = "url"
sender_parameter = None
DEFAULT_BODY = "{{ job_metadata }}"
- default_messages = {"started": {"body": DEFAULT_BODY},
- "success": {"body": DEFAULT_BODY},
- "error": {"body": DEFAULT_BODY},
- "workflow_approval": {
- "running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. '
- 'This node can be viewed at: {{ workflow_url }}"}'},
- "approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
- "timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
- "denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'}}}
+ default_messages = {
+ "started": {"body": DEFAULT_BODY},
+ "success": {"body": DEFAULT_BODY},
+ "error": {"body": DEFAULT_BODY},
+ "workflow_approval": {
+ "running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. ' 'This node can be viewed at: {{ workflow_url }}"}'},
+ "approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
+ "timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
+ "denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
+ },
+ }
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
self.http_method = http_method
@@ -60,18 +64,20 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
self.headers['Content-Type'] = 'application/json'
if 'User-Agent' not in self.headers:
self.headers['User-Agent'] = "Tower {}".format(get_awx_version())
- if self.http_method.lower() not in ['put','post']:
+ if self.http_method.lower() not in ['put', 'post']:
raise ValueError("HTTP method must be either 'POST' or 'PUT'.")
chosen_method = getattr(requests, self.http_method.lower(), None)
for m in messages:
auth = None
if self.username or self.password:
auth = (self.username, self.password)
- r = chosen_method("{}".format(m.recipients()[0]),
- auth=auth,
- data=json.dumps(m.body, ensure_ascii=False).encode('utf-8'),
- headers=self.headers,
- verify=(not self.disable_ssl_verification))
+ r = chosen_method(
+ "{}".format(m.recipients()[0]),
+ auth=auth,
+ data=json.dumps(m.body, ensure_ascii=False).encode('utf-8'),
+ headers=self.headers,
+ verify=(not self.disable_ssl_verification),
+ )
if r.status_code >= 400:
logger.error(smart_text(_("Error sending notification webhook: {}").format(r.status_code)))
if not self.fail_silently:
diff --git a/awx/main/queue.py b/awx/main/queue.py
index 762879fd2c..88fc2c8288 100644
--- a/awx/main/queue.py
+++ b/awx/main/queue.py
@@ -17,7 +17,6 @@ __all__ = ['CallbackQueueDispatcher']
# objects that may exist in events emitted by the callback plugin
# see: https://github.com/ansible/ansible/pull/38759
class AnsibleJSONEncoder(json.JSONEncoder):
-
def default(self, o):
if getattr(o, 'yaml_tag', None) == '!vault':
return o.data
@@ -25,7 +24,6 @@ class AnsibleJSONEncoder(json.JSONEncoder):
class CallbackQueueDispatcher(object):
-
def __init__(self):
self.queue = getattr(settings, 'CALLBACK_QUEUE', '')
self.logger = logging.getLogger('awx.main.queue.CallbackQueueDispatcher')
diff --git a/awx/main/redact.py b/awx/main/redact.py
index 32899d935e..34dc33a98a 100644
--- a/awx/main/redact.py
+++ b/awx/main/redact.py
@@ -37,24 +37,24 @@ class UriCleaner(object):
# replace the first occurance of username and the first and second
# occurance of password
- uri_str = redactedtext[match.start():match.end()]
+ uri_str = redactedtext[match.start() : match.end()]
if username:
uri_str = uri_str.replace(username, UriCleaner.REPLACE_STR, 1)
# 2, just in case the password is $encrypted$
if password:
uri_str = uri_str.replace(password, UriCleaner.REPLACE_STR, 2)
- t = redactedtext[:match.start()] + uri_str
+ t = redactedtext[: match.start()] + uri_str
text_index = len(t)
- if (match.end() < len(redactedtext)):
- t += redactedtext[match.end():]
+ if match.end() < len(redactedtext):
+ t += redactedtext[match.end() :]
redactedtext = t
if text_index >= len(redactedtext):
text_index = len(redactedtext) - 1
except ValueError:
# Invalid URI, redact the whole URI to be safe
- redactedtext = redactedtext[:match.start()] + UriCleaner.REPLACE_STR + redactedtext[match.end():]
+ redactedtext = redactedtext[: match.start()] + UriCleaner.REPLACE_STR + redactedtext[match.end() :]
text_index = match.start() + len(UriCleaner.REPLACE_STR)
return redactedtext
diff --git a/awx/main/registrar.py b/awx/main/registrar.py
index 6d0ccfe495..07e721a953 100644
--- a/awx/main/registrar.py
+++ b/awx/main/registrar.py
@@ -5,7 +5,6 @@ from django.db.models.signals import pre_save, post_save, pre_delete, m2m_change
class ActivityStreamRegistrar(object):
-
def __init__(self):
self.models = []
@@ -22,8 +21,9 @@ class ActivityStreamRegistrar(object):
for m2mfield in model._meta.many_to_many:
try:
m2m_attr = getattr(model, m2mfield.name)
- m2m_changed.connect(activity_stream_associate, sender=m2m_attr.through,
- dispatch_uid=str(self.__class__) + str(m2m_attr.through) + "_associate")
+ m2m_changed.connect(
+ activity_stream_associate, sender=m2m_attr.through, dispatch_uid=str(self.__class__) + str(m2m_attr.through) + "_associate"
+ )
except AttributeError:
pass
@@ -34,7 +34,6 @@ class ActivityStreamRegistrar(object):
pre_delete.disconnect(dispatch_uid=str(self.__class__) + str(model) + "_delete")
self.models.pop(model)
-
for m2mfield in model._meta.many_to_many:
m2m_attr = getattr(model, m2mfield.name)
m2m_changed.disconnect(dispatch_uid=str(self.__class__) + str(m2m_attr.through) + "_associate")
diff --git a/awx/main/routing.py b/awx/main/routing.py
index 2866d46ed0..6ba58e68c6 100644
--- a/awx/main/routing.py
+++ b/awx/main/routing.py
@@ -31,8 +31,8 @@ websocket_urlpatterns = [
url(r'websocket/broadcast/$', consumers.BroadcastConsumer),
]
-application = AWXProtocolTypeRouter({
- 'websocket': AuthMiddlewareStack(
- URLRouter(websocket_urlpatterns)
- ),
-})
+application = AWXProtocolTypeRouter(
+ {
+ 'websocket': AuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
+ }
+)
diff --git a/awx/main/scheduler/dag_simple.py b/awx/main/scheduler/dag_simple.py
index 5a354edbba..ec21b0a0e0 100644
--- a/awx/main/scheduler/dag_simple.py
+++ b/awx/main/scheduler/dag_simple.py
@@ -85,10 +85,7 @@ class SimpleDAG(object):
color = 'red'
elif obj.do_not_run is True:
color = 'gray'
- doc += "%s [color = %s]\n" % (
- run_status(n['node_object']),
- color
- )
+ doc += "%s [color = %s]\n" % (run_status(n['node_object']), color)
for label, edges in self.node_from_edges_by_label.items():
for from_node, to_nodes in edges.items():
for to_node in to_nodes:
@@ -104,9 +101,9 @@ class SimpleDAG(object):
def add_node(self, obj, metadata=None):
if self.find_ord(obj) is None:
- '''
+ """
Assume node is a root node until a child is added
- '''
+ """
node_index = len(self.nodes)
self.root_nodes.add(node_index)
self.node_obj_to_node_index[obj] = node_index
@@ -129,10 +126,8 @@ class SimpleDAG(object):
elif to_obj_ord is None:
raise LookupError("To object not found {}".format(to_obj))
- self.node_from_edges_by_label.setdefault(label, dict()) \
- .setdefault(from_obj_ord, [])
- self.node_to_edges_by_label.setdefault(label, dict()) \
- .setdefault(to_obj_ord, [])
+ self.node_from_edges_by_label.setdefault(label, dict()).setdefault(from_obj_ord, [])
+ self.node_to_edges_by_label.setdefault(label, dict()).setdefault(to_obj_ord, [])
self.node_from_edges_by_label[label][from_obj_ord].append(to_obj_ord)
self.node_to_edges_by_label[label][to_obj_ord].append(from_obj_ord)
@@ -141,9 +136,7 @@ class SimpleDAG(object):
return self.node_obj_to_node_index.get(obj, None)
def _get_children_by_label(self, node_index, label):
- return [self.nodes[index] for index in
- self.node_from_edges_by_label.get(label, {})
- .get(node_index, [])]
+ return [self.nodes[index] for index in self.node_from_edges_by_label.get(label, {}).get(node_index, [])]
def get_children(self, obj, label=None):
this_ord = self.find_ord(obj)
@@ -157,9 +150,7 @@ class SimpleDAG(object):
return nodes
def _get_parents_by_label(self, node_index, label):
- return [self.nodes[index] for index in
- self.node_to_edges_by_label.get(label, {})
- .get(node_index, [])]
+ return [self.nodes[index] for index in self.node_to_edges_by_label.get(label, {}).get(node_index, [])]
def get_parents(self, obj, label=None):
this_ord = self.find_ord(obj)
diff --git a/awx/main/scheduler/dag_workflow.py b/awx/main/scheduler/dag_workflow.py
index 3d26a4da7f..39995f437c 100644
--- a/awx/main/scheduler/dag_workflow.py
+++ b/awx/main/scheduler/dag_workflow.py
@@ -1,4 +1,3 @@
-
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_text
@@ -21,18 +20,14 @@ class WorkflowDAG(SimpleDAG):
def _init_graph(self, workflow_job_or_jt):
if hasattr(workflow_job_or_jt, 'workflow_job_template_nodes'):
vals = ['from_workflowjobtemplatenode_id', 'to_workflowjobtemplatenode_id']
- filters = {
- 'from_workflowjobtemplatenode__workflow_job_template_id': workflow_job_or_jt.id
- }
+ filters = {'from_workflowjobtemplatenode__workflow_job_template_id': workflow_job_or_jt.id}
workflow_nodes = workflow_job_or_jt.workflow_job_template_nodes
success_nodes = WorkflowJobTemplateNode.success_nodes.through.objects.filter(**filters).values_list(*vals)
failure_nodes = WorkflowJobTemplateNode.failure_nodes.through.objects.filter(**filters).values_list(*vals)
always_nodes = WorkflowJobTemplateNode.always_nodes.through.objects.filter(**filters).values_list(*vals)
elif hasattr(workflow_job_or_jt, 'workflow_job_nodes'):
vals = ['from_workflowjobnode_id', 'to_workflowjobnode_id']
- filters = {
- 'from_workflowjobnode__workflow_job_id': workflow_job_or_jt.id
- }
+ filters = {'from_workflowjobnode__workflow_job_id': workflow_job_or_jt.id}
workflow_nodes = workflow_job_or_jt.workflow_job_nodes
success_nodes = WorkflowJobNode.success_nodes.through.objects.filter(**filters).values_list(*vals)
failure_nodes = WorkflowJobNode.failure_nodes.through.objects.filter(**filters).values_list(*vals)
@@ -76,15 +71,16 @@ class WorkflowDAG(SimpleDAG):
obj = node['node_object']
parent_nodes = [p['node_object'] for p in self.get_parents(obj)]
for p in parent_nodes:
- #node has a status
+ # node has a status
if p.job and p.job.status in ["successful", "failed"]:
if p.job and p.job.status == "successful":
status = "success_nodes"
elif p.job and p.job.status == "failed":
status = "failure_nodes"
- #check that the nodes status matches either a pathway of the same status or is an always path.
- if (p not in [node['node_object'] for node in self.get_parents(obj, status)] and
- p not in [node['node_object'] for node in self.get_parents(obj, "always_nodes")]):
+ # check that the nodes status matches either a pathway of the same status or is an always path.
+ if p not in [node['node_object'] for node in self.get_parents(obj, status)] and p not in [
+ node['node_object'] for node in self.get_parents(obj, "always_nodes")
+ ]:
return False
return True
@@ -101,14 +97,11 @@ class WorkflowDAG(SimpleDAG):
continue
elif obj.job:
if obj.job.status in ['failed', 'error', 'canceled']:
- nodes.extend(self.get_children(obj, 'failure_nodes') +
- self.get_children(obj, 'always_nodes'))
+ nodes.extend(self.get_children(obj, 'failure_nodes') + self.get_children(obj, 'always_nodes'))
elif obj.job.status == 'successful':
- nodes.extend(self.get_children(obj, 'success_nodes') +
- self.get_children(obj, 'always_nodes'))
+ nodes.extend(self.get_children(obj, 'success_nodes') + self.get_children(obj, 'always_nodes'))
elif obj.unified_job_template is None:
- nodes.extend(self.get_children(obj, 'failure_nodes') +
- self.get_children(obj, 'always_nodes'))
+ nodes.extend(self.get_children(obj, 'failure_nodes') + self.get_children(obj, 'always_nodes'))
else:
# This catches root nodes or ANY convergence nodes
if not obj.all_parents_must_converge and self._are_relevant_parents_finished(n):
@@ -157,8 +150,7 @@ class WorkflowDAG(SimpleDAG):
for node in failed_nodes:
obj = node['node_object']
- if (len(self.get_children(obj, 'failure_nodes')) +
- len(self.get_children(obj, 'always_nodes'))) == 0:
+ if (len(self.get_children(obj, 'failure_nodes')) + len(self.get_children(obj, 'always_nodes'))) == 0:
if obj.unified_job_template is None:
res = True
failed_unified_job_template_node_ids.append(str(obj.id))
@@ -167,8 +159,10 @@ class WorkflowDAG(SimpleDAG):
failed_path_nodes_id_status.append((str(obj.id), obj.job.status))
if res is True:
- s = _("No error handling path for workflow job node(s) [{node_status}]. Workflow job "
- "node(s) missing unified job template and error handling path [{no_ufjt}].")
+ s = _(
+ "No error handling path for workflow job node(s) [{node_status}]. Workflow job "
+ "node(s) missing unified job template and error handling path [{no_ufjt}]."
+ )
parms = {
'node_status': '',
'no_ufjt': '',
@@ -190,13 +184,13 @@ class WorkflowDAG(SimpleDAG):
Return a boolean
'''
+
def _are_all_nodes_dnr_decided(self, workflow_nodes):
for n in workflow_nodes:
if n.do_not_run is False and not n.job and n.unified_job_template:
return False
return True
-
r'''
Determine if a node (1) is ready to be marked do_not_run and (2) should
be marked do_not_run.
@@ -206,30 +200,27 @@ class WorkflowDAG(SimpleDAG):
Return a boolean
'''
+
def _should_mark_node_dnr(self, node, parent_nodes):
for p in parent_nodes:
if p.do_not_run is True:
pass
elif p.job:
if p.job.status == 'successful':
- if node in (self.get_children(p, 'success_nodes') +
- self.get_children(p, 'always_nodes')):
+ if node in (self.get_children(p, 'success_nodes') + self.get_children(p, 'always_nodes')):
return False
elif p.job.status in ['failed', 'error', 'canceled']:
- if node in (self.get_children(p, 'failure_nodes') +
- self.get_children(p, 'always_nodes')):
+ if node in (self.get_children(p, 'failure_nodes') + self.get_children(p, 'always_nodes')):
return False
else:
return False
elif not p.do_not_run and p.unified_job_template is None:
- if node in (self.get_children(p, 'failure_nodes') +
- self.get_children(p, 'always_nodes')):
+ if node in (self.get_children(p, 'failure_nodes') + self.get_children(p, 'always_nodes')):
return False
else:
return False
return True
-
r'''
determine if the current node is a convergence node by checking if all the
parents are finished then checking to see if all parents meet the needed
@@ -238,6 +229,7 @@ class WorkflowDAG(SimpleDAG):
Return a list object
'''
+
def mark_dnr_nodes(self):
root_nodes = self.get_root_nodes()
nodes_marked_do_not_run = []
diff --git a/awx/main/scheduler/kubernetes.py b/awx/main/scheduler/kubernetes.py
index 529a5e5442..a4746838dc 100644
--- a/awx/main/scheduler/kubernetes.py
+++ b/awx/main/scheduler/kubernetes.py
@@ -22,8 +22,7 @@ def deepmerge(a, b):
{'first': {'all_rows': {'fail': 'cat', 'number': '5', 'pass': 'dog'}}}
"""
if isinstance(a, dict) and isinstance(b, dict):
- return dict([(k, deepmerge(a.get(k), b.get(k)))
- for k in set(a.keys()).union(b.keys())])
+ return dict([(k, deepmerge(a.get(k), b.get(k))) for k in set(a.keys()).union(b.keys())])
elif b is None:
return a
else:
@@ -31,7 +30,6 @@ def deepmerge(a, b):
class PodManager(object):
-
def __init__(self, task=None):
self.task = task
@@ -39,16 +37,12 @@ class PodManager(object):
if not self.credential.kubernetes:
raise RuntimeError('Pod deployment cannot occur without a Kubernetes credential')
- self.kube_api.create_namespaced_pod(body=self.pod_definition,
- namespace=self.namespace,
- _request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
+ self.kube_api.create_namespaced_pod(body=self.pod_definition, namespace=self.namespace, _request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
num_retries = settings.AWX_CONTAINER_GROUP_POD_LAUNCH_RETRIES
for retry_attempt in range(num_retries - 1):
logger.debug(f"Checking for pod {self.pod_name}. Attempt {retry_attempt + 1} of {num_retries}")
- pod = self.kube_api.read_namespaced_pod(name=self.pod_name,
- namespace=self.namespace,
- _request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
+ pod = self.kube_api.read_namespaced_pod(name=self.pod_name, namespace=self.namespace, _request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
if pod.status.phase != 'Pending':
break
else:
@@ -64,16 +58,10 @@ class PodManager(object):
@classmethod
def list_active_jobs(self, instance_group):
- task = collections.namedtuple('Task', 'id instance_group')(
- id='',
- instance_group=instance_group
- )
+ task = collections.namedtuple('Task', 'id instance_group')(id='', instance_group=instance_group)
pm = PodManager(task)
try:
- for pod in pm.kube_api.list_namespaced_pod(
- pm.namespace,
- label_selector='ansible-awx={}'.format(settings.INSTALL_UUID)
- ).to_dict().get('items', []):
+ for pod in pm.kube_api.list_namespaced_pod(pm.namespace, label_selector='ansible-awx={}'.format(settings.INSTALL_UUID)).to_dict().get('items', []):
job = pod['metadata'].get('labels', {}).get('ansible-awx-job-id')
if job:
try:
@@ -84,9 +72,7 @@ class PodManager(object):
logger.exception('Failed to list pods for container group {}'.format(instance_group))
def delete(self):
- return self.kube_api.delete_namespaced_pod(name=self.pod_name,
- namespace=self.namespace,
- _request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
+ return self.kube_api.delete_namespaced_pod(name=self.pod_name, namespace=self.namespace, _request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
@property
def namespace(self):
@@ -105,14 +91,10 @@ class PodManager(object):
# this feels a little janky, but it's what k8s' own code does
# internally when it reads kube config files from disk:
# https://github.com/kubernetes-client/python-base/blob/0b208334ef0247aad9afcaae8003954423b61a0d/config/kube_config.py#L643
- loader = config.kube_config.KubeConfigLoader(
- config_dict=self.kube_config
- )
+ loader = config.kube_config.KubeConfigLoader(config_dict=self.kube_config)
cfg = type.__call__(client.Configuration)
loader.load_and_set(cfg)
- return client.CoreV1Api(api_client=client.ApiClient(
- configuration=cfg
- ))
+ return client.CoreV1Api(api_client=client.ApiClient(configuration=cfg))
@property
def pod_name(self):
@@ -123,36 +105,29 @@ class PodManager(object):
default_pod_spec = {
"apiVersion": "v1",
"kind": "Pod",
- "metadata": {
- "namespace": settings.AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE
- },
+ "metadata": {"namespace": settings.AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE},
"spec": {
- "containers": [{
- "image": settings.AWX_CONTAINER_GROUP_DEFAULT_IMAGE,
- "tty": True,
- "stdin": True,
- "imagePullPolicy": "Always",
- "args": [
- 'sleep', 'infinity'
- ]
- }]
- }
+ "containers": [
+ {
+ "image": settings.AWX_CONTAINER_GROUP_DEFAULT_IMAGE,
+ "tty": True,
+ "stdin": True,
+ "imagePullPolicy": "Always",
+ "args": ['sleep', 'infinity'],
+ }
+ ]
+ },
}
pod_spec_override = {}
if self.task and self.task.instance_group.pod_spec_override:
- pod_spec_override = parse_yaml_or_json(
- self.task.instance_group.pod_spec_override)
+ pod_spec_override = parse_yaml_or_json(self.task.instance_group.pod_spec_override)
pod_spec = {**default_pod_spec, **pod_spec_override}
if self.task:
pod_spec['metadata'] = deepmerge(
- pod_spec.get('metadata', {}),
- dict(name=self.pod_name,
- labels={
- 'ansible-awx': settings.INSTALL_UUID,
- 'ansible-awx-job-id': str(self.task.id)
- }))
+ pod_spec.get('metadata', {}), dict(name=self.pod_name, labels={'ansible-awx': settings.INSTALL_UUID, 'ansible-awx-job-id': str(self.task.id)})
+ )
pod_spec['spec']['containers'][0]['name'] = self.pod_name
return pod_spec
@@ -164,39 +139,16 @@ def generate_tmp_kube_config(credential, namespace):
"apiVersion": "v1",
"kind": "Config",
"preferences": {},
- "clusters": [
- {
- "name": host_input,
- "cluster": {
- "server": host_input
- }
- }
- ],
- "users": [
- {
- "name": host_input,
- "user": {
- "token": credential.get_input('bearer_token')
- }
- }
- ],
- "contexts": [
- {
- "name": host_input,
- "context": {
- "cluster": host_input,
- "user": host_input,
- "namespace": namespace
- }
- }
- ],
- "current-context": host_input
+ "clusters": [{"name": host_input, "cluster": {"server": host_input}}],
+ "users": [{"name": host_input, "user": {"token": credential.get_input('bearer_token')}}],
+ "contexts": [{"name": host_input, "context": {"cluster": host_input, "user": host_input, "namespace": namespace}}],
+ "current-context": host_input,
}
if credential.get_input('verify_ssl') and 'ssl_ca_cert' in credential.inputs:
config["clusters"][0]["cluster"]["certificate-authority-data"] = b64encode(
- credential.get_input('ssl_ca_cert').encode() # encode to bytes
- ).decode() # decode the base64 data into a str
+ credential.get_input('ssl_ca_cert').encode() # encode to bytes
+ ).decode() # decode the base64 data into a str
else:
config["clusters"][0]["cluster"]["insecure-skip-tls-verify"] = True
return config
diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py
index 8d8df5eee2..0757132cf9 100644
--- a/awx/main/scheduler/task_manager.py
+++ b/awx/main/scheduler/task_manager.py
@@ -31,7 +31,7 @@ from awx.main.models import (
UnifiedJob,
WorkflowApproval,
WorkflowJob,
- WorkflowJobTemplate
+ WorkflowJobTemplate,
)
from awx.main.scheduler.dag_workflow import WorkflowDAG
from awx.main.utils.pglock import advisory_lock
@@ -44,10 +44,9 @@ from awx.main.utils import decrypt_field
logger = logging.getLogger('awx.main.scheduler')
-class TaskManager():
-
+class TaskManager:
def __init__(self):
- '''
+ """
Do NOT put database queries or other potentially expensive operations
in the task manager init. The task manager object is created every time a
job is created, transitions state, and every 30 seconds on each tower node.
@@ -55,7 +54,7 @@ class TaskManager():
The NOOP case is short-circuit logic. If the task manager realizes that another instance
of the task manager is already running, then it short-circuits and decides not to run.
- '''
+ """
self.graph = dict()
# start task limit indicates how many pending jobs can be started on this
# .schedule() run. Starting jobs is expensive, and there is code in place to reap
@@ -67,25 +66,27 @@ class TaskManager():
self.time_delta_job_explanation = timedelta(seconds=30)
def after_lock_init(self):
- '''
+ """
Init AFTER we know this instance of the task manager will run because the lock is acquired.
- '''
+ """
instances = Instance.objects.filter(~Q(hostname=None), enabled=True)
self.real_instances = {i.hostname: i for i in instances}
- instances_partial = [SimpleNamespace(obj=instance,
- remaining_capacity=instance.remaining_capacity,
- capacity=instance.capacity,
- jobs_running=instance.jobs_running,
- hostname=instance.hostname) for instance in instances]
+ instances_partial = [
+ SimpleNamespace(
+ obj=instance,
+ remaining_capacity=instance.remaining_capacity,
+ capacity=instance.capacity,
+ jobs_running=instance.jobs_running,
+ hostname=instance.hostname,
+ )
+ for instance in instances
+ ]
instances_by_hostname = {i.hostname: i for i in instances_partial}
for rampart_group in InstanceGroup.objects.prefetch_related('instances'):
- self.graph[rampart_group.name] = dict(graph=DependencyGraph(),
- capacity_total=rampart_group.capacity,
- consumed_capacity=0,
- instances=[])
+ self.graph[rampart_group.name] = dict(graph=DependencyGraph(), capacity_total=rampart_group.capacity, consumed_capacity=0, instances=[])
for instance in rampart_group.instances.filter(enabled=True).order_by('hostname'):
if instance.hostname in instances_by_hostname:
self.graph[rampart_group.name]['instances'].append(instances_by_hostname[instance.hostname])
@@ -108,21 +109,20 @@ class TaskManager():
def get_tasks(self, status_list=('pending', 'waiting', 'running')):
jobs = [j for j in Job.objects.filter(status__in=status_list).prefetch_related('instance_group')]
- inventory_updates_qs = InventoryUpdate.objects.filter(
- status__in=status_list).exclude(source='file').prefetch_related('inventory_source', 'instance_group')
+ inventory_updates_qs = (
+ InventoryUpdate.objects.filter(status__in=status_list).exclude(source='file').prefetch_related('inventory_source', 'instance_group')
+ )
inventory_updates = [i for i in inventory_updates_qs]
# Notice the job_type='check': we want to prevent implicit project updates from blocking our jobs.
project_updates = [p for p in ProjectUpdate.objects.filter(status__in=status_list, job_type='check').prefetch_related('instance_group')]
system_jobs = [s for s in SystemJob.objects.filter(status__in=status_list).prefetch_related('instance_group')]
ad_hoc_commands = [a for a in AdHocCommand.objects.filter(status__in=status_list).prefetch_related('instance_group')]
workflow_jobs = [w for w in WorkflowJob.objects.filter(status__in=status_list)]
- all_tasks = sorted(jobs + project_updates + inventory_updates + system_jobs + ad_hoc_commands + workflow_jobs,
- key=lambda task: task.created)
+ all_tasks = sorted(jobs + project_updates + inventory_updates + system_jobs + ad_hoc_commands + workflow_jobs, key=lambda task: task.created)
return all_tasks
def get_running_workflow_jobs(self):
- graph_workflow_jobs = [wf for wf in
- WorkflowJob.objects.filter(status='running')]
+ graph_workflow_jobs = [wf for wf in WorkflowJob.objects.filter(status='running')]
return graph_workflow_jobs
def get_inventory_source_tasks(self, all_sorted_tasks):
@@ -156,20 +156,26 @@ class TaskManager():
workflow_ancestors = job.get_ancestor_workflows()
if spawn_node.unified_job_template in set(workflow_ancestors):
can_start = False
- logger.info('Refusing to start recursive workflow-in-workflow id={}, wfjt={}, ancestors={}'.format(
- job.id, spawn_node.unified_job_template.pk, [wa.pk for wa in workflow_ancestors]))
+ logger.info(
+ 'Refusing to start recursive workflow-in-workflow id={}, wfjt={}, ancestors={}'.format(
+ job.id, spawn_node.unified_job_template.pk, [wa.pk for wa in workflow_ancestors]
+ )
+ )
display_list = [spawn_node.unified_job_template] + workflow_ancestors
job.job_explanation = gettext_noop(
- "Workflow Job spawned from workflow could not start because it "
- "would result in recursion (spawn order, most recent first: {})"
+ "Workflow Job spawned from workflow could not start because it " "would result in recursion (spawn order, most recent first: {})"
).format(', '.join(['<{}>'.format(tmp) for tmp in display_list]))
else:
- logger.debug('Starting workflow-in-workflow id={}, wfjt={}, ancestors={}'.format(
- job.id, spawn_node.unified_job_template.pk, [wa.pk for wa in workflow_ancestors]))
+ logger.debug(
+ 'Starting workflow-in-workflow id={}, wfjt={}, ancestors={}'.format(
+ job.id, spawn_node.unified_job_template.pk, [wa.pk for wa in workflow_ancestors]
+ )
+ )
if not job._resources_sufficient_for_launch():
can_start = False
- job.job_explanation = gettext_noop("Job spawned from workflow could not start because it "
- "was missing a related resource such as project or inventory")
+ job.job_explanation = gettext_noop(
+ "Job spawned from workflow could not start because it " "was missing a related resource such as project or inventory"
+ )
if can_start:
if workflow_job.start_args:
start_args = json.loads(decrypt_field(workflow_job, 'start_args'))
@@ -177,15 +183,16 @@ class TaskManager():
start_args = {}
can_start = job.signal_start(**start_args)
if not can_start:
- job.job_explanation = gettext_noop("Job spawned from workflow could not start because it "
- "was not in the right state or required manual credentials")
+ job.job_explanation = gettext_noop(
+ "Job spawned from workflow could not start because it " "was not in the right state or required manual credentials"
+ )
if not can_start:
job.status = 'failed'
job.save(update_fields=['status', 'job_explanation'])
job.websocket_emit_status('failed')
# TODO: should we emit a status on the socket here similar to tasks.py awx_periodic_scheduler() ?
- #emit_websocket_notification('/socket.io/jobs', '', dict(id=))
+ # emit_websocket_notification('/socket.io/jobs', '', dict(id=))
def process_finished_workflow_jobs(self, workflow_jobs):
result = []
@@ -251,8 +258,7 @@ class TaskManager():
try:
controller_node = rampart_group.choose_online_controller_node()
except IndexError:
- logger.debug("No controllers available in group {} to run {}".format(
- rampart_group.name, task.log_format))
+ logger.debug("No controllers available in group {} to run {}".format(rampart_group.name, task.log_format))
return
task.status = 'waiting'
@@ -275,14 +281,12 @@ class TaskManager():
# non-Ansible jobs on isolated instances run on controller
task.instance_group = rampart_group.controller
task.execution_node = random.choice(list(rampart_group.controller.instances.all().values_list('hostname', flat=True)))
- logger.debug('Submitting isolated {} to queue {} on node {}.'.format(
- task.log_format, task.instance_group.name, task.execution_node))
+ logger.debug('Submitting isolated {} to queue {} on node {}.'.format(task.log_format, task.instance_group.name, task.execution_node))
elif controller_node:
task.instance_group = rampart_group
task.execution_node = instance.hostname
task.controller_node = controller_node
- logger.debug('Submitting isolated {} to queue {} controlled by {}.'.format(
- task.log_format, task.execution_node, controller_node))
+ logger.debug('Submitting isolated {} to queue {} controlled by {}.'.format(task.log_format, task.execution_node, controller_node))
elif rampart_group.is_container_group:
# find one real, non-containerized instance with capacity to
# act as the controller for k8s API interaction
@@ -295,9 +299,7 @@ class TaskManager():
break
task.instance_group = rampart_group
if match is None:
- logger.warn(
- 'No available capacity to run containerized <{}>.'.format(task.log_format)
- )
+ logger.warn('No available capacity to run containerized <{}>.'.format(task.log_format))
else:
if task.supports_isolation():
task.controller_node = match.hostname
@@ -306,14 +308,12 @@ class TaskManager():
# so just pick *any* non-isolated, non-containerized host and use it
# as the execution node
task.execution_node = match.hostname
- logger.debug('Submitting containerized {} to queue {}.'.format(
- task.log_format, task.execution_node))
+ logger.debug('Submitting containerized {} to queue {}.'.format(task.log_format, task.execution_node))
else:
task.instance_group = rampart_group
if instance is not None:
task.execution_node = instance.hostname
- logger.debug('Submitting {} to <instance group, instance> <{},{}>.'.format(
- task.log_format, task.instance_group_id, task.execution_node))
+ logger.debug('Submitting {} to <instance group, instance> <{},{}>.'.format(task.log_format, task.instance_group_id, task.execution_node))
with disable_activity_stream():
task.celery_task_id = str(uuid.uuid4())
task.save()
@@ -330,15 +330,8 @@ class TaskManager():
opts,
queue=task.get_queue_name(),
uuid=task.celery_task_id,
- callbacks=[{
- 'task': handle_work_success.name,
- 'kwargs': {'task_actual': task_actual}
- }],
- errbacks=[{
- 'task': handle_work_error.name,
- 'args': [task.celery_task_id],
- 'kwargs': {'subtasks': [task_actual] + dependencies}
- }],
+ callbacks=[{'task': handle_work_success.name, 'kwargs': {'task_actual': task_actual}}],
+ errbacks=[{'task': handle_work_error.name, 'args': [task.celery_task_id], 'kwargs': {'subtasks': [task_actual] + dependencies}}],
)
task.websocket_emit_status(task.status) # adds to on_commit
@@ -350,32 +343,22 @@ class TaskManager():
self.graph[task.instance_group.name]['graph'].add_job(task)
def create_project_update(self, task):
- project_task = Project.objects.get(id=task.project_id).create_project_update(
- _eager_fields=dict(launch_type='dependency'))
+ project_task = Project.objects.get(id=task.project_id).create_project_update(_eager_fields=dict(launch_type='dependency'))
# Project created 1 seconds behind
project_task.created = task.created - timedelta(seconds=1)
project_task.status = 'pending'
project_task.save()
- logger.debug(
- 'Spawned {} as dependency of {}'.format(
- project_task.log_format, task.log_format
- )
- )
+ logger.debug('Spawned {} as dependency of {}'.format(project_task.log_format, task.log_format))
return project_task
def create_inventory_update(self, task, inventory_source_task):
- inventory_task = InventorySource.objects.get(id=inventory_source_task.id).create_inventory_update(
- _eager_fields=dict(launch_type='dependency'))
+ inventory_task = InventorySource.objects.get(id=inventory_source_task.id).create_inventory_update(_eager_fields=dict(launch_type='dependency'))
inventory_task.created = task.created - timedelta(seconds=2)
inventory_task.status = 'pending'
inventory_task.save()
- logger.debug(
- 'Spawned {} as dependency of {}'.format(
- inventory_task.log_format, task.log_format
- )
- )
+ logger.debug('Spawned {} as dependency of {}'.format(inventory_task.log_format, task.log_format))
# inventory_sources = self.get_inventory_source_tasks([task])
# self.process_inventory_sources(inventory_sources)
return inventory_task
@@ -409,8 +392,7 @@ class TaskManager():
timeout_seconds = timedelta(seconds=latest_inventory_update.inventory_source.update_cache_timeout)
if (latest_inventory_update.finished + timeout_seconds) < now:
return True
- if latest_inventory_update.inventory_source.update_on_launch is True and \
- latest_inventory_update.status in ['failed', 'canceled', 'error']:
+ if latest_inventory_update.inventory_source.update_on_launch is True and latest_inventory_update.status in ['failed', 'canceled', 'error']:
return True
return False
@@ -441,9 +423,11 @@ class TaskManager():
then consider the project update found. This is so we don't enter an infinite loop
of updating the project when cache timeout is 0.
'''
- if latest_project_update.project.scm_update_cache_timeout == 0 and \
- latest_project_update.launch_type == 'dependency' and \
- latest_project_update.created == job.created - timedelta(seconds=1):
+ if (
+ latest_project_update.project.scm_update_cache_timeout == 0
+ and latest_project_update.launch_type == 'dependency'
+ and latest_project_update.created == job.created - timedelta(seconds=1)
+ ):
return False
'''
Normal Cache Timeout Logic
@@ -491,7 +475,7 @@ class TaskManager():
if len(dependencies) > 0:
self.capture_chain_failure_dependencies(task, dependencies)
- UnifiedJob.objects.filter(pk__in = [task.pk for task in undeped_tasks]).update(dependencies_processed=True)
+ UnifiedJob.objects.filter(pk__in=[task.pk for task in undeped_tasks]).update(dependencies_processed=True)
return created_dependencies
def process_pending_tasks(self, pending_tasks):
@@ -506,7 +490,7 @@ class TaskManager():
job_explanation = gettext_noop(f"waiting for {blocked_by._meta.model_name}-{blocked_by.id} to finish")
if task.job_explanation != job_explanation:
if task.created < (tz_now() - self.time_delta_job_explanation):
- task.job_explanation = job_explanation
+ task.job_explanation = job_explanation
tasks_to_update_job_explanation.append(task)
continue
preferred_instance_groups = task.preferred_instance_groups
@@ -529,22 +513,26 @@ class TaskManager():
remaining_capacity = self.get_remaining_capacity(rampart_group.name)
if (
- task.task_impact > 0 and # project updates have a cost of zero
- not rampart_group.is_container_group and
- self.get_remaining_capacity(rampart_group.name) <= 0):
- logger.debug("Skipping group {}, remaining_capacity {} <= 0".format(
- rampart_group.name, remaining_capacity))
+ task.task_impact > 0
+ and not rampart_group.is_container_group # project updates have a cost of zero
+ and self.get_remaining_capacity(rampart_group.name) <= 0
+ ):
+ logger.debug("Skipping group {}, remaining_capacity {} <= 0".format(rampart_group.name, remaining_capacity))
continue
- execution_instance = InstanceGroup.fit_task_to_most_remaining_capacity_instance(task, self.graph[rampart_group.name]['instances']) or \
- InstanceGroup.find_largest_idle_instance(self.graph[rampart_group.name]['instances'])
+ execution_instance = InstanceGroup.fit_task_to_most_remaining_capacity_instance(
+ task, self.graph[rampart_group.name]['instances']
+ ) or InstanceGroup.find_largest_idle_instance(self.graph[rampart_group.name]['instances'])
if execution_instance or rampart_group.is_container_group:
if not rampart_group.is_container_group:
execution_instance.remaining_capacity = max(0, execution_instance.remaining_capacity - task.task_impact)
execution_instance.jobs_running += 1
- logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
- task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
+ logger.debug(
+ "Starting {} in group {} instance {} (remaining_capacity={})".format(
+ task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity
+ )
+ )
if execution_instance:
execution_instance = self.real_instances[execution_instance.hostname]
@@ -553,8 +541,11 @@ class TaskManager():
found_acceptable_queue = True
break
else:
- logger.debug("No instance available in group {} to run job {} w/ capacity requirement {}".format(
- rampart_group.name, task.log_format, task.task_impact))
+ logger.debug(
+ "No instance available in group {} to run job {} w/ capacity requirement {}".format(
+ rampart_group.name, task.log_format, task.task_impact
+ )
+ )
if not found_acceptable_queue:
task.log_lifecycle("needs_capacity")
job_explanation = gettext_noop("This job is not ready to start because there is not enough available capacity.")
@@ -576,9 +567,9 @@ class TaskManager():
if task.timeout == 0:
continue
if (now - task.created) >= approval_timeout_seconds:
- timeout_message = _(
- "The approval node {name} ({pk}) has expired after {timeout} seconds."
- ).format(name=task.name, pk=task.pk, timeout=task.timeout)
+ timeout_message = _("The approval node {name} ({pk}) has expired after {timeout} seconds.").format(
+ name=task.name, pk=task.pk, timeout=task.timeout
+ )
logger.warn(timeout_message)
task.timed_out = True
task.status = 'failed'
@@ -594,9 +585,7 @@ class TaskManager():
# elsewhere
for j in UnifiedJob.objects.filter(
status__in=['pending', 'waiting', 'running'],
- ).exclude(
- execution_node__in=Instance.objects.values_list('hostname', flat=True)
- ):
+ ).exclude(execution_node__in=Instance.objects.values_list('hostname', flat=True)):
if j.execution_node and not j.is_container_group_task:
logger.error(f'{j.execution_node} is not a registered instance; reaping {j.log_format}')
reap_job(j, 'failed')
@@ -605,13 +594,15 @@ class TaskManager():
self.graph = InstanceGroup.objects.capacity_values(tasks=tasks, graph=self.graph)
def consume_capacity(self, task, instance_group):
- logger.debug('{} consumed {} capacity units from {} with prior total of {}'.format(
- task.log_format, task.task_impact, instance_group,
- self.graph[instance_group]['consumed_capacity']))
+ logger.debug(
+ '{} consumed {} capacity units from {} with prior total of {}'.format(
+ task.log_format, task.task_impact, instance_group, self.graph[instance_group]['consumed_capacity']
+ )
+ )
self.graph[instance_group]['consumed_capacity'] += task.task_impact
def get_remaining_capacity(self, instance_group):
- return (self.graph[instance_group]['capacity_total'] - self.graph[instance_group]['consumed_capacity'])
+ return self.graph[instance_group]['capacity_total'] - self.graph[instance_group]['consumed_capacity']
def process_tasks(self, all_sorted_tasks):
running_tasks = [t for t in all_sorted_tasks if t.status in ['waiting', 'running']]
diff --git a/awx/main/scheduler/tasks.py b/awx/main/scheduler/tasks.py
index 7da6a305a9..b35e542d3a 100644
--- a/awx/main/scheduler/tasks.py
+++ b/awx/main/scheduler/tasks.py
@@ -1,4 +1,3 @@
-
# Python
import logging
diff --git a/awx/main/signals.py b/awx/main/signals.py
index ac7a3d2301..82cd8f43a7 100644
--- a/awx/main/signals.py
+++ b/awx/main/signals.py
@@ -30,11 +30,29 @@ from crum.signals import current_user_getter
# AWX
from awx.main.models import (
- ActivityStream, Group, Host, InstanceGroup, Inventory, InventorySource,
- Job, JobHostSummary, JobTemplate, OAuth2AccessToken, Organization, Project,
- Role, SystemJob, SystemJobTemplate, UnifiedJob, UnifiedJobTemplate, User,
- UserSessionMembership, WorkflowJobTemplateNode, WorkflowApproval,
- WorkflowApprovalTemplate, ROLE_SINGLETON_SYSTEM_ADMINISTRATOR
+ ActivityStream,
+ Group,
+ Host,
+ InstanceGroup,
+ Inventory,
+ InventorySource,
+ Job,
+ JobHostSummary,
+ JobTemplate,
+ OAuth2AccessToken,
+ Organization,
+ Project,
+ Role,
+ SystemJob,
+ SystemJobTemplate,
+ UnifiedJob,
+ UnifiedJobTemplate,
+ User,
+ UserSessionMembership,
+ WorkflowJobTemplateNode,
+ WorkflowApproval,
+ WorkflowApprovalTemplate,
+ ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
)
from awx.main.constants import CENSOR_VALUE
from awx.main.utils import model_instance_diff, model_to_dict, camelcase_to_underscore, get_current_apps
@@ -76,23 +94,19 @@ def emit_update_inventory_on_created_or_deleted(sender, **kwargs):
if getattr(_inventory_updates, 'is_updating', False):
return
instance = kwargs['instance']
- if ('created' in kwargs and kwargs['created']) or \
- kwargs['signal'] == post_delete:
+ if ('created' in kwargs and kwargs['created']) or kwargs['signal'] == post_delete:
pass
else:
return
sender_name = str(sender._meta.verbose_name)
- logger.debug("%s created or deleted, updating inventory computed fields: %r %r",
- sender_name, sender, kwargs)
+ logger.debug("%s created or deleted, updating inventory computed fields: %r %r", sender_name, sender, kwargs)
try:
inventory = instance.inventory
except Inventory.DoesNotExist:
pass
else:
if inventory is not None:
- connection.on_commit(
- lambda: update_inventory_computed_fields.delay(inventory.id)
- )
+ connection.on_commit(lambda: update_inventory_computed_fields.delay(inventory.id))
def rebuild_role_ancestor_list(reverse, model, instance, pk_set, action, **kwargs):
@@ -177,17 +191,16 @@ def cleanup_detached_labels_on_deleted_parent(sender, instance, **kwargs):
def save_related_job_templates(sender, instance, **kwargs):
- '''save_related_job_templates loops through all of the
+ """save_related_job_templates loops through all of the
job templates that use an Inventory that have had their
Organization updated. This triggers the rebuilding of the RBAC hierarchy
and ensures the proper access restrictions.
- '''
+ """
if sender is not Inventory:
raise ValueError('This signal callback is only intended for use with Project or Inventory')
update_fields = kwargs.get('update_fields', None)
- if ((update_fields and not ('organization' in update_fields or 'organization_id' in update_fields)) or
- kwargs.get('created', False)):
+ if (update_fields and not ('organization' in update_fields or 'organization_id' in update_fields)) or kwargs.get('created', False):
return
if instance._prior_values_store.get('organization_id') != instance.organization_id:
@@ -195,9 +208,11 @@ def save_related_job_templates(sender, instance, **kwargs):
for jt in jtq:
parents_added, parents_removed = update_role_parentage_for_instance(jt)
if parents_added or parents_removed:
- logger.info('Permissions on JT {} changed due to inventory {} organization change from {} to {}.'.format(
- jt.pk, instance.pk, instance._prior_values_store.get('organization_id'), instance.organization_id
- ))
+ logger.info(
+ 'Permissions on JT {} changed due to inventory {} organization change from {} to {}.'.format(
+ jt.pk, instance.pk, instance._prior_values_store.get('organization_id'), instance.organization_id
+ )
+ )
def connect_computed_field_signals():
@@ -244,19 +259,17 @@ def migrate_children_from_deleted_group_to_parent_groups(sender, **kwargs):
parents_pks = getattr(instance, '_saved_parents_pks', [])
hosts_pks = getattr(instance, '_saved_hosts_pks', [])
children_pks = getattr(instance, '_saved_children_pks', [])
- is_updating = getattr(_inventory_updates, 'is_updating', False)
+ is_updating = getattr(_inventory_updates, 'is_updating', False)
with ignore_inventory_group_removal():
with ignore_inventory_computed_fields():
if parents_pks:
for parent_group in Group.objects.filter(pk__in=parents_pks):
for child_host in Host.objects.filter(pk__in=hosts_pks):
- logger.debug('adding host %s to parent %s after group deletion',
- child_host, parent_group)
+ logger.debug('adding host %s to parent %s after group deletion', child_host, parent_group)
parent_group.hosts.add(child_host)
for child_group in Group.objects.filter(pk__in=children_pks):
- logger.debug('adding group %s to parent %s after group deletion',
- child_group, parent_group)
+ logger.debug('adding group %s to parent %s after group deletion', child_group, parent_group)
parent_group.children.add(child_group)
inventory_pk = getattr(instance, '_saved_inventory_pk', None)
if inventory_pk and not is_updating:
@@ -296,7 +309,7 @@ def _update_host_last_jhs(host):
@receiver(pre_delete, sender=Job)
def save_host_pks_before_job_delete(sender, **kwargs):
instance = kwargs['instance']
- hosts_qs = Host.objects.filter( last_job__pk=instance.pk)
+ hosts_qs = Host.objects.filter(last_job__pk=instance.pk)
instance._saved_hosts_pks = set(hosts_qs.values_list('pk', flat=True))
@@ -324,9 +337,9 @@ activity_stream_enabled = ActivityStreamEnabled()
@contextlib.contextmanager
def disable_activity_stream():
- '''
+ """
Context manager to disable capturing activity stream changes.
- '''
+ """
try:
previous_value = activity_stream_enabled.enabled
activity_stream_enabled.enabled = False
@@ -355,6 +368,7 @@ def model_serializer_mapping():
from awx.conf.models import Setting
from awx.conf.serializers import SettingSerializer
+
return {
Setting: SettingSerializer,
models.User: serializers.UserActivityStreamSerializer,
@@ -393,14 +407,23 @@ def emit_activity_stream_change(instance):
# could be really noisy
return
from awx.api.serializers import ActivityStreamSerializer
+
actor = None
if instance.actor:
actor = instance.actor.username
summary_fields = ActivityStreamSerializer(instance).get_summary_fields(instance)
- analytics_logger.info('Activity Stream update entry for %s' % str(instance.object1),
- extra=dict(changes=instance.changes, relationship=instance.object_relationship_type,
- actor=actor, operation=instance.operation,
- object1=instance.object1, object2=instance.object2, summary_fields=summary_fields))
+ analytics_logger.info(
+ 'Activity Stream update entry for %s' % str(instance.object1),
+ extra=dict(
+ changes=instance.changes,
+ relationship=instance.object_relationship_type,
+ actor=actor,
+ operation=instance.operation,
+ object1=instance.object1,
+ object2=instance.object2,
+ summary_fields=summary_fields,
+ ),
+ )
def activity_stream_create(sender, instance, created, **kwargs):
@@ -412,21 +435,14 @@ def activity_stream_create(sender, instance, created, **kwargs):
changes = model_to_dict(instance, model_serializer_mapping())
# Special case where Job survey password variables need to be hidden
if type(instance) == Job:
- changes['credentials'] = [
- '{} ({})'.format(c.name, c.id)
- for c in instance.credentials.iterator()
- ]
+ changes['credentials'] = ['{} ({})'.format(c.name, c.id) for c in instance.credentials.iterator()]
changes['labels'] = [label.name for label in instance.labels.iterator()]
if 'extra_vars' in changes:
changes['extra_vars'] = instance.display_extra_vars()
if type(instance) == OAuth2AccessToken:
changes['token'] = CENSOR_VALUE
- activity_entry = get_activity_stream_class()(
- operation='create',
- object1=object1,
- changes=json.dumps(changes),
- actor=get_current_user_or_none())
- #TODO: Weird situation where cascade SETNULL doesn't work
+ activity_entry = get_activity_stream_class()(operation='create', object1=object1, changes=json.dumps(changes), actor=get_current_user_or_none())
+ # TODO: Weird situation where cascade SETNULL doesn't work
# it might actually be a good idea to remove all of these FK references since
# we don't really use them anyway.
if instance._meta.model_name != 'setting': # Is not conf.Setting instance
@@ -435,9 +451,7 @@ def activity_stream_create(sender, instance, created, **kwargs):
else:
activity_entry.setting = conf_to_dict(instance)
activity_entry.save()
- connection.on_commit(
- lambda: emit_activity_stream_change(activity_entry)
- )
+ connection.on_commit(lambda: emit_activity_stream_change(activity_entry))
def activity_stream_update(sender, instance, **kwargs):
@@ -458,20 +472,14 @@ def activity_stream_update(sender, instance, **kwargs):
if getattr(_type, '_deferred', False):
return
object1 = camelcase_to_underscore(instance.__class__.__name__)
- activity_entry = get_activity_stream_class()(
- operation='update',
- object1=object1,
- changes=json.dumps(changes),
- actor=get_current_user_or_none())
+ activity_entry = get_activity_stream_class()(operation='update', object1=object1, changes=json.dumps(changes), actor=get_current_user_or_none())
if instance._meta.model_name != 'setting': # Is not conf.Setting instance
activity_entry.save()
getattr(activity_entry, object1).add(instance.pk)
else:
activity_entry.setting = conf_to_dict(instance)
activity_entry.save()
- connection.on_commit(
- lambda: emit_activity_stream_change(activity_entry)
- )
+ connection.on_commit(lambda: emit_activity_stream_change(activity_entry))
def activity_stream_delete(sender, instance, **kwargs):
@@ -486,10 +494,7 @@ def activity_stream_delete(sender, instance, **kwargs):
if not kwargs.get('inventory_delete_flag', False):
return
# Add additional data about child hosts / groups that will be deleted
- changes['coalesced_data'] = {
- 'hosts_deleted': instance.hosts.count(),
- 'groups_deleted': instance.groups.count()
- }
+ changes['coalesced_data'] = {'hosts_deleted': instance.hosts.count(), 'groups_deleted': instance.groups.count()}
elif isinstance(instance, (Host, Group)) and instance.inventory.pending_deletion:
return # accounted for by inventory entry, above
_type = type(instance)
@@ -499,15 +504,9 @@ def activity_stream_delete(sender, instance, **kwargs):
object1 = camelcase_to_underscore(instance.__class__.__name__)
if type(instance) == OAuth2AccessToken:
changes['token'] = CENSOR_VALUE
- activity_entry = get_activity_stream_class()(
- operation='delete',
- changes=json.dumps(changes),
- object1=object1,
- actor=get_current_user_or_none())
+ activity_entry = get_activity_stream_class()(operation='delete', changes=json.dumps(changes), object1=object1, actor=get_current_user_or_none())
activity_entry.save()
- connection.on_commit(
- lambda: emit_activity_stream_change(activity_entry)
- )
+ connection.on_commit(lambda: emit_activity_stream_change(activity_entry))
def activity_stream_associate(sender, instance, **kwargs):
@@ -524,7 +523,7 @@ def activity_stream_associate(sender, instance, **kwargs):
_type = type(instance)
if getattr(_type, '_deferred', False):
return
- object1=camelcase_to_underscore(obj1.__class__.__name__)
+ object1 = camelcase_to_underscore(obj1.__class__.__name__)
obj_rel = sender.__module__ + "." + sender.__name__
for entity_acted in kwargs['pk_set']:
@@ -550,17 +549,13 @@ def activity_stream_associate(sender, instance, **kwargs):
if isinstance(obj1, SystemJob) or isinstance(obj2_actual, SystemJob):
continue
activity_entry = get_activity_stream_class()(
- changes=json.dumps(dict(object1=object1,
- object1_pk=obj1.pk,
- object2=object2,
- object2_pk=obj2_id,
- action=action,
- relationship=obj_rel)),
+ changes=json.dumps(dict(object1=object1, object1_pk=obj1.pk, object2=object2, object2_pk=obj2_id, action=action, relationship=obj_rel)),
operation=action,
object1=object1,
object2=object2,
object_relationship_type=obj_rel,
- actor=get_current_user_or_none())
+ actor=get_current_user_or_none(),
+ )
activity_entry.save()
getattr(activity_entry, object1).add(obj1.pk)
getattr(activity_entry, object2).add(obj2_actual.pk)
@@ -569,9 +564,7 @@ def activity_stream_associate(sender, instance, **kwargs):
if 'role' in kwargs:
role = kwargs['role']
if role.content_object is not None:
- obj_rel = '.'.join([role.content_object.__module__,
- role.content_object.__class__.__name__,
- role.role_field])
+ obj_rel = '.'.join([role.content_object.__module__, role.content_object.__class__.__name__, role.role_field])
# If the m2m is from the User side we need to
# set the content_object of the Role for our entry.
@@ -581,18 +574,16 @@ def activity_stream_associate(sender, instance, **kwargs):
activity_entry.role.add(role)
activity_entry.object_relationship_type = obj_rel
activity_entry.save()
- connection.on_commit(
- lambda: emit_activity_stream_change(activity_entry)
- )
+ connection.on_commit(lambda: emit_activity_stream_change(activity_entry))
@receiver(current_user_getter)
def get_current_user_from_drf_request(sender, **kwargs):
- '''
+ """
Provider a signal handler to return the current user from the current
request when using Django REST Framework. Requires that the APIView set
drf_request on the underlying Django Request object.
- '''
+ """
request = get_current_request()
drf_request_user = getattr(request, 'drf_request_user', False)
return (drf_request_user, 0)
@@ -651,10 +642,7 @@ def save_user_session_membership(sender, **kwargs):
Session.objects.filter(session_key__in=[membership.session_id]).delete()
membership.delete()
if len(expired):
- consumers.emit_channel_notification(
- 'control-limit_reached_{}'.format(user_id),
- dict(group_name='control', reason='limit_reached')
- )
+ consumers.emit_channel_notification('control-limit_reached_{}'.format(user_id), dict(group_name='control', reason='limit_reached'))
@receiver(post_save, sender=OAuth2AccessToken)
diff --git a/awx/main/tasks.py b/awx/main/tasks.py
index ca1a84827a..c1989fd8a9 100644
--- a/awx/main/tasks.py
+++ b/awx/main/tasks.py
@@ -60,12 +60,26 @@ from awx.main.access import access_registry
from awx.main.analytics import all_collectors, expensive_collectors
from awx.main.redact import UriCleaner
from awx.main.models import (
- Schedule, TowerScheduleState, Instance, InstanceGroup,
- UnifiedJob, Notification,
- Inventory, InventorySource, SmartInventoryMembership,
- Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob,
- JobEvent, ProjectUpdateEvent, InventoryUpdateEvent, AdHocCommandEvent, SystemJobEvent,
- build_safe_env
+ Schedule,
+ TowerScheduleState,
+ Instance,
+ InstanceGroup,
+ UnifiedJob,
+ Notification,
+ Inventory,
+ InventorySource,
+ SmartInventoryMembership,
+ Job,
+ AdHocCommand,
+ ProjectUpdate,
+ InventoryUpdate,
+ SystemJob,
+ JobEvent,
+ ProjectUpdateEvent,
+ InventoryUpdateEvent,
+ AdHocCommandEvent,
+ SystemJobEvent,
+ build_safe_env,
)
from awx.main.constants import ACTIVE_STATES
from awx.main.exceptions import AwxTaskError, PostRunError
@@ -73,12 +87,16 @@ from awx.main.queue import CallbackQueueDispatcher
from awx.main.isolated import manager as isolated_manager
from awx.main.dispatch.publish import task
from awx.main.dispatch import get_local_queuename, reaper
-from awx.main.utils import (update_scm_url,
- ignore_inventory_computed_fields,
- ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager,
- get_awx_version,
- deepmerge,
- parse_yaml_or_json)
+from awx.main.utils import (
+ update_scm_url,
+ ignore_inventory_computed_fields,
+ ignore_inventory_group_removal,
+ extract_ansible_vars,
+ schedule_task_manager,
+ get_awx_version,
+ deepmerge,
+ parse_yaml_or_json,
+)
from awx.main.utils.ansible import read_ansible_config
from awx.main.utils.external_logging import reconfigure_rsyslog
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
@@ -93,10 +111,20 @@ from awx.conf.license import get_license
from rest_framework.exceptions import PermissionDenied
-__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
- 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', 'apply_cluster_membership_policies',
- 'update_inventory_computed_fields', 'update_host_smart_inventory_memberships',
- 'send_notifications', 'purge_old_stdout_files']
+__all__ = [
+ 'RunJob',
+ 'RunSystemJob',
+ 'RunProjectUpdate',
+ 'RunInventoryUpdate',
+ 'RunAdHocCommand',
+ 'handle_work_error',
+ 'handle_work_success',
+ 'apply_cluster_membership_policies',
+ 'update_inventory_computed_fields',
+ 'update_host_smart_inventory_memberships',
+ 'send_notifications',
+ 'purge_old_stdout_files',
+]
HIDDEN_PASSWORD = '**********'
@@ -110,7 +138,6 @@ logger = logging.getLogger('awx.main.tasks')
class InvalidVirtualenvError(Exception):
-
def __init__(self, message):
self.message = message
@@ -157,8 +184,7 @@ def inform_cluster_of_shutdown():
reaper.reap(this_inst)
except Exception:
logger.exception('failed to reap jobs for {}'.format(this_inst.hostname))
- logger.warning('Normal shutdown signal for instance {}, '
- 'removed self from capacity pool.'.format(this_inst.hostname))
+ logger.warning('Normal shutdown signal for instance {}, ' 'removed self from capacity pool.'.format(this_inst.hostname))
except Exception:
logger.exception('Encountered problem with normal shutdown signal.')
@@ -191,9 +217,7 @@ def apply_cluster_membership_policies():
# Process policy instance list first, these will represent manually managed memberships
instance_hostnames_map = {inst.hostname: inst for inst in all_instances}
for ig in all_groups:
- group_actual = Group(obj=ig, instances=[], prior_instances=[
- instance.pk for instance in ig.instances.all() # obtained in prefetch
- ])
+ group_actual = Group(obj=ig, instances=[], prior_instances=[instance.pk for instance in ig.instances.all()]) # obtained in prefetch
for hostname in ig.policy_instance_list:
if hostname not in instance_hostnames_map:
logger.info("Unknown instance {} in {} policy list".format(hostname, ig.name))
@@ -216,8 +240,7 @@ def apply_cluster_membership_policies():
# Process Instance minimum policies next, since it represents a concrete lower bound to the
# number of instances to make available to instance groups
actual_instances = [Node(obj=i, groups=[]) for i in considered_instances if i.managed_by_policy]
- logger.debug("Total non-isolated instances:{} available for policy: {}".format(
- total_instances, len(actual_instances)))
+ logger.debug("Total non-isolated instances:{} available for policy: {}".format(total_instances, len(actual_instances)))
for g in sorted(actual_groups, key=lambda x: len(x.instances)):
policy_min_added = []
for i in sorted(actual_instances, key=lambda x: len(x.groups)):
@@ -286,10 +309,7 @@ def handle_setting_changes(setting_keys):
logger.debug('cache delete_many(%r)', cache_keys)
cache.delete_many(cache_keys)
- if any([
- setting.startswith('LOG_AGGREGATOR')
- for setting in setting_keys
- ]):
+ if any([setting.startswith('LOG_AGGREGATOR') for setting in setting_keys]):
reconfigure_rsyslog()
@@ -317,11 +337,7 @@ def profile_sql(threshold=1, minutes=1):
cache.delete('awx-profile-sql-threshold')
logger.error('SQL PROFILING DISABLED')
else:
- cache.set(
- 'awx-profile-sql-threshold',
- threshold,
- timeout=minutes * 60
- )
+ cache.set('awx-profile-sql-threshold', threshold, timeout=minutes * 60)
logger.error('SQL QUERIES >={}s ENABLED FOR {} MINUTE(S)'.format(threshold, minutes))
@@ -369,7 +385,7 @@ def gather_analytics():
for tgz in tgzfiles:
analytics.ship(tgz)
except Exception:
- logger.exception('Error gathering and sending analytics for {} to {}.'.format(since,until))
+ logger.exception('Error gathering and sending analytics for {} to {}.'.format(since, until))
return False
finally:
if tgzfiles:
@@ -381,6 +397,7 @@ def gather_analytics():
from awx.conf.models import Setting
from rest_framework.fields import DateTimeField
from awx.main.signals import disable_activity_stream
+
if not settings.INSIGHTS_TRACKING_STATE:
return
if not (settings.AUTOMATION_ANALYTICS_URL and settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD):
@@ -411,8 +428,8 @@ def gather_analytics():
start = since
until = None
while start < gather_time:
- until = start + timedelta(hours = 4)
- if (until > gather_time):
+ until = start + timedelta(hours=4)
+ if until > gather_time:
until = gather_time
if not _gather_and_ship(incremental_collectors, since=start, until=until):
break
@@ -427,9 +444,9 @@ def gather_analytics():
def purge_old_stdout_files():
nowtime = time.time()
for f in os.listdir(settings.JOBOUTPUT_ROOT):
- if os.path.getctime(os.path.join(settings.JOBOUTPUT_ROOT,f)) < nowtime - settings.LOCAL_STDOUT_EXPIRE_TIME:
- os.unlink(os.path.join(settings.JOBOUTPUT_ROOT,f))
- logger.debug("Removing {}".format(os.path.join(settings.JOBOUTPUT_ROOT,f)))
+ if os.path.getctime(os.path.join(settings.JOBOUTPUT_ROOT, f)) < nowtime - settings.LOCAL_STDOUT_EXPIRE_TIME:
+ os.unlink(os.path.join(settings.JOBOUTPUT_ROOT, f))
+ logger.debug("Removing {}".format(os.path.join(settings.JOBOUTPUT_ROOT, f)))
@task(queue=get_local_queuename)
@@ -464,12 +481,11 @@ def cluster_node_heartbeat():
if other_inst.version == "":
continue
if Version(other_inst.version.split('-', 1)[0]) > Version(awx_application_version.split('-', 1)[0]) and not settings.DEBUG:
- logger.error("Host {} reports version {}, but this node {} is at {}, shutting down".format(
- other_inst.hostname,
- other_inst.version,
- this_inst.hostname,
- this_inst.version
- ))
+ logger.error(
+ "Host {} reports version {}, but this node {} is at {}, shutting down".format(
+ other_inst.hostname, other_inst.version, this_inst.hostname, this_inst.version
+ )
+ )
# Shutdown signal will set the capacity to zero to ensure no Jobs get added to this instance.
# The heartbeat task will reset the capacity to the system capacity after upgrade.
stop_local_services(communicate=False)
@@ -490,8 +506,7 @@ def cluster_node_heartbeat():
if other_inst.capacity != 0 and not settings.AWX_AUTO_DEPROVISION_INSTANCES:
other_inst.capacity = 0
other_inst.save(update_fields=['capacity'])
- logger.error("Host {} last checked in at {}, marked as lost.".format(
- other_inst.hostname, other_inst.modified))
+ logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.modified))
elif settings.AWX_AUTO_DEPROVISION_INSTANCES:
deprovision_hostname = other_inst.hostname
other_inst.delete()
@@ -505,21 +520,17 @@ def cluster_node_heartbeat():
@task(queue=get_local_queuename)
def awx_k8s_reaper():
- from awx.main.scheduler.kubernetes import PodManager # prevent circular import
+ from awx.main.scheduler.kubernetes import PodManager # prevent circular import
+
for group in InstanceGroup.objects.filter(credential__isnull=False).iterator():
if group.is_container_group:
logger.debug("Checking for orphaned k8s pods for {}.".format(group))
- for job in UnifiedJob.objects.filter(
- pk__in=list(PodManager.list_active_jobs(group))
- ).exclude(status__in=ACTIVE_STATES):
+ for job in UnifiedJob.objects.filter(pk__in=list(PodManager.list_active_jobs(group))).exclude(status__in=ACTIVE_STATES):
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
try:
PodManager(job).delete()
except Exception:
- logger.exception("Failed to delete orphaned pod {} from {}".format(
- job.log_format, group
- ))
-
+ logger.exception("Failed to delete orphaned pod {} from {}".format(job.log_format, group))
@task(queue=get_local_queuename)
@@ -533,11 +544,7 @@ def awx_isolated_heartbeat():
isolated_instance_qs = Instance.objects.filter(
rampart_groups__controller__instances__hostname=local_hostname,
)
- isolated_instance_qs = isolated_instance_qs.filter(
- last_isolated_check__lt=accept_before
- ) | isolated_instance_qs.filter(
- last_isolated_check=None
- )
+ isolated_instance_qs = isolated_instance_qs.filter(last_isolated_check__lt=accept_before) | isolated_instance_qs.filter(last_isolated_check=None)
# Fast pass of isolated instances, claiming the nodes to update
with transaction.atomic():
for isolated_instance in isolated_instance_qs:
@@ -578,15 +585,14 @@ def awx_periodic_scheduler():
for schedule in schedules:
template = schedule.unified_job_template
- schedule.update_computed_fields() # To update next_run timestamp.
+ schedule.update_computed_fields() # To update next_run timestamp.
if template.cache_timeout_blocked:
logger.warn("Cache timeout is in the future, bypassing schedule for template %s" % str(template.id))
continue
try:
job_kwargs = schedule.get_job_kwargs()
new_unified_job = schedule.unified_job_template.create_unified_job(**job_kwargs)
- logger.debug('Spawned {} from schedule {}-{}.'.format(
- new_unified_job.log_format, schedule.name, schedule.pk))
+ logger.debug('Spawned {} from schedule {}-{}.'.format(new_unified_job.log_format, schedule.name, schedule.pk))
if invalid_license:
new_unified_job.status = 'failed'
@@ -600,8 +606,10 @@ def awx_periodic_scheduler():
continue
if not can_start:
new_unified_job.status = 'failed'
- new_unified_job.job_explanation = gettext_noop("Scheduled job could not start because it \
- was not in the right state or required manual credentials")
+ new_unified_job.job_explanation = gettext_noop(
+ "Scheduled job could not start because it \
+ was not in the right state or required manual credentials"
+ )
new_unified_job.save(update_fields=['status', 'job_explanation'])
new_unified_job.websocket_emit_status("failed")
emit_channel_notification('schedules-changed', dict(id=schedule.id, group_name="schedules"))
@@ -647,8 +655,11 @@ def handle_work_error(task_id, *args, **kwargs):
instance.status = 'failed'
instance.failed = True
if not instance.job_explanation:
- instance.job_explanation = 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % \
- (first_instance_type, first_instance.name, first_instance.id)
+ instance.job_explanation = 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % (
+ first_instance_type,
+ first_instance.name,
+ first_instance.id,
+ )
instance.save()
instance.websocket_emit_status("failed")
@@ -682,10 +693,10 @@ def handle_success_and_failure_notifications(job_id):
@task(queue=get_local_queuename)
def update_inventory_computed_fields(inventory_id):
- '''
+ """
Signal handler and wrapper around inventory.update_computed_fields to
prevent unnecessary recursive calls.
- '''
+ """
i = Inventory.objects.filter(id=inventory_id)
if not i.exists():
logger.error("Update Inventory Computed Fields failed due to missing inventory: " + str(inventory_id))
@@ -710,14 +721,13 @@ def update_smart_memberships_for_inventory(smart_inventory):
if removals:
SmartInventoryMembership.objects.filter(inventory=smart_inventory, host_id__in=removals).delete()
if additions:
- add_for_inventory = [
- SmartInventoryMembership(inventory_id=smart_inventory.id, host_id=host_id)
- for host_id in additions
- ]
+ add_for_inventory = [SmartInventoryMembership(inventory_id=smart_inventory.id, host_id=host_id) for host_id in additions]
SmartInventoryMembership.objects.bulk_create(add_for_inventory, ignore_conflicts=True)
- logger.debug('Smart host membership cached for {}, {} additions, {} removals, {} total count.'.format(
- smart_inventory.pk, len(additions), len(removals), len(new)
- ))
+ logger.debug(
+ 'Smart host membership cached for {}, {} additions, {} removals, {} total count.'.format(
+ smart_inventory.pk, len(additions), len(removals), len(new)
+ )
+ )
return True # changed
return False
@@ -765,21 +775,15 @@ def migrate_legacy_event_data(tblname):
total_rows = _remaining()
while total_rows:
with transaction.atomic():
- cursor.execute(
- f'INSERT INTO {tblname} SELECT * FROM _old_{tblname} ORDER BY id DESC LIMIT {chunk} RETURNING id;'
- )
+ cursor.execute(f'INSERT INTO {tblname} SELECT * FROM _old_{tblname} ORDER BY id DESC LIMIT {chunk} RETURNING id;')
last_insert_pk = cursor.fetchone()
if last_insert_pk is None:
# this means that the SELECT from the old table was
# empty, and there was nothing to insert (so we're done)
break
last_insert_pk = last_insert_pk[0]
- cursor.execute(
- f'DELETE FROM _old_{tblname} WHERE id IN (SELECT id FROM _old_{tblname} ORDER BY id DESC LIMIT {chunk});'
- )
- logger.warn(
- f'migrated int -> bigint rows to {tblname} from _old_{tblname}; # ({last_insert_pk} rows remaining)'
- )
+ cursor.execute(f'DELETE FROM _old_{tblname} WHERE id IN (SELECT id FROM _old_{tblname} ORDER BY id DESC LIMIT {chunk});')
+ logger.warn(f'migrated int -> bigint rows to {tblname} from _old_{tblname}; # ({last_insert_pk} rows remaining)')
if _remaining() is None:
cursor.execute(f'DROP TABLE IF EXISTS _old_{tblname}')
@@ -802,10 +806,7 @@ def delete_inventory(inventory_id, user_id, retries=5):
for host in i.hosts.iterator():
host.job_events_as_primary_host.update(host=None)
i.delete()
- emit_channel_notification(
- 'inventories-status_changed',
- {'group_name': 'inventories', 'inventory_id': inventory_id, 'status': 'deleted'}
- )
+ emit_channel_notification('inventories-status_changed', {'group_name': 'inventories', 'inventory_id': inventory_id, 'status': 'deleted'})
logger.debug('Deleted inventory {} as user {}.'.format(inventory_id, user_id))
except Inventory.DoesNotExist:
logger.exception("Delete Inventory failed due to missing inventory: " + str(inventory_id))
@@ -832,6 +833,7 @@ def with_path_cleanup(f):
except OSError:
logger.exception("Failed to remove tmp file: {}".format(p))
self.cleanup_paths = []
+
return _wrapped
@@ -869,27 +871,20 @@ class BaseTask(object):
return instance
except DatabaseError as e:
# Log out the error to the debug logger.
- logger.debug('Database error updating %s, retrying in 5 '
- 'seconds (retry #%d): %s',
- self.model._meta.object_name, _attempt + 1, e)
+ logger.debug('Database error updating %s, retrying in 5 ' 'seconds (retry #%d): %s', self.model._meta.object_name, _attempt + 1, e)
# Attempt to retry the update, assuming we haven't already
# tried too many times.
if _attempt < 5:
time.sleep(5)
- return self.update_model(
- pk,
- _attempt=_attempt + 1,
- **updates
- )
+ return self.update_model(pk, _attempt=_attempt + 1, **updates)
else:
- logger.error('Failed to update %s after %d retries.',
- self.model._meta.object_name, _attempt)
+ logger.error('Failed to update %s after %d retries.', self.model._meta.object_name, _attempt)
def get_path_to(self, *args):
- '''
+ """
Return absolute path relative to this file.
- '''
+ """
return os.path.abspath(os.path.join(os.path.dirname(__file__), *args))
def build_execution_environment_params(self, instance):
@@ -900,8 +895,7 @@ class BaseTask(object):
from awx.main.signals import disable_activity_stream
with disable_activity_stream():
- self.instance = instance = self.update_model(
- instance.pk, execution_environment=instance.resolve_execution_environment())
+ self.instance = instance = self.update_model(instance.pk, execution_environment=instance.resolve_execution_environment())
image = instance.execution_environment.image
params = {
@@ -921,15 +915,15 @@ class BaseTask(object):
return params
def build_private_data(self, instance, private_data_dir):
- '''
+ """
Return SSH private key data (only if stored in DB as ssh_key_data).
Return structure is a dict of the form:
- '''
+ """
def build_private_data_dir(self, instance):
- '''
+ """
Create a temporary directory for job-related files.
- '''
+ """
pdd_wrapper_path = tempfile.mkdtemp(
prefix=f'pdd_wrapper_{instance.pk}_',
dir=settings.AWX_PROOT_BASE_PATH
@@ -949,7 +943,7 @@ class BaseTask(object):
return path
def build_private_data_files(self, instance, private_data_dir):
- '''
+ """
Creates temporary files containing the private data.
Returns a dictionary i.e.,
@@ -965,7 +959,7 @@ class BaseTask(object):
...
}
}
- '''
+ """
private_data = self.build_private_data(instance, private_data_dir)
private_data_files = {'credentials': {}}
if private_data is not None:
@@ -1007,9 +1001,9 @@ class BaseTask(object):
return private_data_files
def build_passwords(self, instance, runtime_passwords):
- '''
+ """
Build a dictionary of passwords for responding to prompts.
- '''
+ """
return {
'yes': 'yes',
'no': 'no',
@@ -1017,9 +1011,9 @@ class BaseTask(object):
}
def build_extra_vars_file(self, instance, private_data_dir):
- '''
+ """
Build ansible yaml file filled with extra vars to be passed via -e@file.yml
- '''
+ """
def build_params_resource_profiling(self, instance, private_data_dir):
resource_profiling_params = {}
@@ -1034,15 +1028,20 @@ class BaseTask(object):
# FIXME: develop some better means of referencing paths inside containers
container_results_dir = os.path.join('/runner', 'artifacts/playbook_profiling')
- logger.debug('Collected the following resource profiling intervals: cpu: {} mem: {} pid: {}'
- .format(cpu_poll_interval, mem_poll_interval, pid_poll_interval))
+ logger.debug(
+ 'Collected the following resource profiling intervals: cpu: {} mem: {} pid: {}'.format(cpu_poll_interval, mem_poll_interval, pid_poll_interval)
+ )
- resource_profiling_params.update({'resource_profiling': True,
- 'resource_profiling_base_cgroup': 'ansible-runner',
- 'resource_profiling_cpu_poll_interval': cpu_poll_interval,
- 'resource_profiling_memory_poll_interval': mem_poll_interval,
- 'resource_profiling_pid_poll_interval': pid_poll_interval,
- 'resource_profiling_results_dir': container_results_dir})
+ resource_profiling_params.update(
+ {
+ 'resource_profiling': True,
+ 'resource_profiling_base_cgroup': 'ansible-runner',
+ 'resource_profiling_cpu_poll_interval': cpu_poll_interval,
+ 'resource_profiling_memory_poll_interval': mem_poll_interval,
+ 'resource_profiling_pid_poll_interval': pid_poll_interval,
+ 'resource_profiling_results_dir': container_results_dir,
+ }
+ )
return resource_profiling_params
@@ -1073,9 +1072,9 @@ class BaseTask(object):
env['PATH'] = os.path.join(settings.AWX_VENV_PATH, "bin")
def build_env(self, instance, private_data_dir, isolated, private_data_files=None):
- '''
+ """
Build environment dictionary for ansible-playbook.
- '''
+ """
env = {}
# Add ANSIBLE_* settings to the subprocess environment.
for attr in dir(settings):
@@ -1090,15 +1089,15 @@ class BaseTask(object):
return env
def should_use_resource_profiling(self, job):
- '''
+ """
Return whether this task should use resource profiling
- '''
+ """
return False
def should_use_proot(self, instance):
- '''
+ """
Return whether this task should use proot.
- '''
+ """
return False
def build_inventory(self, instance, private_data_dir):
@@ -1109,10 +1108,7 @@ class BaseTask(object):
script_data = instance.inventory.get_script_data(**script_params)
# maintain a list of host_name --> host_id
# so we can associate emitted events to Host objects
- self.host_map = {
- hostname: hv.pop('remote_tower_id', '')
- for hostname, hv in script_data.get('_meta', {}).get('hostvars', {}).items()
- }
+ self.host_map = {hostname: hv.pop('remote_tower_id', '') for hostname, hv in script_data.get('_meta', {}).get('hostvars', {}).items()}
json_data = json.dumps(script_data)
path = os.path.join(private_data_dir, 'inventory')
os.makedirs(path, mode=0o700)
@@ -1159,11 +1155,11 @@ class BaseTask(object):
return job_timeout
def get_password_prompts(self, passwords={}):
- '''
+ """
Return a dictionary where keys are strings or regular expressions for
prompts, and values are password lookup keys (keys that are returned
from build_passwords).
- '''
+ """
return OrderedDict()
def create_expect_passwords_data_struct(self, password_prompts, passwords):
@@ -1173,21 +1169,21 @@ class BaseTask(object):
return expect_passwords
def pre_run_hook(self, instance, private_data_dir):
- '''
+ """
Hook for any steps to run before the job/task starts
- '''
+ """
instance.log_lifecycle("pre_run")
def post_run_hook(self, instance, status):
- '''
+ """
Hook for any steps to run before job/task is marked as complete.
- '''
+ """
instance.log_lifecycle("post_run")
def final_run_hook(self, instance, status, private_data_dir, fact_modification_times, isolated_manager_instance=None):
- '''
+ """
Hook for any steps to run after job/task is marked as complete.
- '''
+ """
instance.log_lifecycle("finalize_run")
job_profiling_dir = os.path.join(private_data_dir, 'artifacts/playbook_profiling')
awx_profiling_dir = '/var/log/tower/playbook_profiling/'
@@ -1222,10 +1218,10 @@ class BaseTask(object):
#
# Proceed with caution!
#
- '''
+ """
Ansible runner puts a parent_uuid on each event, no matter what the type.
AWX only saves the parent_uuid if the event is for a Job.
- '''
+ """
# cache end_line locally for RunInventoryUpdate tasks
# which generate job events from two 'streams':
# ansible-inventory and the awx.main.commands.inventory_import
@@ -1285,9 +1281,9 @@ class BaseTask(object):
return False
def cancel_callback(self):
- '''
+ """
Ansible runner callback to tell the job when/if it is canceled
- '''
+ """
unified_job_id = self.instance.pk
self.instance = self.update_model(unified_job_id)
if not self.instance:
@@ -1301,9 +1297,9 @@ class BaseTask(object):
return False
def finished_callback(self, runner_obj):
- '''
+ """
Ansible runner callback triggered on finished run
- '''
+ """
event_data = {
'event': 'EOF',
'final_counter': self.event_ct,
@@ -1313,9 +1309,9 @@ class BaseTask(object):
self.dispatcher.dispatch(event_data)
def status_handler(self, status_data, runner_config):
- '''
+ """
Ansible runner callback triggered on status transition
- '''
+ """
if status_data['status'] == 'starting':
job_env = dict(runner_config.env)
'''
@@ -1324,33 +1320,27 @@ class BaseTask(object):
for k, v in self.safe_env.items():
if k in job_env:
job_env[k] = v
- self.instance = self.update_model(self.instance.pk, job_args=json.dumps(runner_config.command),
- job_cwd=runner_config.cwd, job_env=job_env)
+ self.instance = self.update_model(self.instance.pk, job_args=json.dumps(runner_config.command), job_cwd=runner_config.cwd, job_env=job_env)
def check_handler(self, config):
- '''
+ """
IsolatedManager callback triggered by the repeated checks of the isolated node
- '''
+ """
job_env = build_safe_env(config['env'])
for k, v in self.safe_cred_env.items():
if k in job_env:
job_env[k] = v
- self.instance = self.update_model(self.instance.pk,
- job_args=json.dumps(config['command']),
- job_cwd=config['cwd'],
- job_env=job_env)
-
+ self.instance = self.update_model(self.instance.pk, job_args=json.dumps(config['command']), job_cwd=config['cwd'], job_env=job_env)
@with_path_cleanup
def run(self, pk, **kwargs):
- '''
+ """
Run the job/task and capture its output.
- '''
+ """
self.instance = self.model.objects.get(pk=pk)
# self.instance because of the update_model pattern and when it's used in callback handlers
- self.instance = self.update_model(pk, status='running',
- start_args='') # blank field to remove encrypted passwords
+ self.instance = self.update_model(pk, status='running', start_args='') # blank field to remove encrypted passwords
self.instance.websocket_emit_status("running")
status, rc = 'error', None
extra_update_fields = {}
@@ -1405,19 +1395,15 @@ class BaseTask(object):
passwords = self.build_passwords(self.instance, kwargs)
self.build_extra_vars_file(self.instance, private_data_dir)
args = self.build_args(self.instance, private_data_dir, passwords)
- resource_profiling_params = self.build_params_resource_profiling(self.instance,
- private_data_dir)
- env = self.build_env(self.instance, private_data_dir, isolated,
- private_data_files=private_data_files)
+ resource_profiling_params = self.build_params_resource_profiling(self.instance, private_data_dir)
+ env = self.build_env(self.instance, private_data_dir, isolated, private_data_files=private_data_files)
self.safe_env = build_safe_env(env)
credentials = self.build_credentials_list(self.instance)
for credential in credentials:
if credential:
- credential.credential_type.inject_credential(
- credential, env, self.safe_cred_env, args, private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, self.safe_cred_env, args, private_data_dir)
self.safe_env.update(self.safe_cred_env)
@@ -1466,11 +1452,9 @@ class BaseTask(object):
self.instance.log_lifecycle("running_playbook")
if isinstance(self.instance, SystemJob):
cwd = self.build_cwd(self.instance, private_data_dir)
- res = ansible_runner.interface.run(project_dir=cwd,
- event_handler=self.event_handler,
- finished_callback=self.finished_callback,
- status_handler=self.status_handler,
- **params)
+ res = ansible_runner.interface.run(
+ project_dir=cwd, event_handler=self.event_handler, finished_callback=self.finished_callback, status_handler=self.status_handler, **params
+ )
else:
receptor_job = AWXReceptorJob(self, params)
res = receptor_job.run()
@@ -1510,9 +1494,7 @@ class BaseTask(object):
logger.exception('{} Post run hook errored.'.format(self.instance.log_format))
self.instance = self.update_model(pk)
- self.instance = self.update_model(pk, status=status,
- emitted_events=self.event_ct,
- **extra_update_fields)
+ self.instance = self.update_model(pk, status=status, emitted_events=self.event_ct, **extra_update_fields)
try:
self.final_run_hook(self.instance, status, private_data_dir, fact_modification_times, isolated_manager_instance=isolated_manager_instance)
@@ -1527,22 +1509,18 @@ class BaseTask(object):
raise AwxTaskError.TaskError(self.instance, rc)
-
-
-
-
@task(queue=get_local_queuename)
class RunJob(BaseTask):
- '''
+ """
Run a job using ansible-playbook.
- '''
+ """
model = Job
event_model = JobEvent
event_data_key = 'job_id'
def build_private_data(self, job, private_data_dir):
- '''
+ """
Returns a dict of the form
{
'credentials': {
@@ -1556,7 +1534,7 @@ class RunJob(BaseTask):
...
}
}
- '''
+ """
private_data = {'credentials': {}}
for credential in job.credentials.prefetch_related('input_sources__source_credential').all():
# If we were sent SSH credentials, decrypt them and send them
@@ -1569,10 +1547,10 @@ class RunJob(BaseTask):
return private_data
def build_passwords(self, job, runtime_passwords):
- '''
+ """
Build a dictionary of passwords for SSH private key, SSH user, sudo/su
and ansible-vault.
- '''
+ """
passwords = super(RunJob, self).build_passwords(job, runtime_passwords)
cred = job.machine_credential
if cred:
@@ -1587,11 +1565,7 @@ class RunJob(BaseTask):
if vault_id:
field = 'vault_password.{}'.format(vault_id)
if field in passwords:
- raise RuntimeError(
- 'multiple vault credentials were specified with --vault-id {}@prompt'.format(
- vault_id
- )
- )
+ raise RuntimeError('multiple vault credentials were specified with --vault-id {}@prompt'.format(vault_id))
value = runtime_passwords.get(field, cred.get_input('vault_password', default=''))
if value not in ('', 'ASK'):
passwords[field] = value
@@ -1609,12 +1583,10 @@ class RunJob(BaseTask):
return passwords
def build_env(self, job, private_data_dir, isolated=False, private_data_files=None):
- '''
+ """
Build environment dictionary for ansible-playbook.
- '''
- env = super(RunJob, self).build_env(job, private_data_dir,
- isolated=isolated,
- private_data_files=private_data_files)
+ """
+ env = super(RunJob, self).build_env(job, private_data_dir, isolated=isolated, private_data_files=private_data_files)
if private_data_files is None:
private_data_files = {}
# Set environment variables needed for inventory and job event
@@ -1642,10 +1614,7 @@ class RunJob(BaseTask):
cred_files = private_data_files.get('credentials', {})
for cloud_cred in job.cloud_credentials:
if cloud_cred and cloud_cred.credential_type.namespace == 'openstack':
- env['OS_CLIENT_CONFIG_FILE'] = os.path.join(
- '/runner',
- os.path.basename(cred_files.get(cloud_cred, ''))
- )
+ env['OS_CLIENT_CONFIG_FILE'] = os.path.join('/runner', os.path.basename(cred_files.get(cloud_cred, '')))
for network_cred in job.network_credentials:
env['ANSIBLE_NET_USERNAME'] = network_cred.get_input('username', default='')
@@ -1662,7 +1631,8 @@ class RunJob(BaseTask):
path_vars = (
('ANSIBLE_COLLECTIONS_PATHS', 'collections_paths', 'requirements_collections', '~/.ansible/collections:/usr/share/ansible/collections'),
- ('ANSIBLE_ROLES_PATH', 'roles_path', 'requirements_roles', '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles'))
+ ('ANSIBLE_ROLES_PATH', 'roles_path', 'requirements_roles', '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles'),
+ )
config_values = read_ansible_config(job.project.get_project_path(), list(map(lambda x: x[1], path_vars)))
@@ -1683,10 +1653,10 @@ class RunJob(BaseTask):
return env
def build_args(self, job, private_data_dir, passwords):
- '''
+ """
Build command line argument list for running ansible-playbook,
optionally using ssh-agent for public/private key authentication.
- '''
+ """
creds = job.machine_credential
ssh_username, become_username, become_method = '', '', ''
@@ -1797,15 +1767,15 @@ class RunJob(BaseTask):
return d
def should_use_resource_profiling(self, job):
- '''
+ """
Return whether this task should use resource profiling
- '''
+ """
return settings.AWX_RESOURCE_PROFILING_ENABLED
def should_use_proot(self, job):
- '''
+ """
Return whether this task should use proot.
- '''
+ """
if job.is_container_group_task:
return False
return getattr(settings, 'AWX_PROOT_ENABLED', False)
@@ -1820,9 +1790,11 @@ class RunJob(BaseTask):
if instance.use_fact_cache and os.path.exists(insights_dir):
logger.info('not parent of others')
params.setdefault('container_volume_mounts', [])
- params['container_volume_mounts'].extend([
- f"{insights_dir}:{insights_dir}:Z",
- ])
+ params['container_volume_mounts'].extend(
+ [
+ f"{insights_dir}:{insights_dir}:Z",
+ ]
+ )
return params
@@ -1837,9 +1809,7 @@ class RunJob(BaseTask):
self.update_model(job.pk, status='failed', job_explanation=error)
raise RuntimeError(error)
elif job.project.status in ('error', 'failed'):
- msg = _(
- 'The project revision for this job template is unknown due to a failed update.'
- )
+ msg = _('The project revision for this job template is unknown due to a failed update.')
job = self.update_model(job.pk, status='failed', job_explanation=msg)
raise RuntimeError(msg)
@@ -1849,7 +1819,7 @@ class RunJob(BaseTask):
source_update_tag = 'update_{}'.format(job.project.scm_type)
branch_override = bool(job.scm_branch and job.scm_branch != job.project.scm_branch)
if not job.project.scm_type:
- pass # manual projects are not synced, user has responsibility for that
+ pass # manual projects are not synced, user has responsibility for that
elif not os.path.exists(project_path):
logger.debug('Performing fresh clone of {} on this instance.'.format(job.project))
sync_needs.append(source_update_tag)
@@ -1884,9 +1854,9 @@ class RunJob(BaseTask):
job_type='run',
job_tags=','.join(sync_needs),
status='running',
- instance_group = pu_ig,
+ instance_group=pu_ig,
execution_node=pu_en,
- celery_task_id=job.celery_task_id
+ celery_task_id=job.celery_task_id,
)
if branch_override:
sync_metafields['scm_branch'] = job.scm_branch
@@ -1907,9 +1877,14 @@ class RunJob(BaseTask):
except Exception:
local_project_sync.refresh_from_db()
if local_project_sync.status != 'canceled':
- job = self.update_model(job.pk, status='failed',
- job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
- ('project_update', local_project_sync.name, local_project_sync.id)))
+ job = self.update_model(
+ job.pk,
+ status='failed',
+ job_explanation=(
+ 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}'
+ % ('project_update', local_project_sync.name, local_project_sync.id)
+ ),
+ )
raise
job.refresh_from_db()
if job.cancel_flag:
@@ -1977,7 +1952,7 @@ class RunProjectUpdate(BaseTask):
self.playbook_new_revision = returned_facts['scm_version']
def build_private_data(self, project_update, private_data_dir):
- '''
+ """
Return SSH private key data needed for this project update.
Returns a dict of the form
@@ -1988,7 +1963,7 @@ class RunProjectUpdate(BaseTask):
<awx.main.models.Credential>: <credential_decrypted_ssh_key_data>
}
}
- '''
+ """
private_data = {'credentials': {}}
if project_update.credential:
credential = project_update.credential
@@ -1997,10 +1972,10 @@ class RunProjectUpdate(BaseTask):
return private_data
def build_passwords(self, project_update, runtime_passwords):
- '''
+ """
Build a dictionary of passwords for SSH private key unlock and SCM
username/password.
- '''
+ """
passwords = super(RunProjectUpdate, self).build_passwords(project_update, runtime_passwords)
if project_update.credential:
passwords['scm_key_unlock'] = project_update.credential.get_input('ssh_key_unlock', default='')
@@ -2009,16 +1984,14 @@ class RunProjectUpdate(BaseTask):
return passwords
def build_env(self, project_update, private_data_dir, isolated=False, private_data_files=None):
- '''
+ """
Build environment dictionary for ansible-playbook.
- '''
- env = super(RunProjectUpdate, self).build_env(project_update, private_data_dir,
- isolated=isolated,
- private_data_files=private_data_files)
+ """
+ env = super(RunProjectUpdate, self).build_env(project_update, private_data_dir, isolated=isolated, private_data_files=private_data_files)
env['ANSIBLE_RETRY_FILES_ENABLED'] = str(False)
env['ANSIBLE_ASK_PASS'] = str(False)
env['ANSIBLE_BECOME_ASK_PASS'] = str(False)
- env['DISPLAY'] = '' # Prevent stupid password popup when running tests.
+ env['DISPLAY'] = '' # Prevent stupid password popup when running tests.
# give ansible a hint about the intended tmpdir to work around issues
# like https://github.com/ansible/ansible/issues/30064
env['TMP'] = settings.AWX_PROOT_BASE_PATH
@@ -2029,9 +2002,7 @@ class RunProjectUpdate(BaseTask):
# build out env vars for Galaxy credentials (in order)
galaxy_server_list = []
if project_update.project.organization:
- for i, cred in enumerate(
- project_update.project.organization.galaxy_credentials.all()
- ):
+ for i, cred in enumerate(project_update.project.organization.galaxy_credentials.all()):
env[f'ANSIBLE_GALAXY_SERVER_SERVER{i}_URL'] = cred.get_input('url')
auth_url = cred.get_input('auth_url', default=None)
token = cred.get_input('token', default=None)
@@ -2047,10 +2018,10 @@ class RunProjectUpdate(BaseTask):
return env
def _build_scm_url_extra_vars(self, project_update):
- '''
+ """
Helper method to build SCM url and extra vars with parameters needed
for authentication.
- '''
+ """
extra_vars = {}
if project_update.credential:
scm_username = project_update.credential.get_input('username', default='')
@@ -2059,8 +2030,7 @@ class RunProjectUpdate(BaseTask):
scm_username = ''
scm_password = ''
scm_type = project_update.scm_type
- scm_url = update_scm_url(scm_type, project_update.scm_url,
- check_special_cases=False)
+ scm_url = update_scm_url(scm_type, project_update.scm_url, check_special_cases=False)
scm_url_parts = urlparse.urlsplit(scm_url)
# Prefer the username/password in the URL, if provided.
scm_username = scm_url_parts.username or scm_username
@@ -2077,8 +2047,7 @@ class RunProjectUpdate(BaseTask):
elif scm_type in ('insights', 'archive'):
extra_vars['scm_username'] = scm_username
extra_vars['scm_password'] = scm_password
- scm_url = update_scm_url(scm_type, scm_url, scm_username,
- scm_password, scp_format=True)
+ scm_url = update_scm_url(scm_type, scm_url, scm_username, scm_password, scp_format=True)
else:
scm_url = update_scm_url(scm_type, scm_url, scp_format=True)
@@ -2092,10 +2061,10 @@ class RunProjectUpdate(BaseTask):
return 'localhost,'
def build_args(self, project_update, private_data_dir, passwords):
- '''
+ """
Build command line argument list for running ansible-playbook,
optionally using ssh-agent for public/private key authentication.
- '''
+ """
args = []
if getattr(settings, 'PROJECT_UPDATE_VVV', False):
args.append('-vvv')
@@ -2117,31 +2086,25 @@ class RunProjectUpdate(BaseTask):
elif not scm_branch:
scm_branch = 'HEAD'
- galaxy_creds_are_defined = (
- project_update.project.organization and
- project_update.project.organization.galaxy_credentials.exists()
+ galaxy_creds_are_defined = project_update.project.organization and project_update.project.organization.galaxy_credentials.exists()
+ if not galaxy_creds_are_defined and (settings.AWX_ROLES_ENABLED or settings.AWX_COLLECTIONS_ENABLED):
+ logger.warning('Galaxy role/collection syncing is enabled, but no ' f'credentials are configured for {project_update.project.organization}.')
+
+ extra_vars.update(
+ {
+ 'projects_root': settings.PROJECTS_ROOT.rstrip('/'),
+ 'local_path': os.path.basename(project_update.project.local_path),
+ 'project_path': project_update.get_project_path(check_if_exists=False), # deprecated
+ 'insights_url': settings.INSIGHTS_URL_BASE,
+ 'awx_license_type': get_license().get('license_type', 'UNLICENSED'),
+ 'awx_version': get_awx_version(),
+ 'scm_url': scm_url,
+ 'scm_branch': scm_branch,
+ 'scm_clean': project_update.scm_clean,
+ 'roles_enabled': galaxy_creds_are_defined and settings.AWX_ROLES_ENABLED,
+ 'collections_enabled': galaxy_creds_are_defined and settings.AWX_COLLECTIONS_ENABLED,
+ }
)
- if not galaxy_creds_are_defined and (
- settings.AWX_ROLES_ENABLED or settings.AWX_COLLECTIONS_ENABLED
- ):
- logger.warning(
- 'Galaxy role/collection syncing is enabled, but no '
- f'credentials are configured for {project_update.project.organization}.'
- )
-
- extra_vars.update({
- 'projects_root': settings.PROJECTS_ROOT.rstrip('/'),
- 'local_path': os.path.basename(project_update.project.local_path),
- 'project_path': project_update.get_project_path(check_if_exists=False), # deprecated
- 'insights_url': settings.INSIGHTS_URL_BASE,
- 'awx_license_type': get_license().get('license_type', 'UNLICENSED'),
- 'awx_version': get_awx_version(),
- 'scm_url': scm_url,
- 'scm_branch': scm_branch,
- 'scm_clean': project_update.scm_clean,
- 'roles_enabled': galaxy_creds_are_defined and settings.AWX_ROLES_ENABLED,
- 'collections_enabled': galaxy_creds_are_defined and settings.AWX_COLLECTIONS_ENABLED,
- })
# apply custom refspec from user for PR refs and the like
if project_update.scm_refspec:
extra_vars['scm_refspec'] = project_update.scm_refspec
@@ -2151,10 +2114,7 @@ class RunProjectUpdate(BaseTask):
if project_update.scm_type == 'archive':
# for raw archive, prevent error moving files between volumes
- extra_vars['ansible_remote_tmp'] = os.path.join(
- project_update.get_project_path(check_if_exists=False),
- '.ansible_awx', 'tmp'
- )
+ extra_vars['ansible_remote_tmp'] = os.path.join(project_update.get_project_path(check_if_exists=False), '.ansible_awx', 'tmp')
self._write_extra_vars_file(private_data_dir, extra_vars)
@@ -2168,7 +2128,7 @@ class RunProjectUpdate(BaseTask):
d = super(RunProjectUpdate, self).get_password_prompts(passwords)
d[r'Username for.*:\s*?$'] = 'scm_username'
d[r'Password for.*:\s*?$'] = 'scm_password'
- d['Password:\s*?$'] = 'scm_password' # noqa
+ d['Password:\s*?$'] = 'scm_password' # noqa
d[r'\S+?@\S+?\'s\s+?password:\s*?$'] = 'scm_password'
d[r'Enter passphrase for .*:\s*?$'] = 'scm_key_unlock'
d[r'Bad passphrase, try again for .*:\s*?$'] = ''
@@ -2183,15 +2143,12 @@ class RunProjectUpdate(BaseTask):
if not inv_src.update_on_project_update:
continue
if inv_src.scm_last_revision == scm_revision:
- logger.debug('Skipping SCM inventory update for `{}` because '
- 'project has not changed.'.format(inv_src.name))
+ logger.debug('Skipping SCM inventory update for `{}` because ' 'project has not changed.'.format(inv_src.name))
continue
logger.debug('Local dependent inventory update for `{}`.'.format(inv_src.name))
with transaction.atomic():
- if InventoryUpdate.objects.filter(inventory_source=inv_src,
- status__in=ACTIVE_STATES).exists():
- logger.debug('Skipping SCM inventory update for `{}` because '
- 'another update is already active.'.format(inv_src.name))
+ if InventoryUpdate.objects.filter(inventory_source=inv_src, status__in=ACTIVE_STATES).exists():
+ logger.debug('Skipping SCM inventory update for `{}` because ' 'another update is already active.'.format(inv_src.name))
continue
local_inv_update = inv_src.create_inventory_update(
_eager_fields=dict(
@@ -2200,13 +2157,13 @@ class RunProjectUpdate(BaseTask):
instance_group=project_update.instance_group,
execution_node=project_update.execution_node,
source_project_update=project_update,
- celery_task_id=project_update.celery_task_id))
+ celery_task_id=project_update.celery_task_id,
+ )
+ )
try:
inv_update_class().run(local_inv_update.id)
except Exception:
- logger.exception('{} Unhandled exception updating dependent SCM inventory sources.'.format(
- project_update.log_format
- ))
+ logger.exception('{} Unhandled exception updating dependent SCM inventory sources.'.format(project_update.log_format))
try:
project_update.refresh_from_db()
@@ -2241,6 +2198,7 @@ class RunProjectUpdate(BaseTask):
'''
Note: We don't support blocking=False
'''
+
def acquire_lock(self, instance, blocking=True):
lock_path = instance.get_lock_file()
if lock_path is None:
@@ -2275,9 +2233,7 @@ class RunProjectUpdate(BaseTask):
waiting_time = time.time() - start_time
if waiting_time > 1.0:
- logger.info(
- '{} spent {} waiting to acquire lock for local source tree '
- 'for path {}.'.format(instance.log_format, waiting_time, lock_path))
+ logger.info('{} spent {} waiting to acquire lock for local source tree ' 'for path {}.'.format(instance.log_format, waiting_time, lock_path))
def pre_run_hook(self, instance, private_data_dir):
super(RunProjectUpdate, self).pre_run_hook(instance, private_data_dir)
@@ -2318,7 +2274,7 @@ class RunProjectUpdate(BaseTask):
old_path = os.path.join(cache_dir, entry)
if entry not in (keep_value, 'stage'):
# invalidate, then delete
- new_path = os.path.join(cache_dir,'.~~delete~~' + entry)
+ new_path = os.path.join(cache_dir, '.~~delete~~' + entry)
try:
os.rename(old_path, new_path)
shutil.rmtree(new_path)
@@ -2350,8 +2306,11 @@ class RunProjectUpdate(BaseTask):
# git clone must take file:// syntax for source repo or else options like depth will be ignored
source_as_uri = Path(project_path).as_uri()
git.Repo.clone_from(
- source_as_uri, destination_folder, branch=source_branch,
- depth=1, single_branch=True, # shallow, do not copy full history
+ source_as_uri,
+ destination_folder,
+ branch=source_branch,
+ depth=1,
+ single_branch=True, # shallow, do not copy full history
)
# submodules copied in loop because shallow copies from local HEADs are ideal
# and no git clone submodule options are compatible with minimum requirements
@@ -2420,7 +2379,10 @@ class RunProjectUpdate(BaseTask):
finally:
self.release_lock(instance)
p = instance.project
- if instance.job_type == 'check' and status not in ('failed', 'canceled',):
+ if instance.job_type == 'check' and status not in (
+ 'failed',
+ 'canceled',
+ ):
if self.playbook_new_revision:
p.scm_revision = self.playbook_new_revision
else:
@@ -2437,9 +2399,9 @@ class RunProjectUpdate(BaseTask):
self._update_dependent_inventories(instance, dependent_inventory_sources)
def should_use_proot(self, project_update):
- '''
+ """
Return whether this task should use proot.
- '''
+ """
return getattr(settings, 'AWX_PROOT_ENABLED', False)
def build_execution_environment_params(self, instance):
@@ -2450,10 +2412,12 @@ class RunProjectUpdate(BaseTask):
project_path = instance.get_project_path(check_if_exists=False)
cache_path = instance.get_cache_path()
params.setdefault('container_volume_mounts', [])
- params['container_volume_mounts'].extend([
- f"{project_path}:{project_path}:Z",
- f"{cache_path}:{cache_path}:Z",
- ])
+ params['container_volume_mounts'].extend(
+ [
+ f"{project_path}:{project_path}:Z",
+ f"{cache_path}:{cache_path}:Z",
+ ]
+ )
return params
@@ -2490,9 +2454,7 @@ class RunInventoryUpdate(BaseTask):
are accomplished by the inventory source injectors (in this method)
or custom credential type injectors (in main run method).
"""
- env = super(RunInventoryUpdate, self).build_env(
- inventory_update, private_data_dir, isolated,
- private_data_files=private_data_files)
+ env = super(RunInventoryUpdate, self).build_env(inventory_update, private_data_dir, isolated, private_data_files=private_data_files)
if private_data_files is None:
private_data_files = {}
@@ -2624,11 +2586,11 @@ class RunInventoryUpdate(BaseTask):
return rel_path
def build_cwd(self, inventory_update, private_data_dir):
- '''
+ """
There is one case where the inventory "source" is in a different
location from the private data:
- SCM, where source needs to live in the project folder
- '''
+ """
src = inventory_update.source
container_dir = '/runner' # TODO: make container paths elegant
if src == 'scm' and inventory_update.source_project_update:
@@ -2647,8 +2609,9 @@ class RunInventoryUpdate(BaseTask):
source_project = None
if inventory_update.inventory_source:
source_project = inventory_update.inventory_source.source_project
- if (inventory_update.source=='scm' and inventory_update.launch_type!='scm' and
- source_project and source_project.scm_type): # never ever update manual projects
+ if (
+ inventory_update.source == 'scm' and inventory_update.launch_type != 'scm' and source_project and source_project.scm_type
+ ): # never ever update manual projects
# Check if the content cache exists, so that we do not unnecessarily re-download roles
sync_needs = ['update_{}'.format(source_project.scm_type)]
@@ -2664,8 +2627,10 @@ class RunInventoryUpdate(BaseTask):
job_tags=','.join(sync_needs),
status='running',
execution_node=inventory_update.execution_node,
- instance_group = inventory_update.instance_group,
- celery_task_id=inventory_update.celery_task_id))
+ instance_group=inventory_update.instance_group,
+ celery_task_id=inventory_update.celery_task_id,
+ )
+ )
# associate the inventory update before calling run() so that a
# cancel() call on the inventory update can cancel the project update
local_project_sync.scm_inventory_updates.add(inventory_update)
@@ -2679,9 +2644,13 @@ class RunInventoryUpdate(BaseTask):
inventory_update.inventory_source.save(update_fields=['scm_last_revision'])
except Exception:
inventory_update = self.update_model(
- inventory_update.pk, status='failed',
- job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
- ('project_update', local_project_sync.name, local_project_sync.id)))
+ inventory_update.pk,
+ status='failed',
+ job_explanation=(
+ 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}'
+ % ('project_update', local_project_sync.name, local_project_sync.id)
+ ),
+ )
raise
elif inventory_update.source == 'scm' and inventory_update.launch_type == 'scm' and source_project:
# This follows update, not sync, so make copy here
@@ -2690,7 +2659,7 @@ class RunInventoryUpdate(BaseTask):
def post_run_hook(self, inventory_update, status):
super(RunInventoryUpdate, self).post_run_hook(inventory_update, status)
if status != 'successful':
- return # nothing to save, step out of the way to allow error reporting
+ return # nothing to save, step out of the way to allow error reporting
private_data_dir = inventory_update.job_env['AWX_PRIVATE_DATA_DIR']
expected_output = os.path.join(private_data_dir, 'artifacts', 'output.json')
@@ -2726,11 +2695,13 @@ class RunInventoryUpdate(BaseTask):
options['verbosity'] = inventory_update.verbosity
handler = SpecialInventoryHandler(
- self.event_handler, self.cancel_callback,
+ self.event_handler,
+ self.cancel_callback,
verbosity=inventory_update.verbosity,
job_timeout=self.get_instance_timeout(self.instance),
start_time=inventory_update.started,
- counter=self.event_ct, initial_line=self.end_line
+ counter=self.event_ct,
+ initial_line=self.end_line,
)
inv_logger = logging.getLogger('awx.main.commands.inventory_import')
formatter = inv_logger.handlers[0].formatter
@@ -2739,6 +2710,7 @@ class RunInventoryUpdate(BaseTask):
inv_logger.handlers[0] = handler
from awx.main.management.commands.inventory_import import Command as InventoryImportCommand
+
cmd = InventoryImportCommand()
try:
# save the inventory data to database.
@@ -2751,25 +2723,22 @@ class RunInventoryUpdate(BaseTask):
logger.exception('Error saving {} content, rolling back changes'.format(inventory_update.log_format))
raise
except Exception:
- logger.exception('Exception saving {} content, rolling back changes.'.format(
- inventory_update.log_format))
- raise PostRunError(
- 'Error occured while saving inventory data, see traceback or server logs',
- status='error', tb=traceback.format_exc())
+ logger.exception('Exception saving {} content, rolling back changes.'.format(inventory_update.log_format))
+ raise PostRunError('Error occured while saving inventory data, see traceback or server logs', status='error', tb=traceback.format_exc())
@task(queue=get_local_queuename)
class RunAdHocCommand(BaseTask):
- '''
+ """
Run an ad hoc command using ansible.
- '''
+ """
model = AdHocCommand
event_model = AdHocCommandEvent
event_data_key = 'ad_hoc_command_id'
def build_private_data(self, ad_hoc_command, private_data_dir):
- '''
+ """
Return SSH private key data needed for this ad hoc command (only if
stored in DB as ssh_key_data).
@@ -2786,7 +2755,7 @@ class RunAdHocCommand(BaseTask):
...
}
}
- '''
+ """
# If we were sent SSH credentials, decrypt them and send them
# back (they will be written to a temporary file).
creds = ad_hoc_command.credential
@@ -2798,10 +2767,10 @@ class RunAdHocCommand(BaseTask):
return private_data
def build_passwords(self, ad_hoc_command, runtime_passwords):
- '''
+ """
Build a dictionary of passwords for SSH private key, SSH user and
sudo/su.
- '''
+ """
passwords = super(RunAdHocCommand, self).build_passwords(ad_hoc_command, runtime_passwords)
cred = ad_hoc_command.credential
if cred:
@@ -2812,12 +2781,10 @@ class RunAdHocCommand(BaseTask):
return passwords
def build_env(self, ad_hoc_command, private_data_dir, isolated=False, private_data_files=None):
- '''
+ """
Build environment dictionary for ansible.
- '''
- env = super(RunAdHocCommand, self).build_env(ad_hoc_command, private_data_dir,
- isolated=isolated,
- private_data_files=private_data_files)
+ """
+ env = super(RunAdHocCommand, self).build_env(ad_hoc_command, private_data_dir, isolated=isolated, private_data_files=private_data_files)
# Set environment variables needed for inventory and ad hoc event
# callbacks to work.
env['AD_HOC_COMMAND_ID'] = str(ad_hoc_command.pk)
@@ -2837,10 +2804,10 @@ class RunAdHocCommand(BaseTask):
return env
def build_args(self, ad_hoc_command, private_data_dir, passwords):
- '''
+ """
Build command line argument list for running ansible, optionally using
ssh-agent for public/private key authentication.
- '''
+ """
creds = ad_hoc_command.credential
ssh_username, become_username, become_method = '', '', ''
if creds:
@@ -2884,9 +2851,7 @@ class RunAdHocCommand(BaseTask):
if ad_hoc_command.extra_vars_dict:
redacted_extra_vars, removed_vars = extract_ansible_vars(ad_hoc_command.extra_vars_dict)
if removed_vars:
- raise ValueError(_(
- "{} are prohibited from use in ad hoc commands."
- ).format(", ".join(removed_vars)))
+ raise ValueError(_("{} are prohibited from use in ad hoc commands.").format(", ".join(removed_vars)))
extra_vars.update(ad_hoc_command.extra_vars_dict)
if ad_hoc_command.limit:
@@ -2902,9 +2867,7 @@ class RunAdHocCommand(BaseTask):
if ad_hoc_command.extra_vars_dict:
redacted_extra_vars, removed_vars = extract_ansible_vars(ad_hoc_command.extra_vars_dict)
if removed_vars:
- raise ValueError(_(
- "{} are prohibited from use in ad hoc commands."
- ).format(", ".join(removed_vars)))
+ raise ValueError(_("{} are prohibited from use in ad hoc commands.").format(", ".join(removed_vars)))
extra_vars.update(ad_hoc_command.extra_vars_dict)
self._write_extra_vars_file(private_data_dir, extra_vars)
@@ -2936,9 +2899,9 @@ class RunAdHocCommand(BaseTask):
return d
def should_use_proot(self, ad_hoc_command):
- '''
+ """
Return whether this task should use proot.
- '''
+ """
if ad_hoc_command.is_container_group_task:
return False
return getattr(settings, 'AWX_PROOT_ENABLED', False)
@@ -2973,9 +2936,9 @@ class RunSystemJob(BaseTask):
if 'dry_run' in json_vars and json_vars['dry_run']:
args.extend(['--dry-run'])
if system_job.job_type == 'cleanup_jobs':
- args.extend(['--jobs', '--project-updates', '--inventory-updates',
- '--management-jobs', '--ad-hoc-commands', '--workflow-jobs',
- '--notifications'])
+ args.extend(
+ ['--jobs', '--project-updates', '--inventory-updates', '--management-jobs', '--ad-hoc-commands', '--workflow-jobs', '--notifications']
+ )
except Exception:
logger.exception("{} Failed to parse system job".format(system_job.log_format))
return args
@@ -2990,9 +2953,7 @@ class RunSystemJob(BaseTask):
return path
def build_env(self, instance, private_data_dir, isolated=False, private_data_files=None):
- base_env = super(RunSystemJob, self).build_env(
- instance, private_data_dir, isolated=isolated,
- private_data_files=private_data_files)
+ base_env = super(RunSystemJob, self).build_env(instance, private_data_dir, isolated=isolated, private_data_files=private_data_files)
# TODO: this is able to run by turning off isolation
# the goal is to run it a container instead
env = dict(os.environ.items())
@@ -3022,18 +2983,13 @@ def _reconstruct_relationships(copy_mapping):
setattr(new_obj, field_name, related_obj)
elif field.many_to_many:
for related_obj in getattr(old_obj, field_name).all():
- logger.debug('Deep copy: Adding {} to {}({}).{} relationship'.format(
- related_obj, new_obj, model, field_name
- ))
+ logger.debug('Deep copy: Adding {} to {}({}).{} relationship'.format(related_obj, new_obj, model, field_name))
getattr(new_obj, field_name).add(copy_mapping.get(related_obj, related_obj))
new_obj.save()
@task(queue=get_local_queuename)
-def deep_copy_model_obj(
- model_module, model_name, obj_pk, new_obj_pk,
- user_pk, uuid, permission_check_func=None
-):
+def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, uuid, permission_check_func=None):
sub_obj_list = cache.get(uuid)
if sub_obj_list is None:
logger.error('Deep copy {} from {} to {} failed unexpectedly.'.format(model_name, obj_pk, new_obj_pk))
@@ -3042,6 +2998,7 @@ def deep_copy_model_obj(
logger.debug('Deep copy {} from {} to {}.'.format(model_name, obj_pk, new_obj_pk))
from awx.api.generics import CopyAPIView
from awx.main.signals import disable_activity_stream
+
model = getattr(importlib.import_module(model_module), model_name, None)
if model is None:
return
@@ -3055,22 +3012,17 @@ def deep_copy_model_obj(
with transaction.atomic(), ignore_inventory_computed_fields(), disable_activity_stream():
copy_mapping = {}
for sub_obj_setup in sub_obj_list:
- sub_model = getattr(importlib.import_module(sub_obj_setup[0]),
- sub_obj_setup[1], None)
+ sub_model = getattr(importlib.import_module(sub_obj_setup[0]), sub_obj_setup[1], None)
if sub_model is None:
continue
try:
sub_obj = sub_model.objects.get(pk=sub_obj_setup[2])
except ObjectDoesNotExist:
continue
- copy_mapping.update(CopyAPIView.copy_model_obj(
- obj, new_obj, sub_model, sub_obj, creater
- ))
+ copy_mapping.update(CopyAPIView.copy_model_obj(obj, new_obj, sub_model, sub_obj, creater))
_reconstruct_relationships(copy_mapping)
if permission_check_func:
- permission_check_func = getattr(getattr(
- importlib.import_module(permission_check_func[0]), permission_check_func[1]
- ), permission_check_func[2])
+ permission_check_func = getattr(getattr(importlib.import_module(permission_check_func[0]), permission_check_func[1]), permission_check_func[2])
permission_check_func(creater, copy_mapping.values())
if isinstance(new_obj, Inventory):
update_inventory_computed_fields.delay(new_obj.id)
@@ -3107,17 +3059,13 @@ class AWXReceptorJob:
# submit our work, passing
# in the right side of our socketpair for reading.
- result = receptor_ctl.submit_work(worktype=self.work_type,
- payload=sockout.makefile('rb'),
- params=self.receptor_params)
+ result = receptor_ctl.submit_work(worktype=self.work_type, payload=sockout.makefile('rb'), params=self.receptor_params)
self.unit_id = result['unitid']
sockin.close()
sockout.close()
- resultsock, resultfile = receptor_ctl.get_work_results(self.unit_id,
- return_socket=True,
- return_sockfile=True)
+ resultsock, resultfile = receptor_ctl.get_work_results(self.unit_id, return_socket=True, return_sockfile=True)
# Both "processor" and "cancel_watcher" are spawned in separate threads.
# We wait for the first one to return. If cancel_watcher returns first,
# we yank the socket out from underneath the processor, which will cause it
@@ -3128,8 +3076,7 @@ class AWXReceptorJob:
processor_future = executor.submit(self.processor, resultfile)
cancel_watcher_future = executor.submit(self.cancel_watcher, processor_future)
futures = [processor_future, cancel_watcher_future]
- first_future = concurrent.futures.wait(futures,
- return_when=concurrent.futures.FIRST_COMPLETED)
+ first_future = concurrent.futures.wait(futures, return_when=concurrent.futures.FIRST_COMPLETED)
res = list(first_future.done)[0].result()
if res.status == 'canceled':
@@ -3157,21 +3104,21 @@ class AWXReceptorJob:
if not settings.IS_K8S and self.work_type == 'local':
self.runner_params['only_transmit_kwargs'] = True
- ansible_runner.interface.run(streamer='transmit',
- _output=_socket.makefile('wb'),
- **self.runner_params)
+ ansible_runner.interface.run(streamer='transmit', _output=_socket.makefile('wb'), **self.runner_params)
# Socket must be shutdown here, or the reader will hang forever.
_socket.shutdown(socket.SHUT_WR)
def processor(self, resultfile):
- return ansible_runner.interface.run(streamer='process',
- quiet=True,
- _input=resultfile,
- event_handler=self.task.event_handler,
- finished_callback=self.task.finished_callback,
- status_handler=self.task.status_handler,
- **self.runner_params)
+ return ansible_runner.interface.run(
+ streamer='process',
+ quiet=True,
+ _input=resultfile,
+ event_handler=self.task.event_handler,
+ finished_callback=self.task.finished_callback,
+ status_handler=self.task.status_handler,
+ **self.runner_params,
+ )
@property
def receptor_params(self):
@@ -3187,14 +3134,10 @@ class AWXReceptorJob:
receptor_params["secret_kube_config"] = kubeconfig_yaml
else:
private_data_dir = self.runner_params['private_data_dir']
- receptor_params = {
- "params": f"--private-data-dir={private_data_dir}"
- }
+ receptor_params = {"params": f"--private-data-dir={private_data_dir}"}
return receptor_params
-
-
@property
def work_type(self):
if self.task.instance.is_container_group_task:
@@ -3222,32 +3165,20 @@ class AWXReceptorJob:
default_pod_spec = {
"apiVersion": "v1",
"kind": "Pod",
- "metadata": {
- "namespace": settings.AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE
- },
- "spec": {
- "containers": [{
- "image": settings.AWX_CONTAINER_GROUP_DEFAULT_IMAGE,
- "name": 'worker',
- "args": ['ansible-runner', 'worker']
- }]
- }
+ "metadata": {"namespace": settings.AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE},
+ "spec": {"containers": [{"image": settings.AWX_CONTAINER_GROUP_DEFAULT_IMAGE, "name": 'worker', "args": ['ansible-runner', 'worker']}]},
}
pod_spec_override = {}
if self.task and self.task.instance.instance_group.pod_spec_override:
- pod_spec_override = parse_yaml_or_json(
- self.task.instance.instance_group.pod_spec_override)
+ pod_spec_override = parse_yaml_or_json(self.task.instance.instance_group.pod_spec_override)
pod_spec = {**default_pod_spec, **pod_spec_override}
if self.task:
pod_spec['metadata'] = deepmerge(
pod_spec.get('metadata', {}),
- dict(name=self.pod_name,
- labels={
- 'ansible-awx': settings.INSTALL_UUID,
- 'ansible-awx-job-id': str(self.task.instance.id)
- }))
+ dict(name=self.pod_name, labels={'ansible-awx': settings.INSTALL_UUID, 'ansible-awx-job-id': str(self.task.instance.id)}),
+ )
return pod_spec
@@ -3270,39 +3201,16 @@ class AWXReceptorJob:
"apiVersion": "v1",
"kind": "Config",
"preferences": {},
- "clusters": [
- {
- "name": host_input,
- "cluster": {
- "server": host_input
- }
- }
- ],
- "users": [
- {
- "name": host_input,
- "user": {
- "token": self.credential.get_input('bearer_token')
- }
- }
- ],
- "contexts": [
- {
- "name": host_input,
- "context": {
- "cluster": host_input,
- "user": host_input,
- "namespace": self.namespace
- }
- }
- ],
- "current-context": host_input
+ "clusters": [{"name": host_input, "cluster": {"server": host_input}}],
+ "users": [{"name": host_input, "user": {"token": self.credential.get_input('bearer_token')}}],
+ "contexts": [{"name": host_input, "context": {"cluster": host_input, "user": host_input, "namespace": self.namespace}}],
+ "current-context": host_input,
}
if self.credential.get_input('verify_ssl') and 'ssl_ca_cert' in self.credential.inputs:
config["clusters"][0]["cluster"]["certificate-authority-data"] = b64encode(
- self.credential.get_input('ssl_ca_cert').encode() # encode to bytes
- ).decode() # decode the base64 data into a str
+ self.credential.get_input('ssl_ca_cert').encode() # encode to bytes
+ ).decode() # decode the base64 data into a str
else:
config["clusters"][0]["cluster"]["insecure-skip-tls-verify"] = True
return config
diff --git a/awx/main/templatetags/swagger.py b/awx/main/templatetags/swagger.py
index 62e61abdff..d9f9ceb4dc 100644
--- a/awx/main/templatetags/swagger.py
+++ b/awx/main/templatetags/swagger.py
@@ -3,8 +3,10 @@ from django import template
register = template.Library()
-CONSONANT_SOUND = re.compile(r'''one(![ir])''', re.IGNORECASE|re.VERBOSE) # noqa
-VOWEL_SOUND = re.compile(r'''[aeio]|u([aeiou]|[^n][^aeiou]|ni[^dmnl]|nil[^l])|h(ier|onest|onou?r|ors\b|our(!i))|[fhlmnrsx]\b''', re.IGNORECASE|re.VERBOSE) # noqa
+CONSONANT_SOUND = re.compile(r'''one(![ir])''', re.IGNORECASE | re.VERBOSE) # noqa
+VOWEL_SOUND = re.compile(
+ r'''[aeio]|u([aeiou]|[^n][^aeiou]|ni[^dmnl]|nil[^l])|h(ier|onest|onou?r|ors\b|our(!i))|[fhlmnrsx]\b''', re.IGNORECASE | re.VERBOSE
+) # noqa
@register.filter
diff --git a/awx/main/tests/URI.py b/awx/main/tests/URI.py
index 54fd1d8b06..c5fe95cee2 100644
--- a/awx/main/tests/URI.py
+++ b/awx/main/tests/URI.py
@@ -5,10 +5,10 @@
class URI(object):
DEFAULTS = {
- 'scheme' : 'http',
- 'username' : 'MYUSERNAME',
- 'password' : 'MYPASSWORD',
- 'host' : 'host.com',
+ 'scheme': 'http',
+ 'username': 'MYUSERNAME',
+ 'password': 'MYPASSWORD',
+ 'host': 'host.com',
}
def __init__(self, description='N/A', scheme=DEFAULTS['scheme'], username=DEFAULTS['username'], password=DEFAULTS['password'], host=DEFAULTS['host']):
diff --git a/awx/main/tests/conftest.py b/awx/main/tests/conftest.py
index 409a4504ba..15474505c2 100644
--- a/awx/main/tests/conftest.py
+++ b/awx/main/tests/conftest.py
@@ -1,4 +1,3 @@
-
# Python
import pytest
from unittest import mock
@@ -19,18 +18,18 @@ from django.core.cache import cache
def pytest_addoption(parser):
- parser.addoption(
- "--genschema", action="store_true", default=False, help="execute schema validator"
- )
+ parser.addoption("--genschema", action="store_true", default=False, help="execute schema validator")
def pytest_configure(config):
import sys
+
sys._called_from_test = True
def pytest_unconfigure(config):
import sys
+
del sys._called_from_test
@@ -41,12 +40,12 @@ def mock_access():
try:
mock_instance = mock.MagicMock(__name__='foobar')
MockAccess = mock.MagicMock(return_value=mock_instance)
- the_patch = mock.patch.dict('awx.main.access.access_registry',
- {TowerClass: MockAccess}, clear=False)
+ the_patch = mock.patch.dict('awx.main.access.access_registry', {TowerClass: MockAccess}, clear=False)
the_patch.__enter__()
yield mock_instance
finally:
the_patch.__exit__()
+
return access_given_class
@@ -89,11 +88,18 @@ def default_instance_group(instance_factory, instance_group_factory):
def job_template_with_survey_passwords_factory(job_template_factory):
def rf(persisted):
"Returns job with linked JT survey with password survey questions"
- objects = job_template_factory('jt', organization='org1', survey=[
- {'variable': 'submitter_email', 'type': 'text', 'default': 'foobar@redhat.com'},
- {'variable': 'secret_key', 'default': '6kQngg3h8lgiSTvIEb21', 'type': 'password'},
- {'variable': 'SSN', 'type': 'password'}], persisted=persisted)
+ objects = job_template_factory(
+ 'jt',
+ organization='org1',
+ survey=[
+ {'variable': 'submitter_email', 'type': 'text', 'default': 'foobar@redhat.com'},
+ {'variable': 'secret_key', 'default': '6kQngg3h8lgiSTvIEb21', 'type': 'password'},
+ {'variable': 'SSN', 'type': 'password'},
+ ],
+ persisted=persisted,
+ )
return objects.job_template
+
return rf
diff --git a/awx/main/tests/data/insights.py b/awx/main/tests/data/insights.py
index 8ddb0eba88..f51f9e18f3 100644
--- a/awx/main/tests/data/insights.py
+++ b/awx/main/tests/data/insights.py
@@ -1,5 +1,5 @@
import json
-import os
+import os
dir_path = os.path.dirname(os.path.realpath(__file__))
diff --git a/awx/main/tests/docs/test_swagger_generation.py b/awx/main/tests/docs/test_swagger_generation.py
index 5def85b3d3..e1257cf889 100644
--- a/awx/main/tests/docs/test_swagger_generation.py
+++ b/awx/main/tests/docs/test_swagger_generation.py
@@ -23,7 +23,7 @@ class i18nEncoder(DjangoJSONEncoder):
@pytest.mark.django_db
-class TestSwaggerGeneration():
+class TestSwaggerGeneration:
"""
This class is used to generate a Swagger/OpenAPI document for the awx
API. A _prepare fixture generates a JSON blob containing OpenAPI data,
@@ -37,6 +37,7 @@ class TestSwaggerGeneration():
To customize the `info.description` in the generated OpenAPI document,
modify the text in `awx.api.templates.swagger.description.md`
"""
+
JSON = {}
@pytest.fixture(autouse=True, scope='function')
@@ -57,10 +58,7 @@ class TestSwaggerGeneration():
deprecated_paths = data.pop('deprecated_paths', [])
for path, node in data['paths'].items():
# change {version} in paths to the actual default API version (e.g., v2)
- revised_paths[path.replace(
- '{version}',
- settings.REST_FRAMEWORK['DEFAULT_VERSION']
- )] = node
+ revised_paths[path.replace('{version}', settings.REST_FRAMEWORK['DEFAULT_VERSION'])] = node
for method in node:
if path in deprecated_paths:
node[method]['deprecated'] = True
@@ -81,7 +79,6 @@ class TestSwaggerGeneration():
JSON = self.__class__.JSON
JSON['info']['version'] = release
-
if not request.config.getoption('--genschema'):
JSON['modified'] = datetime.datetime.utcnow().isoformat()
@@ -96,23 +93,20 @@ class TestSwaggerGeneration():
assert 250 < len(paths) < 350
assert list(paths['/api/'].keys()) == ['get']
assert list(paths['/api/v2/'].keys()) == ['get']
- assert list(sorted(
- paths['/api/v2/credentials/'].keys()
- )) == ['get', 'post']
- assert list(sorted(
- paths['/api/v2/credentials/{id}/'].keys()
- )) == ['delete', 'get', 'patch', 'put']
+ assert list(sorted(paths['/api/v2/credentials/'].keys())) == ['get', 'post']
+ assert list(sorted(paths['/api/v2/credentials/{id}/'].keys())) == ['delete', 'get', 'patch', 'put']
assert list(paths['/api/v2/settings/'].keys()) == ['get']
- assert list(paths['/api/v2/settings/{category_slug}/'].keys()) == [
- 'get', 'put', 'patch', 'delete'
- ]
-
- @pytest.mark.parametrize('path', [
- '/api/',
- '/api/v2/',
- '/api/v2/ping/',
- '/api/v2/config/',
- ])
+ assert list(paths['/api/v2/settings/{category_slug}/'].keys()) == ['get', 'put', 'patch', 'delete']
+
+ @pytest.mark.parametrize(
+ 'path',
+ [
+ '/api/',
+ '/api/v2/',
+ '/api/v2/ping/',
+ '/api/v2/config/',
+ ],
+ )
def test_basic_paths(self, path, get, admin):
# hit a couple important endpoints so we always have example data
get(path, user=admin, expect=200)
@@ -143,11 +137,13 @@ class TestSwaggerGeneration():
if request.config.getoption("--genschema"):
pytest.skip("In schema generator skipping swagger generator", allow_module_level=True)
else:
- node[method].setdefault('parameters', []).append({
- 'name': 'data',
- 'in': 'body',
- 'schema': {'example': request_data},
- })
+ node[method].setdefault('parameters', []).append(
+ {
+ 'name': 'data',
+ 'in': 'body',
+ 'schema': {'example': request_data},
+ }
+ )
# Build response examples
if resp:
@@ -155,9 +151,7 @@ class TestSwaggerGeneration():
continue
if content_type == 'application/json':
resp = json.loads(resp)
- node[method]['responses'].setdefault(status_code, {}).setdefault(
- 'examples', {}
- )[content_type] = resp
+ node[method]['responses'].setdefault(status_code, {}).setdefault('examples', {})[content_type] = resp
@classmethod
def teardown_class(cls):
@@ -165,19 +159,7 @@ class TestSwaggerGeneration():
data = json.dumps(cls.JSON, cls=i18nEncoder, indent=2, sort_keys=True)
# replace ISO dates w/ the same value so we don't generate
# needless diffs
- data = re.sub(
- r'[0-9]{4}-[0-9]{2}-[0-9]{2}(T|\s)[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]+(Z|\+[0-9]{2}:[0-9]{2})?',
- r'2018-02-01T08:00:00.000000Z',
- data
- )
- data = re.sub(
- r'''(\s+"client_id": ")([a-zA-Z0-9]{40})("\,\s*)''',
- r'\1xxxx\3',
- data
- )
- data = re.sub(
- r'"action_node": "[^"]+"',
- '"action_node": "awx"',
- data
- )
+ data = re.sub(r'[0-9]{4}-[0-9]{2}-[0-9]{2}(T|\s)[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]+(Z|\+[0-9]{2}:[0-9]{2})?', r'2018-02-01T08:00:00.000000Z', data)
+ data = re.sub(r'''(\s+"client_id": ")([a-zA-Z0-9]{40})("\,\s*)''', r'\1xxxx\3', data)
+ data = re.sub(r'"action_node": "[^"]+"', '"action_node": "awx"', data)
f.write(data)
diff --git a/awx/main/tests/factories/exc.py b/awx/main/tests/factories/exc.py
index aa51de5bd3..a9313a8dc8 100644
--- a/awx/main/tests/factories/exc.py
+++ b/awx/main/tests/factories/exc.py
@@ -2,4 +2,3 @@ class NotUnique(Exception):
def __init__(self, name, objects):
msg = '{} is not a unique key, found {}={}'.format(name, name, objects[name])
super(Exception, self).__init__(msg)
-
diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py
index 2f8cbe6934..9cbdfcd288 100644
--- a/awx/main/tests/factories/fixtures.py
+++ b/awx/main/tests/factories/fixtures.py
@@ -32,12 +32,12 @@ def mk_instance(persisted=True, hostname='instance.example.org'):
if not persisted:
raise RuntimeError('creating an Instance requires persisted=True')
from django.conf import settings
+
return Instance.objects.get_or_create(uuid=settings.SYSTEM_UUID, hostname=hostname)[0]
def mk_instance_group(name='tower', instance=None, minimum=0, percentage=0):
- ig, status = InstanceGroup.objects.get_or_create(name=name, policy_instance_minimum=minimum,
- policy_instance_percentage=percentage)
+ ig, status = InstanceGroup.objects.get_or_create(name=name, policy_instance_minimum=minimum, policy_instance_percentage=percentage)
if instance is not None:
if type(instance) == list:
for i in instance:
@@ -90,8 +90,7 @@ def mk_user(name, is_superuser=False, organization=None, team=None, persisted=Tr
def mk_project(name, organization=None, description=None, persisted=True):
description = description or '{}-description'.format(name)
- project = Project(name=name, description=description,
- playbook_files=['helloworld.yml', 'alt-helloworld.yml'])
+ project = Project(name=name, description=description, playbook_files=['helloworld.yml', 'alt-helloworld.yml'])
if organization is not None:
project.organization = organization
if persisted:
@@ -105,10 +104,7 @@ def mk_credential(name, credential_type='ssh', persisted=True):
type_.save()
else:
type_ = CredentialType.defaults[credential_type]()
- cred = Credential(
- credential_type=type_,
- name=name
- )
+ cred = Credential(credential_type=type_, name=name)
if persisted:
cred.save()
return cred
@@ -135,9 +131,7 @@ def mk_inventory(name, organization=None, persisted=True):
return inv
-def mk_job(job_type='run', status='new', job_template=None, inventory=None,
- credential=None, project=None, extra_vars={},
- persisted=True):
+def mk_job(job_type='run', status='new', job_template=None, inventory=None, credential=None, project=None, extra_vars={}, persisted=True):
job = Job(job_type=job_type, status=status, extra_vars=json.dumps(extra_vars))
job.job_template = job_template
@@ -150,16 +144,24 @@ def mk_job(job_type='run', status='new', job_template=None, inventory=None,
return job
-def mk_job_template(name, job_type='run',
- organization=None, inventory=None,
- credential=None, network_credential=None,
- cloud_credential=None, persisted=True, extra_vars='',
- project=None, spec=None, webhook_service=''):
+def mk_job_template(
+ name,
+ job_type='run',
+ organization=None,
+ inventory=None,
+ credential=None,
+ network_credential=None,
+ cloud_credential=None,
+ persisted=True,
+ extra_vars='',
+ project=None,
+ spec=None,
+ webhook_service='',
+):
if extra_vars:
extra_vars = json.dumps(extra_vars)
- jt = JobTemplate(name=name, job_type=job_type, extra_vars=extra_vars,
- webhook_service=webhook_service, playbook='helloworld.yml')
+ jt = JobTemplate(name=name, job_type=job_type, extra_vars=extra_vars, webhook_service=webhook_service, playbook='helloworld.yml')
jt.inventory = inventory
if jt.inventory is None:
@@ -189,8 +191,7 @@ def mk_job_template(name, job_type='run',
return jt
-def mk_workflow_job(status='new', workflow_job_template=None, extra_vars={},
- persisted=True):
+def mk_workflow_job(status='new', workflow_job_template=None, extra_vars={}, persisted=True):
job = WorkflowJob(status=status, extra_vars=json.dumps(extra_vars))
job.workflow_job_template = workflow_job_template
@@ -200,13 +201,11 @@ def mk_workflow_job(status='new', workflow_job_template=None, extra_vars={},
return job
-def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None, persisted=True,
- webhook_service=''):
+def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None, persisted=True, webhook_service=''):
if extra_vars:
extra_vars = json.dumps(extra_vars)
- wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars, organization=organization,
- webhook_service=webhook_service)
+ wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars, organization=organization, webhook_service=webhook_service)
wfjt.survey_spec = spec
if wfjt.survey_spec:
@@ -217,35 +216,30 @@ def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None,
return wfjt
-def mk_workflow_job_template_node(workflow_job_template=None,
- unified_job_template=None,
- success_nodes=None,
- failure_nodes=None,
- always_nodes=None,
- persisted=True):
- workflow_node = WorkflowJobTemplateNode(workflow_job_template=workflow_job_template,
- unified_job_template=unified_job_template,
- success_nodes=success_nodes,
- failure_nodes=failure_nodes,
- always_nodes=always_nodes)
+def mk_workflow_job_template_node(
+ workflow_job_template=None, unified_job_template=None, success_nodes=None, failure_nodes=None, always_nodes=None, persisted=True
+):
+ workflow_node = WorkflowJobTemplateNode(
+ workflow_job_template=workflow_job_template,
+ unified_job_template=unified_job_template,
+ success_nodes=success_nodes,
+ failure_nodes=failure_nodes,
+ always_nodes=always_nodes,
+ )
if persisted:
workflow_node.save()
return workflow_node
-def mk_workflow_job_node(unified_job_template=None,
- success_nodes=None,
- failure_nodes=None,
- always_nodes=None,
- workflow_job=None,
- job=None,
- persisted=True):
- workflow_node = WorkflowJobNode(unified_job_template=unified_job_template,
- success_nodes=success_nodes,
- failure_nodes=failure_nodes,
- always_nodes=always_nodes,
- workflow_job=workflow_job,
- job=job)
+def mk_workflow_job_node(unified_job_template=None, success_nodes=None, failure_nodes=None, always_nodes=None, workflow_job=None, job=None, persisted=True):
+ workflow_node = WorkflowJobNode(
+ unified_job_template=unified_job_template,
+ success_nodes=success_nodes,
+ failure_nodes=failure_nodes,
+ always_nodes=always_nodes,
+ workflow_job=workflow_job,
+ job=job,
+ )
if persisted:
workflow_node.save()
return workflow_node
diff --git a/awx/main/tests/factories/objects.py b/awx/main/tests/factories/objects.py
index 8246a71908..46f41e72b9 100644
--- a/awx/main/tests/factories/objects.py
+++ b/awx/main/tests/factories/objects.py
@@ -4,11 +4,11 @@ from .exc import NotUnique
def generate_objects(artifacts, kwargs):
- '''generate_objects takes a list of artifacts that are supported by
+ """generate_objects takes a list of artifacts that are supported by
a create function and compares it to the kwargs passed in to the create
function. If a kwarg is found that is not in the artifacts list a RuntimeError
is raised.
- '''
+ """
for k in kwargs.keys():
if k not in artifacts:
raise RuntimeError('{} is not a valid argument'.format(k))
@@ -16,13 +16,13 @@ def generate_objects(artifacts, kwargs):
def generate_role_objects(objects):
- '''generate_role_objects assembles a dictionary of all possible objects by name.
+ """generate_role_objects assembles a dictionary of all possible objects by name.
It will raise an exception if any of the objects share a name due to the fact that
it is to be used with apply_roles, which expects unique object names.
roles share a common name e.g. admin_role, member_role. This ensures that the
roles short hand used for mapping Roles and Users in apply_roles will function as desired.
- '''
+ """
combined_objects = {}
for o in objects:
if type(o) is dict:
@@ -41,20 +41,20 @@ def generate_role_objects(objects):
class _Mapped(object):
- '''_Mapped is a helper class that replaces spaces and dashes
+ """_Mapped is a helper class that replaces spaces and dashes
in the name of an object and assigns the object as an attribute
input: {'my org': Organization}
output: instance.my_org = Organization
- '''
+ """
+
def __init__(self, d):
self.d = d
- for k,v in d.items():
+ for k, v in d.items():
k = k.replace(' ', '_')
k = k.replace('-', '_')
- setattr(self, k.replace(' ','_'), v)
+ setattr(self, k.replace(' ', '_'), v)
def all(self):
return self.d.values()
-
diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py
index 87a7b436eb..6cf632f40b 100644
--- a/awx/main/tests/factories/tower.py
+++ b/awx/main/tests/factories/tower.py
@@ -37,7 +37,7 @@ from .fixtures import (
def apply_roles(roles, objects, persisted):
- '''apply_roles evaluates a list of Role relationships represented as strings.
+ """apply_roles evaluates a list of Role relationships represented as strings.
The format of this string is 'role:[user|role]'. When a user is provided, they will be
made a member of the role on the LHS. When a role is provided that role will be added to
the children of the role on the LHS.
@@ -59,7 +59,7 @@ def apply_roles(roles, objects, persisted):
---------------
roles = ['org1.admin_role:team1.admin_role']
objects = {'org1': Organization', 'user1': User} # Exception, no team1 entry
- '''
+ """
if roles is None:
return None
@@ -92,7 +92,7 @@ def apply_roles(roles, objects, persisted):
def generate_users(organization, teams, superuser, persisted, **kwargs):
- '''generate_users evaluates a mixed list of User objects and strings.
+ """generate_users evaluates a mixed list of User objects and strings.
If a string is encountered a user with that username is created and added to the lookup dict.
If a User object is encountered the User.username is used as a key for the lookup dict.
@@ -100,7 +100,7 @@ def generate_users(organization, teams, superuser, persisted, **kwargs):
If a string in that format is encounted an attempt to lookup the team by the key team_name from the teams
argumnent is made, a KeyError will be thrown if the team does not exist in the dict. The teams argument should
be a dict of {Team.name:Team}
- '''
+ """
users = {}
key = 'superusers' if superuser else 'users'
if key in kwargs and kwargs.get(key) is not None:
@@ -118,10 +118,10 @@ def generate_users(organization, teams, superuser, persisted, **kwargs):
def generate_teams(organization, persisted, **kwargs):
- '''generate_teams evalutes a mixed list of Team objects and strings.
+ """generate_teams evalutes a mixed list of Team objects and strings.
If a string is encountered a team with that string name is created and added to the lookup dict.
If a Team object is encounted the Team.name is used as a key for the lookup dict.
- '''
+ """
teams = {}
if 'teams' in kwargs and kwargs.get('teams') is not None:
for t in kwargs['teams']:
@@ -141,10 +141,10 @@ def create_instance_group(name, instances=None, minimum=0, percentage=0):
def create_survey_spec(variables=None, default_type='integer', required=True, min=None, max=None):
- '''
+ """
Returns a valid survey spec for a job template, based on the input
argument specifying variable name(s)
- '''
+ """
if isinstance(variables, list):
vars_list = variables
else:
@@ -198,13 +198,21 @@ def create_survey_spec(variables=None, default_type='integer', required=True, mi
def create_job_template(name, roles=None, persisted=True, webhook_service='', **kwargs):
- Objects = generate_objects(["job_template", "jobs",
- "organization",
- "inventory",
- "project",
- "credential", "cloud_credential", "network_credential",
- "job_type",
- "survey",], kwargs)
+ Objects = generate_objects(
+ [
+ "job_template",
+ "jobs",
+ "organization",
+ "inventory",
+ "project",
+ "credential",
+ "cloud_credential",
+ "network_credential",
+ "job_type",
+ "survey",
+ ],
+ kwargs,
+ )
org = None
proj = None
@@ -252,10 +260,19 @@ def create_job_template(name, roles=None, persisted=True, webhook_service='', **
else:
spec = None
- jt = mk_job_template(name, project=proj, inventory=inv, credential=cred,
- network_credential=net_cred, cloud_credential=cloud_cred,
- job_type=job_type, spec=spec, extra_vars=extra_vars,
- persisted=persisted, webhook_service=webhook_service)
+ jt = mk_job_template(
+ name,
+ project=proj,
+ inventory=inv,
+ credential=cred,
+ network_credential=net_cred,
+ cloud_credential=cloud_cred,
+ job_type=job_type,
+ spec=spec,
+ extra_vars=extra_vars,
+ persisted=persisted,
+ webhook_service=webhook_service,
+ )
if 'jobs' in kwargs:
for i in kwargs['jobs']:
@@ -267,31 +284,41 @@ def create_job_template(name, roles=None, persisted=True, webhook_service='', **
if spec is not None:
for question in spec['spec']:
job_extra_vars[question['variable']] = question['default']
- jobs[i] = mk_job(job_template=jt, project=proj, inventory=inv, credential=cred,
- extra_vars=job_extra_vars,
- job_type=job_type, persisted=persisted)
+ jobs[i] = mk_job(
+ job_template=jt, project=proj, inventory=inv, credential=cred, extra_vars=job_extra_vars, job_type=job_type, persisted=persisted
+ )
role_objects = generate_role_objects([org, proj, inv, cred])
apply_roles(roles, role_objects, persisted)
- return Objects(job_template=jt,
- jobs=jobs,
- project=proj,
- inventory=inv,
- credential=cred, cloud_credential=cloud_cred, network_credential=net_cred,
- job_type=job_type,
- organization=org,
- survey=spec,)
+ return Objects(
+ job_template=jt,
+ jobs=jobs,
+ project=proj,
+ inventory=inv,
+ credential=cred,
+ cloud_credential=cloud_cred,
+ network_credential=net_cred,
+ job_type=job_type,
+ organization=org,
+ survey=spec,
+ )
def create_organization(name, roles=None, persisted=True, **kwargs):
- Objects = generate_objects(["organization",
- "teams", "users",
- "superusers",
- "projects",
- "labels",
- "notification_templates",
- "inventories",], kwargs)
+ Objects = generate_objects(
+ [
+ "organization",
+ "teams",
+ "users",
+ "superusers",
+ "projects",
+ "labels",
+ "notification_templates",
+ "inventories",
+ ],
+ kwargs,
+ )
projects = {}
inventories = {}
@@ -334,22 +361,29 @@ def create_organization(name, roles=None, persisted=True, **kwargs):
role_objects = generate_role_objects([org, superusers, users, teams, projects, labels, notification_templates])
apply_roles(roles, role_objects, persisted)
- return Objects(organization=org,
- superusers=_Mapped(superusers),
- users=_Mapped(users),
- teams=_Mapped(teams),
- projects=_Mapped(projects),
- labels=_Mapped(labels),
- notification_templates=_Mapped(notification_templates),
- inventories=_Mapped(inventories))
+ return Objects(
+ organization=org,
+ superusers=_Mapped(superusers),
+ users=_Mapped(users),
+ teams=_Mapped(teams),
+ projects=_Mapped(projects),
+ labels=_Mapped(labels),
+ notification_templates=_Mapped(notification_templates),
+ inventories=_Mapped(inventories),
+ )
def create_notification_template(name, roles=None, persisted=True, **kwargs):
- Objects = generate_objects(["notification_template",
- "organization",
- "users",
- "superusers",
- "teams",], kwargs)
+ Objects = generate_objects(
+ [
+ "notification_template",
+ "organization",
+ "users",
+ "superusers",
+ "teams",
+ ],
+ kwargs,
+ )
organization = None
@@ -365,16 +399,10 @@ def create_notification_template(name, roles=None, persisted=True, **kwargs):
role_objects = generate_role_objects([organization, notification_template])
apply_roles(roles, role_objects, persisted)
- return Objects(notification_template=notification_template,
- organization=organization,
- users=_Mapped(users),
- superusers=_Mapped(superusers),
- teams=teams)
+ return Objects(notification_template=notification_template, organization=organization, users=_Mapped(users), superusers=_Mapped(superusers), teams=teams)
-def generate_workflow_job_template_nodes(workflow_job_template,
- persisted,
- **kwargs):
+def generate_workflow_job_template_nodes(workflow_job_template, persisted, **kwargs):
workflow_job_template_nodes = kwargs.get('workflow_job_template_nodes', [])
if len(workflow_job_template_nodes) > 0 and not persisted:
@@ -383,9 +411,7 @@ def generate_workflow_job_template_nodes(workflow_job_template,
new_nodes = []
for i, node in enumerate(workflow_job_template_nodes):
- new_node = WorkflowJobTemplateNode(workflow_job_template=workflow_job_template,
- unified_job_template=node['unified_job_template'],
- id=i)
+ new_node = WorkflowJobTemplateNode(workflow_job_template=workflow_job_template, unified_job_template=node['unified_job_template'], id=i)
if persisted:
new_node.save()
new_nodes.append(new_node)
@@ -401,30 +427,26 @@ def generate_workflow_job_template_nodes(workflow_job_template,
# TODO: Implement survey and jobs
def create_workflow_job_template(name, organization=None, persisted=True, webhook_service='', **kwargs):
- Objects = generate_objects(["workflow_job_template",
- "workflow_job_template_nodes",
- "survey",], kwargs)
+ Objects = generate_objects(
+ [
+ "workflow_job_template",
+ "workflow_job_template_nodes",
+ "survey",
+ ],
+ kwargs,
+ )
spec = None
- #jobs = None
+ # jobs = None
extra_vars = kwargs.get('extra_vars', '')
if 'survey' in kwargs:
spec = create_survey_spec(kwargs['survey'])
- wfjt = mk_workflow_job_template(name,
- organization=organization,
- spec=spec,
- extra_vars=extra_vars,
- persisted=persisted,
- webhook_service=webhook_service)
-
-
+ wfjt = mk_workflow_job_template(name, organization=organization, spec=spec, extra_vars=extra_vars, persisted=persisted, webhook_service=webhook_service)
- workflow_jt_nodes = generate_workflow_job_template_nodes(wfjt,
- persisted,
- workflow_job_template_nodes=kwargs.get('workflow_job_template_nodes', []))
+ workflow_jt_nodes = generate_workflow_job_template_nodes(wfjt, persisted, workflow_job_template_nodes=kwargs.get('workflow_job_template_nodes', []))
'''
if 'jobs' in kwargs:
@@ -435,7 +457,9 @@ def create_workflow_job_template(name, organization=None, persisted=True, webhoo
# TODO: Create the job
raise RuntimeError("Currently, only already created jobs are supported")
'''
- return Objects(workflow_job_template=wfjt,
- #jobs=jobs,
- workflow_job_template_nodes=workflow_jt_nodes,
- survey=spec,)
+ return Objects(
+ workflow_job_template=wfjt,
+ # jobs=jobs,
+ workflow_job_template_nodes=workflow_jt_nodes,
+ survey=spec,
+ )
diff --git a/awx/main/tests/functional/__init__.py b/awx/main/tests/functional/__init__.py
index 15c2991281..6b9ac00c34 100644
--- a/awx/main/tests/functional/__init__.py
+++ b/awx/main/tests/functional/__init__.py
@@ -12,27 +12,24 @@ def app_post_migration(sender, app_config, **kwargs):
# so we've got to make sure the deprecated
# `main_unifiedjob.result_stdout_text` column actually exists
cur = connection.cursor()
- cols = cur.execute(
- 'SELECT sql FROM sqlite_master WHERE tbl_name="main_unifiedjob";'
- ).fetchone()[0]
+ cols = cur.execute('SELECT sql FROM sqlite_master WHERE tbl_name="main_unifiedjob";').fetchone()[0]
if 'result_stdout_text' not in cols:
- cur.execute(
- 'ALTER TABLE main_unifiedjob ADD COLUMN result_stdout_text TEXT'
- )
+ cur.execute('ALTER TABLE main_unifiedjob ADD COLUMN result_stdout_text TEXT')
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
post_migrate.connect(app_post_migration, sender=apps.get_app_config('main'))
-
@contextlib.contextmanager
def immediate_on_commit():
"""
Context manager executing transaction.on_commit() hooks immediately as
if the connection was in auto-commit mode.
"""
+
def on_commit(func):
func()
+
with mock.patch('django.db.connection.on_commit', side_effect=on_commit) as patch:
yield patch
diff --git a/awx/main/tests/functional/analytics/test_collectors.py b/awx/main/tests/functional/analytics/test_collectors.py
index 1d643588d1..0fed6e9c15 100644
--- a/awx/main/tests/functional/analytics/test_collectors.py
+++ b/awx/main/tests/functional/analytics/test_collectors.py
@@ -73,26 +73,20 @@ def sqlite_copy_expert(request):
@pytest.mark.django_db
-def test_copy_tables_unified_job_query(
- sqlite_copy_expert, project, inventory, job_template
-):
+def test_copy_tables_unified_job_query(sqlite_copy_expert, project, inventory, job_template):
"""
Ensure that various unified job types are in the output of the query.
"""
time_start = now() - timedelta(hours=9)
- inv_src = InventorySource.objects.create(
- name="inventory_update1", inventory=inventory, source="gce"
- )
+ inv_src = InventorySource.objects.create(name="inventory_update1", inventory=inventory, source="gce")
- project_update_name = ProjectUpdate.objects.create(
- project=project, name="project_update1"
- ).name
+ project_update_name = ProjectUpdate.objects.create(project=project, name="project_update1").name
inventory_update_name = inv_src.create_unified_job().name
job_name = job_template.create_unified_job().name
with tempfile.TemporaryDirectory() as tmpdir:
- collectors.unified_jobs_table(time_start, tmpdir, until = now() + timedelta(seconds=1))
+ collectors.unified_jobs_table(time_start, tmpdir, until=now() + timedelta(seconds=1))
with open(os.path.join(tmpdir, "unified_jobs_table.csv")) as f:
lines = "".join([line for line in f])
@@ -117,10 +111,7 @@ def workflow_job(states=["new", "new", "new", "new", "new"]):
"""
wfj = WorkflowJob.objects.create()
jt = JobTemplate.objects.create(name="test-jt")
- nodes = [
- WorkflowJobNode.objects.create(workflow_job=wfj, unified_job_template=jt)
- for i in range(0, 6)
- ]
+ nodes = [WorkflowJobNode.objects.create(workflow_job=wfj, unified_job_template=jt) for i in range(0, 6)]
for node, state in zip(nodes, states):
if state:
node.job = jt.create_job()
@@ -140,7 +131,7 @@ def test_copy_tables_workflow_job_node_query(sqlite_copy_expert, workflow_job):
time_start = now() - timedelta(hours=9)
with tempfile.TemporaryDirectory() as tmpdir:
- collectors.workflow_job_node_table(time_start, tmpdir, until = now() + timedelta(seconds=1))
+ collectors.workflow_job_node_table(time_start, tmpdir, until=now() + timedelta(seconds=1))
with open(os.path.join(tmpdir, "workflow_job_node_table.csv")) as f:
reader = csv.reader(f)
# Pop the headers
@@ -149,19 +140,11 @@ def test_copy_tables_workflow_job_node_query(sqlite_copy_expert, workflow_job):
ids = [int(line[0]) for line in lines]
- assert ids == list(
- workflow_job.workflow_nodes.all().values_list("id", flat=True)
- )
+ assert ids == list(workflow_job.workflow_nodes.all().values_list("id", flat=True))
- for index, relationship in zip(
- [7, 8, 9], ["success_nodes", "failure_nodes", "always_nodes"]
- ):
+ for index, relationship in zip([7, 8, 9], ["success_nodes", "failure_nodes", "always_nodes"]):
for i, l in enumerate(lines):
- related_nodes = (
- [int(e) for e in l[index].split(",")] if l[index] else []
- )
+ related_nodes = [int(e) for e in l[index].split(",")] if l[index] else []
assert related_nodes == list(
- getattr(workflow_job.workflow_nodes.all()[i], relationship)
- .all()
- .values_list("id", flat=True)
+ getattr(workflow_job.workflow_nodes.all()[i], relationship).all().values_list("id", flat=True)
), f"(right side) workflow_nodes.all()[{i}].{relationship}.all()"
diff --git a/awx/main/tests/functional/analytics/test_core.py b/awx/main/tests/functional/analytics/test_core.py
index f3cc1fcd4b..dbb819a87f 100644
--- a/awx/main/tests/functional/analytics/test_core.py
+++ b/awx/main/tests/functional/analytics/test_core.py
@@ -22,7 +22,7 @@ def bad_json(since, **kwargs):
@register('throws_error', '1.0')
def throws_error(since, **kwargs):
raise ValueError()
-
+
def _valid_license():
pass
@@ -38,7 +38,7 @@ def mock_valid_license():
@pytest.mark.django_db
def test_gather(mock_valid_license):
settings.INSIGHTS_TRACKING_STATE = True
-
+
tgzfiles = gather(module=importlib.import_module(__name__))
files = {}
with tarfile.open(tgzfiles[0], "r:gz") as archive:
@@ -57,4 +57,3 @@ def test_gather(mock_valid_license):
os.remove(tgz)
except Exception:
pass
-
diff --git a/awx/main/tests/functional/analytics/test_counts.py b/awx/main/tests/functional/analytics/test_counts.py
index 877f21bada..a4ce6a3a38 100644
--- a/awx/main/tests/functional/analytics/test_counts.py
+++ b/awx/main/tests/functional/analytics/test_counts.py
@@ -32,9 +32,7 @@ def test_empty():
@pytest.mark.django_db
-def test_database_counts(
- organization_factory, job_template_factory, workflow_job_template_factory
-):
+def test_database_counts(organization_factory, job_template_factory, workflow_job_template_factory):
objs = organization_factory("org", superusers=["admin"])
jt = job_template_factory(
"test",
diff --git a/awx/main/tests/functional/analytics/test_metrics.py b/awx/main/tests/functional/analytics/test_metrics.py
index 3853f083b7..94076d1362 100644
--- a/awx/main/tests/functional/analytics/test_metrics.py
+++ b/awx/main/tests/functional/analytics/test_metrics.py
@@ -7,50 +7,43 @@ from awx.api.versioning import reverse
from awx.main.models.rbac import Role
EXPECTED_VALUES = {
- 'awx_system_info':1.0,
- 'awx_organizations_total':1.0,
- 'awx_users_total':1.0,
- 'awx_teams_total':1.0,
- 'awx_inventories_total':1.0,
- 'awx_projects_total':1.0,
- 'awx_job_templates_total':1.0,
- 'awx_workflow_job_templates_total':1.0,
- 'awx_hosts_total':1.0,
- 'awx_hosts_total':1.0,
- 'awx_schedules_total':1.0,
- 'awx_inventory_scripts_total':1.0,
- 'awx_sessions_total':0.0,
- 'awx_sessions_total':0.0,
- 'awx_sessions_total':0.0,
- 'awx_custom_virtualenvs_total':0.0,
- 'awx_running_jobs_total':0.0,
- 'awx_instance_capacity':100.0,
- 'awx_instance_consumed_capacity':0.0,
- 'awx_instance_remaining_capacity':100.0,
- 'awx_instance_cpu':0.0,
- 'awx_instance_memory':0.0,
- 'awx_instance_info':1.0,
- 'awx_license_instance_total':0,
- 'awx_license_instance_free':0,
- 'awx_pending_jobs_total':0,
+ 'awx_system_info': 1.0,
+ 'awx_organizations_total': 1.0,
+ 'awx_users_total': 1.0,
+ 'awx_teams_total': 1.0,
+ 'awx_inventories_total': 1.0,
+ 'awx_projects_total': 1.0,
+ 'awx_job_templates_total': 1.0,
+ 'awx_workflow_job_templates_total': 1.0,
+ 'awx_hosts_total': 1.0,
+ 'awx_hosts_total': 1.0,
+ 'awx_schedules_total': 1.0,
+ 'awx_inventory_scripts_total': 1.0,
+ 'awx_sessions_total': 0.0,
+ 'awx_sessions_total': 0.0,
+ 'awx_sessions_total': 0.0,
+ 'awx_custom_virtualenvs_total': 0.0,
+ 'awx_running_jobs_total': 0.0,
+ 'awx_instance_capacity': 100.0,
+ 'awx_instance_consumed_capacity': 0.0,
+ 'awx_instance_remaining_capacity': 100.0,
+ 'awx_instance_cpu': 0.0,
+ 'awx_instance_memory': 0.0,
+ 'awx_instance_info': 1.0,
+ 'awx_license_instance_total': 0,
+ 'awx_license_instance_free': 0,
+ 'awx_pending_jobs_total': 0,
}
@pytest.mark.django_db
def test_metrics_counts(organization_factory, job_template_factory, workflow_job_template_factory):
objs = organization_factory('org', superusers=['admin'])
- jt = job_template_factory(
- 'test', organization=objs.organization,
- inventory='test_inv', project='test_project',
- credential='test_cred'
- )
+ jt = job_template_factory('test', organization=objs.organization, inventory='test_inv', project='test_project', credential='test_cred')
workflow_job_template_factory('test')
models.Team(organization=objs.organization).save()
models.Host(inventory=jt.inventory).save()
- models.Schedule(
- rrule='DTSTART;TZID=America/New_York:20300504T150000',
- unified_job_template=jt.job_template
- ).save()
+ models.Schedule(rrule='DTSTART;TZID=America/New_York:20300504T150000', unified_job_template=jt.job_template).save()
models.CustomInventoryScript(organization=objs.organization).save()
output = metrics()
@@ -63,7 +56,7 @@ def test_metrics_counts(organization_factory, job_template_factory, workflow_job
assert EXPECTED_VALUES[name] == value
-@pytest.mark.django_db
+@pytest.mark.django_db
def test_metrics_permissions(get, admin, org_admin, alice, bob, organization):
assert get(reverse('api:metrics_view'), user=admin).status_code == 200
assert get(reverse('api:metrics_view'), user=org_admin).status_code == 403
@@ -71,18 +64,16 @@ def test_metrics_permissions(get, admin, org_admin, alice, bob, organization):
assert get(reverse('api:metrics_view'), user=bob).status_code == 403
organization.auditor_role.members.add(bob)
assert get(reverse('api:metrics_view'), user=bob).status_code == 403
-
+
Role.singleton('system_auditor').members.add(bob)
bob.is_system_auditor = True
assert get(reverse('api:metrics_view'), user=bob).status_code == 200
-@pytest.mark.django_db
+@pytest.mark.django_db
def test_metrics_http_methods(get, post, patch, put, options, admin):
assert get(reverse('api:metrics_view'), user=admin).status_code == 200
assert put(reverse('api:metrics_view'), user=admin).status_code == 405
assert patch(reverse('api:metrics_view'), user=admin).status_code == 405
assert post(reverse('api:metrics_view'), user=admin).status_code == 405
assert options(reverse('api:metrics_view'), user=admin).status_code == 200
-
-
diff --git a/awx/main/tests/functional/api/test_activity_streams.py b/awx/main/tests/functional/api/test_activity_streams.py
index c002932a07..961fd02f80 100644
--- a/awx/main/tests/functional/api/test_activity_streams.py
+++ b/awx/main/tests/functional/api/test_activity_streams.py
@@ -6,7 +6,6 @@ from awx.main.access import ActivityStreamAccess
from awx.conf.models import Setting
-
@pytest.fixture
def activity_stream_entry(organization, org_admin):
return ActivityStream.objects.filter(organization__pk=organization.pk, user=org_admin, operation='associate').first()
@@ -92,9 +91,21 @@ def test_stream_access_cant_change(activity_stream_entry, organization, org_admi
@pytest.mark.django_db
@pytest.mark.activity_stream_access
def test_stream_queryset_hides_shows_items(
- activity_stream_entry, organization, user, org_admin,
- project, org_credential, inventory, label, deploy_jobtemplate,
- notification_template, group, host, team, settings):
+ activity_stream_entry,
+ organization,
+ user,
+ org_admin,
+ project,
+ org_credential,
+ inventory,
+ label,
+ deploy_jobtemplate,
+ notification_template,
+ group,
+ host,
+ team,
+ settings,
+):
settings.ACTIVITY_STREAM_ENABLED = True
# this user is not in any organizations and should not see any resource activity
no_access_user = user('no-access-user', False)
@@ -126,18 +137,14 @@ def test_stream_queryset_hides_shows_items(
@pytest.mark.django_db
def test_stream_user_direct_role_updates(get, post, organization_factory):
- objects = organization_factory('test_org',
- superusers=['admin'],
- users=['test'],
- inventories=['inv1'])
+ objects = organization_factory('test_org', superusers=['admin'], users=['test'], inventories=['inv1'])
url = reverse('api:user_roles_list', kwargs={'pk': objects.users.test.pk})
post(url, dict(id=objects.inventories.inv1.read_role.pk), objects.superusers.admin)
activity_stream = ActivityStream.objects.filter(
- inventory__pk=objects.inventories.inv1.pk,
- user__pk=objects.users.test.pk,
- role__pk=objects.inventories.inv1.read_role.pk).first()
+ inventory__pk=objects.inventories.inv1.pk, user__pk=objects.users.test.pk, role__pk=objects.inventories.inv1.read_role.pk
+ ).first()
url = reverse('api:activity_stream_detail', kwargs={'pk': activity_stream.pk})
response = get(url, objects.users.test)
diff --git a/awx/main/tests/functional/api/test_adhoc.py b/awx/main/tests/functional/api/test_adhoc.py
index a081a36cce..983e45029c 100644
--- a/awx/main/tests/functional/api/test_adhoc.py
+++ b/awx/main/tests/functional/api/test_adhoc.py
@@ -1,4 +1,4 @@
-from unittest import mock # noqa
+from unittest import mock # noqa
import pytest
from awx.api.versioning import reverse
@@ -43,6 +43,7 @@ def post_adhoc(post, inventory, machine_credential):
del data[k]
return post(url, data, user, expect=expect)
+
return f
@@ -89,7 +90,7 @@ def test_user_post_ad_hoc_command_list(alice, post_adhoc, inventory, machine_cre
@pytest.mark.django_db
def test_user_post_ad_hoc_command_list_xfail(alice, post_adhoc, inventory, machine_credential):
- inventory.read_role.members.add(alice) # just read access? no dice.
+ inventory.read_role.members.add(alice) # just read access? no dice.
machine_credential.use_role.members.add(alice)
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=403)
diff --git a/awx/main/tests/functional/api/test_auth.py b/awx/main/tests/functional/api/test_auth.py
index dfc92e67f5..d9ac588de3 100644
--- a/awx/main/tests/functional/api/test_auth.py
+++ b/awx/main/tests/functional/api/test_auth.py
@@ -13,7 +13,7 @@ from awx.api.versioning import drf_reverse
def test_invalid_login():
anon = auth.get_user(Client())
url = drf_reverse('api:login')
-
+
factory = APIRequestFactory()
data = {'userame': 'invalid', 'password': 'invalid'}
diff --git a/awx/main/tests/functional/api/test_create_attach_views.py b/awx/main/tests/functional/api/test_create_attach_views.py
index d6d8415916..b22ec08912 100644
--- a/awx/main/tests/functional/api/test_create_attach_views.py
+++ b/awx/main/tests/functional/api/test_create_attach_views.py
@@ -10,11 +10,8 @@ def test_user_role_view_access(rando, inventory, mocker, post):
data = {"id": role_pk}
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
- post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}),
- data=data, user=rando, expect=403)
- mock_access.can_attach.assert_called_once_with(
- inventory.admin_role, rando, 'members', data,
- skip_sub_obj_read_check=False)
+ post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
+ mock_access.can_attach.assert_called_once_with(inventory.admin_role, rando, 'members', data, skip_sub_obj_read_check=False)
@pytest.mark.django_db
@@ -25,11 +22,8 @@ def test_team_role_view_access(rando, team, inventory, mocker, post):
data = {"id": role_pk}
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
- post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}),
- data=data, user=rando, expect=403)
- mock_access.can_attach.assert_called_once_with(
- inventory.admin_role, team, 'member_role.parents', data,
- skip_sub_obj_read_check=False)
+ post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
+ mock_access.can_attach.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
@pytest.mark.django_db
@@ -40,11 +34,8 @@ def test_role_team_view_access(rando, team, inventory, mocker, post):
data = {"id": team.pk}
mock_access = mocker.MagicMock(return_value=False, __name__='mocked')
with mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access):
- post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}),
- data=data, user=rando, expect=403)
- mock_access.assert_called_once_with(
- inventory.admin_role, team, 'member_role.parents', data,
- skip_sub_obj_read_check=False)
+ post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
+ mock_access.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
@pytest.mark.django_db
@@ -54,8 +45,7 @@ def test_org_associate_with_junk_data(rando, admin_user, organization, post):
will turn off if the action is an association
"""
user_data = {'is_system_auditor': True, 'id': rando.pk}
- post(url=reverse('api:organization_users_list', kwargs={'pk': organization.pk}),
- data=user_data, expect=204, user=admin_user)
+ post(url=reverse('api:organization_users_list', kwargs={'pk': organization.pk}), data=user_data, expect=204, user=admin_user)
# assure user is now an org member
assert rando in organization.member_role
# assure that this did not also make them a system auditor
diff --git a/awx/main/tests/functional/api/test_credential.py b/awx/main/tests/functional/api/test_credential.py
index e8e7b4b271..bf6e908c52 100644
--- a/awx/main/tests/functional/api/test_credential.py
+++ b/awx/main/tests/functional/api/test_credential.py
@@ -1,13 +1,11 @@
import re
-from unittest import mock # noqa
+from unittest import mock # noqa
import pytest
from django.utils.encoding import smart_str
-from awx.main.models import (AdHocCommand, Credential, CredentialType, Job, JobTemplate,
- Inventory, InventorySource, Project,
- WorkflowJobNode)
+from awx.main.models import AdHocCommand, Credential, CredentialType, Job, JobTemplate, Inventory, InventorySource, Project, WorkflowJobNode
from awx.main.utils import decrypt_field
from awx.api.versioning import reverse
@@ -49,12 +47,7 @@ def test_create_user_credential_via_credentials_list(post, get, alice, credentia
@pytest.mark.django_db
def test_credential_validation_error_with_bad_user(post, admin, credentialtype_ssh):
- params = {
- 'credential_type': 1,
- 'inputs': {'username': 'someusername'},
- 'user': 'asdf',
- 'name': 'Some name'
- }
+ params = {'credential_type': 1, 'inputs': {'username': 'someusername'}, 'user': 'asdf', 'name': 'Some name'}
response = post(reverse('api:credential_list'), params, admin)
assert response.status_code == 400
assert response.data['user'][0] == 'Incorrect type. Expected pk value, received str.'
@@ -84,10 +77,7 @@ def test_credential_validation_error_with_multiple_owner_fields(post, admin, ali
}
response = post(reverse('api:credential_list'), params, admin)
assert response.status_code == 400
- assert response.data['detail'][0] == (
- "Only one of 'user', 'team', or 'organization' should be provided, "
- "received organization, team, user fields."
- )
+ assert response.data['detail'][0] == ("Only one of 'user', 'team', or 'organization' should be provided, " "received organization, team, user fields.")
@pytest.mark.django_db
@@ -98,11 +88,7 @@ def test_create_user_credential_via_user_credentials_list(post, get, alice, cred
'user': alice.id,
'name': 'Some name',
}
- response = post(
- reverse('api:user_credentials_list', kwargs={'pk': alice.pk}),
- params,
- alice
- )
+ response = post(reverse('api:user_credentials_list', kwargs={'pk': alice.pk}), params, alice)
assert response.status_code == 201
response = get(reverse('api:user_credentials_list', kwargs={'pk': alice.pk}), alice)
@@ -132,11 +118,7 @@ def test_create_user_credential_via_user_credentials_list_xfail(post, alice, bob
'user': bob.id,
'name': 'Some name',
}
- response = post(
- reverse('api:user_credentials_list', kwargs={'pk': bob.pk}),
- params,
- alice
- )
+ response = post(reverse('api:user_credentials_list', kwargs={'pk': bob.pk}), params, alice)
assert response.status_code == 403
@@ -156,10 +138,7 @@ def test_create_team_credential(post, get, team, organization, org_admin, team_m
response = post(reverse('api:credential_list'), params, org_admin)
assert response.status_code == 201
- response = get(
- reverse('api:team_credentials_list', kwargs={'pk': team.pk}),
- team_member
- )
+ response = get(reverse('api:team_credentials_list', kwargs={'pk': team.pk}), team_member)
assert response.status_code == 200
assert response.data['count'] == 1
@@ -175,17 +154,10 @@ def test_create_team_credential_via_team_credentials_list(post, get, team, org_a
'team': team.id,
'name': 'Some name',
}
- response = post(
- reverse('api:team_credentials_list', kwargs={'pk': team.pk}),
- params,
- org_admin
- )
+ response = post(reverse('api:team_credentials_list', kwargs={'pk': team.pk}), params, org_admin)
assert response.status_code == 201
- response = get(
- reverse('api:team_credentials_list', kwargs={'pk': team.pk}),
- team_member
- )
+ response = get(reverse('api:team_credentials_list', kwargs={'pk': team.pk}), team_member)
assert response.status_code == 200
assert response.data['count'] == 1
@@ -226,9 +198,7 @@ def test_create_team_credential_by_team_member_xfail(post, team, organization, a
def test_grant_org_credential_to_org_user_through_role_users(post, credential, organization, org_admin, org_member):
credential.organization = organization
credential.save()
- response = post(reverse('api:role_users_list', kwargs={'pk': credential.use_role.id}), {
- 'id': org_member.id
- }, org_admin)
+ response = post(reverse('api:role_users_list', kwargs={'pk': credential.use_role.id}), {'id': org_member.id}, org_admin)
assert response.status_code == 204
@@ -236,9 +206,7 @@ def test_grant_org_credential_to_org_user_through_role_users(post, credential, o
def test_grant_org_credential_to_org_user_through_user_roles(post, credential, organization, org_admin, org_member):
credential.organization = organization
credential.save()
- response = post(reverse('api:user_roles_list', kwargs={'pk': org_member.id}), {
- 'id': credential.use_role.id
- }, org_admin)
+ response = post(reverse('api:user_roles_list', kwargs={'pk': org_member.id}), {'id': credential.use_role.id}, org_admin)
assert response.status_code == 204
@@ -246,9 +214,7 @@ def test_grant_org_credential_to_org_user_through_user_roles(post, credential, o
def test_grant_org_credential_to_non_org_user_through_role_users(post, credential, organization, org_admin, alice):
credential.organization = organization
credential.save()
- response = post(reverse('api:role_users_list', kwargs={'pk': credential.use_role.id}), {
- 'id': alice.id
- }, org_admin)
+ response = post(reverse('api:role_users_list', kwargs={'pk': credential.use_role.id}), {'id': alice.id}, org_admin)
assert response.status_code == 400
@@ -256,9 +222,7 @@ def test_grant_org_credential_to_non_org_user_through_role_users(post, credentia
def test_grant_org_credential_to_non_org_user_through_user_roles(post, credential, organization, org_admin, alice):
credential.organization = organization
credential.save()
- response = post(reverse('api:user_roles_list', kwargs={'pk': alice.id}), {
- 'id': credential.use_role.id
- }, org_admin)
+ response = post(reverse('api:user_roles_list', kwargs={'pk': alice.id}), {'id': credential.use_role.id}, org_admin)
assert response.status_code == 400
@@ -266,9 +230,7 @@ def test_grant_org_credential_to_non_org_user_through_user_roles(post, credentia
def test_grant_private_credential_to_user_through_role_users(post, credential, alice, bob):
# normal users can't do this
credential.admin_role.members.add(alice)
- response = post(reverse('api:role_users_list', kwargs={'pk': credential.use_role.id}), {
- 'id': bob.id
- }, alice)
+ response = post(reverse('api:role_users_list', kwargs={'pk': credential.use_role.id}), {'id': bob.id}, alice)
assert response.status_code == 400
@@ -276,18 +238,14 @@ def test_grant_private_credential_to_user_through_role_users(post, credential, a
def test_grant_private_credential_to_org_user_through_role_users(post, credential, org_admin, org_member):
# org admins can't either
credential.admin_role.members.add(org_admin)
- response = post(reverse('api:role_users_list', kwargs={'pk': credential.use_role.id}), {
- 'id': org_member.id
- }, org_admin)
+ response = post(reverse('api:role_users_list', kwargs={'pk': credential.use_role.id}), {'id': org_member.id}, org_admin)
assert response.status_code == 400
@pytest.mark.django_db
def test_sa_grant_private_credential_to_user_through_role_users(post, credential, admin, bob):
# but system admins can
- response = post(reverse('api:role_users_list', kwargs={'pk': credential.use_role.id}), {
- 'id': bob.id
- }, admin)
+ response = post(reverse('api:role_users_list', kwargs={'pk': credential.use_role.id}), {'id': bob.id}, admin)
assert response.status_code == 204
@@ -295,9 +253,7 @@ def test_sa_grant_private_credential_to_user_through_role_users(post, credential
def test_grant_private_credential_to_user_through_user_roles(post, credential, alice, bob):
# normal users can't do this
credential.admin_role.members.add(alice)
- response = post(reverse('api:user_roles_list', kwargs={'pk': bob.id}), {
- 'id': credential.use_role.id
- }, alice)
+ response = post(reverse('api:user_roles_list', kwargs={'pk': bob.id}), {'id': credential.use_role.id}, alice)
assert response.status_code == 400
@@ -305,18 +261,14 @@ def test_grant_private_credential_to_user_through_user_roles(post, credential, a
def test_grant_private_credential_to_org_user_through_user_roles(post, credential, org_admin, org_member):
# org admins can't either
credential.admin_role.members.add(org_admin)
- response = post(reverse('api:user_roles_list', kwargs={'pk': org_member.id}), {
- 'id': credential.use_role.id
- }, org_admin)
+ response = post(reverse('api:user_roles_list', kwargs={'pk': org_member.id}), {'id': credential.use_role.id}, org_admin)
assert response.status_code == 400
@pytest.mark.django_db
def test_sa_grant_private_credential_to_user_through_user_roles(post, credential, admin, bob):
# but system admins can
- response = post(reverse('api:user_roles_list', kwargs={'pk': bob.id}), {
- 'id': credential.use_role.id
- }, admin)
+ response = post(reverse('api:user_roles_list', kwargs={'pk': bob.id}), {'id': credential.use_role.id}, admin)
assert response.status_code == 204
@@ -325,9 +277,7 @@ def test_grant_org_credential_to_team_through_role_teams(post, credential, organ
assert org_auditor not in credential.read_role
credential.organization = organization
credential.save()
- response = post(reverse('api:role_teams_list', kwargs={'pk': credential.use_role.id}), {
- 'id': team.id
- }, org_admin)
+ response = post(reverse('api:role_teams_list', kwargs={'pk': credential.use_role.id}), {'id': team.id}, org_admin)
assert response.status_code == 204
assert org_auditor in credential.read_role
@@ -337,9 +287,7 @@ def test_grant_org_credential_to_team_through_team_roles(post, credential, organ
assert org_auditor not in credential.read_role
credential.organization = organization
credential.save()
- response = post(reverse('api:team_roles_list', kwargs={'pk': team.id}), {
- 'id': credential.use_role.id
- }, org_admin)
+ response = post(reverse('api:team_roles_list', kwargs={'pk': team.id}), {'id': credential.use_role.id}, org_admin)
assert response.status_code == 204
assert org_auditor in credential.read_role
@@ -347,18 +295,14 @@ def test_grant_org_credential_to_team_through_team_roles(post, credential, organ
@pytest.mark.django_db
def test_sa_grant_private_credential_to_team_through_role_teams(post, credential, admin, team):
# not even a system admin can grant a private cred to a team though
- response = post(reverse('api:role_teams_list', kwargs={'pk': credential.use_role.id}), {
- 'id': team.id
- }, admin)
+ response = post(reverse('api:role_teams_list', kwargs={'pk': credential.use_role.id}), {'id': team.id}, admin)
assert response.status_code == 400
@pytest.mark.django_db
def test_sa_grant_private_credential_to_team_through_team_roles(post, credential, admin, team):
# not even a system admin can grant a private cred to a team though
- response = post(reverse('api:role_teams_list', kwargs={'pk': team.id}), {
- 'id': credential.use_role.id
- }, admin)
+ response = post(reverse('api:role_teams_list', kwargs={'pk': team.id}), {'id': credential.use_role.id}, admin)
assert response.status_code == 400
@@ -375,11 +319,7 @@ def test_create_org_credential_as_not_admin(post, organization, org_member, cred
'name': 'Some name',
'organization': organization.id,
}
- response = post(
- reverse('api:credential_list'),
- params,
- org_member
- )
+ response = post(reverse('api:credential_list'), params, org_member)
assert response.status_code == 403
@@ -403,16 +343,9 @@ def test_credential_detail(post, get, organization, org_admin, credentialtype_ss
'name': 'Some name',
'organization': organization.id,
}
- response = post(
- reverse('api:credential_list'),
- params,
- org_admin
- )
+ response = post(reverse('api:credential_list'), params, org_admin)
assert response.status_code == 201
- response = get(
- reverse('api:credential_detail', kwargs={'pk': response.data['id']}),
- org_admin
- )
+ response = get(reverse('api:credential_detail', kwargs={'pk': response.data['id']}), org_admin)
assert response.status_code == 200
summary_fields = response.data['summary_fields']
assert 'organization' in summary_fields
@@ -428,38 +361,22 @@ def test_list_created_org_credentials(post, get, organization, org_admin, org_me
'name': 'Some name',
'organization': organization.id,
}
- response = post(
- reverse('api:credential_list'),
- params,
- org_admin
- )
+ response = post(reverse('api:credential_list'), params, org_admin)
assert response.status_code == 201
- response = get(
- reverse('api:credential_list'),
- org_admin
- )
+ response = get(reverse('api:credential_list'), org_admin)
assert response.status_code == 200
assert response.data['count'] == 1
- response = get(
- reverse('api:credential_list'),
- org_member
- )
+ response = get(reverse('api:credential_list'), org_member)
assert response.status_code == 200
assert response.data['count'] == 0
- response = get(
- reverse('api:organization_credential_list', kwargs={'pk': organization.pk}),
- org_admin
- )
+ response = get(reverse('api:organization_credential_list', kwargs={'pk': organization.pk}), org_admin)
assert response.status_code == 200
assert response.data['count'] == 1
- response = get(
- reverse('api:organization_credential_list', kwargs={'pk': organization.pk}),
- org_member
- )
+ response = get(reverse('api:organization_credential_list', kwargs={'pk': organization.pk}), org_member)
assert response.status_code == 200
assert response.data['count'] == 0
@@ -468,22 +385,9 @@ def test_list_created_org_credentials(post, get, organization, org_admin, org_me
@pytest.mark.django_db
def test_list_cannot_order_by_encrypted_field(post, get, organization, org_admin, credentialtype_ssh, order_by):
for i, password in enumerate(('abc', 'def', 'xyz')):
- response = post(
- reverse('api:credential_list'),
- {
- 'organization': organization.id,
- 'name': 'C%d' % i,
- 'password': password
- },
- org_admin
- )
+ response = post(reverse('api:credential_list'), {'organization': organization.id, 'name': 'C%d' % i, 'password': password}, org_admin)
- response = get(
- reverse('api:credential_list'),
- org_admin,
- QUERY_STRING='order_by=%s' % order_by,
- status=400
- )
+ response = get(reverse('api:credential_list'), org_admin, QUERY_STRING='order_by=%s' % order_by, status=400)
assert response.status_code == 400
@@ -493,9 +397,7 @@ def test_inputs_cannot_contain_extra_fields(get, post, organization, admin, cred
'name': 'Best credential ever',
'organization': organization.id,
'credential_type': credentialtype_ssh.pk,
- 'inputs': {
- 'invalid_field': 'foo'
- },
+ 'inputs': {'invalid_field': 'foo'},
}
response = post(reverse('api:credential_list'), params, admin)
assert response.status_code == 400
@@ -511,10 +413,7 @@ def test_falsey_field_data(get, post, organization, admin, field_value):
'name': 'Best credential ever',
'credential_type': net.pk,
'organization': organization.id,
- 'inputs': {
- 'username': 'joe-user',
- 'authorize': field_value
- }
+ 'inputs': {'username': 'joe-user', 'authorize': field_value},
}
response = post(reverse('api:credential_list'), params, admin)
assert response.status_code == 201
@@ -525,18 +424,16 @@ def test_falsey_field_data(get, post, organization, admin, field_value):
@pytest.mark.django_db
-@pytest.mark.parametrize('kind, extraneous', [
- ['net', 'authorize_password'],
-])
+@pytest.mark.parametrize(
+ 'kind, extraneous',
+ [
+ ['net', 'authorize_password'],
+ ],
+)
def test_field_dependencies(get, post, organization, admin, kind, extraneous):
_type = CredentialType.defaults[kind]()
_type.save()
- params = {
- 'name': 'Best credential ever',
- 'credential_type': _type.pk,
- 'organization': organization.id,
- 'inputs': {extraneous: 'not needed'}
- }
+ params = {'name': 'Best credential ever', 'credential_type': _type.pk, 'organization': organization.id, 'inputs': {extraneous: 'not needed'}}
response = post(reverse('api:credential_list'), params, admin)
assert response.status_code == 400
assert re.search('cannot be set unless .+ is set.', smart_str(response.content))
@@ -557,7 +454,7 @@ def test_scm_create_ok(post, organization, admin):
'password': 'some_password',
'ssh_key_data': EXAMPLE_ENCRYPTED_PRIVATE_KEY,
'ssh_key_unlock': 'some_key_unlock',
- }
+ },
}
scm = CredentialType.defaults['scm']()
scm.save()
@@ -580,7 +477,7 @@ def test_ssh_create_ok(post, organization, admin):
'name': 'Best credential ever',
'inputs': {
'password': 'secret',
- }
+ },
}
ssh = CredentialType.defaults['ssh']()
ssh.save()
@@ -604,7 +501,7 @@ def test_vault_create_ok(post, organization, admin):
'name': 'Best credential ever',
'inputs': {
'vault_password': 'some_password',
- }
+ },
}
vault = CredentialType.defaults['vault']()
vault.save()
@@ -622,14 +519,7 @@ def test_vault_password_required(post, organization, admin):
vault = CredentialType.defaults['vault']()
vault.save()
response = post(
- reverse('api:credential_list'),
- {
- 'credential_type': vault.pk,
- 'organization': organization.id,
- 'name': 'Best credential ever',
- 'inputs': {}
- },
- admin
+ reverse('api:credential_list'), {'credential_type': vault.pk, 'organization': organization.id, 'name': 'Best credential ever', 'inputs': {}}, admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
@@ -657,7 +547,7 @@ def test_net_create_ok(post, organization, admin):
'ssh_key_unlock': 'some_key_unlock',
'authorize': True,
'authorize_password': 'some_authorize_password',
- }
+ },
}
net = CredentialType.defaults['net']()
net.save()
@@ -687,7 +577,7 @@ def test_gce_create_ok(post, organization, admin):
'username': 'some_username',
'project': 'some_project',
'ssh_key_data': EXAMPLE_PRIVATE_KEY,
- }
+ },
}
gce = CredentialType.defaults['gce']()
gce.save()
@@ -716,8 +606,8 @@ def test_azure_rm_create_ok(post, organization, admin):
'password': 'some_password',
'client': 'some_client',
'secret': 'some_secret',
- 'tenant': 'some_tenant'
- }
+ 'tenant': 'some_tenant',
+ },
}
azure_rm = CredentialType.defaults['azure_rm']()
azure_rm.save()
@@ -747,7 +637,7 @@ def test_satellite6_create_ok(post, organization, admin):
'host': 'some_host',
'username': 'some_username',
'password': 'some_password',
- }
+ },
}
sat6 = CredentialType.defaults['satellite6']()
sat6.save()
@@ -770,11 +660,7 @@ def test_aws_create_ok(post, organization, admin):
params = {
'credential_type': 1,
'name': 'Best credential ever',
- 'inputs': {
- 'username': 'some_username',
- 'password': 'some_password',
- 'security_token': 'abc123'
- }
+ 'inputs': {'username': 'some_username', 'password': 'some_password', 'security_token': 'abc123'},
}
aws = CredentialType.defaults['aws']()
aws.save()
@@ -791,11 +677,7 @@ def test_aws_create_ok(post, organization, admin):
@pytest.mark.django_db
def test_aws_create_fail_required_fields(post, organization, admin):
- params = {
- 'credential_type': 1,
- 'name': 'Best credential ever',
- 'inputs': {}
- }
+ params = {'credential_type': 1, 'name': 'Best credential ever', 'inputs': {}}
aws = CredentialType.defaults['aws']()
aws.save()
params['organization'] = organization.id
@@ -816,15 +698,7 @@ def test_aws_create_fail_required_fields(post, organization, admin):
#
@pytest.mark.django_db
def test_vmware_create_ok(post, organization, admin):
- params = {
- 'credential_type': 1,
- 'name': 'Best credential ever',
- 'inputs': {
- 'host': 'some_host',
- 'username': 'some_username',
- 'password': 'some_password'
- }
- }
+ params = {'credential_type': 1, 'name': 'Best credential ever', 'inputs': {'host': 'some_host', 'username': 'some_username', 'password': 'some_password'}}
vmware = CredentialType.defaults['vmware']()
vmware.save()
params['organization'] = organization.id
@@ -840,11 +714,7 @@ def test_vmware_create_ok(post, organization, admin):
@pytest.mark.django_db
def test_vmware_create_fail_required_fields(post, organization, admin):
- params = {
- 'credential_type': 1,
- 'name': 'Best credential ever',
- 'inputs': {}
- }
+ params = {'credential_type': 1, 'name': 'Best credential ever', 'inputs': {}}
vmware = CredentialType.defaults['vmware']()
vmware.save()
params['organization'] = organization.id
@@ -872,7 +742,7 @@ def test_openstack_create_ok(post, organization, admin):
'password': 'some_password',
'project': 'some_project',
'host': 'some_host',
- }
+ },
}
openstack = CredentialType.defaults['openstack']()
openstack.save()
@@ -884,11 +754,14 @@ def test_openstack_create_ok(post, organization, admin):
@pytest.mark.django_db
-@pytest.mark.parametrize('verify_ssl, expected', [
- [None, True],
- [True, True],
- [False, False],
-])
+@pytest.mark.parametrize(
+ 'verify_ssl, expected',
+ [
+ [None, True],
+ [True, True],
+ [False, False],
+ ],
+)
def test_openstack_verify_ssl(get, post, organization, admin, verify_ssl, expected):
openstack = CredentialType.defaults['openstack']()
openstack.save()
@@ -900,12 +773,7 @@ def test_openstack_verify_ssl(get, post, organization, admin, verify_ssl, expect
}
if verify_ssl is not None:
inputs['verify_ssl'] = verify_ssl
- params = {
- 'credential_type': openstack.id,
- 'inputs': inputs,
- 'name': 'Best credential ever',
- 'organization': organization.id
- }
+ params = {'credential_type': openstack.id, 'inputs': inputs, 'name': 'Best credential ever', 'organization': organization.id}
response = post(reverse('api:credential_list'), params, admin)
assert response.status_code == 201
@@ -943,25 +811,15 @@ def test_field_removal(put, organization, admin, credentialtype_ssh):
'inputs': {
'username': 'joe',
'password': '',
- }
+ },
}
cred = Credential(
- credential_type=credentialtype_ssh,
- name='Best credential ever',
- organization=organization,
- inputs={
- 'username': u'jim',
- 'password': u'secret'
- }
+ credential_type=credentialtype_ssh, name='Best credential ever', organization=organization, inputs={'username': u'jim', 'password': u'secret'}
)
cred.save()
params['organization'] = organization.id
- response = put(
- reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- params,
- admin
- )
+ response = put(reverse('api:credential_detail', kwargs={'pk': cred.pk}), params, admin)
assert response.status_code == 200
cred = Credential.objects.all()[:1].get()
@@ -970,25 +828,21 @@ def test_field_removal(put, organization, admin, credentialtype_ssh):
@pytest.mark.django_db
-@pytest.mark.parametrize('relation, related_obj', [
- ['ad_hoc_commands', AdHocCommand()],
- ['insights_inventories', Inventory()],
- ['unifiedjobs', Job()],
- ['unifiedjobtemplates', JobTemplate()],
- ['unifiedjobtemplates', InventorySource(source='ec2')],
- ['projects', Project()],
- ['workflowjobnodes', WorkflowJobNode()],
-])
-def test_credential_type_mutability(patch, organization, admin, credentialtype_ssh,
- credentialtype_aws, relation, related_obj):
+@pytest.mark.parametrize(
+ 'relation, related_obj',
+ [
+ ['ad_hoc_commands', AdHocCommand()],
+ ['insights_inventories', Inventory()],
+ ['unifiedjobs', Job()],
+ ['unifiedjobtemplates', JobTemplate()],
+ ['unifiedjobtemplates', InventorySource(source='ec2')],
+ ['projects', Project()],
+ ['workflowjobnodes', WorkflowJobNode()],
+ ],
+)
+def test_credential_type_mutability(patch, organization, admin, credentialtype_ssh, credentialtype_aws, relation, related_obj):
cred = Credential(
- credential_type=credentialtype_ssh,
- name='Best credential ever',
- organization=organization,
- inputs={
- 'username': u'jim',
- 'password': u'pass'
- }
+ credential_type=credentialtype_ssh, name='Best credential ever', organization=organization, inputs={'username': u'jim', 'password': u'pass'}
)
cred.save()
@@ -998,27 +852,16 @@ def test_credential_type_mutability(patch, organization, admin, credentialtype_s
def _change_credential_type():
return patch(
reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- {
- 'credential_type': credentialtype_aws.pk,
- 'inputs': {
- 'username': u'jim',
- 'password': u'pass'
- }
- },
- admin
+ {'credential_type': credentialtype_aws.pk, 'inputs': {'username': u'jim', 'password': u'pass'}},
+ admin,
)
response = _change_credential_type()
assert response.status_code == 400
- expected = ['You cannot change the credential type of the credential, '
- 'as it may break the functionality of the resources using it.']
+ expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
- response = patch(
- reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- {'name': 'Worst credential ever'},
- admin
- )
+ response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)
assert response.status_code == 200
assert Credential.objects.get(pk=cred.pk).name == 'Worst credential ever'
@@ -1028,15 +871,14 @@ def test_credential_type_mutability(patch, organization, admin, credentialtype_s
@pytest.mark.django_db
-def test_vault_credential_type_mutability(patch, organization, admin, credentialtype_ssh,
- credentialtype_vault):
+def test_vault_credential_type_mutability(patch, organization, admin, credentialtype_ssh, credentialtype_vault):
cred = Credential(
credential_type=credentialtype_vault,
name='Best credential ever',
organization=organization,
inputs={
'vault_password': u'some-vault',
- }
+ },
)
cred.save()
@@ -1047,27 +889,16 @@ def test_vault_credential_type_mutability(patch, organization, admin, credential
def _change_credential_type():
return patch(
reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- {
- 'credential_type': credentialtype_ssh.pk,
- 'inputs': {
- 'username': u'jim',
- 'password': u'pass'
- }
- },
- admin
+ {'credential_type': credentialtype_ssh.pk, 'inputs': {'username': u'jim', 'password': u'pass'}},
+ admin,
)
response = _change_credential_type()
assert response.status_code == 400
- expected = ['You cannot change the credential type of the credential, '
- 'as it may break the functionality of the resources using it.']
+ expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
- response = patch(
- reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- {'name': 'Worst credential ever'},
- admin
- )
+ response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)
assert response.status_code == 200
assert Credential.objects.get(pk=cred.pk).name == 'Worst credential ever'
@@ -1077,16 +908,9 @@ def test_vault_credential_type_mutability(patch, organization, admin, credential
@pytest.mark.django_db
-def test_cloud_credential_type_mutability(patch, organization, admin, credentialtype_ssh,
- credentialtype_aws):
+def test_cloud_credential_type_mutability(patch, organization, admin, credentialtype_ssh, credentialtype_aws):
cred = Credential(
- credential_type=credentialtype_aws,
- name='Best credential ever',
- organization=organization,
- inputs={
- 'username': u'jim',
- 'password': u'pass'
- }
+ credential_type=credentialtype_aws, name='Best credential ever', organization=organization, inputs={'username': u'jim', 'password': u'pass'}
)
cred.save()
@@ -1097,27 +921,16 @@ def test_cloud_credential_type_mutability(patch, organization, admin, credential
def _change_credential_type():
return patch(
reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- {
- 'credential_type': credentialtype_ssh.pk,
- 'inputs': {
- 'username': u'jim',
- 'password': u'pass'
- }
- },
- admin
+ {'credential_type': credentialtype_ssh.pk, 'inputs': {'username': u'jim', 'password': u'pass'}},
+ admin,
)
response = _change_credential_type()
assert response.status_code == 400
- expected = ['You cannot change the credential type of the credential, '
- 'as it may break the functionality of the resources using it.']
+ expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
- response = patch(
- reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- {'name': 'Worst credential ever'},
- admin
- )
+ response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)
assert response.status_code == 200
assert Credential.objects.get(pk=cred.pk).name == 'Worst credential ever'
@@ -1150,26 +963,18 @@ def test_ssh_unlock_needed(put, organization, admin, credentialtype_ssh):
'inputs': {
'username': 'joe',
'ssh_key_data': '$encrypted$',
- }
+ },
}
cred = Credential(
credential_type=credentialtype_ssh,
name='Best credential ever',
organization=organization,
- inputs={
- 'username': u'joe',
- 'ssh_key_data': EXAMPLE_ENCRYPTED_PRIVATE_KEY,
- 'ssh_key_unlock': 'unlock'
- }
+ inputs={'username': u'joe', 'ssh_key_data': EXAMPLE_ENCRYPTED_PRIVATE_KEY, 'ssh_key_unlock': 'unlock'},
)
cred.save()
params['organization'] = organization.id
- response = put(
- reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- params,
- admin
- )
+ response = put(reverse('api:credential_detail', kwargs={'pk': cred.pk}), params, admin)
assert response.status_code == 400
assert response.data['inputs']['ssh_key_unlock'] == ['must be set when SSH key is encrypted.']
@@ -1183,7 +988,7 @@ def test_ssh_unlock_not_needed(put, organization, admin, credentialtype_ssh):
'username': 'joe',
'ssh_key_data': '$encrypted$',
'ssh_key_unlock': 'superfluous-key-unlock',
- }
+ },
}
cred = Credential(
credential_type=credentialtype_ssh,
@@ -1192,16 +997,12 @@ def test_ssh_unlock_not_needed(put, organization, admin, credentialtype_ssh):
inputs={
'username': u'joe',
'ssh_key_data': EXAMPLE_PRIVATE_KEY,
- }
+ },
)
cred.save()
params['organization'] = organization.id
- response = put(
- reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- params,
- admin
- )
+ response = put(reverse('api:credential_detail', kwargs={'pk': cred.pk}), params, admin)
assert response.status_code == 400
assert response.data['inputs']['ssh_key_unlock'] == ['should not be set when SSH key is not encrypted.']
@@ -1215,26 +1016,18 @@ def test_ssh_unlock_with_prior_value(put, organization, admin, credentialtype_ss
'username': 'joe',
'ssh_key_data': '$encrypted$',
'ssh_key_unlock': 'new-unlock',
- }
+ },
}
cred = Credential(
credential_type=credentialtype_ssh,
name='Best credential ever',
organization=organization,
- inputs={
- 'username': u'joe',
- 'ssh_key_data': EXAMPLE_ENCRYPTED_PRIVATE_KEY,
- 'ssh_key_unlock': 'old-unlock'
- }
+ inputs={'username': u'joe', 'ssh_key_data': EXAMPLE_ENCRYPTED_PRIVATE_KEY, 'ssh_key_unlock': 'old-unlock'},
)
cred.save()
params['organization'] = organization.id
- response = put(
- reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- params,
- admin
- )
+ response = put(reverse('api:credential_detail', kwargs={'pk': cred.pk}), params, admin)
assert response.status_code == 200
cred = Credential.objects.all()[:1].get()
@@ -1250,7 +1043,7 @@ def test_ssh_bad_key_unlock_not_checked(put, organization, admin, credentialtype
'username': 'oscar',
'ssh_key_data': 'invalid-key',
'ssh_key_unlock': 'unchecked-unlock',
- }
+ },
}
cred = Credential(
credential_type=credentialtype_ssh,
@@ -1260,16 +1053,12 @@ def test_ssh_bad_key_unlock_not_checked(put, organization, admin, credentialtype
'username': u'oscar',
'ssh_key_data': 'invalid-key',
'ssh_key_unlock': 'unchecked-unlock',
- }
+ },
)
cred.save()
params['organization'] = organization.id
- response = put(
- reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- params,
- admin
- )
+ response = put(reverse('api:credential_detail', kwargs={'pk': cred.pk}), params, admin)
assert response.status_code == 400
assert response.data['inputs']['ssh_key_data'] == ['Invalid certificate or key: invalid-key...']
assert 'ssh_key_unlock' not in response.data['inputs']
@@ -1279,6 +1068,7 @@ def test_ssh_bad_key_unlock_not_checked(put, organization, admin, credentialtype
# test secret encryption/decryption
#
+
@pytest.mark.django_db
def test_secret_encryption_on_create(get, post, organization, admin, credentialtype_ssh):
params = {
@@ -1316,17 +1106,13 @@ def test_secret_encryption_on_update(get, post, patch, organization, admin, cred
'credential_type': 1,
'inputs': {
'username': 'joe',
- }
+ },
},
- admin
+ admin,
)
assert response.status_code == 201
- response = patch(
- reverse('api:credential_detail', kwargs={'pk': 1}),
- params,
- admin
- )
+ response = patch(reverse('api:credential_detail', kwargs={'pk': 1}), params, admin)
assert response.status_code == 200
response = get(reverse('api:credential_list'), admin)
@@ -1350,22 +1136,12 @@ def test_secret_encryption_previous_value(patch, organization, admin, credential
}
}
cred = Credential(
- credential_type=credentialtype_ssh,
- name='Best credential ever',
- organization=organization,
- inputs={
- 'username': u'jim',
- 'password': u'secret'
- }
+ credential_type=credentialtype_ssh, name='Best credential ever', organization=organization, inputs={'username': u'jim', 'password': u'secret'}
)
cred.save()
assert decrypt_field(cred, 'password') == 'secret'
- response = patch(
- reverse('api:credential_detail', kwargs={'pk': cred.pk}),
- params,
- admin
- )
+ response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), params, admin)
assert response.status_code == 200
cred = Credential.objects.all()[:1].get()
@@ -1377,26 +1153,10 @@ def test_secret_encryption_previous_value(patch, organization, admin, credential
@pytest.mark.django_db
def test_custom_credential_type_create(get, post, organization, admin):
credential_type = CredentialType(
- kind='cloud',
- name='MyCloud',
- inputs = {
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string',
- 'secret': True
- }]
- }
+ kind='cloud', name='MyCloud', inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True}]}
)
credential_type.save()
- params = {
- 'name': 'Best credential ever',
- 'organization': organization.pk,
- 'credential_type': credential_type.pk,
- 'inputs': {
- 'api_token': 'secret'
- }
- }
+ params = {'name': 'Best credential ever', 'organization': organization.pk, 'credential_type': credential_type.pk, 'inputs': {'api_token': 'secret'}}
response = post(reverse('api:credential_list'), params, admin)
assert response.status_code == 201
@@ -1424,35 +1184,24 @@ def test_create_credential_missing_user_team_org_xfail(post, admin, credentialty
assert response.status_code == 400
-@pytest.mark.parametrize('url, status, msg', [
- ('foo.com', 400, 'Invalid URL: Missing url scheme (http, https, etc.)'),
- ('https://[dead:beef', 400, 'Invalid IPv6 URL'),
- ('http:domain:8080', 400, 'Invalid URL: http:domain:8080'),
- ('http:/domain:8080', 400, 'Invalid URL: http:/domain:8080'),
- ('http://foo.com', 201, None)
-])
+@pytest.mark.parametrize(
+ 'url, status, msg',
+ [
+ ('foo.com', 400, 'Invalid URL: Missing url scheme (http, https, etc.)'),
+ ('https://[dead:beef', 400, 'Invalid IPv6 URL'),
+ ('http:domain:8080', 400, 'Invalid URL: http:domain:8080'),
+ ('http:/domain:8080', 400, 'Invalid URL: http:/domain:8080'),
+ ('http://foo.com', 201, None),
+ ],
+)
@pytest.mark.django_db
def test_create_credential_with_invalid_url_xfail(post, organization, admin, url, status, msg):
credential_type = CredentialType(
- kind='test',
- name='MyTestCredentialType',
- inputs = {
- 'fields': [{
- 'id': 'server_url',
- 'label': 'Server Url',
- 'type': 'string',
- 'format': 'url'
- }]
- }
+ kind='test', name='MyTestCredentialType', inputs={'fields': [{'id': 'server_url', 'label': 'Server Url', 'type': 'string', 'format': 'url'}]}
)
credential_type.save()
- params = {
- 'name': 'Second Best Credential Ever',
- 'organization': organization.pk,
- 'credential_type': credential_type.pk,
- 'inputs': {'server_url': url}
- }
+ params = {'name': 'Second Best Credential Ever', 'organization': organization.pk, 'credential_type': credential_type.pk, 'inputs': {'server_url': url}}
endpoint = reverse('api:credential_list')
response = post(endpoint, params, admin)
assert response.status_code == status
diff --git a/awx/main/tests/functional/api/test_credential_input_sources.py b/awx/main/tests/functional/api/test_credential_input_sources.py
index 194244c4cb..d13710e0ab 100644
--- a/awx/main/tests/functional/api/test_credential_input_sources.py
+++ b/awx/main/tests/functional/api/test_credential_input_sources.py
@@ -13,7 +13,7 @@ def test_associate_credential_input_source(get, post, delete, admin, vault_crede
'target_credential': vault_credential.pk,
'source_credential': external_credential.pk,
'input_field_name': 'vault_password',
- 'metadata': {'key': 'some_example_key'}
+ 'metadata': {'key': 'some_example_key'},
}
response = post(list_url, params, admin)
assert response.status_code == 201
@@ -29,13 +29,7 @@ def test_associate_credential_input_source(get, post, delete, admin, vault_crede
assert input_source.metadata == {'key': 'some_example_key'}
# detach
- response = delete(
- reverse(
- 'api:credential_input_source_detail',
- kwargs={'pk': detail.data['id']}
- ),
- admin
- )
+ response = delete(reverse('api:credential_input_source_detail', kwargs={'pk': detail.data['id']}), admin)
assert response.status_code == 204
response = get(list_url, admin)
@@ -45,12 +39,15 @@ def test_associate_credential_input_source(get, post, delete, admin, vault_crede
@pytest.mark.django_db
-@pytest.mark.parametrize('metadata', [
- {}, # key is required
- {'key': None}, # must be a string
- {'key': 123}, # must be a string
- {'extraneous': 'foo'}, # invalid parameter
-])
+@pytest.mark.parametrize(
+ 'metadata',
+ [
+ {}, # key is required
+ {'key': None}, # must be a string
+ {'key': 123}, # must be a string
+ {'extraneous': 'foo'}, # invalid parameter
+ ],
+)
def test_associate_credential_input_source_with_invalid_metadata(get, post, admin, vault_credential, external_credential, metadata):
list_url = reverse('api:credential_input_source_list')
@@ -73,9 +70,16 @@ def test_create_from_list(get, post, admin, vault_credential, external_credentia
'input_field_name': 'vault_password',
'metadata': {'key': 'some_example_key'},
}
- assert post(reverse(
- 'api:credential_input_source_list',
- ), params, admin).status_code == 201
+ assert (
+ post(
+ reverse(
+ 'api:credential_input_source_list',
+ ),
+ params,
+ admin,
+ ).status_code
+ == 201
+ )
assert CredentialInputSource.objects.count() == 1
@@ -131,10 +135,7 @@ def test_input_source_rbac_associate(get, post, delete, alice, vault_credential,
assert get(response.data['url'], alice).status_code == 403
# alice can't admin the target (so she can't remove the input source)
- delete_url = reverse(
- 'api:credential_input_source_detail',
- kwargs={'pk': detail.data['id']}
- )
+ delete_url = reverse('api:credential_input_source_detail', kwargs={'pk': detail.data['id']})
response = delete(delete_url, alice)
assert response.status_code == 403
@@ -145,13 +146,8 @@ def test_input_source_rbac_associate(get, post, delete, alice, vault_credential,
@pytest.mark.django_db
-def test_input_source_detail_rbac(get, post, patch, delete, admin, alice,
- vault_credential, external_credential,
- other_external_credential):
- sublist_url = reverse(
- 'api:credential_input_source_sublist',
- kwargs={'pk': vault_credential.pk}
- )
+def test_input_source_detail_rbac(get, post, patch, delete, admin, alice, vault_credential, external_credential, other_external_credential):
+ sublist_url = reverse('api:credential_input_source_sublist', kwargs={'pk': vault_credential.pk})
params = {
'source_credential': external_credential.pk,
'input_field_name': 'vault_password',
@@ -201,9 +197,7 @@ def test_input_source_detail_rbac(get, post, patch, delete, admin, alice,
@pytest.mark.django_db
-def test_input_source_create_rbac(get, post, patch, delete, alice,
- vault_credential, external_credential,
- other_external_credential):
+def test_input_source_create_rbac(get, post, patch, delete, alice, vault_credential, external_credential, other_external_credential):
list_url = reverse('api:credential_input_source_list')
params = {
'target_credential': vault_credential.pk,
@@ -229,9 +223,7 @@ def test_input_source_create_rbac(get, post, patch, delete, alice,
@pytest.mark.django_db
-def test_input_source_rbac_swap_target_credential(get, post, put, patch, admin, alice,
- machine_credential, vault_credential,
- external_credential):
+def test_input_source_rbac_swap_target_credential(get, post, put, patch, admin, alice, machine_credential, vault_credential, external_credential):
# If you change the target credential for an input source,
# you have to have admin role on the *original* credential (so you can
# remove the relationship) *and* on the *new* credential (so you can apply the
@@ -253,28 +245,18 @@ def test_input_source_rbac_swap_target_credential(get, post, put, patch, admin,
external_credential.admin_role.members.add(alice)
# alice can't change target cred because she can't admin either one
- assert patch(url, {
- 'target_credential': machine_credential.pk,
- 'input_field_name': 'password'
- }, alice).status_code == 403
+ assert patch(url, {'target_credential': machine_credential.pk, 'input_field_name': 'password'}, alice).status_code == 403
# alice still can't change target cred because she can't admin *the new one*
vault_credential.admin_role.members.add(alice)
- assert patch(url, {
- 'target_credential': machine_credential.pk,
- 'input_field_name': 'password'
- }, alice).status_code == 403
+ assert patch(url, {'target_credential': machine_credential.pk, 'input_field_name': 'password'}, alice).status_code == 403
machine_credential.admin_role.members.add(alice)
- assert patch(url, {
- 'target_credential': machine_credential.pk,
- 'input_field_name': 'password'
- }, alice).status_code == 200
+ assert patch(url, {'target_credential': machine_credential.pk, 'input_field_name': 'password'}, alice).status_code == 200
@pytest.mark.django_db
-def test_input_source_rbac_change_metadata(get, post, put, patch, admin, alice,
- machine_credential, external_credential):
+def test_input_source_rbac_change_metadata(get, post, put, patch, admin, alice, machine_credential, external_credential):
# To change an input source, a user must have admin permissions on the
# target credential and use permissions on the source credential.
list_url = reverse('api:credential_input_source_list')
@@ -291,31 +273,21 @@ def test_input_source_rbac_change_metadata(get, post, put, patch, admin, alice,
# alice can't change input source metadata because she isn't an admin of the
# target credential and doesn't have use permission on the source credential
- assert patch(url, {
- 'metadata': {'key': 'some_other_key'}
- }, alice).status_code == 403
+ assert patch(url, {'metadata': {'key': 'some_other_key'}}, alice).status_code == 403
# alice still can't change input source metadata because she doesn't have
# use permission on the source credential.
machine_credential.admin_role.members.add(alice)
- assert patch(url, {
- 'metadata': {'key': 'some_other_key'}
- }, alice).status_code == 403
+ assert patch(url, {'metadata': {'key': 'some_other_key'}}, alice).status_code == 403
external_credential.use_role.members.add(alice)
- assert patch(url, {
- 'metadata': {'key': 'some_other_key'}
- }, alice).status_code == 200
+ assert patch(url, {'metadata': {'key': 'some_other_key'}}, alice).status_code == 200
@pytest.mark.django_db
def test_create_credential_input_source_with_non_external_source_returns_400(post, admin, credential, vault_credential):
list_url = reverse('api:credential_input_source_list')
- params = {
- 'target_credential': vault_credential.pk,
- 'source_credential': credential.pk,
- 'input_field_name': 'vault_password'
- }
+ params = {'target_credential': vault_credential.pk, 'source_credential': credential.pk, 'input_field_name': 'vault_password'}
response = post(list_url, params, admin)
assert response.status_code == 400
assert response.data['source_credential'] == ['Source must be an external credential']
@@ -328,7 +300,7 @@ def test_create_credential_input_source_with_undefined_input_returns_400(post, a
'target_credential': vault_credential.pk,
'source_credential': external_credential.pk,
'input_field_name': 'not_defined_for_credential_type',
- 'metadata': {'key': 'some_key'}
+ 'metadata': {'key': 'some_key'},
}
response = post(list_url, params, admin)
assert response.status_code == 400
@@ -338,14 +310,9 @@ def test_create_credential_input_source_with_undefined_input_returns_400(post, a
@pytest.mark.django_db
def test_create_credential_input_source_with_already_used_input_returns_400(post, admin, vault_credential, external_credential, other_external_credential):
list_url = reverse('api:credential_input_source_list')
- all_params = [{
- 'target_credential': vault_credential.pk,
- 'source_credential': external_credential.pk,
- 'input_field_name': 'vault_password'
- }, {
- 'target_credential': vault_credential.pk,
- 'source_credential': other_external_credential.pk,
- 'input_field_name': 'vault_password'
- }]
+ all_params = [
+ {'target_credential': vault_credential.pk, 'source_credential': external_credential.pk, 'input_field_name': 'vault_password'},
+ {'target_credential': vault_credential.pk, 'source_credential': other_external_credential.pk, 'input_field_name': 'vault_password'},
+ ]
all_responses = [post(list_url, params, admin) for params in all_params]
assert all_responses.pop().status_code == 400
diff --git a/awx/main/tests/functional/api/test_credential_type.py b/awx/main/tests/functional/api/test_credential_type.py
index bf7aa4ceff..bdc9630a97 100644
--- a/awx/main/tests/functional/api/test_credential_type.py
+++ b/awx/main/tests/functional/api/test_credential_type.py
@@ -13,10 +13,13 @@ def test_list_as_unauthorized_xfail(get):
@pytest.mark.django_db
-@pytest.mark.parametrize('method, valid', [
- ('GET', sorted(dict(CredentialType.KIND_CHOICES).keys())),
- ('POST', ['cloud', 'net']),
-])
+@pytest.mark.parametrize(
+ 'method, valid',
+ [
+ ('GET', sorted(dict(CredentialType.KIND_CHOICES).keys())),
+ ('POST', ['cloud', 'net']),
+ ],
+)
def test_options_valid_kinds(method, valid, options, admin):
response = options(reverse('api:credential_type_list'), admin)
choices = sorted(dict(response.data['actions'][method]['kind']['choices']).keys())
@@ -52,9 +55,12 @@ def test_list_as_admin(get, admin):
@pytest.mark.django_db
def test_create_as_unauthorized_xfail(get, post):
- response = post(reverse('api:credential_type_list'), {
- 'name': 'Custom Credential Type',
- })
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'name': 'Custom Credential Type',
+ },
+ )
assert response.status_code == 401
@@ -97,15 +103,8 @@ def test_update_credential_type_in_use_xfail(patch, delete, admin):
@pytest.mark.django_db
def test_update_credential_type_unvalidated_inputs(post, patch, admin):
- simple_inputs = {'fields': [
- {'id': 'api_token', 'label': 'fooo'}
- ]}
- response = post(
- url=reverse('api:credential_type_list'),
- data={'name': 'foo', 'kind': 'cloud', 'inputs': simple_inputs},
- user=admin,
- expect=201
- )
+ simple_inputs = {'fields': [{'id': 'api_token', 'label': 'fooo'}]}
+ response = post(url=reverse('api:credential_type_list'), data={'name': 'foo', 'kind': 'cloud', 'inputs': simple_inputs}, user=admin, expect=201)
# validation adds the type field to the input
_type = CredentialType.objects.get(pk=response.data['id'])
Credential(credential_type=_type, name='My Custom Cred').save()
@@ -141,21 +140,20 @@ def test_delete_as_unauthorized_xfail(delete):
@pytest.mark.django_db
def test_create_as_normal_user_xfail(get, post, alice):
- response = post(reverse('api:credential_type_list'), {
- 'name': 'Custom Credential Type',
- }, alice)
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'name': 'Custom Credential Type',
+ },
+ alice,
+ )
assert response.status_code == 403
assert get(reverse('api:credential_type_list'), alice).data['count'] == 0
@pytest.mark.django_db
def test_create_as_admin(get, post, admin):
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'Custom Credential Type',
- 'inputs': {},
- 'injectors': {}
- }, admin)
+ response = post(reverse('api:credential_type_list'), {'kind': 'cloud', 'name': 'Custom Credential Type', 'inputs': {}, 'injectors': {}}, admin)
assert response.status_code == 201
response = get(reverse('api:credential_type_list'), admin)
@@ -168,13 +166,9 @@ def test_create_as_admin(get, post, admin):
@pytest.mark.django_db
def test_create_managed_by_tower_readonly(get, post, admin):
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'Custom Credential Type',
- 'inputs': {},
- 'injectors': {},
- 'managed_by_tower': True
- }, admin)
+ response = post(
+ reverse('api:credential_type_list'), {'kind': 'cloud', 'name': 'Custom Credential Type', 'inputs': {}, 'injectors': {}, 'managed_by_tower': True}, admin
+ )
assert response.status_code == 201
response = get(reverse('api:credential_type_list'), admin)
@@ -184,12 +178,16 @@ def test_create_managed_by_tower_readonly(get, post, admin):
@pytest.mark.django_db
def test_create_dependencies_not_supported(get, post, admin):
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'Custom Credential Type',
- 'inputs': {'dependencies': {'foo': ['bar']}},
- 'injectors': {},
- }, admin)
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'kind': 'cloud',
+ 'name': 'Custom Credential Type',
+ 'inputs': {'dependencies': {'foo': ['bar']}},
+ 'injectors': {},
+ },
+ admin,
+ )
assert response.status_code == 400
assert response.data['inputs'] == ["'dependencies' is not supported for custom credentials."]
@@ -200,19 +198,16 @@ def test_create_dependencies_not_supported(get, post, admin):
@pytest.mark.django_db
@pytest.mark.parametrize('kind', ['cloud', 'net'])
def test_create_valid_kind(kind, get, post, admin):
- response = post(reverse('api:credential_type_list'), {
- 'kind': kind,
- 'name': 'My Custom Type',
- 'inputs': {
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string',
- 'secret': True
- }]
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'kind': kind,
+ 'name': 'My Custom Type',
+ 'inputs': {'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True}]},
+ 'injectors': {},
},
- 'injectors': {}
- }, admin)
+ admin,
+ )
assert response.status_code == 201
response = get(reverse('api:credential_type_list'), admin)
@@ -222,19 +217,16 @@ def test_create_valid_kind(kind, get, post, admin):
@pytest.mark.django_db
@pytest.mark.parametrize('kind', ['ssh', 'vault', 'scm', 'insights', 'kubernetes', 'galaxy'])
def test_create_invalid_kind(kind, get, post, admin):
- response = post(reverse('api:credential_type_list'), {
- 'kind': kind,
- 'name': 'My Custom Type',
- 'inputs': {
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string',
- 'secret': True
- }]
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'kind': kind,
+ 'name': 'My Custom Type',
+ 'inputs': {'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True}]},
+ 'injectors': {},
},
- 'injectors': {}
- }, admin)
+ admin,
+ )
assert response.status_code == 400
response = get(reverse('api:credential_type_list'), admin)
@@ -243,19 +235,16 @@ def test_create_invalid_kind(kind, get, post, admin):
@pytest.mark.django_db
def test_create_with_valid_inputs(get, post, admin):
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'MyCloud',
- 'inputs': {
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string',
- 'secret': True
- }]
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'kind': 'cloud',
+ 'name': 'MyCloud',
+ 'inputs': {'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True}]},
+ 'injectors': {},
},
- 'injectors': {}
- }, admin)
+ admin,
+ )
assert response.status_code == 201
response = get(reverse('api:credential_type_list'), admin)
@@ -270,20 +259,19 @@ def test_create_with_valid_inputs(get, post, admin):
@pytest.mark.django_db
def test_create_with_required_inputs(get, post, admin):
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'MyCloud',
- 'inputs': {
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string',
- 'secret': True
- }],
- 'required': ['api_token'],
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'kind': 'cloud',
+ 'name': 'MyCloud',
+ 'inputs': {
+ 'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True}],
+ 'required': ['api_token'],
+ },
+ 'injectors': {},
},
- 'injectors': {}
- }, admin)
+ admin,
+ )
assert response.status_code == 201
response = get(reverse('api:credential_type_list'), admin)
@@ -293,140 +281,149 @@ def test_create_with_required_inputs(get, post, admin):
@pytest.mark.django_db
-@pytest.mark.parametrize('default, status_code', [
- ['some default string', 201],
- [None, 400],
- [True, 400],
- [False, 400],
-])
+@pytest.mark.parametrize(
+ 'default, status_code',
+ [
+ ['some default string', 201],
+ [None, 400],
+ [True, 400],
+ [False, 400],
+ ],
+)
@pytest.mark.parametrize('secret', [True, False])
def test_create_with_default_string(get, post, admin, default, status_code, secret):
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'MyCloud',
- 'inputs': {
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string',
- 'secret': secret,
- 'default': default,
- }],
- 'required': ['api_token'],
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'kind': 'cloud',
+ 'name': 'MyCloud',
+ 'inputs': {
+ 'fields': [
+ {
+ 'id': 'api_token',
+ 'label': 'API Token',
+ 'type': 'string',
+ 'secret': secret,
+ 'default': default,
+ }
+ ],
+ 'required': ['api_token'],
+ },
+ 'injectors': {},
},
- 'injectors': {}
- }, admin)
+ admin,
+ )
assert response.status_code == status_code
if status_code == 201:
- cred = Credential(
- credential_type=CredentialType.objects.get(pk=response.data['id']),
- name='My Custom Cred'
- )
+ cred = Credential(credential_type=CredentialType.objects.get(pk=response.data['id']), name='My Custom Cred')
assert cred.get_input('api_token') == default
elif status_code == 400:
assert "{} is not a string".format(default) in json.dumps(response.data)
@pytest.mark.django_db
-@pytest.mark.parametrize('default, status_code', [
- ['some default string', 400],
- [None, 400],
- [True, 201],
- [False, 201],
-])
+@pytest.mark.parametrize(
+ 'default, status_code',
+ [
+ ['some default string', 400],
+ [None, 400],
+ [True, 201],
+ [False, 201],
+ ],
+)
def test_create_with_default_bool(get, post, admin, default, status_code):
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'MyCloud',
- 'inputs': {
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'boolean',
- 'default': default,
- }],
- 'required': ['api_token'],
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'kind': 'cloud',
+ 'name': 'MyCloud',
+ 'inputs': {
+ 'fields': [
+ {
+ 'id': 'api_token',
+ 'label': 'API Token',
+ 'type': 'boolean',
+ 'default': default,
+ }
+ ],
+ 'required': ['api_token'],
+ },
+ 'injectors': {},
},
- 'injectors': {}
- }, admin)
+ admin,
+ )
assert response.status_code == status_code
if status_code == 201:
- cred = Credential(
- credential_type=CredentialType.objects.get(pk=response.data['id']),
- name='My Custom Cred'
- )
+ cred = Credential(credential_type=CredentialType.objects.get(pk=response.data['id']), name='My Custom Cred')
assert cred.get_input('api_token') == default
elif status_code == 400:
assert "{} is not a boolean".format(default) in json.dumps(response.data)
@pytest.mark.django_db
-@pytest.mark.parametrize('inputs', [
- True,
- 100,
- [1, 2, 3, 4],
- 'malformed',
- {'feelds': {}},
- {'fields': [123, 234, 345]},
- {'fields': [{'id':'one', 'label':'One'}, 234]},
- {'feelds': {}, 'fields': [{'id':'one', 'label':'One'}, 234]}
-])
+@pytest.mark.parametrize(
+ 'inputs',
+ [
+ True,
+ 100,
+ [1, 2, 3, 4],
+ 'malformed',
+ {'feelds': {}},
+ {'fields': [123, 234, 345]},
+ {'fields': [{'id': 'one', 'label': 'One'}, 234]},
+ {'feelds': {}, 'fields': [{'id': 'one', 'label': 'One'}, 234]},
+ ],
+)
def test_create_with_invalid_inputs_xfail(post, admin, inputs):
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'MyCloud',
- 'inputs': inputs,
- 'injectors': {}
- }, admin)
+ response = post(reverse('api:credential_type_list'), {'kind': 'cloud', 'name': 'MyCloud', 'inputs': inputs, 'injectors': {}}, admin)
assert response.status_code == 400
@pytest.mark.django_db
-@pytest.mark.parametrize('injectors', [
- True,
- 100,
- [1, 2, 3, 4],
- 'malformed',
- {'mal': 'formed'},
- {'env': {'ENV_VAR': 123}, 'mal': 'formed'},
- {'env': True},
- {'env': [1, 2, 3]},
- {'file': True},
- {'file': [1, 2, 3]},
- {'extra_vars': True},
- {'extra_vars': [1, 2, 3]},
-])
+@pytest.mark.parametrize(
+ 'injectors',
+ [
+ True,
+ 100,
+ [1, 2, 3, 4],
+ 'malformed',
+ {'mal': 'formed'},
+ {'env': {'ENV_VAR': 123}, 'mal': 'formed'},
+ {'env': True},
+ {'env': [1, 2, 3]},
+ {'file': True},
+ {'file': [1, 2, 3]},
+ {'extra_vars': True},
+ {'extra_vars': [1, 2, 3]},
+ ],
+)
def test_create_with_invalid_injectors_xfail(post, admin, injectors):
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'MyCloud',
- 'inputs': {},
- 'injectors': injectors,
- }, admin)
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'kind': 'cloud',
+ 'name': 'MyCloud',
+ 'inputs': {},
+ 'injectors': injectors,
+ },
+ admin,
+ )
assert response.status_code == 400
@pytest.mark.django_db
def test_ask_at_runtime_xfail(get, post, admin):
# ask_at_runtime is only supported by the built-in SSH and Vault types
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'MyCloud',
- 'inputs': {
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string',
- 'secret': True,
- 'ask_at_runtime': True
- }]
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'kind': 'cloud',
+ 'name': 'MyCloud',
+ 'inputs': {'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True, 'ask_at_runtime': True}]},
+ 'injectors': {'env': {'ANSIBLE_MY_CLOUD_TOKEN': '{{api_token}}'}},
},
- 'injectors': {
- 'env': {
- 'ANSIBLE_MY_CLOUD_TOKEN': '{{api_token}}'
- }
- }
- }, admin)
+ admin,
+ )
assert response.status_code == 400
response = get(reverse('api:credential_type_list'), admin)
@@ -435,50 +432,37 @@ def test_ask_at_runtime_xfail(get, post, admin):
@pytest.mark.django_db
def test_create_with_valid_injectors(get, post, admin):
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'MyCloud',
- 'inputs': {
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string',
- 'secret': True
- }]
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'kind': 'cloud',
+ 'name': 'MyCloud',
+ 'inputs': {'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True}]},
+ 'injectors': {'env': {'AWX_MY_CLOUD_TOKEN': '{{api_token}}'}},
},
- 'injectors': {
- 'env': {
- 'AWX_MY_CLOUD_TOKEN': '{{api_token}}'
- }
- }
- }, admin, expect=201)
+ admin,
+ expect=201,
+ )
response = get(reverse('api:credential_type_list'), admin)
assert response.data['count'] == 1
injectors = response.data['results'][0]['injectors']
assert len(injectors) == 1
- assert injectors['env'] == {
- 'AWX_MY_CLOUD_TOKEN': '{{api_token}}'
- }
+ assert injectors['env'] == {'AWX_MY_CLOUD_TOKEN': '{{api_token}}'}
@pytest.mark.django_db
def test_create_with_undefined_template_variable_xfail(post, admin):
- response = post(reverse('api:credential_type_list'), {
- 'kind': 'cloud',
- 'name': 'MyCloud',
- 'inputs': {
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string',
- 'secret': True
- }]
+ response = post(
+ reverse('api:credential_type_list'),
+ {
+ 'kind': 'cloud',
+ 'name': 'MyCloud',
+ 'inputs': {'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True}]},
+ 'injectors': {'env': {'AWX_MY_CLOUD_TOKEN': '{{api_tolkien}}'}},
},
- 'injectors': {
- 'env': {'AWX_MY_CLOUD_TOKEN': '{{api_tolkien}}'}
- }
- }, admin)
+ admin,
+ )
assert response.status_code == 400
assert "'api_tolkien' is undefined" in json.dumps(response.data)
diff --git a/awx/main/tests/functional/api/test_deprecated_credential_assignment.py b/awx/main/tests/functional/api/test_deprecated_credential_assignment.py
index 38fea20c4d..e8b4c2070e 100644
--- a/awx/main/tests/functional/api/test_deprecated_credential_assignment.py
+++ b/awx/main/tests/functional/api/test_deprecated_credential_assignment.py
@@ -10,8 +10,8 @@ from awx.api.versioning import reverse
def ec2_source(inventory, project):
with mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.update'):
return inventory.inventory_sources.create(
- name='some_source', update_on_project_update=True, source='ec2',
- source_project=project, scm_last_revision=project.scm_revision)
+ name='some_source', update_on_project_update=True, source='ec2', source_project=project, scm_last_revision=project.scm_revision
+ )
@pytest.fixture
@@ -26,10 +26,7 @@ def job_template(job_template, project, inventory):
@pytest.mark.django_db
def test_prevent_multiple_machine_creds(get, post, job_template, admin, machine_credential):
- url = reverse(
- 'api:job_template_credentials_list',
- kwargs={'pk': job_template.pk}
- )
+ url = reverse('api:job_template_credentials_list', kwargs={'pk': job_template.pk})
def _new_cred(name):
return {
@@ -38,7 +35,7 @@ def test_prevent_multiple_machine_creds(get, post, job_template, admin, machine_
'inputs': {
'username': 'bob',
'password': 'secret',
- }
+ },
}
post(url, _new_cred('First Cred'), admin, expect=201)
@@ -59,7 +56,7 @@ def test_invalid_credential_type_at_launch(get, post, job_template, admin, kind)
inputs={
'username': 'bob',
'password': 'secret',
- }
+ },
)
cred.save()
url = reverse('api:job_template_launch', kwargs={'pk': job_template.pk})
@@ -71,8 +68,7 @@ def test_invalid_credential_type_at_launch(get, post, job_template, admin, kind)
@pytest.mark.django_db
def test_prevent_multiple_machine_creds_at_launch(get, post, job_template, admin, machine_credential):
- other_cred = Credential(credential_type=machine_credential.credential_type, name="Second",
- inputs={'username': 'bob'})
+ other_cred = Credential(credential_type=machine_credential.credential_type, name="Second", inputs={'username': 'bob'})
other_cred.save()
creds = [machine_credential.pk, other_cred.pk]
url = reverse('api:job_template_launch', kwargs={'pk': job_template.pk})
@@ -94,8 +90,7 @@ def test_ssh_password_prompted_at_launch(get, post, job_template, admin, machine
def test_prompted_credential_replaced_on_launch(get, post, job_template, admin, machine_credential):
# If a JT has a credential that needs a password, but the launch POST
# specifies credential that does not require any passwords
- cred2 = Credential(name='second-cred', inputs=machine_credential.inputs,
- credential_type=machine_credential.credential_type)
+ cred2 = Credential(name='second-cred', inputs=machine_credential.inputs, credential_type=machine_credential.credential_type)
cred2.inputs['password'] = 'ASK'
cred2.save()
job_template.credentials.add(cred2)
@@ -114,10 +109,7 @@ def test_ssh_credential_with_password_at_launch(get, post, job_template, admin,
assert resp.data['passwords_needed_to_start'] == ['ssh_password']
with mock.patch.object(Job, 'signal_start') as signal_start:
- resp = post(url, {
- 'credentials': [machine_credential.pk],
- 'ssh_password': 'testing123'
- }, admin, expect=201)
+ resp = post(url, {'credentials': [machine_credential.pk], 'ssh_password': 'testing123'}, admin, expect=201)
signal_start.assert_called_with(ssh_password='testing123')
@@ -141,10 +133,7 @@ def test_vault_credential_with_password_at_launch(get, post, job_template, admin
assert resp.data['passwords_needed_to_start'] == ['vault_password']
with mock.patch.object(Job, 'signal_start') as signal_start:
- resp = post(url, {
- 'credentials': [vault_credential.pk],
- 'vault_password': 'testing123'
- }, admin, expect=201)
+ resp = post(url, {'credentials': [vault_credential.pk], 'vault_password': 'testing123'}, admin, expect=201)
signal_start.assert_called_with(vault_password='testing123')
@@ -153,36 +142,24 @@ def test_deprecated_credential_activity_stream(patch, admin_user, machine_creden
job_template.credentials.add(machine_credential)
starting_entries = job_template.activitystream_set.count()
# no-op patch
- patch(
- job_template.get_absolute_url(),
- admin_user,
- data={'credential': machine_credential.pk},
- expect=200
- )
+ patch(job_template.get_absolute_url(), admin_user, data={'credential': machine_credential.pk}, expect=200)
# no-op should not produce activity stream entries
assert starting_entries == job_template.activitystream_set.count()
@pytest.mark.django_db
def test_multi_vault_preserved_on_put(get, put, admin_user, job_template, vault_credential):
- '''
+ """
A PUT request will necessarily specify deprecated fields, but if the deprecated
field is a singleton while the `credentials` relation has many, that makes
it very easy to drop those credentials not specified in the PUT data
- '''
+ """
vault2 = Credential.objects.create(
- name='second-vault',
- credential_type=vault_credential.credential_type,
- inputs={'vault_password': 'foo', 'vault_id': 'foo'}
+ name='second-vault', credential_type=vault_credential.credential_type, inputs={'vault_password': 'foo', 'vault_id': 'foo'}
)
job_template.credentials.add(vault_credential, vault2)
assert job_template.credentials.count() == 2 # sanity check
r = get(job_template.get_absolute_url(), admin_user, expect=200)
# should be a no-op PUT request
- put(
- job_template.get_absolute_url(),
- admin_user,
- data=r.data,
- expect=200
- )
+ put(job_template.get_absolute_url(), admin_user, data=r.data, expect=200)
assert job_template.credentials.count() == 2
diff --git a/awx/main/tests/functional/api/test_events.py b/awx/main/tests/functional/api/test_events.py
index 036183e93d..45f498e2f3 100644
--- a/awx/main/tests/functional/api/test_events.py
+++ b/awx/main/tests/functional/api/test_events.py
@@ -5,17 +5,18 @@ from awx.main.models import AdHocCommand, AdHocCommandEvent, JobEvent
@pytest.mark.django_db
-@pytest.mark.parametrize('truncate, expected', [
- (True, False),
- (False, True),
-])
+@pytest.mark.parametrize(
+ 'truncate, expected',
+ [
+ (True, False),
+ (False, True),
+ ],
+)
def test_job_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected):
objs = organization_factory("org", superusers=['admin'])
- jt = job_template_factory("jt", organization=objs.organization,
- inventory='test_inv', project='test_proj').job_template
+ jt = job_template_factory("jt", organization=objs.organization, inventory='test_inv', project='test_proj').job_template
job = jt.create_unified_job()
- JobEvent.create_from_data(job_id=job.pk, uuid='abc123', event='runner_on_start',
- stdout='a' * 1025).save()
+ JobEvent.create_from_data(job_id=job.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save()
url = reverse('api:job_job_events_list', kwargs={'pk': job.pk})
if not truncate:
@@ -26,16 +27,18 @@ def test_job_events_sublist_truncation(get, organization_factory, job_template_f
@pytest.mark.django_db
-@pytest.mark.parametrize('truncate, expected', [
- (True, False),
- (False, True),
-])
+@pytest.mark.parametrize(
+ 'truncate, expected',
+ [
+ (True, False),
+ (False, True),
+ ],
+)
def test_ad_hoc_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected):
objs = organization_factory("org", superusers=['admin'])
adhoc = AdHocCommand()
adhoc.save()
- AdHocCommandEvent.create_from_data(ad_hoc_command_id=adhoc.pk, uuid='abc123', event='runner_on_start',
- stdout='a' * 1025).save()
+ AdHocCommandEvent.create_from_data(ad_hoc_command_id=adhoc.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save()
url = reverse('api:ad_hoc_command_ad_hoc_command_events_list', kwargs={'pk': adhoc.pk})
if not truncate:
diff --git a/awx/main/tests/functional/api/test_generic.py b/awx/main/tests/functional/api/test_generic.py
index 2933cb09e6..608a15c2f8 100644
--- a/awx/main/tests/functional/api/test_generic.py
+++ b/awx/main/tests/functional/api/test_generic.py
@@ -6,13 +6,7 @@ from awx.api.versioning import reverse
@pytest.mark.django_db
def test_proxy_ip_allowed(get, patch, admin):
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
- patch(url, user=admin, data={
- 'REMOTE_HOST_HEADERS': [
- 'HTTP_X_FROM_THE_LOAD_BALANCER',
- 'REMOTE_ADDR',
- 'REMOTE_HOST'
- ]
- })
+ patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
class HeaderTrackingMiddleware(object):
environ = {}
@@ -26,39 +20,28 @@ def test_proxy_ip_allowed(get, patch, admin):
# By default, `PROXY_IP_ALLOWED_LIST` is disabled, so custom `REMOTE_HOST_HEADERS`
# should just pass through
middleware = HeaderTrackingMiddleware()
- get(url, user=admin, middleware=middleware,
- HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
+ get(url, user=admin, middleware=middleware, HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
# If `PROXY_IP_ALLOWED_LIST` is restricted to 10.0.1.100 and we make a request
# from 8.9.10.11, the custom `HTTP_X_FROM_THE_LOAD_BALANCER` header should
# be stripped
- patch(url, user=admin, data={
- 'PROXY_IP_ALLOWED_LIST': ['10.0.1.100']
- })
+ patch(url, user=admin, data={'PROXY_IP_ALLOWED_LIST': ['10.0.1.100']})
middleware = HeaderTrackingMiddleware()
- get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11',
- HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
+ get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11', HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
assert 'HTTP_X_FROM_THE_LOAD_BALANCER' not in middleware.environ
# If 8.9.10.11 is added to `PROXY_IP_ALLOWED_LIST` the
# `HTTP_X_FROM_THE_LOAD_BALANCER` header should be passed through again
- patch(url, user=admin, data={
- 'PROXY_IP_ALLOWED_LIST': ['10.0.1.100', '8.9.10.11']
- })
+ patch(url, user=admin, data={'PROXY_IP_ALLOWED_LIST': ['10.0.1.100', '8.9.10.11']})
middleware = HeaderTrackingMiddleware()
- get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11',
- HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
+ get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11', HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
# Allow allowed list of proxy hostnames in addition to IP addresses
- patch(url, user=admin, data={
- 'PROXY_IP_ALLOWED_LIST': ['my.proxy.example.org']
- })
+ patch(url, user=admin, data={'PROXY_IP_ALLOWED_LIST': ['my.proxy.example.org']})
middleware = HeaderTrackingMiddleware()
- get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11',
- REMOTE_HOST='my.proxy.example.org',
- HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
+ get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11', REMOTE_HOST='my.proxy.example.org', HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
@@ -67,38 +50,23 @@ class TestDeleteViews:
def test_sublist_delete_permission_check(self, inventory_source, host, rando, delete):
inventory_source.hosts.add(host)
inventory_source.inventory.read_role.members.add(rando)
- delete(
- reverse(
- 'api:inventory_source_hosts_list',
- kwargs={'pk': inventory_source.pk}
- ), user=rando, expect=403
- )
+ delete(reverse('api:inventory_source_hosts_list', kwargs={'pk': inventory_source.pk}), user=rando, expect=403)
def test_sublist_delete_functionality(self, inventory_source, host, rando, delete):
inventory_source.hosts.add(host)
inventory_source.inventory.admin_role.members.add(rando)
- delete(
- reverse(
- 'api:inventory_source_hosts_list',
- kwargs={'pk': inventory_source.pk}
- ), user=rando, expect=204
- )
+ delete(reverse('api:inventory_source_hosts_list', kwargs={'pk': inventory_source.pk}), user=rando, expect=204)
assert inventory_source.hosts.count() == 0
def test_destroy_permission_check(self, job_factory, system_auditor, delete):
job = job_factory()
- resp = delete(
- job.get_absolute_url(), user=system_auditor
- )
+ resp = delete(job.get_absolute_url(), user=system_auditor)
assert resp.status_code == 403
@pytest.mark.django_db
def test_filterable_fields(options, instance, admin_user):
- r = options(
- url=instance.get_absolute_url(),
- user=admin_user
- )
+ r = options(url=instance.get_absolute_url(), user=admin_user)
filterable_info = r.data['actions']['GET']['created']
non_filterable_info = r.data['actions']['GET']['percent_capacity_remaining']
@@ -112,11 +80,7 @@ def test_filterable_fields(options, instance, admin_user):
@pytest.mark.django_db
def test_handle_content_type(post, admin):
''' Tower should return 415 when wrong content type is in HTTP requests '''
- post(reverse('api:project_list'),
- {'name': 't', 'organization': None},
- admin,
- content_type='text/html',
- expect=415)
+ post(reverse('api:project_list'), {'name': 't', 'organization': None}, admin, content_type='text/html', expect=415)
@pytest.mark.django_db
diff --git a/awx/main/tests/functional/api/test_host_insights.py b/awx/main/tests/functional/api/test_host_insights.py
index 348ca02952..e5b3d8a783 100644
--- a/awx/main/tests/functional/api/test_host_insights.py
+++ b/awx/main/tests/functional/api/test_host_insights.py
@@ -38,9 +38,7 @@ class TestHostInsights:
url = reverse('api:host_insights', kwargs={'pk': host.pk})
response = get(url, user('admin', True))
- assert response.data['error'] == (
- 'Could not translate Insights system ID 123e4567-e89b-12d3-a456-426655440000'
- ' into an Insights platform ID.')
+ assert response.data['error'] == ('Could not translate Insights system ID 123e4567-e89b-12d3-a456-426655440000' ' into an Insights platform ID.')
assert response.status_code == 404
def test_insights_no_credential(self, get, hosts, user, mocker):
@@ -56,11 +54,24 @@ class TestHostInsights:
assert response.data['error'] == 'The Insights Credential for "test-inv" was not found.'
assert response.status_code == 404
- @pytest.mark.parametrize("status_code, exception, error, message", [
- (502, requests.exceptions.SSLError, 'SSLError while trying to connect to https://myexample.com/whocares/me/', None,),
- (504, requests.exceptions.Timeout, 'Request to https://myexample.com/whocares/me/ timed out.', None,),
- (502, requests.exceptions.RequestException, 'booo!', 'Unknown exception booo! while trying to GET https://myexample.com/whocares/me/'),
- ])
+ @pytest.mark.parametrize(
+ "status_code, exception, error, message",
+ [
+ (
+ 502,
+ requests.exceptions.SSLError,
+ 'SSLError while trying to connect to https://myexample.com/whocares/me/',
+ None,
+ ),
+ (
+ 504,
+ requests.exceptions.Timeout,
+ 'Request to https://myexample.com/whocares/me/ timed out.',
+ None,
+ ),
+ (502, requests.exceptions.RequestException, 'booo!', 'Unknown exception booo! while trying to GET https://myexample.com/whocares/me/'),
+ ],
+ )
def test_insights_exception(self, get, hosts, insights_credential, user, mocker, status_code, exception, error, message):
mocker.patch.object(requests.Session, 'get', side_effect=exception(error))
@@ -89,8 +100,7 @@ class TestHostInsights:
url = reverse('api:host_insights', kwargs={'pk': host.pk})
response = get(url, user('admin', True))
- assert response.data['error'] == (
- "Unauthorized access. Please check your Insights Credential username and password.")
+ assert response.data['error'] == ("Unauthorized access. Please check your Insights Credential username and password.")
assert response.status_code == 502
def test_insights_bad_status(self, get, hosts, insights_credential, user, mocker):
diff --git a/awx/main/tests/functional/api/test_instance_group.py b/awx/main/tests/functional/api/test_instance_group.py
index 22ecf1a2f6..967775dd74 100644
--- a/awx/main/tests/functional/api/test_instance_group.py
+++ b/awx/main/tests/functional/api/test_instance_group.py
@@ -62,6 +62,7 @@ def create_job_factory(job_factory, instance_group):
j.instance_group = instance_group
j.save()
return j
+
return fn
@@ -73,6 +74,7 @@ def create_project_update_factory(instance_group, project):
pu.instance_group = instance_group
pu.save()
return pu
+
return fn
@@ -195,8 +197,7 @@ def test_prevent_isolated_instance_removal_from_isolated_instance_group(post, ad
@pytest.mark.django_db
-def test_prevent_non_isolated_instance_added_to_isolated_instance_group(
- post, admin, non_iso_instance, isolated_instance_group):
+def test_prevent_non_isolated_instance_added_to_isolated_instance_group(post, admin, non_iso_instance, isolated_instance_group):
url = reverse("api:instance_group_instance_list", kwargs={'pk': isolated_instance_group.pk})
assert False is non_iso_instance.is_isolated()
@@ -205,8 +206,7 @@ def test_prevent_non_isolated_instance_added_to_isolated_instance_group(
@pytest.mark.django_db
-def test_prevent_non_isolated_instance_added_to_isolated_instance_group_via_policy_list(
- patch, admin, non_iso_instance, isolated_instance_group):
+def test_prevent_non_isolated_instance_added_to_isolated_instance_group_via_policy_list(patch, admin, non_iso_instance, isolated_instance_group):
url = reverse("api:instance_group_detail", kwargs={'pk': isolated_instance_group.pk})
assert False is non_iso_instance.is_isolated()
@@ -216,9 +216,7 @@ def test_prevent_non_isolated_instance_added_to_isolated_instance_group_via_poli
@pytest.mark.django_db
-@pytest.mark.parametrize(
- 'source_model', ['job_template', 'inventory', 'organization'], indirect=True
-)
+@pytest.mark.parametrize('source_model', ['job_template', 'inventory', 'organization'], indirect=True)
def test_instance_group_order_persistence(get, post, admin, source_model):
# create several instance groups in random order
total = 5
@@ -226,10 +224,7 @@ def test_instance_group_order_persistence(get, post, admin, source_model):
random.shuffle(pks)
instances = [InstanceGroup.objects.create(name='iso-%d' % i) for i in pks]
view_name = camelcase_to_underscore(source_model.__class__.__name__)
- url = reverse(
- 'api:{}_instance_groups_list'.format(view_name),
- kwargs={'pk': source_model.pk}
- )
+ url = reverse('api:{}_instance_groups_list'.format(view_name), kwargs={'pk': source_model.pk})
# associate them all
for instance in instances:
@@ -258,22 +253,22 @@ def test_instance_group_update_fields(patch, instance, instance_group, admin, co
ig_url = reverse("api:instance_group_detail", kwargs={'pk': instance_group.pk})
assert not instance_group.is_container_group
assert not containerized_instance_group.is_isolated
- resp = patch(ig_url, {'policy_instance_percentage':15}, admin, expect=200)
+ resp = patch(ig_url, {'policy_instance_percentage': 15}, admin, expect=200)
assert 15 == resp.data['policy_instance_percentage']
- resp = patch(ig_url, {'policy_instance_minimum':15}, admin, expect=200)
+ resp = patch(ig_url, {'policy_instance_minimum': 15}, admin, expect=200)
assert 15 == resp.data['policy_instance_minimum']
- resp = patch(ig_url, {'policy_instance_list':[instance.hostname]}, admin)
+ resp = patch(ig_url, {'policy_instance_list': [instance.hostname]}, admin)
assert [instance.hostname] == resp.data['policy_instance_list']
# containerized instance group
cg_url = reverse("api:instance_group_detail", kwargs={'pk': containerized_instance_group.pk})
assert containerized_instance_group.is_container_group
assert not containerized_instance_group.is_isolated
- resp = patch(cg_url, {'policy_instance_percentage':15}, admin, expect=400)
+ resp = patch(cg_url, {'policy_instance_percentage': 15}, admin, expect=400)
assert ["Containerized instances may not be managed via the API"] == resp.data['policy_instance_percentage']
- resp = patch(cg_url, {'policy_instance_minimum':15}, admin, expect=400)
+ resp = patch(cg_url, {'policy_instance_minimum': 15}, admin, expect=400)
assert ["Containerized instances may not be managed via the API"] == resp.data['policy_instance_minimum']
- resp = patch(cg_url, {'policy_instance_list':[instance.hostname]}, admin)
+ resp = patch(cg_url, {'policy_instance_list': [instance.hostname]}, admin)
assert ["Containerized instances may not be managed via the API"] == resp.data['policy_instance_list']
diff --git a/awx/main/tests/functional/api/test_inventory.py b/awx/main/tests/functional/api/test_inventory.py
index 5bad1b6f30..1ed1c633d1 100644
--- a/awx/main/tests/functional/api/test_inventory.py
+++ b/awx/main/tests/functional/api/test_inventory.py
@@ -14,8 +14,8 @@ from awx.main.models import InventorySource, Inventory, ActivityStream
def scm_inventory(inventory, project):
with mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.update'):
inventory.inventory_sources.create(
- name='foobar', update_on_project_update=True, source='scm',
- source_project=project, scm_last_revision=project.scm_revision)
+ name='foobar', update_on_project_update=True, source='scm', source_project=project, scm_last_revision=project.scm_revision
+ )
return inventory
@@ -23,11 +23,10 @@ def scm_inventory(inventory, project):
def factory_scm_inventory(inventory, project):
def fn(**kwargs):
with mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.update'):
- return inventory.inventory_sources.create(source_project=project,
- overwrite_vars=True,
- source='scm',
- scm_last_revision=project.scm_revision,
- **kwargs)
+ return inventory.inventory_sources.create(
+ source_project=project, overwrite_vars=True, source='scm', scm_last_revision=project.scm_revision, **kwargs
+ )
+
return fn
@@ -69,15 +68,15 @@ def test_inventory_host_name_unique(scm_inventory, post, admin_user):
'name': 'barfoo',
'inventory_id': scm_inventory.id,
},
- admin_user,
- expect=400
+ admin_user,
+ expect=400,
)
assert resp.status_code == 400
assert "A Group with that name already exists." in json.dumps(resp.data)
-@pytest.mark.django_db
+@pytest.mark.django_db
def test_inventory_group_name_unique(scm_inventory, post, admin_user):
inv_src = scm_inventory.inventory_sources.first()
inv_src.hosts.create(name='barfoo', inventory=scm_inventory)
@@ -87,24 +86,22 @@ def test_inventory_group_name_unique(scm_inventory, post, admin_user):
'name': 'barfoo',
'inventory_id': scm_inventory.id,
},
- admin_user,
- expect=400
+ admin_user,
+ expect=400,
)
assert resp.status_code == 400
assert "A Host with that name already exists." in json.dumps(resp.data)
-@pytest.mark.parametrize("role_field,expected_status_code", [
- (None, 403),
- ('admin_role', 200),
- ('update_role', 403),
- ('adhoc_role', 403),
- ('use_role', 403)
-])
+@pytest.mark.parametrize("role_field,expected_status_code", [(None, 403), ('admin_role', 200), ('update_role', 403), ('adhoc_role', 403), ('use_role', 403)])
@pytest.mark.django_db
def test_edit_inventory(put, inventory, alice, role_field, expected_status_code):
- data = { 'organization': inventory.organization.id, 'name': 'New name', 'description': 'Hello world', }
+ data = {
+ 'organization': inventory.organization.id,
+ 'name': 'New name',
+ 'description': 'Hello world',
+ }
if role_field:
getattr(inventory, role_field).members.add(alice)
put(reverse('api:inventory_detail', kwargs={'pk': inventory.id}), data, alice, expect=expected_status_code)
@@ -151,83 +148,58 @@ def test_async_inventory_deletion_deletes_related_jt(delete, get, job_template,
@pytest.mark.django_db
def test_list_cannot_order_by_unsearchable_field(get, organization, alice, order_by):
for i, script in enumerate(('#!/bin/a', '#!/bin/b', '#!/bin/c')):
- custom_script = organization.custom_inventory_scripts.create(
- name="I%d" % i,
- script=script
- )
+ custom_script = organization.custom_inventory_scripts.create(name="I%d" % i, script=script)
custom_script.admin_role.members.add(alice)
- get(reverse('api:inventory_script_list'), alice,
- QUERY_STRING='order_by=%s' % order_by, expect=403)
+ get(reverse('api:inventory_script_list'), alice, QUERY_STRING='order_by=%s' % order_by, expect=403)
-@pytest.mark.parametrize("role_field,expected_status_code", [
- (None, 403),
- ('admin_role', 201),
- ('update_role', 403),
- ('adhoc_role', 403),
- ('use_role', 403)
-])
+@pytest.mark.parametrize("role_field,expected_status_code", [(None, 403), ('admin_role', 201), ('update_role', 403), ('adhoc_role', 403), ('use_role', 403)])
@pytest.mark.django_db
def test_create_inventory_group(post, inventory, alice, role_field, expected_status_code):
- data = { 'name': 'New name', 'description': 'Hello world', }
+ data = {
+ 'name': 'New name',
+ 'description': 'Hello world',
+ }
if role_field:
getattr(inventory, role_field).members.add(alice)
post(reverse('api:inventory_groups_list', kwargs={'pk': inventory.id}), data, alice, expect=expected_status_code)
-@pytest.mark.parametrize("role_field,expected_status_code", [
- (None, 403),
- ('admin_role', 201),
- ('update_role', 403),
- ('adhoc_role', 403),
- ('use_role', 403)
-])
+@pytest.mark.parametrize("role_field,expected_status_code", [(None, 403), ('admin_role', 201), ('update_role', 403), ('adhoc_role', 403), ('use_role', 403)])
@pytest.mark.django_db
def test_create_inventory_group_child(post, group, alice, role_field, expected_status_code):
- data = { 'name': 'New name', 'description': 'Hello world', }
+ data = {
+ 'name': 'New name',
+ 'description': 'Hello world',
+ }
if role_field:
getattr(group.inventory, role_field).members.add(alice)
post(reverse('api:group_children_list', kwargs={'pk': group.id}), data, alice, expect=expected_status_code)
-@pytest.mark.parametrize("role_field,expected_status_code", [
- (None, 403),
- ('admin_role', 200),
- ('update_role', 403),
- ('adhoc_role', 403),
- ('use_role', 403)
-])
+@pytest.mark.parametrize("role_field,expected_status_code", [(None, 403), ('admin_role', 200), ('update_role', 403), ('adhoc_role', 403), ('use_role', 403)])
@pytest.mark.django_db
def test_edit_inventory_group(put, group, alice, role_field, expected_status_code):
- data = { 'name': 'New name', 'description': 'Hello world', }
+ data = {
+ 'name': 'New name',
+ 'description': 'Hello world',
+ }
if role_field:
getattr(group.inventory, role_field).members.add(alice)
put(reverse('api:group_detail', kwargs={'pk': group.id}), data, alice, expect=expected_status_code)
-@pytest.mark.parametrize("role_field,expected_status_code", [
- (None, 403),
- ('admin_role', 201),
- ('update_role', 403),
- ('adhoc_role', 403),
- ('use_role', 403)
-])
+@pytest.mark.parametrize("role_field,expected_status_code", [(None, 403), ('admin_role', 201), ('update_role', 403), ('adhoc_role', 403), ('use_role', 403)])
@pytest.mark.django_db
def test_create_inventory_inventory_source(post, inventory, alice, role_field, expected_status_code):
- data = { 'source': 'ec2', 'name': 'ec2-inv-source'}
+ data = {'source': 'ec2', 'name': 'ec2-inv-source'}
if role_field:
getattr(inventory, role_field).members.add(alice)
post(reverse('api:inventory_inventory_sources_list', kwargs={'pk': inventory.id}), data, alice, expect=expected_status_code)
-@pytest.mark.parametrize("role_field,expected_status_code", [
- (None, 403),
- ('admin_role', 204),
- ('update_role', 403),
- ('adhoc_role', 403),
- ('use_role', 403)
-])
+@pytest.mark.parametrize("role_field,expected_status_code", [(None, 403), ('admin_role', 204), ('update_role', 403), ('adhoc_role', 403), ('use_role', 403)])
@pytest.mark.django_db
def test_delete_inventory_group(delete, group, alice, role_field, expected_status_code):
if role_field:
@@ -237,11 +209,8 @@ def test_delete_inventory_group(delete, group, alice, role_field, expected_statu
@pytest.mark.django_db
def test_create_inventory_smartgroup(post, get, inventory, admin_user, organization):
- data = { 'name': 'Group 1', 'description': 'Test Group'}
- smart_inventory = Inventory(name='smart',
- kind='smart',
- organization=organization,
- host_filter='inventory_sources__source=ec2')
+ data = {'name': 'Group 1', 'description': 'Test Group'}
+ smart_inventory = Inventory(name='smart', kind='smart', organization=organization, host_filter='inventory_sources__source=ec2')
smart_inventory.save()
post(reverse('api:inventory_groups_list', kwargs={'pk': smart_inventory.id}), data, admin_user)
resp = get(reverse('api:inventory_groups_list', kwargs={'pk': smart_inventory.id}), admin_user)
@@ -253,11 +222,8 @@ def test_create_inventory_smartgroup(post, get, inventory, admin_user, organizat
@pytest.mark.django_db
def test_create_inventory_smart_inventory_sources(post, get, inventory, admin_user, organization):
- data = { 'name': 'Inventory Source 1', 'description': 'Test Inventory Source'}
- smart_inventory = Inventory(name='smart',
- kind='smart',
- organization=organization,
- host_filter='inventory_sources__source=ec2')
+ data = {'name': 'Inventory Source 1', 'description': 'Test Inventory Source'}
+ smart_inventory = Inventory(name='smart', kind='smart', organization=organization, host_filter='inventory_sources__source=ec2')
smart_inventory.save()
post(reverse('api:inventory_inventory_sources_list', kwargs={'pk': smart_inventory.id}), data, admin_user)
resp = get(reverse('api:inventory_inventory_sources_list', kwargs={'pk': smart_inventory.id}), admin_user)
@@ -284,12 +250,13 @@ def test_urlencode_host_filter(post, admin_user, organization):
post(
reverse('api:inventory_list'),
data={
- 'name': 'smart inventory', 'kind': 'smart',
+ 'name': 'smart inventory',
+ 'kind': 'smart',
'organization': organization.pk,
- 'host_filter': 'ansible_facts__ansible_distribution_version=%227.4%22'
+ 'host_filter': 'ansible_facts__ansible_distribution_version=%227.4%22',
},
user=admin_user,
- expect=201
+ expect=201,
)
# Assert that the saved version of host filter has escaped ""
si = Inventory.objects.get(name='smart inventory')
@@ -300,13 +267,9 @@ def test_urlencode_host_filter(post, admin_user, organization):
def test_host_filter_unicode(post, admin_user, organization):
post(
reverse('api:inventory_list'),
- data={
- 'name': 'smart inventory', 'kind': 'smart',
- 'organization': organization.pk,
- 'host_filter': u'ansible_facts__ansible_distribution=レッドハット'
- },
+ data={'name': 'smart inventory', 'kind': 'smart', 'organization': organization.pk, 'host_filter': u'ansible_facts__ansible_distribution=レッドハット'},
user=admin_user,
- expect=201
+ expect=201,
)
si = Inventory.objects.get(name='smart inventory')
assert si.host_filter == u'ansible_facts__ansible_distribution=レッドハット'
@@ -318,12 +281,13 @@ def test_host_filter_invalid_ansible_facts_lookup(post, admin_user, organization
resp = post(
reverse('api:inventory_list'),
data={
- 'name': 'smart inventory', 'kind': 'smart',
+ 'name': 'smart inventory',
+ 'kind': 'smart',
'organization': organization.pk,
- 'host_filter': u'ansible_facts__ansible_distribution__{}=cent'.format(lookup)
+ 'host_filter': u'ansible_facts__ansible_distribution__{}=cent'.format(lookup),
},
user=admin_user,
- expect=400
+ expect=400,
)
assert 'ansible_facts does not support searching with __{}'.format(lookup) in json.dumps(resp.data)
@@ -333,35 +297,36 @@ def test_host_filter_ansible_facts_exact(post, admin_user, organization):
post(
reverse('api:inventory_list'),
data={
- 'name': 'smart inventory', 'kind': 'smart',
+ 'name': 'smart inventory',
+ 'kind': 'smart',
'organization': organization.pk,
- 'host_filter': 'ansible_facts__ansible_distribution__exact="CentOS"'
+ 'host_filter': 'ansible_facts__ansible_distribution__exact="CentOS"',
},
user=admin_user,
- expect=201
+ expect=201,
)
-@pytest.mark.parametrize("role_field,expected_status_code", [
- (None, 403),
- ('admin_role', 201),
- ('update_role', 403),
- ('adhoc_role', 403),
- ('use_role', 403)
-])
+@pytest.mark.parametrize("role_field,expected_status_code", [(None, 403), ('admin_role', 201), ('update_role', 403), ('adhoc_role', 403), ('use_role', 403)])
@pytest.mark.django_db
def test_create_inventory_host(post, inventory, alice, role_field, expected_status_code):
- data = { 'name': 'New name', 'description': 'Hello world', }
+ data = {
+ 'name': 'New name',
+ 'description': 'Hello world',
+ }
if role_field:
getattr(inventory, role_field).members.add(alice)
post(reverse('api:inventory_hosts_list', kwargs={'pk': inventory.id}), data, alice, expect=expected_status_code)
-@pytest.mark.parametrize("hosts,expected_status_code", [
- (1, 201),
- (2, 201),
- (3, 201),
-])
+@pytest.mark.parametrize(
+ "hosts,expected_status_code",
+ [
+ (1, 201),
+ (2, 201),
+ (3, 201),
+ ],
+)
@pytest.mark.django_db
def test_create_inventory_host_with_limits(post, admin_user, inventory, hosts, expected_status_code):
# The per-Organization host limits functionality should be a no-op on AWX.
@@ -371,35 +336,28 @@ def test_create_inventory_host_with_limits(post, admin_user, inventory, hosts, e
inventory.hosts.create(name="Existing host %i" % i)
data = {'name': 'New name', 'description': 'Hello world'}
- post(reverse('api:inventory_hosts_list', kwargs={'pk': inventory.id}),
- data, admin_user, expect=expected_status_code)
+ post(reverse('api:inventory_hosts_list', kwargs={'pk': inventory.id}), data, admin_user, expect=expected_status_code)
-@pytest.mark.parametrize("role_field,expected_status_code", [
- (None, 403),
- ('admin_role', 201),
- ('update_role', 403),
- ('adhoc_role', 403),
- ('use_role', 403)
-])
+@pytest.mark.parametrize("role_field,expected_status_code", [(None, 403), ('admin_role', 201), ('update_role', 403), ('adhoc_role', 403), ('use_role', 403)])
@pytest.mark.django_db
def test_create_inventory_group_host(post, group, alice, role_field, expected_status_code):
- data = { 'name': 'New name', 'description': 'Hello world', }
+ data = {
+ 'name': 'New name',
+ 'description': 'Hello world',
+ }
if role_field:
getattr(group.inventory, role_field).members.add(alice)
post(reverse('api:group_hosts_list', kwargs={'pk': group.id}), data, alice, expect=expected_status_code)
-@pytest.mark.parametrize("role_field,expected_status_code", [
- (None, 403),
- ('admin_role', 200),
- ('update_role', 403),
- ('adhoc_role', 403),
- ('use_role', 403)
-])
+@pytest.mark.parametrize("role_field,expected_status_code", [(None, 403), ('admin_role', 200), ('update_role', 403), ('adhoc_role', 403), ('use_role', 403)])
@pytest.mark.django_db
def test_edit_inventory_host(put, host, alice, role_field, expected_status_code):
- data = { 'name': 'New name', 'description': 'Hello world', }
+ data = {
+ 'name': 'New name',
+ 'description': 'Hello world',
+ }
if role_field:
getattr(host.inventory, role_field).members.add(alice)
put(reverse('api:host_detail', kwargs={'pk': host.id}), data, alice, expect=expected_status_code)
@@ -417,13 +375,7 @@ def test_edit_inventory_host_with_limits(put, host, admin_user):
put(reverse('api:host_detail', kwargs={'pk': host.id}), data, admin_user, expect=200)
-@pytest.mark.parametrize("role_field,expected_status_code", [
- (None, 403),
- ('admin_role', 204),
- ('update_role', 403),
- ('adhoc_role', 403),
- ('use_role', 403)
-])
+@pytest.mark.parametrize("role_field,expected_status_code", [(None, 403), ('admin_role', 204), ('update_role', 403), ('adhoc_role', 403), ('use_role', 403)])
@pytest.mark.django_db
def test_delete_inventory_host(delete, host, alice, role_field, expected_status_code):
if role_field:
@@ -432,16 +384,12 @@ def test_delete_inventory_host(delete, host, alice, role_field, expected_status_
# See companion test in tests/functional/test_rbac_inventory.py::test_inventory_source_update
-@pytest.mark.parametrize("start_access,expected_status_code", [
- (True, 202),
- (False, 403)
-])
+@pytest.mark.parametrize("start_access,expected_status_code", [(True, 202), (False, 403)])
@pytest.mark.django_db
def test_inventory_update_access_called(post, inventory_source, alice, mock_access, start_access, expected_status_code):
with mock_access(InventorySource) as mock_instance:
mock_instance.can_start = mock.MagicMock(return_value=start_access)
- post(reverse('api:inventory_source_update_view', kwargs={'pk': inventory_source.id}),
- {}, alice, expect=expected_status_code)
+ post(reverse('api:inventory_source_update_view', kwargs={'pk': inventory_source.id}), {}, alice, expect=expected_status_code)
mock_instance.can_start.assert_called_once_with(inventory_source)
@@ -449,20 +397,18 @@ def test_inventory_update_access_called(post, inventory_source, alice, mock_acce
def test_inventory_source_vars_prohibition(post, inventory, admin_user):
with mock.patch('awx.api.serializers.settings') as mock_settings:
mock_settings.INV_ENV_VARIABLE_BLOCKED = ('FOOBAR',)
- r = post(reverse('api:inventory_source_list'),
- {'name': 'new inv src', 'source_vars': '{\"FOOBAR\": \"val\"}', 'inventory': inventory.pk},
- admin_user, expect=400)
+ r = post(
+ reverse('api:inventory_source_list'),
+ {'name': 'new inv src', 'source_vars': '{\"FOOBAR\": \"val\"}', 'inventory': inventory.pk},
+ admin_user,
+ expect=400,
+ )
assert 'prohibited environment variable' in r.data['source_vars'][0]
assert 'FOOBAR' in r.data['source_vars'][0]
@pytest.mark.django_db
-@pytest.mark.parametrize('role,expect', [
- ('admin_role', 200),
- ('use_role', 403),
- ('adhoc_role', 403),
- ('read_role', 403)
-])
+@pytest.mark.parametrize('role,expect', [('admin_role', 200), ('use_role', 403), ('adhoc_role', 403), ('read_role', 403)])
def test_action_view_permissions(patch, put, get, inventory, rando, role, expect):
getattr(inventory, role).members.add(rando)
url = reverse('api:inventory_variable_data', kwargs={'pk': inventory.pk})
@@ -476,38 +422,25 @@ def test_action_view_permissions(patch, put, get, inventory, rando, role, expect
class TestInventorySourceCredential:
def test_need_cloud_credential(self, inventory, admin_user, post):
"""Test that a cloud-based source requires credential"""
- r = post(
- url=reverse('api:inventory_source_list'),
- data={'inventory': inventory.pk, 'name': 'foo', 'source': 'openstack'},
- expect=400,
- user=admin_user
- )
+ r = post(url=reverse('api:inventory_source_list'), data={'inventory': inventory.pk, 'name': 'foo', 'source': 'openstack'}, expect=400, user=admin_user)
assert 'Credential is required for a cloud source' in r.data['credential'][0]
def test_ec2_no_credential(self, inventory, admin_user, post):
"""Test that an ec2 inventory source can be added with no credential"""
- post(
- url=reverse('api:inventory_source_list'),
- data={'inventory': inventory.pk, 'name': 'fobar', 'source': 'ec2'},
- expect=201,
- user=admin_user
- )
+ post(url=reverse('api:inventory_source_list'), data={'inventory': inventory.pk, 'name': 'fobar', 'source': 'ec2'}, expect=201, user=admin_user)
def test_validating_credential_type(self, organization, inventory, admin_user, post):
"""Test that cloud sources must use their respective credential type"""
from awx.main.models.credential import Credential, CredentialType
+
openstack = CredentialType.defaults['openstack']()
openstack.save()
- os_cred = Credential.objects.create(
- credential_type=openstack, name='bar', organization=organization)
+ os_cred = Credential.objects.create(credential_type=openstack, name='bar', organization=organization)
r = post(
url=reverse('api:inventory_source_list'),
- data={
- 'inventory': inventory.pk, 'name': 'fobar', 'source': 'ec2',
- 'credential': os_cred.pk
- },
+ data={'inventory': inventory.pk, 'name': 'fobar', 'source': 'ec2', 'credential': os_cred.pk},
expect=400,
- user=admin_user
+ user=admin_user,
)
assert 'Cloud-based inventory sources (such as ec2)' in r.data['credential'][0]
assert 'require credentials for the matching cloud service' in r.data['credential'][0]
@@ -519,32 +452,24 @@ class TestInventorySourceCredential:
r = post(
url=reverse('api:inventory_source_list'),
data={
- 'inventory': inventory.pk, 'name': 'fobar', 'source': 'scm',
- 'source_project': project.pk, 'source_path': '',
+ 'inventory': inventory.pk,
+ 'name': 'fobar',
+ 'source': 'scm',
+ 'source_project': project.pk,
+ 'source_path': '',
'credential': vault_credential.pk,
'source_vars': 'plugin: a.b.c',
},
expect=400,
- user=admin_user
+ user=admin_user,
)
assert 'Credentials of type insights and vault' in r.data['credential'][0]
assert 'disallowed for scm inventory sources' in r.data['credential'][0]
- def test_vault_credential_not_allowed_via_related(
- self, project, inventory, vault_credential, admin_user, post):
+ def test_vault_credential_not_allowed_via_related(self, project, inventory, vault_credential, admin_user, post):
"""Vault credentials cannot be associated via related endpoint"""
- inv_src = InventorySource.objects.create(
- inventory=inventory, name='foobar', source='scm',
- source_project=project, source_path=''
- )
- r = post(
- url=reverse('api:inventory_source_credentials_list', kwargs={'pk': inv_src.pk}),
- data={
- 'id': vault_credential.pk
- },
- expect=400,
- user=admin_user
- )
+ inv_src = InventorySource.objects.create(inventory=inventory, name='foobar', source='scm', source_project=project, source_path='')
+ r = post(url=reverse('api:inventory_source_credentials_list', kwargs={'pk': inv_src.pk}), data={'id': vault_credential.pk}, expect=400, user=admin_user)
assert 'Credentials of type insights and vault' in r.data['msg']
assert 'disallowed for scm inventory sources' in r.data['msg']
@@ -552,74 +477,66 @@ class TestInventorySourceCredential:
"""The credentials relationship is used to manage the cloud credential
this test checks that replacement works"""
from awx.main.models.credential import Credential, CredentialType
+
openstack = CredentialType.defaults['openstack']()
openstack.save()
- os_cred = Credential.objects.create(
- credential_type=openstack, name='bar', organization=organization)
+ os_cred = Credential.objects.create(credential_type=openstack, name='bar', organization=organization)
r = post(
url=reverse('api:inventory_source_list'),
data={
- 'inventory': inventory.pk, 'name': 'fobar', 'source': 'scm',
- 'source_project': project.pk, 'source_path': '',
- 'credential': os_cred.pk, 'source_vars': 'plugin: a.b.c',
+ 'inventory': inventory.pk,
+ 'name': 'fobar',
+ 'source': 'scm',
+ 'source_project': project.pk,
+ 'source_path': '',
+ 'credential': os_cred.pk,
+ 'source_vars': 'plugin: a.b.c',
},
expect=201,
- user=admin_user
+ user=admin_user,
)
aws = CredentialType.defaults['aws']()
aws.save()
- aws_cred = Credential.objects.create(
- credential_type=aws, name='bar2', organization=organization)
+ aws_cred = Credential.objects.create(credential_type=aws, name='bar2', organization=organization)
inv_src = InventorySource.objects.get(pk=r.data['id'])
assert list(inv_src.credentials.values_list('id', flat=True)) == [os_cred.pk]
- patch(
- url=inv_src.get_absolute_url(),
- data={
- 'credential': aws_cred.pk
- },
- expect=200,
- user=admin_user
- )
+ patch(url=inv_src.get_absolute_url(), data={'credential': aws_cred.pk}, expect=200, user=admin_user)
assert list(inv_src.credentials.values_list('id', flat=True)) == [aws_cred.pk]
@pytest.mark.django_db
class TestControlledBySCM:
- '''
+ """
Check that various actions are correctly blocked if object is controlled
by an SCM follow-project inventory source
- '''
+ """
+
def test_safe_method_works(self, get, options, scm_inventory, admin_user):
get(scm_inventory.get_absolute_url(), admin_user, expect=200)
options(scm_inventory.get_absolute_url(), admin_user, expect=200)
assert InventorySource.objects.get(inventory=scm_inventory.pk).scm_last_revision != ''
def test_vars_edit_reset(self, patch, scm_inventory, admin_user):
- patch(scm_inventory.get_absolute_url(), {'variables': 'hello: world'},
- admin_user, expect=200)
+ patch(scm_inventory.get_absolute_url(), {'variables': 'hello: world'}, admin_user, expect=200)
assert InventorySource.objects.get(inventory=scm_inventory.pk).scm_last_revision == ''
def test_name_edit_allowed(self, patch, scm_inventory, admin_user):
- patch(scm_inventory.get_absolute_url(), {'variables': '---', 'name': 'newname'},
- admin_user, expect=200)
+ patch(scm_inventory.get_absolute_url(), {'variables': '---', 'name': 'newname'}, admin_user, expect=200)
assert InventorySource.objects.get(inventory=scm_inventory.pk).scm_last_revision != ''
def test_host_associations_reset(self, post, scm_inventory, admin_user):
inv_src = scm_inventory.inventory_sources.first()
h = inv_src.hosts.create(name='barfoo', inventory=scm_inventory)
g = inv_src.groups.create(name='fooland', inventory=scm_inventory)
- post(reverse('api:host_groups_list', kwargs={'pk': h.id}), {'id': g.id},
- admin_user, expect=204)
- post(reverse('api:group_hosts_list', kwargs={'pk': g.id}), {'id': h.id},
- admin_user, expect=204)
+ post(reverse('api:host_groups_list', kwargs={'pk': h.id}), {'id': g.id}, admin_user, expect=204)
+ post(reverse('api:group_hosts_list', kwargs={'pk': g.id}), {'id': h.id}, admin_user, expect=204)
assert InventorySource.objects.get(inventory=scm_inventory.pk).scm_last_revision == ''
def test_group_group_associations_reset(self, post, scm_inventory, admin_user):
inv_src = scm_inventory.inventory_sources.first()
g1 = inv_src.groups.create(name='barland', inventory=scm_inventory)
g2 = inv_src.groups.create(name='fooland', inventory=scm_inventory)
- post(reverse('api:group_children_list', kwargs={'pk': g1.id}), {'id': g2.id},
- admin_user, expect=204)
+ post(reverse('api:group_children_list', kwargs={'pk': g1.id}), {'id': g2.id}, admin_user, expect=204)
assert InventorySource.objects.get(inventory=scm_inventory.pk).scm_last_revision == ''
def test_host_group_delete_reset(self, delete, scm_inventory, admin_user):
@@ -636,56 +553,66 @@ class TestControlledBySCM:
assert scm_inventory.inventory_sources.count() == 0
def test_adding_inv_src_ok(self, post, scm_inventory, project, admin_user):
- post(reverse('api:inventory_inventory_sources_list',
- kwargs={'pk': scm_inventory.id}),
- {'name': 'new inv src',
- 'source_project': project.pk,
- 'update_on_project_update': False,
- 'source': 'scm',
- 'overwrite_vars': True,
- 'source_vars': 'plugin: a.b.c'},
- admin_user, expect=201)
+ post(
+ reverse('api:inventory_inventory_sources_list', kwargs={'pk': scm_inventory.id}),
+ {
+ 'name': 'new inv src',
+ 'source_project': project.pk,
+ 'update_on_project_update': False,
+ 'source': 'scm',
+ 'overwrite_vars': True,
+ 'source_vars': 'plugin: a.b.c',
+ },
+ admin_user,
+ expect=201,
+ )
def test_adding_inv_src_prohibited(self, post, scm_inventory, project, admin_user):
- post(reverse('api:inventory_inventory_sources_list', kwargs={'pk': scm_inventory.id}),
- {'name': 'new inv src', 'source_project': project.pk, 'update_on_project_update': True, 'source': 'scm', 'overwrite_vars': True},
- admin_user, expect=400)
+ post(
+ reverse('api:inventory_inventory_sources_list', kwargs={'pk': scm_inventory.id}),
+ {'name': 'new inv src', 'source_project': project.pk, 'update_on_project_update': True, 'source': 'scm', 'overwrite_vars': True},
+ admin_user,
+ expect=400,
+ )
def test_two_update_on_project_update_inv_src_prohibited(self, patch, scm_inventory, factory_scm_inventory, project, admin_user):
scm_inventory2 = factory_scm_inventory(name="scm_inventory2")
- res = patch(reverse('api:inventory_source_detail', kwargs={'pk': scm_inventory2.id}),
- {'update_on_project_update': True,},
- admin_user, expect=400)
+ res = patch(
+ reverse('api:inventory_source_detail', kwargs={'pk': scm_inventory2.id}),
+ {
+ 'update_on_project_update': True,
+ },
+ admin_user,
+ expect=400,
+ )
content = json.loads(res.content)
- assert content['update_on_project_update'] == ["More than one SCM-based inventory source with update on project update "
- "per-inventory not allowed."]
+ assert content['update_on_project_update'] == ["More than one SCM-based inventory source with update on project update " "per-inventory not allowed."]
def test_adding_inv_src_without_proj_access_prohibited(self, post, project, inventory, rando):
inventory.admin_role.members.add(rando)
- post(reverse('api:inventory_inventory_sources_list', kwargs={'pk': inventory.id}),
- {'name': 'new inv src', 'source_project': project.pk, 'source': 'scm', 'overwrite_vars': True, 'source_vars': 'plugin: a.b.c'},
- rando, expect=403)
+ post(
+ reverse('api:inventory_inventory_sources_list', kwargs={'pk': inventory.id}),
+ {'name': 'new inv src', 'source_project': project.pk, 'source': 'scm', 'overwrite_vars': True, 'source_vars': 'plugin: a.b.c'},
+ rando,
+ expect=403,
+ )
@pytest.mark.django_db
class TestInsightsCredential:
def test_insights_credential(self, patch, insights_inventory, admin_user, insights_credential):
- patch(insights_inventory.get_absolute_url(),
- {'insights_credential': insights_credential.id}, admin_user,
- expect=200)
+ patch(insights_inventory.get_absolute_url(), {'insights_credential': insights_credential.id}, admin_user, expect=200)
def test_insights_credential_protection(self, post, patch, insights_inventory, alice, insights_credential):
insights_inventory.organization.admin_role.members.add(alice)
insights_inventory.admin_role.members.add(alice)
- post(reverse('api:inventory_list'), {
- "name": "test",
- "organization": insights_inventory.organization.id,
- "insights_credential": insights_credential.id
- }, alice, expect=403)
- patch(insights_inventory.get_absolute_url(),
- {'insights_credential': insights_credential.id}, alice, expect=403)
+ post(
+ reverse('api:inventory_list'),
+ {"name": "test", "organization": insights_inventory.organization.id, "insights_credential": insights_credential.id},
+ alice,
+ expect=403,
+ )
+ patch(insights_inventory.get_absolute_url(), {'insights_credential': insights_credential.id}, alice, expect=403)
def test_non_insights_credential(self, patch, insights_inventory, admin_user, scm_credential):
- patch(insights_inventory.get_absolute_url(),
- {'insights_credential': scm_credential.id}, admin_user,
- expect=400)
+ patch(insights_inventory.get_absolute_url(), {'insights_credential': scm_credential.id}, admin_user, expect=400)
diff --git a/awx/main/tests/functional/api/test_job.py b/awx/main/tests/functional/api/test_job.py
index a0467bd6c0..f7fa087372 100644
--- a/awx/main/tests/functional/api/test_job.py
+++ b/awx/main/tests/functional/api/test_job.py
@@ -23,8 +23,7 @@ from awx.main.models import (
@pytest.mark.django_db
-def test_job_relaunch_permission_denied_response(
- post, get, inventory, project, credential, net_credential, machine_credential):
+def test_job_relaunch_permission_denied_response(post, get, inventory, project, credential, net_credential, machine_credential):
jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project, ask_credential_on_launch=True)
jt.credentials.add(machine_credential)
jt_user = User.objects.create(username='jobtemplateuser')
@@ -38,13 +37,12 @@ def test_job_relaunch_permission_denied_response(
# Job has prompted credential, launch denied w/ message
job.launch_config.credentials.add(net_credential)
- r = post(reverse('api:job_relaunch', kwargs={'pk':job.pk}), {}, jt_user, expect=403)
+ r = post(reverse('api:job_relaunch', kwargs={'pk': job.pk}), {}, jt_user, expect=403)
assert 'launched with prompted fields you do not have access to' in r.data['detail']
@pytest.mark.django_db
-def test_job_relaunch_prompts_not_accepted_response(
- post, get, inventory, project, credential, net_credential, machine_credential):
+def test_job_relaunch_prompts_not_accepted_response(post, get, inventory, project, credential, net_credential, machine_credential):
jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project)
jt.credentials.add(machine_credential)
jt_user = User.objects.create(username='jobtemplateuser')
@@ -58,21 +56,23 @@ def test_job_relaunch_prompts_not_accepted_response(
# Job has prompted credential, launch denied w/ message
job.launch_config.credentials.add(net_credential)
- r = post(reverse('api:job_relaunch', kwargs={'pk':job.pk}), {}, jt_user, expect=403)
+ r = post(reverse('api:job_relaunch', kwargs={'pk': job.pk}), {}, jt_user, expect=403)
@pytest.mark.django_db
def test_job_relaunch_permission_denied_response_other_user(get, post, inventory, project, alice, bob, survey_spec_factory):
- '''
+ """
Asserts custom permission denied message corresponding to
awx/main/tests/functional/test_rbac_job.py::TestJobRelaunchAccess::test_other_user_prompts
- '''
+ """
jt = JobTemplate.objects.create(
- name='testjt', inventory=inventory, project=project,
+ name='testjt',
+ inventory=inventory,
+ project=project,
ask_credential_on_launch=True,
ask_variables_on_launch=True,
survey_spec=survey_spec_factory([{'variable': 'secret_key', 'default': '6kQngg3h8lgiSTvIEb21', 'type': 'password'}]),
- survey_enabled=True
+ survey_enabled=True,
)
jt.execute_role.members.add(alice, bob)
with impersonate(bob):
@@ -83,43 +83,30 @@ def test_job_relaunch_permission_denied_response_other_user(get, post, inventory
assert r.data['summary_fields']['user_capabilities']['start']
# Job has prompted data, launch denied w/ message
- r = post(
- url=reverse('api:job_relaunch', kwargs={'pk':job.pk}),
- data={},
- user=alice,
- expect=403
- )
+ r = post(url=reverse('api:job_relaunch', kwargs={'pk': job.pk}), data={}, user=alice, expect=403)
assert 'Job was launched with secret prompts provided by another user' in r.data['detail']
@pytest.mark.django_db
def test_job_relaunch_without_creds(post, inventory, project, admin_user):
- jt = JobTemplate.objects.create(
- name='testjt', inventory=inventory,
- project=project
- )
+ jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project)
job = jt.create_unified_job()
- post(
- url=reverse('api:job_relaunch', kwargs={'pk':job.pk}),
- data={},
- user=admin_user,
- expect=201
- )
+ post(url=reverse('api:job_relaunch', kwargs={'pk': job.pk}), data={}, user=admin_user, expect=201)
@pytest.mark.django_db
-@pytest.mark.parametrize("status,hosts", [
- ('all', 'host1,host2,host3'),
- ('failed', 'host3'),
-])
+@pytest.mark.parametrize(
+ "status,hosts",
+ [
+ ('all', 'host1,host2,host3'),
+ ('failed', 'host3'),
+ ],
+)
def test_job_relaunch_on_failed_hosts(post, inventory, project, machine_credential, admin_user, status, hosts):
h1 = inventory.hosts.create(name='host1') # no-op
h2 = inventory.hosts.create(name='host2') # changed host
h3 = inventory.hosts.create(name='host3') # failed host
- jt = JobTemplate.objects.create(
- name='testjt', inventory=inventory,
- project=project
- )
+ jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project)
jt.credentials.add(machine_credential)
job = jt.create_unified_job(_eager_fields={'status': 'failed'}, limit='host1,host2,host3')
job.job_events.create(event='playbook_on_stats')
@@ -127,12 +114,7 @@ def test_job_relaunch_on_failed_hosts(post, inventory, project, machine_credenti
job.job_host_summaries.create(host=h2, failed=False, ok=0, changed=1, failures=0, host_name=h2.name)
job.job_host_summaries.create(host=h3, failed=False, ok=0, changed=0, failures=1, host_name=h3.name)
- r = post(
- url=reverse('api:job_relaunch', kwargs={'pk':job.pk}),
- data={'hosts': status},
- user=admin_user,
- expect=201
- )
+ r = post(url=reverse('api:job_relaunch', kwargs={'pk': job.pk}), data={'hosts': status}, user=admin_user, expect=201)
assert r.data.get('limit') == hosts
@@ -140,37 +122,25 @@ def test_job_relaunch_on_failed_hosts(post, inventory, project, machine_credenti
def test_summary_fields_recent_jobs(job_template, admin_user, get):
jobs = []
for i in range(13):
- jobs.append(Job.objects.create(
- job_template=job_template,
- status='failed',
- created=timezone.make_aware(datetime.datetime(2017, 3, 21, 9, i)),
- finished=timezone.make_aware(datetime.datetime(2017, 3, 21, 10, i))
- ))
- r = get(
- url = job_template.get_absolute_url(),
- user = admin_user,
- exepect = 200
- )
+ jobs.append(
+ Job.objects.create(
+ job_template=job_template,
+ status='failed',
+ created=timezone.make_aware(datetime.datetime(2017, 3, 21, 9, i)),
+ finished=timezone.make_aware(datetime.datetime(2017, 3, 21, 10, i)),
+ )
+ )
+ r = get(url=job_template.get_absolute_url(), user=admin_user, exepect=200)
recent_jobs = r.data['summary_fields']['recent_jobs']
assert len(recent_jobs) == 10
- assert recent_jobs == [{
- 'id': job.id,
- 'status': 'failed',
- 'finished': job.finished,
- 'canceled_on': None,
- 'type': 'job'
- } for job in jobs[-10:][::-1]]
+ assert recent_jobs == [{'id': job.id, 'status': 'failed', 'finished': job.finished, 'canceled_on': None, 'type': 'job'} for job in jobs[-10:][::-1]]
@pytest.mark.django_db
def test_slice_jt_recent_jobs(slice_job_factory, admin_user, get):
workflow_job = slice_job_factory(3, spawn=True)
slice_jt = workflow_job.job_template
- r = get(
- url=slice_jt.get_absolute_url(),
- user=admin_user,
- expect=200
- )
+ r = get(url=slice_jt.get_absolute_url(), user=admin_user, expect=200)
job_ids = [entry['id'] for entry in r.data['summary_fields']['recent_jobs']]
# decision is that workflow job should be shown in the related jobs
# joblets of the workflow job should NOT be shown
@@ -180,11 +150,7 @@ def test_slice_jt_recent_jobs(slice_job_factory, admin_user, get):
@pytest.mark.django_db
def test_block_unprocessed_events(delete, admin_user, mocker):
time_of_finish = parse("Thu Feb 28 09:10:20 2013 -0500")
- job = Job.objects.create(
- emitted_events=1,
- status='finished',
- finished=time_of_finish
- )
+ job = Job.objects.create(emitted_events=1, status='finished', finished=time_of_finish)
request = mock.MagicMock()
class MockView(UnifiedJobDeletionMixin):
@@ -203,18 +169,10 @@ def test_block_unprocessed_events(delete, admin_user, mocker):
@pytest.mark.django_db
def test_block_related_unprocessed_events(mocker, organization, project, delete, admin_user):
- job_template = JobTemplate.objects.create(
- project=project,
- playbook='helloworld.yml'
- )
+ job_template = JobTemplate.objects.create(project=project, playbook='helloworld.yml')
time_of_finish = parse("Thu Feb 23 14:17:24 2012 -0500")
Job.objects.create(
- emitted_events=1,
- status='finished',
- finished=time_of_finish,
- job_template=job_template,
- project=project,
- organization=project.organization
+ emitted_events=1, status='finished', finished=time_of_finish, job_template=job_template, project=project, organization=project.organization
)
view = RelatedJobsPreventDeleteMixin()
time_of_request = time_of_finish + relativedelta(seconds=2)
@@ -225,32 +183,14 @@ def test_block_related_unprocessed_events(mocker, organization, project, delete,
@pytest.mark.django_db
def test_disallowed_http_update_methods(put, patch, post, inventory, project, admin_user):
- jt = JobTemplate.objects.create(
- name='test_disallowed_methods', inventory=inventory,
- project=project
- )
+ jt = JobTemplate.objects.create(name='test_disallowed_methods', inventory=inventory, project=project)
job = jt.create_unified_job()
- post(
- url=reverse('api:job_detail', kwargs={'pk': job.pk}),
- data={},
- user=admin_user,
- expect=405
- )
- put(
- url=reverse('api:job_detail', kwargs={'pk': job.pk}),
- data={},
- user=admin_user,
- expect=405
- )
- patch(
- url=reverse('api:job_detail', kwargs={'pk': job.pk}),
- data={},
- user=admin_user,
- expect=405
- )
+ post(url=reverse('api:job_detail', kwargs={'pk': job.pk}), data={}, user=admin_user, expect=405)
+ put(url=reverse('api:job_detail', kwargs={'pk': job.pk}), data={}, user=admin_user, expect=405)
+ patch(url=reverse('api:job_detail', kwargs={'pk': job.pk}), data={}, user=admin_user, expect=405)
-class TestControllerNode():
+class TestControllerNode:
@pytest.fixture
def project_update(self, project):
return ProjectUpdate.objects.create(project=project)
@@ -264,9 +204,7 @@ class TestControllerNode():
return AdHocCommand.objects.create(inventory=inventory)
@pytest.mark.django_db
- def test_field_controller_node_exists(self, sqlite_copy_expert,
- admin_user, job, project_update,
- inventory_update, adhoc, get, system_job_factory):
+ def test_field_controller_node_exists(self, sqlite_copy_expert, admin_user, job, project_update, inventory_update, adhoc, get, system_job_factory):
system_job = system_job_factory()
r = get(reverse('api:unified_job_list') + '?id={}'.format(job.id), admin_user, expect=200)
diff --git a/awx/main/tests/functional/api/test_job_runtime_params.py b/awx/main/tests/functional/api/test_job_runtime_params.py
index 80b2fcadfb..33d91ded58 100644
--- a/awx/main/tests/functional/api/test_job_runtime_params.py
+++ b/awx/main/tests/functional/api/test_job_runtime_params.py
@@ -13,14 +13,7 @@ from awx.api.versioning import reverse
@pytest.fixture
def runtime_data(organization, credentialtype_ssh):
- cred_obj = Credential.objects.create(
- name='runtime-cred',
- credential_type=credentialtype_ssh,
- inputs={
- 'username': 'test_user2',
- 'password': 'pas4word2'
- }
- )
+ cred_obj = Credential.objects.create(name='runtime-cred', credential_type=credentialtype_ssh, inputs={'username': 'test_user2', 'password': 'pas4word2'})
inv_obj = organization.inventories.create(name="runtime-inv")
return dict(
extra_vars='{"job_launch_var": 4}',
@@ -31,7 +24,7 @@ def runtime_data(organization, credentialtype_ssh):
inventory=inv_obj.pk,
credentials=[cred_obj.pk],
diff_mode=True,
- verbosity=2
+ verbosity=2,
)
@@ -50,8 +43,8 @@ def job_template_prompts(project, inventory, machine_credential):
name='deploy-job-template',
# JT values must differ from prompted vals in order to register
limit='webservers',
- job_tags = 'foobar',
- skip_tags = 'barfoo',
+ job_tags='foobar',
+ skip_tags='barfoo',
ask_variables_on_launch=on_off,
ask_tags_on_launch=on_off,
ask_skip_tags_on_launch=on_off,
@@ -64,6 +57,7 @@ def job_template_prompts(project, inventory, machine_credential):
)
jt.credentials.add(machine_credential)
return jt
+
return rf
@@ -87,10 +81,10 @@ def job_template_prompts_null(project):
def data_to_internal(data):
- '''
+ """
returns internal representation, model objects, dictionaries, etc
as opposed to integer primary keys and JSON strings
- '''
+ """
internal = data.copy()
if 'extra_vars' in data:
internal['extra_vars'] = json.loads(data['extra_vars'])
@@ -111,8 +105,7 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, ad
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
- response = post(reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
- runtime_data, admin_user, expect=201)
+ response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ()
@@ -142,8 +135,7 @@ def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admi
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
- response = post(reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
- runtime_data, admin_user, expect=201)
+ response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
called_with = data_to_internal(runtime_data)
JobTemplate.create_unified_job.assert_called_with(**called_with)
@@ -163,10 +155,9 @@ def test_job_accept_empty_tags(job_template_prompts, post, admin_user, mocker):
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
- post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
- {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
+ post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
- assert JobTemplate.create_unified_job.call_args == ({'job_tags':'', 'skip_tags':''},)
+ assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
mock_job.signal_start.assert_called_once()
@@ -189,8 +180,7 @@ def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null,
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
- response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
- runtime_data, rando, expect=201)
+ response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
assert JobTemplate.create_unified_job.called
expected_call = data_to_internal(runtime_data)
assert JobTemplate.create_unified_job.call_args == (expected_call,)
@@ -206,9 +196,11 @@ def test_job_reject_invalid_prompted_vars(runtime_data, job_template_prompts, po
job_template = job_template_prompts(True)
response = post(
- reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
- dict(job_type='foobicate', # foobicate is not a valid job type
- inventory=87865, credentials=[48474]), admin_user, expect=400)
+ reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
+ dict(job_type='foobicate', inventory=87865, credentials=[48474]), # foobicate is not a valid job type
+ admin_user,
+ expect=400,
+ )
assert response.data['job_type'] == [u'"foobicate" is not a valid choice.']
assert response.data['inventory'] == [u'Invalid pk "87865" - object does not exist.']
@@ -220,9 +212,7 @@ def test_job_reject_invalid_prompted_vars(runtime_data, job_template_prompts, po
def test_job_reject_invalid_prompted_extra_vars(runtime_data, job_template_prompts, post, admin_user):
job_template = job_template_prompts(True)
- response = post(
- reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
- dict(extra_vars='{"unbalanced brackets":'), admin_user, expect=400)
+ response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), dict(extra_vars='{"unbalanced brackets":'), admin_user, expect=400)
assert 'extra_vars' in response.data
assert 'Cannot parse as' in str(response.data['extra_vars'][0])
@@ -234,8 +224,7 @@ def test_job_launch_fails_without_inventory(deploy_jobtemplate, post, admin_user
deploy_jobtemplate.inventory = None
deploy_jobtemplate.save()
- response = post(reverse('api:job_template_launch',
- kwargs={'pk': deploy_jobtemplate.pk}), {}, admin_user, expect=400)
+ response = post(reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}), {}, admin_user, expect=400)
assert 'inventory' in response.data['resources_needed_to_start'][0]
@@ -247,8 +236,7 @@ def test_job_launch_fails_without_inventory_access(job_template_prompts, runtime
job_template.execute_role.members.add(rando)
# Assure that giving an inventory without access to the inventory blocks the launch
- response = post(reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
- dict(inventory=runtime_data['inventory']), rando, expect=403)
+ response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), dict(inventory=runtime_data['inventory']), rando, expect=403)
assert response.data['detail'] == u'You do not have permission to perform this action.'
@@ -260,8 +248,7 @@ def test_job_launch_fails_without_credential_access(job_template_prompts, runtim
job_template.execute_role.members.add(rando)
# Assure that giving a credential without access blocks the launch
- post(reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
- dict(credentials=runtime_data['credentials']), rando, expect=403)
+ post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), dict(credentials=runtime_data['credentials']), rando, expect=403)
@pytest.mark.django_db
@@ -308,14 +295,7 @@ def test_job_launch_JT_enforces_unique_credentials_kinds(machine_credential, cre
"""
creds = []
for i in range(2):
- aws = Credential.objects.create(
- name='cred-%d' % i,
- credential_type=credentialtype_aws,
- inputs={
- 'username': 'test_user',
- 'password': 'pas4word'
- }
- )
+ aws = Credential.objects.create(name='cred-%d' % i, credential_type=credentialtype_aws, inputs={'username': 'test_user', 'password': 'pas4word'})
aws.save()
creds.append(aws)
@@ -343,25 +323,19 @@ def test_job_launch_with_empty_creds(machine_credential, vault_credential, deplo
@pytest.mark.django_db
-def test_job_launch_fails_with_missing_vault_password(machine_credential, vault_credential,
- deploy_jobtemplate, post, rando):
+def test_job_launch_fails_with_missing_vault_password(machine_credential, vault_credential, deploy_jobtemplate, post, rando):
vault_credential.inputs['vault_password'] = 'ASK'
vault_credential.save()
deploy_jobtemplate.credentials.add(vault_credential)
deploy_jobtemplate.execute_role.members.add(rando)
deploy_jobtemplate.save()
- response = post(
- reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}),
- rando,
- expect=400
- )
+ response = post(reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}), rando, expect=400)
assert response.data['passwords_needed_to_start'] == ['vault_password']
@pytest.mark.django_db
-def test_job_launch_with_added_cred_and_vault_password(credential, machine_credential, vault_credential,
- deploy_jobtemplate, post, admin):
+def test_job_launch_with_added_cred_and_vault_password(credential, machine_credential, vault_credential, deploy_jobtemplate, post, admin):
# see: https://github.com/ansible/awx/issues/8202
vault_credential.inputs['vault_password'] = 'ASK'
vault_credential.save()
@@ -382,14 +356,11 @@ def test_job_launch_with_added_cred_and_vault_password(credential, machine_crede
admin,
expect=201,
)
- signal_start.assert_called_with(**{
- 'vault_password': 'vault-me'
- })
+ signal_start.assert_called_with(**{'vault_password': 'vault-me'})
@pytest.mark.django_db
-def test_job_launch_with_multiple_launch_time_passwords(credential, machine_credential, vault_credential,
- deploy_jobtemplate, post, admin):
+def test_job_launch_with_multiple_launch_time_passwords(credential, machine_credential, vault_credential, deploy_jobtemplate, post, admin):
# see: https://github.com/ansible/awx/issues/8202
deploy_jobtemplate.ask_credential_on_launch = True
deploy_jobtemplate.credentials.remove(credential)
@@ -397,11 +368,7 @@ def test_job_launch_with_multiple_launch_time_passwords(credential, machine_cred
deploy_jobtemplate.credentials.add(vault_credential)
deploy_jobtemplate.save()
- second_machine_credential = Credential(
- name='SSH #2',
- credential_type=machine_credential.credential_type,
- inputs={'password': 'ASK'}
- )
+ second_machine_credential = Credential(name='SSH #2', credential_type=machine_credential.credential_type, inputs={'password': 'ASK'})
second_machine_credential.save()
vault_credential.inputs['vault_password'] = 'ASK'
@@ -418,37 +385,26 @@ def test_job_launch_with_multiple_launch_time_passwords(credential, machine_cred
admin,
expect=201,
)
- signal_start.assert_called_with(**{
- 'ssh_password': 'ssh-me',
- 'vault_password': 'vault-me',
- })
+ signal_start.assert_called_with(
+ **{
+ 'ssh_password': 'ssh-me',
+ 'vault_password': 'vault-me',
+ }
+ )
@pytest.mark.django_db
-@pytest.mark.parametrize('launch_kwargs', [
- {'vault_password.abc': 'vault-me-1', 'vault_password.xyz': 'vault-me-2'},
- {'credential_passwords': {'vault_password.abc': 'vault-me-1', 'vault_password.xyz': 'vault-me-2'}}
-])
-def test_job_launch_fails_with_missing_multivault_password(machine_credential, vault_credential,
- deploy_jobtemplate, launch_kwargs,
- get, post, rando):
- vault_cred_first = Credential(
- name='Vault #1',
- credential_type=vault_credential.credential_type,
- inputs={
- 'vault_password': 'ASK',
- 'vault_id': 'abc'
- }
- )
+@pytest.mark.parametrize(
+ 'launch_kwargs',
+ [
+ {'vault_password.abc': 'vault-me-1', 'vault_password.xyz': 'vault-me-2'},
+ {'credential_passwords': {'vault_password.abc': 'vault-me-1', 'vault_password.xyz': 'vault-me-2'}},
+ ],
+)
+def test_job_launch_fails_with_missing_multivault_password(machine_credential, vault_credential, deploy_jobtemplate, launch_kwargs, get, post, rando):
+ vault_cred_first = Credential(name='Vault #1', credential_type=vault_credential.credential_type, inputs={'vault_password': 'ASK', 'vault_id': 'abc'})
vault_cred_first.save()
- vault_cred_second = Credential(
- name='Vault #2',
- credential_type=vault_credential.credential_type,
- inputs={
- 'vault_password': 'ASK',
- 'vault_id': 'xyz'
- }
- )
+ vault_cred_second = Credential(name='Vault #2', credential_type=vault_credential.credential_type, inputs={'vault_password': 'ASK', 'vault_id': 'xyz'})
vault_cred_second.save()
deploy_jobtemplate.credentials.add(vault_cred_first)
deploy_jobtemplate.credentials.add(vault_cred_second)
@@ -463,53 +419,43 @@ def test_job_launch_fails_with_missing_multivault_password(machine_credential, v
'passwords_needed': ['vault_password.abc'],
'vault_id': u'abc',
'name': u'Vault #1',
- 'id': vault_cred_first.id
+ 'id': vault_cred_first.id,
} in resp.data['defaults']['credentials']
assert {
'credential_type': vault_cred_second.credential_type_id,
'passwords_needed': ['vault_password.xyz'],
'vault_id': u'xyz',
'name': u'Vault #2',
- 'id': vault_cred_second.id
+ 'id': vault_cred_second.id,
} in resp.data['defaults']['credentials']
assert resp.data['passwords_needed_to_start'] == ['vault_password.abc', 'vault_password.xyz']
- assert sum([
- cred['passwords_needed'] for cred in resp.data['defaults']['credentials']
- if cred['credential_type'] == vault_credential.credential_type_id
- ], []) == ['vault_password.abc', 'vault_password.xyz']
+ assert sum(
+ [cred['passwords_needed'] for cred in resp.data['defaults']['credentials'] if cred['credential_type'] == vault_credential.credential_type_id], []
+ ) == ['vault_password.abc', 'vault_password.xyz']
resp = post(url, rando, expect=400)
assert resp.data['passwords_needed_to_start'] == ['vault_password.abc', 'vault_password.xyz']
with mock.patch.object(Job, 'signal_start') as signal_start:
post(url, launch_kwargs, rando, expect=201)
- signal_start.assert_called_with(**{
- 'vault_password.abc': 'vault-me-1',
- 'vault_password.xyz': 'vault-me-2'
- })
+ signal_start.assert_called_with(**{'vault_password.abc': 'vault-me-1', 'vault_password.xyz': 'vault-me-2'})
@pytest.mark.django_db
-def test_job_launch_fails_with_missing_ssh_password(machine_credential, deploy_jobtemplate, post,
- rando):
+def test_job_launch_fails_with_missing_ssh_password(machine_credential, deploy_jobtemplate, post, rando):
machine_credential.inputs['password'] = 'ASK'
machine_credential.save()
deploy_jobtemplate.credentials.add(machine_credential)
deploy_jobtemplate.execute_role.members.add(rando)
deploy_jobtemplate.save()
- response = post(
- reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}),
- rando,
- expect=400
- )
+ response = post(reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}), rando, expect=400)
assert response.data['passwords_needed_to_start'] == ['ssh_password']
@pytest.mark.django_db
-def test_job_launch_fails_with_missing_vault_and_ssh_password(machine_credential, vault_credential,
- deploy_jobtemplate, post, rando):
+def test_job_launch_fails_with_missing_vault_and_ssh_password(machine_credential, vault_credential, deploy_jobtemplate, post, rando):
vault_credential.inputs['vault_password'] = 'ASK'
vault_credential.save()
machine_credential.inputs['password'] = 'ASK'
@@ -519,17 +465,12 @@ def test_job_launch_fails_with_missing_vault_and_ssh_password(machine_credential
deploy_jobtemplate.execute_role.members.add(rando)
deploy_jobtemplate.save()
- response = post(
- reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}),
- rando,
- expect=400
- )
+ response = post(reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}), rando, expect=400)
assert sorted(response.data['passwords_needed_to_start']) == ['ssh_password', 'vault_password']
@pytest.mark.django_db
-def test_job_launch_pass_with_prompted_vault_password(machine_credential, vault_credential,
- deploy_jobtemplate, post, rando):
+def test_job_launch_pass_with_prompted_vault_password(machine_credential, vault_credential, deploy_jobtemplate, post, rando):
vault_credential.inputs['vault_password'] = 'ASK'
vault_credential.save()
deploy_jobtemplate.credentials.add(machine_credential)
@@ -538,12 +479,7 @@ def test_job_launch_pass_with_prompted_vault_password(machine_credential, vault_
deploy_jobtemplate.save()
with mock.patch.object(Job, 'signal_start') as signal_start:
- post(
- reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}),
- {'vault_password': 'vault-me'},
- rando,
- expect=201
- )
+ post(reverse('api:job_template_launch', kwargs={'pk': deploy_jobtemplate.pk}), {'vault_password': 'vault-me'}, rando, expect=201)
signal_start.assert_called_with(vault_password='vault-me')
@@ -558,8 +494,7 @@ def test_job_launch_JT_with_credentials(machine_credential, credential, net_cred
assert validated, serializer.errors
kv['credentials'] = [credential, net_credential, machine_credential, kube_credential] # convert to internal value
- prompted_fields, ignored_fields, errors = deploy_jobtemplate._accept_or_ignore_job_kwargs(
- _exclude_errors=['required', 'prompts'], **kv)
+ prompted_fields, ignored_fields, errors = deploy_jobtemplate._accept_or_ignore_job_kwargs(_exclude_errors=['required', 'prompts'], **kv)
job_obj = deploy_jobtemplate.create_unified_job(**prompted_fields)
creds = job_obj.credentials.all()
@@ -574,17 +509,13 @@ def test_job_launch_JT_with_credentials(machine_credential, credential, net_cred
def test_job_branch_rejected_and_accepted(deploy_jobtemplate):
deploy_jobtemplate.ask_scm_branch_on_launch = True
deploy_jobtemplate.save()
- prompted_fields, ignored_fields, errors = deploy_jobtemplate._accept_or_ignore_job_kwargs(
- scm_branch='foobar'
- )
+ prompted_fields, ignored_fields, errors = deploy_jobtemplate._accept_or_ignore_job_kwargs(scm_branch='foobar')
assert 'scm_branch' in ignored_fields
assert 'does not allow override of branch' in errors['scm_branch']
deploy_jobtemplate.project.allow_override = True
deploy_jobtemplate.project.save()
- prompted_fields, ignored_fields, errors = deploy_jobtemplate._accept_or_ignore_job_kwargs(
- scm_branch='foobar'
- )
+ prompted_fields, ignored_fields, errors = deploy_jobtemplate._accept_or_ignore_job_kwargs(scm_branch='foobar')
assert not ignored_fields
assert prompted_fields['scm_branch'] == 'foobar'
@@ -602,11 +533,13 @@ def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
response = post(
- reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
+ reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
- admin_user, expect=201)
+ admin_user,
+ expect=201,
+ )
assert JobTemplate.create_unified_job.called
- assert JobTemplate.create_unified_job.call_args == ({'extra_vars':{'survey_var': 4}},)
+ assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
job_id = response.data['job']
assert job_id == 968
@@ -632,14 +565,14 @@ def test_callback_accept_prompted_extra_var(mocker, survey_spec_factory, job_tem
post(
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
- admin_user, expect=201, format='json')
+ admin_user,
+ expect=201,
+ format='json',
+ )
assert UnifiedJobTemplate.create_unified_job.called
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
call_args.pop('_eager_fields', None) # internal purposes
- assert call_args == {
- 'extra_vars': {'survey_var': 4, 'job_launch_var': 3},
- 'limit': 'single-host'
- }
+ assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
mock_job.signal_start.assert_called_once()
@@ -657,15 +590,16 @@ def test_callback_ignore_unprompted_extra_var(mocker, survey_spec_factory, job_t
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
post(
- reverse('api:job_template_callback', kwargs={'pk':job_template.pk}),
+ reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
- admin_user, expect=201, format='json')
+ admin_user,
+ expect=201,
+ format='json',
+ )
assert UnifiedJobTemplate.create_unified_job.called
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
call_args.pop('_eager_fields', None) # internal purposes
- assert call_args == {
- 'limit': 'single-host'
- }
+ assert call_args == {'limit': 'single-host'}
mock_job.signal_start.assert_called_once()
@@ -679,8 +613,7 @@ def test_callback_find_matching_hosts(mocker, get, job_template_prompts, admin_u
host_with_alias = Host(name='localhost', inventory=job_template.inventory)
host_with_alias.save()
with mocker.patch('awx.main.access.BaseAccess.check_license'):
- r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
- user=admin_user, expect=200)
+ r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
assert tuple(r.data['matching_hosts']) == ('localhost',)
@@ -693,6 +626,5 @@ def test_callback_extra_var_takes_priority_over_host_name(mocker, get, job_templ
host_with_alias = Host(name='localhost', variables={'ansible_host': 'foobar'}, inventory=job_template.inventory)
host_with_alias.save()
with mocker.patch('awx.main.access.BaseAccess.check_license'):
- r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
- user=admin_user, expect=200)
+ r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
assert not r.data['matching_hosts']
diff --git a/awx/main/tests/functional/api/test_job_template.py b/awx/main/tests/functional/api/test_job_template.py
index 2eeefa7eed..dc6ce0c7a0 100644
--- a/awx/main/tests/functional/api/test_job_template.py
+++ b/awx/main/tests/functional/api/test_job_template.py
@@ -15,11 +15,12 @@ from rest_framework.exceptions import ValidationError
@pytest.mark.django_db
@pytest.mark.parametrize(
- "grant_project, grant_inventory, expect", [
+ "grant_project, grant_inventory, expect",
+ [
(True, True, 201),
(True, False, 403),
(False, True, 403),
- ]
+ ],
)
def test_create(post, project, machine_credential, inventory, alice, grant_project, grant_inventory, expect):
if grant_project:
@@ -30,14 +31,9 @@ def test_create(post, project, machine_credential, inventory, alice, grant_proje
post(
url=reverse('api:job_template_list'),
- data={
- 'name': 'Some name',
- 'project': project.id,
- 'inventory': inventory.id,
- 'playbook': 'helloworld.yml'
- },
+ data={'name': 'Some name', 'project': project.id, 'inventory': inventory.id, 'playbook': 'helloworld.yml'},
user=alice,
- expect=expect
+ expect=expect,
)
@@ -45,20 +41,24 @@ def test_create(post, project, machine_credential, inventory, alice, grant_proje
@pytest.mark.parametrize('kind', ['scm', 'insights'])
def test_invalid_credential_kind_xfail(get, post, organization_factory, job_template_factory, kind):
objs = organization_factory("org", superusers=['admin'])
- jt = job_template_factory("jt", organization=objs.organization,
- inventory='test_inv', project='test_proj').job_template
+ jt = job_template_factory("jt", organization=objs.organization, inventory='test_inv', project='test_proj').job_template
url = reverse('api:job_template_credentials_list', kwargs={'pk': jt.pk})
cred_type = CredentialType.defaults[kind]()
cred_type.save()
- response = post(url, {
- 'name': 'My Cred',
- 'credential_type': cred_type.pk,
- 'inputs': {
- 'username': 'bob',
- 'password': 'secret',
- }
- }, objs.superusers.admin, expect=400)
+ response = post(
+ url,
+ {
+ 'name': 'My Cred',
+ 'credential_type': cred_type.pk,
+ 'inputs': {
+ 'username': 'bob',
+ 'password': 'secret',
+ },
+ },
+ objs.superusers.admin,
+ expect=400,
+ )
assert 'Cannot assign a Credential of kind `{}`.'.format(kind) in response.data.values()
@@ -77,18 +77,19 @@ def test_create_with_forks_exceeding_maximum_xfail(alice, post, project, invento
'forks': 11,
},
user=alice,
- expect=400
+ expect=400,
)
assert 'Maximum number of forks (10) exceeded' in str(response.data)
@pytest.mark.django_db
@pytest.mark.parametrize(
- "grant_project, grant_inventory, expect", [
+ "grant_project, grant_inventory, expect",
+ [
(True, True, 200),
(True, False, 403),
(False, True, 403),
- ]
+ ],
)
def test_edit_sensitive_fields(patch, job_template_factory, alice, grant_project, grant_inventory, expect):
objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
@@ -99,23 +100,25 @@ def test_edit_sensitive_fields(patch, job_template_factory, alice, grant_project
if grant_inventory:
objs.inventory.use_role.members.add(alice)
- patch(url=reverse('api:job_template_detail', kwargs={'pk': objs.job_template.id}), data={
- 'name': 'Some name',
- 'project': objs.project.id,
- 'inventory': objs.inventory.id,
- 'playbook': 'alt-helloworld.yml',
- }, user=alice, expect=expect)
+ patch(
+ url=reverse('api:job_template_detail', kwargs={'pk': objs.job_template.id}),
+ data={
+ 'name': 'Some name',
+ 'project': objs.project.id,
+ 'inventory': objs.inventory.id,
+ 'playbook': 'alt-helloworld.yml',
+ },
+ user=alice,
+ expect=expect,
+ )
@pytest.mark.django_db
def test_reject_dict_extra_vars_patch(patch, job_template_factory, admin_user):
# Expect a string for extra_vars, raise 400 in this case that would
# otherwise have been saved incorrectly
- jt = job_template_factory(
- 'jt', organization='org1', project='prj', inventory='inv', credential='cred'
- ).job_template
- patch(reverse('api:job_template_detail', kwargs={'pk': jt.id}),
- {'extra_vars': {'foo': 5}}, admin_user, expect=400)
+ jt = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred').job_template
+ patch(reverse('api:job_template_detail', kwargs={'pk': jt.id}), {'extra_vars': {'foo': 5}}, admin_user, expect=400)
@pytest.mark.django_db
@@ -126,31 +129,33 @@ def test_edit_playbook(patch, job_template_factory, alice):
objs.credential.use_role.members.add(alice)
objs.inventory.use_role.members.add(alice)
- patch(reverse('api:job_template_detail', kwargs={'pk': objs.job_template.id}), {
- 'playbook': 'alt-helloworld.yml',
- }, alice, expect=200)
+ patch(
+ reverse('api:job_template_detail', kwargs={'pk': objs.job_template.id}),
+ {
+ 'playbook': 'alt-helloworld.yml',
+ },
+ alice,
+ expect=200,
+ )
objs.inventory.use_role.members.remove(alice)
- patch(reverse('api:job_template_detail', kwargs={'pk': objs.job_template.id}), {
- 'playbook': 'helloworld.yml',
- }, alice, expect=403)
+ patch(
+ reverse('api:job_template_detail', kwargs={'pk': objs.job_template.id}),
+ {
+ 'playbook': 'helloworld.yml',
+ },
+ alice,
+ expect=403,
+ )
@pytest.mark.django_db
-@pytest.mark.parametrize('json_body',
- ["abc", True, False, "{\"name\": \"test\"}", 100, .5])
+@pytest.mark.parametrize('json_body', ["abc", True, False, "{\"name\": \"test\"}", 100, 0.5])
def test_invalid_json_body(patch, job_template_factory, alice, json_body):
objs = job_template_factory('jt', organization='org1')
objs.job_template.admin_role.members.add(alice)
- resp = patch(
- reverse('api:job_template_detail', kwargs={'pk': objs.job_template.id}),
- json_body,
- alice,
- expect=400
- )
- assert resp.data['detail'] == (
- u'JSON parse error - not a JSON object'
- )
+ resp = patch(reverse('api:job_template_detail', kwargs={'pk': objs.job_template.id}), json_body, alice, expect=400)
+ assert resp.data['detail'] == (u'JSON parse error - not a JSON object')
@pytest.mark.django_db
@@ -159,44 +164,42 @@ def test_edit_nonsenstive(patch, job_template_factory, alice):
jt = objs.job_template
jt.admin_role.members.add(alice)
- res = patch(reverse('api:job_template_detail', kwargs={'pk': jt.id}), {
- 'name': 'updated',
- 'description': 'bar',
- 'forks': 14,
- 'limit': 'something',
- 'verbosity': 5,
- 'extra_vars': '---',
- 'job_tags': 'sometags',
- 'force_handlers': True,
- 'skip_tags': 'thistag,thattag',
- 'ask_variables_on_launch':True,
- 'ask_tags_on_launch':True,
- 'ask_skip_tags_on_launch':True,
- 'ask_job_type_on_launch':True,
- 'ask_inventory_on_launch':True,
- 'ask_credential_on_launch': True,
- }, alice, expect=200)
+ res = patch(
+ reverse('api:job_template_detail', kwargs={'pk': jt.id}),
+ {
+ 'name': 'updated',
+ 'description': 'bar',
+ 'forks': 14,
+ 'limit': 'something',
+ 'verbosity': 5,
+ 'extra_vars': '---',
+ 'job_tags': 'sometags',
+ 'force_handlers': True,
+ 'skip_tags': 'thistag,thattag',
+ 'ask_variables_on_launch': True,
+ 'ask_tags_on_launch': True,
+ 'ask_skip_tags_on_launch': True,
+ 'ask_job_type_on_launch': True,
+ 'ask_inventory_on_launch': True,
+ 'ask_credential_on_launch': True,
+ },
+ alice,
+ expect=200,
+ )
assert res.data['name'] == 'updated'
@pytest.fixture
def jt_copy_edit(job_template_factory, project):
- objects = job_template_factory(
- 'copy-edit-job-template',
- project=project)
+ objects = job_template_factory('copy-edit-job-template', project=project)
return objects.job_template
@pytest.mark.django_db
def test_job_template_role_user(post, organization_factory, job_template_factory):
- objects = organization_factory("org",
- superusers=['admin'],
- users=['test'])
+ objects = organization_factory("org", superusers=['admin'], users=['test'])
- jt_objects = job_template_factory("jt",
- organization=objects.organization,
- inventory='test_inv',
- project='test_proj')
+ jt_objects = job_template_factory("jt", organization=objects.organization, inventory='test_inv', project='test_proj')
url = reverse('api:user_roles_list', kwargs={'pk': objects.users.test.pk})
response = post(url, dict(id=jt_objects.job_template.execute_role.pk), objects.superusers.admin)
@@ -209,7 +212,7 @@ def test_jt_admin_copy_edit_functional(jt_copy_edit, rando, get, post):
jt_copy_edit.admin_role.members.add(rando)
jt_copy_edit.save()
- get_response = get(reverse('api:job_template_detail', kwargs={'pk':jt_copy_edit.pk}), user=rando)
+ get_response = get(reverse('api:job_template_detail', kwargs={'pk': jt_copy_edit.pk}), user=rando)
assert get_response.status_code == 200
post_data = get_response.data
@@ -219,44 +222,30 @@ def test_jt_admin_copy_edit_functional(jt_copy_edit, rando, get, post):
@pytest.mark.django_db
-def test_launch_with_pending_deletion_inventory(get, post, organization_factory,
- job_template_factory, machine_credential,
- credential, net_credential):
+def test_launch_with_pending_deletion_inventory(get, post, organization_factory, job_template_factory, machine_credential, credential, net_credential):
objs = organization_factory("org", superusers=['admin'])
- jt = job_template_factory("jt", organization=objs.organization, credential='c',
- inventory='test_inv', project='test_proj').job_template
+ jt = job_template_factory("jt", organization=objs.organization, credential='c', inventory='test_inv', project='test_proj').job_template
jt.inventory.pending_deletion = True
jt.inventory.save()
- resp = post(
- reverse('api:job_template_launch', kwargs={'pk': jt.pk}),
- objs.superusers.admin, expect=400
- )
+ resp = post(reverse('api:job_template_launch', kwargs={'pk': jt.pk}), objs.superusers.admin, expect=400)
assert resp.data['inventory'] == ['The inventory associated with this Job Template is being deleted.']
@pytest.mark.django_db
def test_launch_with_pending_deletion_inventory_workflow(get, post, organization, inventory, admin_user):
- wfjt = WorkflowJobTemplate.objects.create(
- name='wfjt',
- organization=organization,
- inventory=inventory
- )
+ wfjt = WorkflowJobTemplate.objects.create(name='wfjt', organization=organization, inventory=inventory)
inventory.pending_deletion = True
inventory.save()
- resp = post(
- url=reverse('api:workflow_job_template_launch', kwargs={'pk': wfjt.pk}),
- user=admin_user, expect=400
- )
+ resp = post(url=reverse('api:workflow_job_template_launch', kwargs={'pk': wfjt.pk}), user=admin_user, expect=400)
assert resp.data['inventory'] == ['The inventory associated with this Workflow is being deleted.']
@pytest.mark.django_db
def test_jt_without_project(inventory):
- data = dict(name="Test", job_type="run",
- inventory=inventory.pk, project=None)
+ data = dict(name="Test", job_type="run", inventory=inventory.pk, project=None)
serializer = JobTemplateSerializer(data=data)
assert not serializer.is_valid()
assert "project" in serializer.errors
@@ -268,12 +257,7 @@ def test_jt_without_project(inventory):
@pytest.mark.django_db
def test_disallow_template_delete_on_running_job(job_template_factory, delete, admin_user):
- objects = job_template_factory('jt',
- credential='c',
- job_type="run",
- project='p',
- inventory='i',
- organization='o')
+ objects = job_template_factory('jt', credential='c', job_type="run", project='p', inventory='i', organization='o')
objects.job_template.create_unified_job()
delete_response = delete(reverse('api:job_template_detail', kwargs={'pk': objects.job_template.pk}), user=admin_user)
assert delete_response.status_code == 409
@@ -307,58 +291,33 @@ def test_jt_organization_follows_project(post, patch, admin_user):
project2 = Project.objects.create(name='proj2', organization=org2, **project_common)
r = post(
url=reverse('api:job_template_list'),
- data={
- "name": "fooo",
- "ask_inventory_on_launch": True,
- "project": project1.pk,
- "playbook": "helloworld.yml"
- },
+ data={"name": "fooo", "ask_inventory_on_launch": True, "project": project1.pk, "playbook": "helloworld.yml"},
user=admin_user,
- expect=201
+ expect=201,
)
data = r.data
assert data['organization'] == project1.organization_id
data['project'] = project2.id
jt = JobTemplate.objects.get(pk=data['id'])
- r = patch(
- url=jt.get_absolute_url(),
- data=data,
- user=admin_user,
- expect=200
- )
+ r = patch(url=jt.get_absolute_url(), data=data, user=admin_user, expect=200)
assert r.data['organization'] == project2.organization_id
@pytest.mark.django_db
def test_jt_organization_field_is_read_only(patch, post, project, admin_user):
org = project.organization
- jt = JobTemplate.objects.create(
- name='foo_jt',
- ask_inventory_on_launch=True,
- project=project, playbook='helloworld.yml'
- )
+ jt = JobTemplate.objects.create(name='foo_jt', ask_inventory_on_launch=True, project=project, playbook='helloworld.yml')
org2 = Organization.objects.create(name='foo2')
- r = patch(
- url=jt.get_absolute_url(),
- data={'organization': org2.id},
- user=admin_user,
- expect=200
- )
+ r = patch(url=jt.get_absolute_url(), data={'organization': org2.id}, user=admin_user, expect=200)
assert r.data['organization'] == org.id
assert JobTemplate.objects.get(pk=jt.pk).organization == org
# similar test, but on creation
r = post(
url=reverse('api:job_template_list'),
- data={
- 'name': 'foobar',
- 'project': project.id,
- 'organization': org2.id,
- 'ask_inventory_on_launch': True,
- 'playbook': 'helloworld.yml'
- },
+ data={'name': 'foobar', 'project': project.id, 'organization': org2.id, 'ask_inventory_on_launch': True, 'playbook': 'helloworld.yml'},
user=admin_user,
- expect=201
+ expect=201,
)
assert r.data['organization'] == org.id
assert JobTemplate.objects.get(pk=r.data['id']).organization == org
@@ -366,10 +325,7 @@ def test_jt_organization_field_is_read_only(patch, post, project, admin_user):
@pytest.mark.django_db
def test_callback_disallowed_null_inventory(project):
- jt = JobTemplate.objects.create(
- name='test-jt', inventory=None,
- ask_inventory_on_launch=True,
- project=project, playbook='helloworld.yml')
+ jt = JobTemplate.objects.create(name='test-jt', inventory=None, ask_inventory_on_launch=True, project=project, playbook='helloworld.yml')
serializer = JobTemplateSerializer(jt)
assert serializer.instance == jt
with pytest.raises(ValidationError) as exc:
@@ -381,15 +337,9 @@ def test_callback_disallowed_null_inventory(project):
def test_job_template_branch_error(project, inventory, post, admin_user):
r = post(
url=reverse('api:job_template_list'),
- data={
- "name": "fooo",
- "inventory": inventory.pk,
- "project": project.pk,
- "playbook": "helloworld.yml",
- "scm_branch": "foobar"
- },
+ data={"name": "fooo", "inventory": inventory.pk, "project": project.pk, "playbook": "helloworld.yml", "scm_branch": "foobar"},
user=admin_user,
- expect=400
+ expect=400,
)
assert 'Project does not allow overriding branch' in str(r.data['scm_branch'])
@@ -398,14 +348,8 @@ def test_job_template_branch_error(project, inventory, post, admin_user):
def test_job_template_branch_prompt_error(project, inventory, post, admin_user):
r = post(
url=reverse('api:job_template_list'),
- data={
- "name": "fooo",
- "inventory": inventory.pk,
- "project": project.pk,
- "playbook": "helloworld.yml",
- "ask_scm_branch_on_launch": True
- },
+ data={"name": "fooo", "inventory": inventory.pk, "project": project.pk, "playbook": "helloworld.yml", "ask_scm_branch_on_launch": True},
user=admin_user,
- expect=400
+ expect=400,
)
assert 'Project does not allow overriding branch' in str(r.data['ask_scm_branch_on_launch'])
diff --git a/awx/main/tests/functional/api/test_notifications.py b/awx/main/tests/functional/api/test_notifications.py
index b7e9af9fcd..92f6045191 100644
--- a/awx/main/tests/functional/api/test_notifications.py
+++ b/awx/main/tests/functional/api/test_notifications.py
@@ -14,10 +14,7 @@ def test_get_org_running_notification(get, admin, organization):
@pytest.mark.django_db
def test_post_org_running_notification(get, post, admin, notification_template, organization):
url = reverse('api:organization_notification_templates_started_list', kwargs={'pk': organization.pk})
- response = post(url,
- dict(id=notification_template.id,
- associate=True),
- admin)
+ response = post(url, dict(id=notification_template.id, associate=True), admin)
assert response.status_code == 204
response = get(url, admin)
assert response.status_code == 200
@@ -35,10 +32,7 @@ def test_get_project_running_notification(get, admin, project):
@pytest.mark.django_db
def test_post_project_running_notification(get, post, admin, notification_template, project):
url = reverse('api:project_notification_templates_started_list', kwargs={'pk': project.pk})
- response = post(url,
- dict(id=notification_template.id,
- associate=True),
- admin)
+ response = post(url, dict(id=notification_template.id, associate=True), admin)
assert response.status_code == 204
response = get(url, admin)
assert response.status_code == 200
@@ -56,10 +50,7 @@ def test_get_inv_src_running_notification(get, admin, inventory_source):
@pytest.mark.django_db
def test_post_inv_src_running_notification(get, post, admin, notification_template, inventory_source):
url = reverse('api:inventory_source_notification_templates_started_list', kwargs={'pk': inventory_source.pk})
- response = post(url,
- dict(id=notification_template.id,
- associate=True),
- admin)
+ response = post(url, dict(id=notification_template.id, associate=True), admin)
assert response.status_code == 204
response = get(url, admin)
assert response.status_code == 200
@@ -77,10 +68,7 @@ def test_get_jt_running_notification(get, admin, job_template):
@pytest.mark.django_db
def test_post_jt_running_notification(get, post, admin, notification_template, job_template):
url = reverse('api:job_template_notification_templates_started_list', kwargs={'pk': job_template.pk})
- response = post(url,
- dict(id=notification_template.id,
- associate=True),
- admin)
+ response = post(url, dict(id=notification_template.id, associate=True), admin)
assert response.status_code == 204
response = get(url, admin)
assert response.status_code == 200
@@ -98,10 +86,7 @@ def test_get_sys_jt_running_notification(get, admin, system_job_template):
@pytest.mark.django_db
def test_post_sys_jt_running_notification(get, post, admin, notification_template, system_job_template):
url = reverse('api:system_job_template_notification_templates_started_list', kwargs={'pk': system_job_template.pk})
- response = post(url,
- dict(id=notification_template.id,
- associate=True),
- admin)
+ response = post(url, dict(id=notification_template.id, associate=True), admin)
assert response.status_code == 204
response = get(url, admin)
assert response.status_code == 200
@@ -119,10 +104,7 @@ def test_get_wfjt_running_notification(get, admin, workflow_job_template):
@pytest.mark.django_db
def test_post_wfjt_running_notification(get, post, admin, notification_template, workflow_job_template):
url = reverse('api:workflow_job_template_notification_templates_started_list', kwargs={'pk': workflow_job_template.pk})
- response = post(url,
- dict(id=notification_template.id,
- associate=True),
- admin)
+ response = post(url, dict(id=notification_template.id, associate=True), admin)
assert response.status_code == 204
response = get(url, admin)
assert response.status_code == 200
@@ -148,10 +130,7 @@ def test_get_wfjt_approval_notification(get, admin, workflow_job_template):
@pytest.mark.django_db
def test_post_wfjt_approval_notification(get, post, admin, notification_template, workflow_job_template):
url = reverse('api:workflow_job_template_notification_templates_approvals_list', kwargs={'pk': workflow_job_template.pk})
- response = post(url,
- dict(id=notification_template.id,
- associate=True),
- admin)
+ response = post(url, dict(id=notification_template.id, associate=True), admin)
assert response.status_code == 204
response = get(url, admin)
assert response.status_code == 200
@@ -169,10 +148,7 @@ def test_get_org_approval_notification(get, admin, organization):
@pytest.mark.django_db
def test_post_org_approval_notification(get, post, admin, notification_template, organization):
url = reverse('api:organization_notification_templates_approvals_list', kwargs={'pk': organization.pk})
- response = post(url,
- dict(id=notification_template.id,
- associate=True),
- admin)
+ response = post(url, dict(id=notification_template.id, associate=True), admin)
assert response.status_code == 204
response = get(url, admin)
assert response.status_code == 200
diff --git a/awx/main/tests/functional/api/test_oauth.py b/awx/main/tests/functional/api/test_oauth.py
index 0c185bd386..4387f06b9c 100644
--- a/awx/main/tests/functional/api/test_oauth.py
+++ b/awx/main/tests/functional/api/test_oauth.py
@@ -10,8 +10,7 @@ from django.utils.encoding import smart_str, smart_bytes
from awx.main.utils.encryption import decrypt_value, get_encryption_key
from awx.api.versioning import reverse, drf_reverse
-from awx.main.models.oauth import (OAuth2Application as Application,
- OAuth2AccessToken as AccessToken)
+from awx.main.models.oauth import OAuth2Application as Application, OAuth2AccessToken as AccessToken
from awx.main.tests.functional import immediate_on_commit
from awx.sso.models import UserEnterpriseAuth
from oauth2_provider.models import RefreshToken
@@ -24,9 +23,7 @@ def test_personal_access_token_creation(oauth_application, post, alice):
url,
data='grant_type=password&username=alice&password=alice&scope=read',
content_type='application/x-www-form-urlencoded',
- HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([
- oauth_application.client_id, oauth_application.client_secret
- ]))))
+ HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([oauth_application.client_id, oauth_application.client_secret])))),
)
resp_json = smart_str(resp._container[0])
assert 'access_token' in resp_json
@@ -45,10 +42,8 @@ def test_token_creation_disabled_for_external_accounts(oauth_application, post,
url,
data='grant_type=password&username=alice&password=alice&scope=read',
content_type='application/x-www-form-urlencoded',
- HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([
- oauth_application.client_id, oauth_application.client_secret
- ])))),
- status=status
+ HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([oauth_application.client_id, oauth_application.client_secret])))),
+ status=status,
)
if allow_oauth:
assert AccessToken.objects.count() == 1
@@ -66,29 +61,19 @@ def test_existing_token_enabled_for_external_accounts(oauth_application, get, po
url,
data='grant_type=password&username=admin&password=admin&scope=read',
content_type='application/x-www-form-urlencoded',
- HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([
- oauth_application.client_id, oauth_application.client_secret
- ])))),
- status=201
+ HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([oauth_application.client_id, oauth_application.client_secret])))),
+ status=201,
)
token = json.loads(resp.content)['access_token']
assert AccessToken.objects.count() == 1
with immediate_on_commit():
- resp = get(
- drf_reverse('api:user_me_list', kwargs={'version': 'v2'}),
- HTTP_AUTHORIZATION='Bearer ' + token,
- status=200
- )
+ resp = get(drf_reverse('api:user_me_list', kwargs={'version': 'v2'}), HTTP_AUTHORIZATION='Bearer ' + token, status=200)
assert json.loads(resp.content)['results'][0]['username'] == 'admin'
with override_settings(RADIUS_SERVER='example.org', ALLOW_OAUTH2_FOR_EXTERNAL_USER=False):
with immediate_on_commit():
- resp = get(
- drf_reverse('api:user_me_list', kwargs={'version': 'v2'}),
- HTTP_AUTHORIZATION='Bearer ' + token,
- status=200
- )
+ resp = get(drf_reverse('api:user_me_list', kwargs={'version': 'v2'}), HTTP_AUTHORIZATION='Bearer ' + token, status=200)
assert json.loads(resp.content)['results'][0]['username'] == 'admin'
@@ -96,31 +81,44 @@ def test_existing_token_enabled_for_external_accounts(oauth_application, get, po
def test_pat_creation_no_default_scope(oauth_application, post, admin):
# tests that the default scope is overriden
url = reverse('api:o_auth2_token_list')
- response = post(url, {'description': 'test token',
- 'scope': 'read',
- 'application': oauth_application.pk,
- }, admin)
+ response = post(
+ url,
+ {
+ 'description': 'test token',
+ 'scope': 'read',
+ 'application': oauth_application.pk,
+ },
+ admin,
+ )
assert response.data['scope'] == 'read'
-
-
+
+
@pytest.mark.django_db
def test_pat_creation_no_scope(oauth_application, post, admin):
url = reverse('api:o_auth2_token_list')
- response = post(url, {'description': 'test token',
- 'application': oauth_application.pk,
- }, admin)
+ response = post(
+ url,
+ {
+ 'description': 'test token',
+ 'application': oauth_application.pk,
+ },
+ admin,
+ )
assert response.data['scope'] == 'write'
@pytest.mark.django_db
def test_oauth2_application_create(admin, organization, post):
response = post(
- reverse('api:o_auth2_application_list'), {
+ reverse('api:o_auth2_application_list'),
+ {
'name': 'test app',
'organization': organization.pk,
'client_type': 'confidential',
'authorization_grant_type': 'password',
- }, admin, expect=201
+ },
+ admin,
+ expect=201,
)
assert 'modified' in response.data
assert 'updated' not in response.data
@@ -131,29 +129,35 @@ def test_oauth2_application_create(admin, organization, post):
assert created_app.client_type == 'confidential'
assert created_app.authorization_grant_type == 'password'
assert created_app.organization == organization
-
-
+
+
@pytest.mark.django_db
def test_oauth2_validator(admin, oauth_application, post):
post(
- reverse('api:o_auth2_application_list'), {
- 'name': 'Write App Token',
+ reverse('api:o_auth2_application_list'),
+ {
+ 'name': 'Write App Token',
'application': oauth_application.pk,
'scope': 'Write',
- }, admin, expect=400
+ },
+ admin,
+ expect=400,
)
-
+
@pytest.mark.django_db
def test_oauth_application_update(oauth_application, organization, patch, admin, alice):
patch(
- reverse('api:o_auth2_application_detail', kwargs={'pk': oauth_application.pk}), {
+ reverse('api:o_auth2_application_detail', kwargs={'pk': oauth_application.pk}),
+ {
'name': 'Test app with immutable grant type and user',
'organization': organization.pk,
'redirect_uris': 'http://localhost/api/',
'authorization_grant_type': 'password',
'skip_authorization': True,
- }, admin, expect=200
+ },
+ admin,
+ expect=200,
)
updated_app = Application.objects.get(client_id=oauth_application.client_id)
assert updated_app.name == 'Test app with immutable grant type and user'
@@ -166,29 +170,27 @@ def test_oauth_application_update(oauth_application, organization, patch, admin,
@pytest.mark.django_db
def test_oauth_application_encryption(admin, organization, post):
response = post(
- reverse('api:o_auth2_application_list'), {
+ reverse('api:o_auth2_application_list'),
+ {
'name': 'test app',
'organization': organization.pk,
'client_type': 'confidential',
'authorization_grant_type': 'password',
- }, admin, expect=201
+ },
+ admin,
+ expect=201,
)
pk = response.data.get('id')
secret = response.data.get('client_secret')
with connection.cursor() as cursor:
- encrypted = cursor.execute(
- 'SELECT client_secret FROM main_oauth2application WHERE id={}'.format(pk)
- ).fetchone()[0]
+ encrypted = cursor.execute('SELECT client_secret FROM main_oauth2application WHERE id={}'.format(pk)).fetchone()[0]
assert encrypted.startswith('$encrypted$')
assert decrypt_value(get_encryption_key('value', pk=None), encrypted) == secret
@pytest.mark.django_db
def test_oauth_token_create(oauth_application, get, post, admin):
- response = post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}),
- {'scope': 'read'}, admin, expect=201
- )
+ response = post(reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}), {'scope': 'read'}, admin, expect=201)
assert 'modified' in response.data and response.data['modified'] is not None
assert 'updated' not in response.data
token = AccessToken.objects.get(token=response.data['token'])
@@ -199,87 +201,48 @@ def test_oauth_token_create(oauth_application, get, post, admin):
assert refresh_token.user == admin
assert refresh_token.access_token == token
assert token.scope == 'read'
- response = get(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}),
- admin, expect=200
- )
+ response = get(reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}), admin, expect=200)
assert response.data['count'] == 1
- response = get(
- reverse('api:o_auth2_application_detail', kwargs={'pk': oauth_application.pk}),
- admin, expect=200
- )
+ response = get(reverse('api:o_auth2_application_detail', kwargs={'pk': oauth_application.pk}), admin, expect=200)
assert response.data['summary_fields']['tokens']['count'] == 1
- assert response.data['summary_fields']['tokens']['results'][0] == {
- 'id': token.pk, 'scope': token.scope, 'token': '************'
- }
+ assert response.data['summary_fields']['tokens']['results'][0] == {'id': token.pk, 'scope': token.scope, 'token': '************'}
- response = post(
- reverse('api:o_auth2_token_list'),
- {'scope': 'read', 'application': oauth_application.pk}, admin, expect=201
- )
+ response = post(reverse('api:o_auth2_token_list'), {'scope': 'read', 'application': oauth_application.pk}, admin, expect=201)
assert response.data['refresh_token']
response = post(
- reverse('api:user_authorized_token_list', kwargs={'pk': admin.pk}),
- {'scope': 'read', 'application': oauth_application.pk}, admin, expect=201
+ reverse('api:user_authorized_token_list', kwargs={'pk': admin.pk}), {'scope': 'read', 'application': oauth_application.pk}, admin, expect=201
)
assert response.data['refresh_token']
- response = post(
- reverse('api:application_o_auth2_token_list', kwargs={'pk': oauth_application.pk}),
- {'scope': 'read'}, admin, expect=201
- )
+ response = post(reverse('api:application_o_auth2_token_list', kwargs={'pk': oauth_application.pk}), {'scope': 'read'}, admin, expect=201)
assert response.data['refresh_token']
@pytest.mark.django_db
def test_oauth_token_update(oauth_application, post, patch, admin):
- response = post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}),
- {'scope': 'read'}, admin, expect=201
- )
+ response = post(reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}), {'scope': 'read'}, admin, expect=201)
token = AccessToken.objects.get(token=response.data['token'])
- patch(
- reverse('api:o_auth2_token_detail', kwargs={'pk': token.pk}),
- {'scope': 'write'}, admin, expect=200
- )
+ patch(reverse('api:o_auth2_token_detail', kwargs={'pk': token.pk}), {'scope': 'write'}, admin, expect=200)
token = AccessToken.objects.get(token=token.token)
assert token.scope == 'write'
@pytest.mark.django_db
def test_oauth_token_delete(oauth_application, post, delete, get, admin):
- response = post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}),
- {'scope': 'read'}, admin, expect=201
- )
+ response = post(reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}), {'scope': 'read'}, admin, expect=201)
token = AccessToken.objects.get(token=response.data['token'])
- delete(
- reverse('api:o_auth2_token_detail', kwargs={'pk': token.pk}),
- admin, expect=204
- )
+ delete(reverse('api:o_auth2_token_detail', kwargs={'pk': token.pk}), admin, expect=204)
assert AccessToken.objects.count() == 0
assert RefreshToken.objects.count() == 1
- response = get(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}),
- admin, expect=200
- )
+ response = get(reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}), admin, expect=200)
assert response.data['count'] == 0
- response = get(
- reverse('api:o_auth2_application_detail', kwargs={'pk': oauth_application.pk}),
- admin, expect=200
- )
+ response = get(reverse('api:o_auth2_application_detail', kwargs={'pk': oauth_application.pk}), admin, expect=200)
assert response.data['summary_fields']['tokens']['count'] == 0
@pytest.mark.django_db
def test_oauth_application_delete(oauth_application, post, delete, admin):
- post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}),
- {'scope': 'read'}, admin, expect=201
- )
- delete(
- reverse('api:o_auth2_application_detail', kwargs={'pk': oauth_application.pk}),
- admin, expect=204
- )
+ post(reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}), {'scope': 'read'}, admin, expect=201)
+ delete(reverse('api:o_auth2_application_detail', kwargs={'pk': oauth_application.pk}), admin, expect=204)
assert Application.objects.filter(client_id=oauth_application.client_id).count() == 0
assert RefreshToken.objects.filter(application=oauth_application).count() == 0
assert AccessToken.objects.filter(application=oauth_application).count() == 0
@@ -292,27 +255,22 @@ def test_oauth_list_user_tokens(oauth_application, post, get, admin, alice):
post(url, {'scope': 'read'}, user, expect=201)
response = get(url, admin, expect=200)
assert response.data['count'] == 1
-
+
@pytest.mark.django_db
def test_refresh_accesstoken(oauth_application, post, get, delete, admin):
- response = post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}),
- {'scope': 'read'}, admin, expect=201
- )
+ response = post(reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}), {'scope': 'read'}, admin, expect=201)
assert AccessToken.objects.count() == 1
assert RefreshToken.objects.count() == 1
token = AccessToken.objects.get(token=response.data['token'])
refresh_token = RefreshToken.objects.get(token=response.data['refresh_token'])
-
- refresh_url = drf_reverse('api:oauth_authorization_root_view') + 'token/'
+
+ refresh_url = drf_reverse('api:oauth_authorization_root_view') + 'token/'
response = post(
refresh_url,
data='grant_type=refresh_token&refresh_token=' + refresh_token.token,
content_type='application/x-www-form-urlencoded',
- HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([
- oauth_application.client_id, oauth_application.client_secret
- ]))))
+ HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([oauth_application.client_id, oauth_application.client_secret])))),
)
assert RefreshToken.objects.filter(token=refresh_token).exists()
original_refresh_token = RefreshToken.objects.get(token=refresh_token)
@@ -325,33 +283,24 @@ def test_refresh_accesstoken(oauth_application, post, get, delete, admin):
assert AccessToken.objects.filter(token=new_token).count() == 1
# checks that RefreshTokens are rotated (new RefreshToken issued)
assert RefreshToken.objects.filter(token=new_refresh_token).count() == 1
- assert original_refresh_token.revoked # is not None
+ assert original_refresh_token.revoked # is not None
@pytest.mark.django_db
def test_refresh_token_expiration_is_respected(oauth_application, post, get, delete, admin):
- response = post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}),
- {'scope': 'read'}, admin, expect=201
- )
+ response = post(reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}), {'scope': 'read'}, admin, expect=201)
assert AccessToken.objects.count() == 1
assert RefreshToken.objects.count() == 1
refresh_token = RefreshToken.objects.get(token=response.data['refresh_token'])
refresh_url = drf_reverse('api:oauth_authorization_root_view') + 'token/'
- short_lived = {
- 'ACCESS_TOKEN_EXPIRE_SECONDS': 1,
- 'AUTHORIZATION_CODE_EXPIRE_SECONDS': 1,
- 'REFRESH_TOKEN_EXPIRE_SECONDS': 1
- }
+ short_lived = {'ACCESS_TOKEN_EXPIRE_SECONDS': 1, 'AUTHORIZATION_CODE_EXPIRE_SECONDS': 1, 'REFRESH_TOKEN_EXPIRE_SECONDS': 1}
time.sleep(1)
with override_settings(OAUTH2_PROVIDER=short_lived):
response = post(
refresh_url,
data='grant_type=refresh_token&refresh_token=' + refresh_token.token,
content_type='application/x-www-form-urlencoded',
- HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([
- oauth_application.client_id, oauth_application.client_secret
- ]))))
+ HTTP_AUTHORIZATION='Basic ' + smart_str(base64.b64encode(smart_bytes(':'.join([oauth_application.client_id, oauth_application.client_secret])))),
)
assert response.status_code == 403
assert b'The refresh token has expired.' in response.content
@@ -360,38 +309,31 @@ def test_refresh_token_expiration_is_respected(oauth_application, post, get, del
assert RefreshToken.objects.count() == 1
-
@pytest.mark.django_db
def test_revoke_access_then_refreshtoken(oauth_application, post, get, delete, admin):
- response = post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}),
- {'scope': 'read'}, admin, expect=201
- )
+ response = post(reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}), {'scope': 'read'}, admin, expect=201)
token = AccessToken.objects.get(token=response.data['token'])
refresh_token = RefreshToken.objects.get(token=response.data['refresh_token'])
assert AccessToken.objects.count() == 1
assert RefreshToken.objects.count() == 1
-
+
token.revoke()
assert AccessToken.objects.count() == 0
assert RefreshToken.objects.count() == 1
assert not refresh_token.revoked
-
+
refresh_token.revoke()
assert AccessToken.objects.count() == 0
assert RefreshToken.objects.count() == 1
-
-
+
+
@pytest.mark.django_db
def test_revoke_refreshtoken(oauth_application, post, get, delete, admin):
- response = post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}),
- {'scope': 'read'}, admin, expect=201
- )
+ response = post(reverse('api:o_auth2_application_token_list', kwargs={'pk': oauth_application.pk}), {'scope': 'read'}, admin, expect=201)
refresh_token = RefreshToken.objects.get(token=response.data['refresh_token'])
assert AccessToken.objects.count() == 1
assert RefreshToken.objects.count() == 1
-
+
refresh_token.revoke()
assert AccessToken.objects.count() == 0
# the same RefreshToken is recycled
diff --git a/awx/main/tests/functional/api/test_organization_counts.py b/awx/main/tests/functional/api/test_organization_counts.py
index d45b1fa083..096fc11350 100644
--- a/awx/main/tests/functional/api/test_organization_counts.py
+++ b/awx/main/tests/functional/api/test_organization_counts.py
@@ -21,11 +21,7 @@ def organization_resource_creator(organization, user):
for i in range(inventories):
inventory = organization.inventories.create(name="associated-inv %s" % i)
for i in range(projects):
- Project.objects.create(
- name="test-proj %s" % i,
- description="test-proj-desc",
- organization=organization
- )
+ Project.objects.create(name="test-proj %s" % i, description="test-proj-desc", organization=organization)
# Mix up the inventories and projects used by the job templates
i_proj = 0
i_inv = 0
@@ -33,11 +29,9 @@ def organization_resource_creator(organization, user):
project = Project.objects.filter(organization=organization)[i_proj]
# project = organization.projects.all()[i_proj]
inventory = organization.inventories.all()[i_inv]
- project.jobtemplates.create(name="test-jt %s" % i,
- description="test-job-template-desc",
- inventory=inventory,
- playbook="test_playbook.yml",
- organization=organization)
+ project.jobtemplates.create(
+ name="test-jt %s" % i, description="test-job-template-desc", inventory=inventory, playbook="test_playbook.yml", organization=organization
+ )
i_proj += 1
i_inv += 1
if i_proj >= Project.objects.filter(organization=organization).count():
@@ -46,25 +40,12 @@ def organization_resource_creator(organization, user):
i_inv = 0
return organization
+
return rf
-COUNTS_PRIMES = {
- 'users': 11,
- 'admins': 5,
- 'job_templates': 3,
- 'projects': 3,
- 'inventories': 7,
- 'teams': 5
-}
-COUNTS_ZEROS = {
- 'users': 0,
- 'admins': 0,
- 'job_templates': 0,
- 'projects': 0,
- 'inventories': 0,
- 'teams': 0
-}
+COUNTS_PRIMES = {'users': 11, 'admins': 5, 'job_templates': 3, 'projects': 3, 'inventories': 7, 'teams': 5}
+COUNTS_ZEROS = {'users': 0, 'admins': 0, 'job_templates': 0, 'projects': 0, 'inventories': 0, 'teams': 0}
@pytest.fixture
@@ -76,8 +57,7 @@ def resourced_organization(organization_resource_creator):
def test_org_counts_detail_admin(resourced_organization, user, get):
# Check that all types of resources are counted by a superuser
external_admin = user('admin', True)
- response = get(reverse('api:organization_detail',
- kwargs={'pk': resourced_organization.pk}), external_admin)
+ response = get(reverse('api:organization_detail', kwargs={'pk': resourced_organization.pk}), external_admin)
assert response.status_code == 200
counts = response.data['summary_fields']['related_field_counts']
@@ -90,8 +70,7 @@ def test_org_counts_detail_admin(resourced_organization, user, get):
def test_org_counts_detail_member(resourced_organization, user, get):
# Check that a non-admin org member can only see users / admin in detail view
member_user = resourced_organization.member_role.members.get(username='org-member 1')
- response = get(reverse('api:organization_detail',
- kwargs={'pk': resourced_organization.pk}), member_user)
+ response = get(reverse('api:organization_detail', kwargs={'pk': resourced_organization.pk}), member_user)
assert response.status_code == 200
counts = response.data['summary_fields']['related_field_counts']
@@ -103,7 +82,7 @@ def test_org_counts_detail_member(resourced_organization, user, get):
'job_templates': 0,
'projects': 0,
'inventories': 0,
- 'teams': 0
+ 'teams': 0,
}
@@ -136,7 +115,7 @@ def test_org_counts_list_member(resourced_organization, user, get):
'job_templates': 0,
'projects': 0,
'inventories': 0,
- 'teams': 0
+ 'teams': 0,
}
@@ -145,8 +124,7 @@ def test_new_org_zero_counts(user, post):
# Check that a POST to the organization list endpoint returns
# correct counts, including the new record
org_list_url = reverse('api:organization_list')
- post_response = post(url=org_list_url, data={'name': 'test organization',
- 'description': ''}, user=user('admin', True))
+ post_response = post(url=org_list_url, data={'name': 'test organization', 'description': ''}, user=user('admin', True))
assert post_response.status_code == 201
new_org_list = post_response.render().data
@@ -216,7 +194,8 @@ def test_JT_not_double_counted(resourced_organization, user, get):
inventory=resourced_organization.inventories.all()[0],
project=proj,
name='double-linked-job-template',
- organization=resourced_organization)
+ organization=resourced_organization,
+ )
counts_dict = COUNTS_PRIMES
counts_dict['job_templates'] += 1
diff --git a/awx/main/tests/functional/api/test_organizations.py b/awx/main/tests/functional/api/test_organizations.py
index 93994d3842..3db0e619fd 100644
--- a/awx/main/tests/functional/api/test_organizations.py
+++ b/awx/main/tests/functional/api/test_organizations.py
@@ -16,6 +16,7 @@ def create_job_factory(job_factory, project):
j.project = project
j.save()
return j
+
return fn
@@ -27,19 +28,18 @@ def create_project_update_factory(organization, project):
pu.organization = organization
pu.save()
return pu
+
return fn
@pytest.fixture
def organization_jobs_successful(create_job_factory, create_project_update_factory):
- return [create_job_factory(status='successful') for i in range(0, 2)] + \
- [create_project_update_factory(status='successful') for i in range(0, 2)]
+ return [create_job_factory(status='successful') for i in range(0, 2)] + [create_project_update_factory(status='successful') for i in range(0, 2)]
@pytest.fixture
def organization_jobs_running(create_job_factory, create_project_update_factory):
- return [create_job_factory(status='running') for i in range(0, 2)] + \
- [create_project_update_factory(status='running') for i in range(0, 2)]
+ return [create_job_factory(status='running') for i in range(0, 2)] + [create_project_update_factory(status='running') for i in range(0, 2)]
@pytest.mark.django_db
@@ -129,10 +129,7 @@ def test_organization_inventory_list(organization, inventory_factory, get, alice
@pytest.mark.django_db
def test_create_organization(post, admin, alice):
- new_org = {
- 'name': 'new org',
- 'description': 'my description'
- }
+ new_org = {'name': 'new org', 'description': 'my description'}
res = post(reverse('api:organization_list'), new_org, user=admin, expect=201)
assert res.data['name'] == new_org['name']
res = post(reverse('api:organization_list'), new_org, user=admin, expect=400)
@@ -140,10 +137,7 @@ def test_create_organization(post, admin, alice):
@pytest.mark.django_db
def test_create_organization_xfail(post, alice):
- new_org = {
- 'name': 'new org',
- 'description': 'my description'
- }
+ new_org = {'name': 'new org', 'description': 'my description'}
post(reverse('api:organization_list'), new_org, user=alice, expect=403)
@@ -152,7 +146,7 @@ def test_add_user_to_organization(post, organization, alice, bob):
organization.admin_role.members.add(alice)
post(reverse('api:organization_users_list', kwargs={'pk': organization.id}), {'id': bob.id}, user=alice, expect=204)
assert bob in organization.member_role
- post(reverse('api:organization_users_list', kwargs={'pk': organization.id}), {'id': bob.id, 'disassociate': True} , user=alice, expect=204)
+ post(reverse('api:organization_users_list', kwargs={'pk': organization.id}), {'id': bob.id, 'disassociate': True}, user=alice, expect=204)
assert bob not in organization.member_role
@@ -168,7 +162,7 @@ def test_add_admin_to_organization(post, organization, alice, bob):
post(reverse('api:organization_admins_list', kwargs={'pk': organization.id}), {'id': bob.id}, user=alice, expect=204)
assert bob in organization.admin_role
assert bob in organization.member_role
- post(reverse('api:organization_admins_list', kwargs={'pk': organization.id}), {'id': bob.id, 'disassociate': True} , user=alice, expect=204)
+ post(reverse('api:organization_admins_list', kwargs={'pk': organization.id}), {'id': bob.id, 'disassociate': True}, user=alice, expect=204)
assert bob not in organization.admin_role
assert bob not in organization.member_role
@@ -264,21 +258,9 @@ def test_galaxy_credential_association_forbidden(alice, organization, post):
galaxy = CredentialType.defaults['galaxy_api_token']()
galaxy.save()
- cred = Credential.objects.create(
- credential_type=galaxy,
- name='Public Galaxy',
- organization=organization,
- inputs={
- 'url': 'https://galaxy.ansible.com/'
- }
- )
+ cred = Credential.objects.create(credential_type=galaxy, name='Public Galaxy', organization=organization, inputs={'url': 'https://galaxy.ansible.com/'})
url = reverse('api:organization_galaxy_credentials_list', kwargs={'pk': organization.id})
- post(
- url,
- {'associate': True, 'id': cred.pk},
- user=alice,
- expect=403
- )
+ post(url, {'associate': True, 'id': cred.pk}, user=alice, expect=403)
@pytest.mark.django_db
@@ -292,12 +274,7 @@ def test_galaxy_credential_type_enforcement(admin, organization, post):
organization=organization,
)
url = reverse('api:organization_galaxy_credentials_list', kwargs={'pk': organization.id})
- resp = post(
- url,
- {'associate': True, 'id': cred.pk},
- user=admin,
- expect=400
- )
+ resp = post(url, {'associate': True, 'id': cred.pk}, user=admin, expect=400)
assert resp.data['msg'] == 'Credential must be a Galaxy credential, not Machine.'
@@ -308,20 +285,10 @@ def test_galaxy_credential_association(alice, admin, organization, post, get):
for i in range(5):
cred = Credential.objects.create(
- credential_type=galaxy,
- name=f'Public Galaxy {i + 1}',
- organization=organization,
- inputs={
- 'url': 'https://galaxy.ansible.com/'
- }
+ credential_type=galaxy, name=f'Public Galaxy {i + 1}', organization=organization, inputs={'url': 'https://galaxy.ansible.com/'}
)
url = reverse('api:organization_galaxy_credentials_list', kwargs={'pk': organization.id})
- post(
- url,
- {'associate': True, 'id': cred.pk},
- user=admin,
- expect=204
- )
+ post(url, {'associate': True, 'id': cred.pk}, user=admin, expect=204)
resp = get(url, user=admin)
assert [cred['name'] for cred in resp.data['results']] == [
'Public Galaxy 1',
@@ -331,12 +298,7 @@ def test_galaxy_credential_association(alice, admin, organization, post, get):
'Public Galaxy 5',
]
- post(
- url,
- {'disassociate': True, 'id': Credential.objects.get(name='Public Galaxy 3').pk},
- user=admin,
- expect=204
- )
+ post(url, {'disassociate': True, 'id': Credential.objects.get(name='Public Galaxy 3').pk}, user=admin, expect=204)
resp = get(url, user=admin)
assert [cred['name'] for cred in resp.data['results']] == [
'Public Galaxy 1',
diff --git a/awx/main/tests/functional/api/test_pagination.py b/awx/main/tests/functional/api/test_pagination.py
index 3bbe469d59..92bb8e70e5 100644
--- a/awx/main/tests/functional/api/test_pagination.py
+++ b/awx/main/tests/functional/api/test_pagination.py
@@ -19,6 +19,7 @@ def host(inventory):
h.save()
h = Host.objects.get(name=name, inventory=inventory)
return h
+
return handler
@@ -29,6 +30,7 @@ def group(inventory):
g.save()
g = Group.objects.get(name=name, inventory=inventory)
return g
+
return handler
diff --git a/awx/main/tests/functional/api/test_project.py b/awx/main/tests/functional/api/test_project.py
index 71d685ce7b..08bb760224 100644
--- a/awx/main/tests/functional/api/test_project.py
+++ b/awx/main/tests/functional/api/test_project.py
@@ -7,14 +7,10 @@ from awx.main.models import Project, JobTemplate
@pytest.mark.django_db
class TestInsightsCredential:
def test_insights_credential(self, patch, insights_project, admin_user, insights_credential):
- patch(insights_project.get_absolute_url(),
- {'credential': insights_credential.id}, admin_user,
- expect=200)
+ patch(insights_project.get_absolute_url(), {'credential': insights_credential.id}, admin_user, expect=200)
def test_non_insights_credential(self, patch, insights_project, admin_user, scm_credential):
- patch(insights_project.get_absolute_url(),
- {'credential': scm_credential.id}, admin_user,
- expect=400)
+ patch(insights_project.get_absolute_url(), {'credential': scm_credential.id}, admin_user, expect=400)
@pytest.mark.django_db
@@ -26,22 +22,13 @@ def test_no_changing_overwrite_behavior_if_used(post, patch, organization, admin
'organization': organization.id,
'allow_override': True,
'scm_type': 'git',
- 'scm_url': 'https://github.com/ansible/test-playbooks.git'
+ 'scm_url': 'https://github.com/ansible/test-playbooks.git',
},
user=admin_user,
- expect=201
- )
- jt = JobTemplate.objects.create(
- name='provides branch', project_id=r1.data['id'],
- playbook='helloworld.yml',
- scm_branch='foobar'
- )
- r2 = patch(
- url=reverse('api:project_detail', kwargs={'pk': r1.data['id']}),
- data={'allow_override': False},
- user=admin_user,
- expect=400
+ expect=201,
)
+ jt = JobTemplate.objects.create(name='provides branch', project_id=r1.data['id'], playbook='helloworld.yml', scm_branch='foobar')
+ r2 = patch(url=reverse('api:project_detail', kwargs={'pk': r1.data['id']}), data={'allow_override': False}, user=admin_user, expect=400)
p = Project.objects.get(pk=r1.data['id'])
assert 'job templates depend on branch override behavior for this project' in str(r2.data['allow_override'])
assert 'ids: {}'.format(jt.id) in str(r2.data['allow_override'])
@@ -57,17 +44,12 @@ def test_changing_overwrite_behavior_okay_if_not_used(post, patch, organization,
'organization': organization.id,
'allow_override': True,
'scm_type': 'git',
- 'scm_url': 'https://github.com/ansible/test-playbooks.git'
+ 'scm_url': 'https://github.com/ansible/test-playbooks.git',
},
user=admin_user,
- expect=201
- )
- patch(
- url=reverse('api:project_detail', kwargs={'pk': r1.data['id']}),
- data={'allow_override': False},
- user=admin_user,
- expect=200
+ expect=201,
)
+ patch(url=reverse('api:project_detail', kwargs={'pk': r1.data['id']}), data={'allow_override': False}, user=admin_user, expect=200)
assert Project.objects.get(pk=r1.data['id']).allow_override is False
@@ -75,6 +57,4 @@ def test_changing_overwrite_behavior_okay_if_not_used(post, patch, organization,
def test_scm_project_local_path_invalid(get, patch, project, admin):
url = reverse('api:project_detail', kwargs={'pk': project.id})
resp = patch(url, {'local_path': '/foo/bar'}, user=admin, expect=400)
- assert resp.data['local_path'] == [
- 'Cannot change local_path for git-based projects'
- ]
+ assert resp.data['local_path'] == ['Cannot change local_path for git-based projects']
diff --git a/awx/main/tests/functional/api/test_rbac_displays.py b/awx/main/tests/functional/api/test_rbac_displays.py
index d0a0cb4f98..8178da672c 100644
--- a/awx/main/tests/functional/api/test_rbac_displays.py
+++ b/awx/main/tests/functional/api/test_rbac_displays.py
@@ -59,9 +59,7 @@ class TestJobTemplateCopyEdit:
@pytest.fixture
def jt_copy_edit(self, job_template_factory, project):
- objects = job_template_factory(
- 'copy-edit-job-template',
- project=project)
+ objects = job_template_factory('copy-edit-job-template', project=project)
return objects.job_template
def fake_context(self, user):
@@ -88,8 +86,10 @@ class TestJobTemplateCopyEdit:
jt_res = JobTemplate.objects.create(
job_type='run',
project=project,
- inventory=None, ask_inventory_on_launch=False, # not allowed
- ask_credential_on_launch=True, name='deploy-job-template'
+ inventory=None,
+ ask_inventory_on_launch=False, # not allowed
+ ask_credential_on_launch=True,
+ name='deploy-job-template',
)
serializer = JobTemplateSerializer(jt_res, context=self.fake_context(admin_user))
response = serializer.to_representation(jt_res)
@@ -143,7 +143,7 @@ class TestJobTemplateCopyEdit:
def mock_access_method(mocker):
mock_method = mocker.MagicMock()
mock_method.return_value = 'foobar'
- mock_method.__name__ = 'bars' # Required for a logging statement
+ mock_method.__name__ = 'bars' # Required for a logging statement
return mock_method
@@ -162,8 +162,7 @@ class TestAccessListCapabilities:
assert len(data['results']) == 1
assert len(data['results'][0]['summary_fields'][sublist]) == 1
- def test_access_list_direct_access_capability(
- self, inventory, rando, get, mocker, mock_access_method):
+ def test_access_list_direct_access_capability(self, inventory, rando, get, mocker, mock_access_method):
inventory.admin_role.members.add(rando)
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
@@ -174,8 +173,7 @@ class TestAccessListCapabilities:
direct_access_list = response.data['results'][0]['summary_fields']['direct_access']
assert direct_access_list[0]['role']['user_capabilities']['unattach'] == 'foobar'
- def test_access_list_indirect_access_capability(
- self, inventory, organization, org_admin, get, mocker, mock_access_method):
+ def test_access_list_indirect_access_capability(self, inventory, organization, org_admin, get, mocker, mock_access_method):
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
@@ -184,8 +182,7 @@ class TestAccessListCapabilities:
indirect_access_list = response.data['results'][0]['summary_fields']['indirect_access']
assert indirect_access_list[0]['role']['user_capabilities']['unattach'] == 'foobar'
- def test_access_list_team_direct_access_capability(
- self, inventory, team, team_member, get, mocker, mock_access_method):
+ def test_access_list_team_direct_access_capability(self, inventory, team, team_member, get, mocker, mock_access_method):
team.member_role.children.add(inventory.admin_role)
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
@@ -205,8 +202,7 @@ def test_team_roles_unattach(mocker, team, team_member, inventory, mock_access_m
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
# Did we assess whether team_member can remove team's permission to the inventory?
- mock_access_method.assert_called_once_with(
- inventory.admin_role, team.member_role, 'parents', skip_sub_obj_read_check=True, data={})
+ mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', skip_sub_obj_read_check=True, data={})
assert response.data['results'][0]['summary_fields']['user_capabilities']['unattach'] == 'foobar'
@@ -220,8 +216,7 @@ def test_user_roles_unattach(mocker, organization, alice, bob, mock_access_metho
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
# Did we assess whether bob can remove alice's permission to the inventory?
- mock_access_method.assert_called_once_with(
- organization.member_role, alice, 'members', skip_sub_obj_read_check=True, data={})
+ mock_access_method.assert_called_once_with(organization.member_role, alice, 'members', skip_sub_obj_read_check=True, data={})
assert response.data['results'][0]['summary_fields']['user_capabilities']['unattach'] == 'foobar'
@@ -298,17 +293,37 @@ def test_prefetch_jt_copy_capability(job_template, project, inventory, rando):
job_template.save()
qs = JobTemplate.objects.all()
- mapping = prefetch_page_capabilities(JobTemplate, qs, [{'copy': [
- 'project.use', 'inventory.use',
- ]}], rando)
+ mapping = prefetch_page_capabilities(
+ JobTemplate,
+ qs,
+ [
+ {
+ 'copy': [
+ 'project.use',
+ 'inventory.use',
+ ]
+ }
+ ],
+ rando,
+ )
assert mapping[job_template.id] == {'copy': False}
project.use_role.members.add(rando)
inventory.use_role.members.add(rando)
- mapping = prefetch_page_capabilities(JobTemplate, qs, [{'copy': [
- 'project.use', 'inventory.use',
- ]}], rando)
+ mapping = prefetch_page_capabilities(
+ JobTemplate,
+ qs,
+ [
+ {
+ 'copy': [
+ 'project.use',
+ 'inventory.use',
+ ]
+ }
+ ],
+ rando,
+ )
assert mapping[job_template.id] == {'copy': True}
@@ -317,10 +332,7 @@ def test_workflow_orphaned_capabilities(rando):
wfjt = WorkflowJobTemplate.objects.create(name='test', organization=None)
wfjt.admin_role.members.add(rando)
access = WorkflowJobTemplateAccess(rando)
- assert not access.get_user_capabilities(
- wfjt, method_list=['edit', 'copy'],
- capabilities_cache={'copy': True}
- )['copy']
+ assert not access.get_user_capabilities(wfjt, method_list=['edit', 'copy'], capabilities_cache={'copy': True})['copy']
@pytest.mark.django_db
diff --git a/awx/main/tests/functional/api/test_resource_access_lists.py b/awx/main/tests/functional/api/test_resource_access_lists.py
index 8ec0ddc264..71d107dbda 100644
--- a/awx/main/tests/functional/api/test_resource_access_lists.py
+++ b/awx/main/tests/functional/api/test_resource_access_lists.py
@@ -31,7 +31,7 @@ def test_indirect_access_list(get, organization, project, team_factory, user, ad
project_admin_res = [r for r in result.data['results'] if r['id'] == project_admin.id][0]
team_admin_res = [r for r in result.data['results'] if r['id'] == team_admin.id][0]
- project_admin_team_member_res = [r for r in result.data['results'] if r['id'] == project_admin_team_member.id][0]
+ project_admin_team_member_res = [r for r in result.data['results'] if r['id'] == project_admin_team_member.id][0]
admin_res = [r for r in result.data['results'] if r['id'] == admin.id][0]
assert len(project_admin_res['summary_fields']['direct_access']) == 1
@@ -55,4 +55,3 @@ def test_indirect_access_list(get, organization, project, team_factory, user, ad
admin_entry = admin_res['summary_fields']['indirect_access'][0]['role']
assert admin_entry['name'] == Role.singleton('system_administrator').name
-
diff --git a/awx/main/tests/functional/api/test_role.py b/awx/main/tests/functional/api/test_role.py
index d398263c37..cec31d9d7e 100644
--- a/awx/main/tests/functional/api/test_role.py
+++ b/awx/main/tests/functional/api/test_role.py
@@ -15,14 +15,8 @@ def test_admin_visible_to_orphaned_users(get, alice):
@pytest.mark.django_db
-@pytest.mark.parametrize('role,code', [
- ('member_role', 400),
- ('admin_role', 400),
- ('inventory_admin_role', 204)
-])
-@pytest.mark.parametrize('reversed', [
- True, False
-])
+@pytest.mark.parametrize('role,code', [('member_role', 400), ('admin_role', 400), ('inventory_admin_role', 204)])
+@pytest.mark.parametrize('reversed', [True, False])
def test_org_object_role_assigned_to_team(post, team, organization, org_admin, role, code, reversed):
if reversed:
url = reverse('api:role_teams_list', kwargs={'pk': getattr(organization, role).id})
@@ -31,9 +25,4 @@ def test_org_object_role_assigned_to_team(post, team, organization, org_admin, r
url = reverse('api:team_roles_list', kwargs={'pk': team.id})
sub_id = getattr(organization, role).id
- post(
- url=url,
- data={'id': sub_id},
- user=org_admin,
- expect=code
- )
+ post(url=url, data={'id': sub_id}, user=org_admin, expect=code)
diff --git a/awx/main/tests/functional/api/test_schedules.py b/awx/main/tests/functional/api/test_schedules.py
index bdaa6aa4a6..bda8e3efbe 100644
--- a/awx/main/tests/functional/api/test_schedules.py
+++ b/awx/main/tests/functional/api/test_schedules.py
@@ -26,8 +26,7 @@ def get_rrule(tz=None):
@pytest.mark.django_db
def test_non_job_extra_vars_prohibited(post, project, admin_user):
url = reverse('api:project_schedules_list', kwargs={'pk': project.id})
- r = post(url, {'name': 'test sch', 'rrule': RRULE_EXAMPLE, 'extra_data': '{"a": 5}'},
- admin_user, expect=400)
+ r = post(url, {'name': 'test sch', 'rrule': RRULE_EXAMPLE, 'extra_data': '{"a": 5}'}, admin_user, expect=400)
assert 'not allowed on launch' in str(r.data['extra_data'][0])
@@ -43,7 +42,7 @@ def test_wfjt_unprompted_inventory_rejected(post, workflow_job_template, invento
url=reverse('api:workflow_job_template_schedules_list', kwargs={'pk': workflow_job_template.id}),
data={'name': 'test sch', 'rrule': RRULE_EXAMPLE, 'inventory': inventory.pk},
user=admin_user,
- expect=400
+ expect=400,
)
assert r.data['inventory'] == ['Field is not configured to prompt on launch.']
@@ -56,27 +55,21 @@ def test_wfjt_unprompted_inventory_accepted(post, workflow_job_template, invento
url=reverse('api:workflow_job_template_schedules_list', kwargs={'pk': workflow_job_template.id}),
data={'name': 'test sch', 'rrule': RRULE_EXAMPLE, 'inventory': inventory.pk},
user=admin_user,
- expect=201
+ expect=201,
)
assert Schedule.objects.get(pk=r.data['id']).inventory == inventory
@pytest.mark.django_db
def test_valid_survey_answer(post, admin_user, project, inventory, survey_spec_factory):
- job_template = JobTemplate.objects.create(
- name='test-jt',
- project=project,
- playbook='helloworld.yml',
- inventory=inventory
- )
+ job_template = JobTemplate.objects.create(name='test-jt', project=project, playbook='helloworld.yml', inventory=inventory)
job_template.ask_variables_on_launch = False
job_template.survey_enabled = True
job_template.survey_spec = survey_spec_factory('var1')
assert job_template.survey_spec['spec'][0]['type'] == 'integer'
job_template.save()
url = reverse('api:job_template_schedules_list', kwargs={'pk': job_template.id})
- post(url, {'name': 'test sch', 'rrule': RRULE_EXAMPLE, 'extra_data': '{"var1": 54}'},
- admin_user, expect=201)
+ post(url, {'name': 'test sch', 'rrule': RRULE_EXAMPLE, 'extra_data': '{"var1": 54}'}, admin_user, expect=201)
@pytest.mark.django_db
@@ -88,73 +81,65 @@ def test_encrypted_survey_answer(post, patch, admin_user, project, inventory, su
inventory=inventory,
ask_variables_on_launch=False,
survey_enabled=True,
- survey_spec=survey_spec_factory([{'variable': 'var1', 'type': 'password'}])
+ survey_spec=survey_spec_factory([{'variable': 'var1', 'type': 'password'}]),
)
# test encrypted-on-create
url = reverse('api:job_template_schedules_list', kwargs={'pk': job_template.id})
- r = post(url, {'name': 'test sch', 'rrule': RRULE_EXAMPLE, 'extra_data': '{"var1": "foo"}'},
- admin_user, expect=201)
+ r = post(url, {'name': 'test sch', 'rrule': RRULE_EXAMPLE, 'extra_data': '{"var1": "foo"}'}, admin_user, expect=201)
assert r.data['extra_data']['var1'] == "$encrypted$"
schedule = Schedule.objects.get(pk=r.data['id'])
assert schedule.extra_data['var1'].startswith('$encrypted$')
assert decrypt_value(get_encryption_key('value', pk=None), schedule.extra_data['var1']) == 'foo'
# test a no-op change
- r = patch(
- schedule.get_absolute_url(),
- data={'extra_data': {'var1': '$encrypted$'}},
- user=admin_user,
- expect=200
- )
+ r = patch(schedule.get_absolute_url(), data={'extra_data': {'var1': '$encrypted$'}}, user=admin_user, expect=200)
assert r.data['extra_data']['var1'] == '$encrypted$'
schedule.refresh_from_db()
assert decrypt_value(get_encryption_key('value', pk=None), schedule.extra_data['var1']) == 'foo'
# change to a different value
- r = patch(
- schedule.get_absolute_url(),
- data={'extra_data': {'var1': 'bar'}},
- user=admin_user,
- expect=200
- )
+ r = patch(schedule.get_absolute_url(), data={'extra_data': {'var1': 'bar'}}, user=admin_user, expect=200)
assert r.data['extra_data']['var1'] == '$encrypted$'
schedule.refresh_from_db()
assert decrypt_value(get_encryption_key('value', pk=None), schedule.extra_data['var1']) == 'bar'
@pytest.mark.django_db
-@pytest.mark.parametrize('rrule, error', [
- ("", "This field may not be blank"),
- ("DTSTART:NONSENSE", "Valid DTSTART required in rrule"),
- ("DTSTART:20300308T050000Z DTSTART:20310308T050000", "Multiple DTSTART is not supported"),
- ("DTSTART:20300308T050000Z", "RRULE required in rrule"),
- ("DTSTART:20300308T050000Z RRULE:NONSENSE", "INTERVAL required in rrule"),
- ("DTSTART:20300308T050000Z RRULE:FREQ=SECONDLY;INTERVAL=5;COUNT=6", "SECONDLY is not supported"),
- ("DTSTART:20300308T050000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=3,4", "Multiple BYMONTHDAYs not supported"), # noqa
- ("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYMONTH=1,2", "Multiple BYMONTHs not supported"), # noqa
- ("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=5MO", "BYDAY with numeric prefix not supported"), # noqa
- ("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYYEARDAY=100", "BYYEARDAY not supported"), # noqa
- ("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYWEEKNO=20", "BYWEEKNO not supported"),
- ("DTSTART:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000", "COUNT > 999 is unsupported"), # noqa
- ("DTSTART;TZID=US-Eastern:19961105T090000 RRULE:FREQ=MINUTELY;INTERVAL=10;COUNT=5", "A valid TZID must be provided"), # noqa
- ("DTSTART:20300308T050000Z RRULE:FREQ=REGULARLY;INTERVAL=1", "rrule parsing failed validation: invalid 'FREQ': REGULARLY"), # noqa
- ("DTSTART:20030925T104941Z RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z", "RRULE may not contain both COUNT and UNTIL"), # noqa
- ("DTSTART;TZID=America/New_York:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1", "rrule parsing failed validation"),
- ("DTSTART:20300308T050000 RRULE:FREQ=DAILY;INTERVAL=1", "DTSTART cannot be a naive datetime"),
-])
+@pytest.mark.parametrize(
+ 'rrule, error',
+ [
+ ("", "This field may not be blank"),
+ ("DTSTART:NONSENSE", "Valid DTSTART required in rrule"),
+ ("DTSTART:20300308T050000Z DTSTART:20310308T050000", "Multiple DTSTART is not supported"),
+ ("DTSTART:20300308T050000Z", "RRULE required in rrule"),
+ ("DTSTART:20300308T050000Z RRULE:NONSENSE", "INTERVAL required in rrule"),
+ ("DTSTART:20300308T050000Z RRULE:FREQ=SECONDLY;INTERVAL=5;COUNT=6", "SECONDLY is not supported"),
+ ("DTSTART:20300308T050000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=3,4", "Multiple BYMONTHDAYs not supported"), # noqa
+ ("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYMONTH=1,2", "Multiple BYMONTHs not supported"), # noqa
+ ("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=5MO", "BYDAY with numeric prefix not supported"), # noqa
+ ("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYYEARDAY=100", "BYYEARDAY not supported"), # noqa
+ ("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYWEEKNO=20", "BYWEEKNO not supported"),
+ ("DTSTART:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000", "COUNT > 999 is unsupported"), # noqa
+ ("DTSTART;TZID=US-Eastern:19961105T090000 RRULE:FREQ=MINUTELY;INTERVAL=10;COUNT=5", "A valid TZID must be provided"), # noqa
+ ("DTSTART:20300308T050000Z RRULE:FREQ=REGULARLY;INTERVAL=1", "rrule parsing failed validation: invalid 'FREQ': REGULARLY"), # noqa
+ ("DTSTART:20030925T104941Z RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z", "RRULE may not contain both COUNT and UNTIL"), # noqa
+ ("DTSTART;TZID=America/New_York:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1", "rrule parsing failed validation"),
+ ("DTSTART:20300308T050000 RRULE:FREQ=DAILY;INTERVAL=1", "DTSTART cannot be a naive datetime"),
+ ],
+)
def test_invalid_rrules(post, admin_user, project, inventory, rrule, error):
- job_template = JobTemplate.objects.create(
- name='test-jt',
- project=project,
- playbook='helloworld.yml',
- inventory=inventory
- )
+ job_template = JobTemplate.objects.create(name='test-jt', project=project, playbook='helloworld.yml', inventory=inventory)
url = reverse('api:job_template_schedules_list', kwargs={'pk': job_template.id})
- resp = post(url, {
- 'name': 'Some Schedule',
- 'rrule': rrule,
- }, admin_user, expect=400)
+ resp = post(
+ url,
+ {
+ 'name': 'Some Schedule',
+ 'rrule': rrule,
+ },
+ admin_user,
+ expect=400,
+ )
assert error in smart_str(resp.content)
@@ -345,12 +330,15 @@ def test_months_with_31_days(post, admin_user):
@pytest.mark.django_db
@pytest.mark.timeout(3)
-@pytest.mark.parametrize('freq, delta, total_seconds', (
- ('MINUTELY', 1, 60),
- ('MINUTELY', 15, 15 * 60),
- ('HOURLY', 1, 3600),
- ('HOURLY', 2, 3600 * 2),
-))
+@pytest.mark.parametrize(
+ 'freq, delta, total_seconds',
+ (
+ ('MINUTELY', 1, 60),
+ ('MINUTELY', 15, 15 * 60),
+ ('HOURLY', 1, 3600),
+ ('HOURLY', 2, 3600 * 2),
+ ),
+)
def test_really_old_dtstart(post, admin_user, freq, delta, total_seconds):
url = reverse('api:schedule_rrule')
# every <interval>, at the :30 second mark
@@ -364,16 +352,14 @@ def test_really_old_dtstart(post, admin_user, freq, delta, total_seconds):
assert next_ten[0] >= start
# ...but *no more than* <interval> into the future
- assert now() + datetime.timedelta(**{
- 'minutes' if freq == 'MINUTELY' else 'hours': delta
- })
+ assert now() + datetime.timedelta(**{'minutes' if freq == 'MINUTELY' else 'hours': delta})
# every date in the list is <interval> greater than the last
for i, x in enumerate(next_ten):
if i == 0:
continue
assert x.second == 30
- delta = (x - next_ten[i - 1])
+ delta = x - next_ten[i - 1]
assert delta.total_seconds() == total_seconds
@@ -404,12 +390,7 @@ def test_zoneinfo(get, admin_user):
@pytest.mark.django_db
def test_normal_user_can_create_jt_schedule(options, post, project, inventory, alice):
- jt = JobTemplate.objects.create(
- name='test-jt',
- project=project,
- playbook='helloworld.yml',
- inventory=inventory
- )
+ jt = JobTemplate.objects.create(name='test-jt', project=project, playbook='helloworld.yml', inventory=inventory)
jt.save()
url = reverse('api:schedule_list')
diff --git a/awx/main/tests/functional/api/test_script_endpoint.py b/awx/main/tests/functional/api/test_script_endpoint.py
index 64023c5b22..f4d1465ba8 100644
--- a/awx/main/tests/functional/api/test_script_endpoint.py
+++ b/awx/main/tests/functional/api/test_script_endpoint.py
@@ -7,9 +7,7 @@ from awx.main.models import Inventory, Host
@pytest.mark.django_db
def test_empty_inventory(post, get, admin_user, organization, group_factory):
- inventory = Inventory(name='basic_inventory',
- kind='',
- organization=organization)
+ inventory = Inventory(name='basic_inventory', kind='', organization=organization)
inventory.save()
resp = get(reverse('api:inventory_script_view', kwargs={'pk': inventory.pk}), admin_user)
jdata = json.loads(resp.content)
@@ -21,9 +19,7 @@ def test_empty_inventory(post, get, admin_user, organization, group_factory):
@pytest.mark.django_db
def test_ungrouped_hosts(post, get, admin_user, organization, group_factory):
- inventory = Inventory(name='basic_inventory',
- kind='',
- organization=organization)
+ inventory = Inventory(name='basic_inventory', kind='', organization=organization)
inventory.save()
Host.objects.create(name='first_host', inventory=inventory)
Host.objects.create(name='second_host', inventory=inventory)
diff --git a/awx/main/tests/functional/api/test_search_filter.py b/awx/main/tests/functional/api/test_search_filter.py
index 4e67ed834a..1bf2788859 100644
--- a/awx/main/tests/functional/api/test_search_filter.py
+++ b/awx/main/tests/functional/api/test_search_filter.py
@@ -32,7 +32,7 @@ class TestSearchFilter:
# Actually test the endpoint.
host_list_url = reverse('api:host_list')
- # Test if the OR releation works.
+ # Test if the OR releation works.
request = factory.get(host_list_url, data={'groups__search': ['g1', 'g2']})
request.user = admin
response = HostList.as_view()(request)
diff --git a/awx/main/tests/functional/api/test_settings.py b/awx/main/tests/functional/api/test_settings.py
index c478b70817..fa53c65aa9 100644
--- a/awx/main/tests/functional/api/test_settings.py
+++ b/awx/main/tests/functional/api/test_settings.py
@@ -13,9 +13,9 @@ from awx.conf.models import Setting
from awx.conf.registry import settings_registry
-TEST_GIF_LOGO = 'data:image/gif;base64,R0lGODlhIQAjAPIAAP//////AP8AAMzMAJmZADNmAAAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh+QQJCgAHACwAAAAAIQAjAAADo3i63P4wykmrvTjrzZsxXfR94WMQBFh6RECuixHMLyzPQ13ewZCvow9OpzEAjIBj79cJJmU+FceIVEZ3QRozxBttmyOBwPBtisdX4Bha3oxmS+llFIPHQXQKkiSEXz9PeklHBzx3hYNyEHt4fmmAhHp8Nz45KgV5FgWFOFEGmwWbGqEfniChohmoQZ+oqRiZDZhEgk81I4mwg4EKVbxzrDHBEAkAIfkECQoABwAsAAAAACEAIwAAA6V4utz+MMpJq724GpP15p1kEAQYQmOwnWjgrmxjuMEAx8rsDjZ+fJvdLWQAFAHGWo8FRM54JqIRmYTigDrDMqZTbbbMj0CgjTLHZKvPQH6CTx+a2vKR0XbbOsoZ7SphG057gjl+c0dGgzeGNiaBiSgbBQUHBV08NpOVlkMSk0FKjZuURHiiOJxQnSGfQJuoEKREejK0dFRGjoiQt7iOuLx0rgxYEQkAIfkECQoABwAsAAAAACEAIwAAA7h4utxnxslJDSGR6nrz/owxYB64QUEwlGaVqlB7vrAJscsd3Lhy+wBArGEICo3DUFH4QDqK0GMy51xOgcGlEAfJ+iAFie62chR+jYKaSAuQGOqwJp7jGQRDuol+F/jxZWsyCmoQfwYwgoM5Oyg1i2w0A2WQIW2TPYOIkleQmy+UlYygoaIPnJmapKmqKiusMmSdpjxypnALtrcHioq3ury7hGm3dnVosVpMWFmwREZbddDOSsjVswcJACH5BAkKAAcALAAAAAAhACMAAAOxeLrc/jDKSZUxNS9DCNYV54HURQwfGRlDEFwqdLVuGjOsW9/Odb0wnsUAKBKNwsMFQGwyNUHckVl8bqI4o43lA26PNkv1S9DtNuOeVirw+aTI3qWAQwnud1vhLSnQLS0GeFF+GoVKNF0fh4Z+LDQ6Bn5/MTNmL0mAl2E3j2aclTmRmYCQoKEDiaRDKFhJez6UmbKyQowHtzy1uEl8DLCnEktrQ2PBD1NxSlXKIW5hz6cJACH5BAkKAAcALAAAAAAhACMAAAOkeLrc/jDKSau9OOvNlTFd9H3hYxAEWDJfkK5LGwTq+g0zDR/GgM+10A04Cm56OANgqTRmkDTmSOiLMgFOTM9AnFJHuexzYBAIijZf2SweJ8ttbbXLmd5+wBiJosSCoGF/fXEeS1g8gHl9hxODKkh4gkwVIwUekESIhA4FlgV3PyCWG52WI2oGnR2lnUWpqhqVEF4Xi7QjhpsshpOFvLosrnpoEAkAIfkECQoABwAsAAAAACEAIwAAA6l4utz+MMpJq71YGpPr3t1kEAQXQltQnk8aBCa7bMMLy4wx1G8s072PL6SrGQDI4zBThCU/v50zCVhidIYgNPqxWZkDg0AgxB2K4vEXbBSvr1JtZ3uOext0x7FqovF6OXtfe1UzdjAxhINPM013ChtJER8FBQeVRX8GlpggFZWWfjwblTiigGZnfqRmpUKbljKxDrNMeY2eF4R8jUiSur6/Z8GFV2WBtwwJACH5BAkKAAcALAAAAAAhACMAAAO6eLrcZi3KyQwhkGpq8f6ONWQgaAxB8JTfg6YkO50pzD5xhaurhCsGAKCnEw6NucNDCAkyI8ugdAhFKpnJJdMaeiofBejowUseCr9GYa0j1GyMdVgjBxoEuPSZXWKf7gKBeHtzMms0gHgGfDIVLztmjScvNZEyk28qjT40b5aXlHCbDgOhnzedoqOOlKeopaqrCy56sgtotbYKhYW6e7e9tsHBssO6eSTIm1peV0iuFUZDyU7NJnmcuQsJACH5BAkKAAcALAAAAAAhACMAAAOteLrc/jDKSZsxNS9DCNYV54Hh4H0kdAXBgKaOwbYX/Miza1vrVe8KA2AoJL5gwiQgeZz4GMXlcHl8xozQ3kW3KTajL9zsBJ1+sV2fQfALem+XAlRApxu4ioI1UpC76zJ4fRqDBzI+LFyFhH1iiS59fkgziW07jjRAG5QDeECOLk2Tj6KjnZafW6hAej6Smgevr6yysza2tiCuMasUF2Yov2gZUUQbU8YaaqjLpQkAOw==' # NOQA
-TEST_PNG_LOGO = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACEAAAAjCAYAAAAaLGNkAAAAAXNSR0IB2cksfwAAAdVpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpDb21wcmVzc2lvbj4xPC90aWZmOkNvbXByZXNzaW9uPgogICAgICAgICA8dGlmZjpQaG90b21ldHJpY0ludGVycHJldGF0aW9uPjI8L3RpZmY6UGhvdG9tZXRyaWNJbnRlcnByZXRhdGlvbj4KICAgICAgICAgPHRpZmY6T3JpZW50YXRpb24+MTwvdGlmZjpPcmllbnRhdGlvbj4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+Cjl0tmoAAAHVSURBVFgJ7VZRsoMgDNTOu5E9U+/Ud6Z6JssGNg2oNKD90xkHCNnNkgTbYbieKwNXBn6bgSXQ4+16xi5UDiqDN3Pecr6+1fM5DHh7n1NEIPjjoRLKzOjG3qQ5dRtEy2LCjh/Gz2wDZE2nZYKkrxdn/kY9XQQkGCGqqDY5IgJFkEKgBCzDNGXhTKEye7boFRH6IPJj5EshiNCSjV4R4eSx7zhmR2tcdIuwmWiMeao7e0JHViZEWUI5aP8a9O+rx74D6sGEiJftiX3YeueIiFXg2KrhpqzjVC3dPZFYJZ7NOwwtNwM8R0UkLfH0sT5qck+OlkMq0BucKr0iWG7gpAQksD9esM1z3Lnf6SHjLh67nnKEGxC/iomWhByTeXOQJGHHcKxwHhHKnt1HIdYtmexkIb/HOURWTSJqn2gKMDG0bDUc/D0iAseovxUBoylmQCug6IVhSv+4DIeKI94jAr4AjiSEgQ25JYB+YWT9BZ94AM8erwgFkRifaArA6U0G5KT0m//z26REZuK9okgrT6VwE1jTHjbVzyNAyRwTEPOtuiex9FVBNZCkruaA4PZqFp1u8Rpww9/6rcK5y0EkAxRiZJt79PWOVYWGRE9pbJhavMengMflGyumk0akMsQnAAAAAElFTkSuQmCC' # NOQA
-TEST_JPEG_LOGO = 'data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAASABIAAD/4QBkRXhpZgAATU0AKgAAAAgAAwEGAAMAAAABAAIAAAESAAMAAAABAAEAAIdpAAQAAAABAAAAMgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAAIaADAAQAAAABAAAAIwAAAAD/4QkhaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLwA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/PiA8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJYTVAgQ29yZSA1LjQuMCI+IDxyZGY6UkRGIHhtbG5zOnJkZj0iaHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi1yZGYtc3ludGF4LW5zIyI+IDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiLz4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA8P3hwYWNrZXQgZW5kPSJ3Ij8+AP/tADhQaG90b3Nob3AgMy4wADhCSU0EBAAAAAAAADhCSU0EJQAAAAAAENQdjNmPALIE6YAJmOz4Qn7/wAARCAAjACEDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9sAQwAGBgYGBgYKBgYKDgoKCg4SDg4ODhIXEhISEhIXHBcXFxcXFxwcHBwcHBwcIiIiIiIiJycnJycsLCwsLCwsLCws/9sAQwEHBwcLCgsTCgoTLh8aHy4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u/90ABAAD/9oADAMBAAIRAxEAPwD6poormvFfivSvB2lHVtWLGMtsRE2hnYKzlVLsi52oxALDdjauWKqQCXQfFXh7xP8Aaf7AvYrz7HL5U3lk/K3YjIGVODtcZVsHBODXQV806bcT+E9L03XbCOS2udMsLQanbB4po72xYMfOQpKYyV2zPEwcNwVK7WAr6WriwWMWIUvdcZRdmnuu33rVFSjYKKKK7ST/0PqmuF8Vv4X8S+HNZ0+e/gIsYJvtEsL+bJZsI3UuyxNvBA3gpxvXchyCRXdV8ta3bW667DoloW1y10tLLTJxZWP2hoLSGYzNHclGZpJC0ESk8IAZcRB8is61T2cHK1/1DrY526h8YXHh691vxCz6dafY5Q0U7yGSeQxSxohNzJLcbUeQ4VnVNxBRCWL19b2eraVqE9xa2F3BcS2jbJ0ikV2ibJG1wpJU5UjBx0PpXzrrniy4k17TrrWrGex022ufMijvd9m11PGH8naXKqsUcgR3MhB5U7MA16x4L8F3vhq2sY9Ru4rg6day2tusEAhCrcOkknmEMRI2Y1AcLGT8xYMzZHjZFGu6cquKjaUnt2XS76vv/SN8RVjOdoKyXY9Cooor3TA//9H6pr4gfxRrMvxJ0/whLJE+maVrcVnZRtBCzwQQ3SIipMU80fKignflgPmJr7fr4A/5rf8A9zJ/7eUAdX8SfGviPwl8TtaPh6eK1eTyN0n2eCSUg28OV8ySNn2/KDtztzzjNfZVhY2umWMGm2KeXb2sSQxJknakYCqMkknAHUnNfBXxt/5Kdq//AG7/APpPFX3/AEAFFFFAH//Z' # NOQA
+TEST_GIF_LOGO = 'data:image/gif;base64,R0lGODlhIQAjAPIAAP//////AP8AAMzMAJmZADNmAAAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh+QQJCgAHACwAAAAAIQAjAAADo3i63P4wykmrvTjrzZsxXfR94WMQBFh6RECuixHMLyzPQ13ewZCvow9OpzEAjIBj79cJJmU+FceIVEZ3QRozxBttmyOBwPBtisdX4Bha3oxmS+llFIPHQXQKkiSEXz9PeklHBzx3hYNyEHt4fmmAhHp8Nz45KgV5FgWFOFEGmwWbGqEfniChohmoQZ+oqRiZDZhEgk81I4mwg4EKVbxzrDHBEAkAIfkECQoABwAsAAAAACEAIwAAA6V4utz+MMpJq724GpP15p1kEAQYQmOwnWjgrmxjuMEAx8rsDjZ+fJvdLWQAFAHGWo8FRM54JqIRmYTigDrDMqZTbbbMj0CgjTLHZKvPQH6CTx+a2vKR0XbbOsoZ7SphG057gjl+c0dGgzeGNiaBiSgbBQUHBV08NpOVlkMSk0FKjZuURHiiOJxQnSGfQJuoEKREejK0dFRGjoiQt7iOuLx0rgxYEQkAIfkECQoABwAsAAAAACEAIwAAA7h4utxnxslJDSGR6nrz/owxYB64QUEwlGaVqlB7vrAJscsd3Lhy+wBArGEICo3DUFH4QDqK0GMy51xOgcGlEAfJ+iAFie62chR+jYKaSAuQGOqwJp7jGQRDuol+F/jxZWsyCmoQfwYwgoM5Oyg1i2w0A2WQIW2TPYOIkleQmy+UlYygoaIPnJmapKmqKiusMmSdpjxypnALtrcHioq3ury7hGm3dnVosVpMWFmwREZbddDOSsjVswcJACH5BAkKAAcALAAAAAAhACMAAAOxeLrc/jDKSZUxNS9DCNYV54HURQwfGRlDEFwqdLVuGjOsW9/Odb0wnsUAKBKNwsMFQGwyNUHckVl8bqI4o43lA26PNkv1S9DtNuOeVirw+aTI3qWAQwnud1vhLSnQLS0GeFF+GoVKNF0fh4Z+LDQ6Bn5/MTNmL0mAl2E3j2aclTmRmYCQoKEDiaRDKFhJez6UmbKyQowHtzy1uEl8DLCnEktrQ2PBD1NxSlXKIW5hz6cJACH5BAkKAAcALAAAAAAhACMAAAOkeLrc/jDKSau9OOvNlTFd9H3hYxAEWDJfkK5LGwTq+g0zDR/GgM+10A04Cm56OANgqTRmkDTmSOiLMgFOTM9AnFJHuexzYBAIijZf2SweJ8ttbbXLmd5+wBiJosSCoGF/fXEeS1g8gHl9hxODKkh4gkwVIwUekESIhA4FlgV3PyCWG52WI2oGnR2lnUWpqhqVEF4Xi7QjhpsshpOFvLosrnpoEAkAIfkECQoABwAsAAAAACEAIwAAA6l4utz+MMpJq71YGpPr3t1kEAQXQltQnk8aBCa7bMMLy4wx1G8s072PL6SrGQDI4zBThCU/v50zCVhidIYgNPqxWZkDg0AgxB2K4vEXbBSvr1JtZ3uOext0x7FqovF6OXtfe1UzdjAxhINPM013ChtJER8FBQeVRX8GlpggFZWWfjwblTiigGZnfqRmpUKbljKxDrNMeY2eF4R8jUiSur6/Z8GFV2WBtwwJACH5BAkKAAcALAAAAAAhACMAAAO6eLrcZi3KyQwhkGpq8f6ONWQgaAxB8JTfg6YkO50pzD5xhaurhCsGAKCnEw6NucNDCAkyI8ugdAhFKpnJJdMaeiofBejowUseCr9GYa0j1GyMdVgjBxoEuPSZXWKf7gKBeHtzMms0gHgGfDIVLztmjScvNZEyk28qjT40b5aXlHCbDgOhnzedoqOOlKeopaqrCy56sgtotbYKhYW6e7e9tsHBssO6eSTIm1peV0iuFUZDyU7NJnmcuQsJACH5BAkKAAcALAAAAAAhACMAAAOteLrc/jDKSZsxNS9DCNYV54Hh4H0kdAXBgKaOwbYX/Miza1vrVe8KA2AoJL5gwiQgeZz4GMXlcHl8xozQ3kW3KTajL9zsBJ1+sV2fQfALem+XAlRApxu4ioI1UpC76zJ4fRqDBzI+LFyFhH1iiS59fkgziW07jjRAG5QDeECOLk2Tj6KjnZafW6hAej6Smgevr6yysza2tiCuMasUF2Yov2gZUUQbU8YaaqjLpQkAOw==' # NOQA
+TEST_PNG_LOGO = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACEAAAAjCAYAAAAaLGNkAAAAAXNSR0IB2cksfwAAAdVpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpDb21wcmVzc2lvbj4xPC90aWZmOkNvbXByZXNzaW9uPgogICAgICAgICA8dGlmZjpQaG90b21ldHJpY0ludGVycHJldGF0aW9uPjI8L3RpZmY6UGhvdG9tZXRyaWNJbnRlcnByZXRhdGlvbj4KICAgICAgICAgPHRpZmY6T3JpZW50YXRpb24+MTwvdGlmZjpPcmllbnRhdGlvbj4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+Cjl0tmoAAAHVSURBVFgJ7VZRsoMgDNTOu5E9U+/Ud6Z6JssGNg2oNKD90xkHCNnNkgTbYbieKwNXBn6bgSXQ4+16xi5UDiqDN3Pecr6+1fM5DHh7n1NEIPjjoRLKzOjG3qQ5dRtEy2LCjh/Gz2wDZE2nZYKkrxdn/kY9XQQkGCGqqDY5IgJFkEKgBCzDNGXhTKEye7boFRH6IPJj5EshiNCSjV4R4eSx7zhmR2tcdIuwmWiMeao7e0JHViZEWUI5aP8a9O+rx74D6sGEiJftiX3YeueIiFXg2KrhpqzjVC3dPZFYJZ7NOwwtNwM8R0UkLfH0sT5qck+OlkMq0BucKr0iWG7gpAQksD9esM1z3Lnf6SHjLh67nnKEGxC/iomWhByTeXOQJGHHcKxwHhHKnt1HIdYtmexkIb/HOURWTSJqn2gKMDG0bDUc/D0iAseovxUBoylmQCug6IVhSv+4DIeKI94jAr4AjiSEgQ25JYB+YWT9BZ94AM8erwgFkRifaArA6U0G5KT0m//z26REZuK9okgrT6VwE1jTHjbVzyNAyRwTEPOtuiex9FVBNZCkruaA4PZqFp1u8Rpww9/6rcK5y0EkAxRiZJt79PWOVYWGRE9pbJhavMengMflGyumk0akMsQnAAAAAElFTkSuQmCC' # NOQA
+TEST_JPEG_LOGO = 'data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAASABIAAD/4QBkRXhpZgAATU0AKgAAAAgAAwEGAAMAAAABAAIAAAESAAMAAAABAAEAAIdpAAQAAAABAAAAMgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAAIaADAAQAAAABAAAAIwAAAAD/4QkhaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLwA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/PiA8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJYTVAgQ29yZSA1LjQuMCI+IDxyZGY6UkRGIHhtbG5zOnJkZj0iaHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi1yZGYtc3ludGF4LW5zIyI+IDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiLz4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA8P3hwYWNrZXQgZW5kPSJ3Ij8+AP/tADhQaG90b3Nob3AgMy4wADhCSU0EBAAAAAAAADhCSU0EJQAAAAAAENQdjNmPALIE6YAJmOz4Qn7/wAARCAAjACEDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9sAQwAGBgYGBgYKBgYKDgoKCg4SDg4ODhIXEhISEhIXHBcXFxcXFxwcHBwcHBwcIiIiIiIiJycnJycsLCwsLCwsLCws/9sAQwEHBwcLCgsTCgoTLh8aHy4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u/90ABAAD/9oADAMBAAIRAxEAPwD6poormvFfivSvB2lHVtWLGMtsRE2hnYKzlVLsi52oxALDdjauWKqQCXQfFXh7xP8Aaf7AvYrz7HL5U3lk/K3YjIGVODtcZVsHBODXQV806bcT+E9L03XbCOS2udMsLQanbB4po72xYMfOQpKYyV2zPEwcNwVK7WAr6WriwWMWIUvdcZRdmnuu33rVFSjYKKKK7ST/0PqmuF8Vv4X8S+HNZ0+e/gIsYJvtEsL+bJZsI3UuyxNvBA3gpxvXchyCRXdV8ta3bW667DoloW1y10tLLTJxZWP2hoLSGYzNHclGZpJC0ESk8IAZcRB8is61T2cHK1/1DrY526h8YXHh691vxCz6dafY5Q0U7yGSeQxSxohNzJLcbUeQ4VnVNxBRCWL19b2eraVqE9xa2F3BcS2jbJ0ikV2ibJG1wpJU5UjBx0PpXzrrniy4k17TrrWrGex022ufMijvd9m11PGH8naXKqsUcgR3MhB5U7MA16x4L8F3vhq2sY9Ru4rg6day2tusEAhCrcOkknmEMRI2Y1AcLGT8xYMzZHjZFGu6cquKjaUnt2XS76vv/SN8RVjOdoKyXY9Cooor3TA//9H6pr4gfxRrMvxJ0/whLJE+maVrcVnZRtBCzwQQ3SIipMU80fKignflgPmJr7fr4A/5rf8A9zJ/7eUAdX8SfGviPwl8TtaPh6eK1eTyN0n2eCSUg28OV8ySNn2/KDtztzzjNfZVhY2umWMGm2KeXb2sSQxJknakYCqMkknAHUnNfBXxt/5Kdq//AG7/APpPFX3/AEAFFFFAH//Z' # NOQA
@pytest.mark.django_db
@@ -47,16 +47,19 @@ def test_jobs_settings(get, put, patch, delete, admin):
@pytest.mark.django_db
-@pytest.mark.parametrize('value, expected', [
- [True, 400],
- ['invalid', 400],
- [['also', 'invalid'], 400],
- [{}, 200],
- [{'X_FOO': 'VALID'}, 200],
- [{'X_TOTAL': 100}, 200],
- [{'X_FOO': ['ALSO', 'INVALID']}, 400],
- [{'X_FOO': {'ALSO': 'INVALID'}}, 400],
-])
+@pytest.mark.parametrize(
+ 'value, expected',
+ [
+ [True, 400],
+ ['invalid', 400],
+ [['also', 'invalid'], 400],
+ [{}, 200],
+ [{'X_FOO': 'VALID'}, 200],
+ [{'X_TOTAL': 100}, 200],
+ [{'X_FOO': ['ALSO', 'INVALID']}, 400],
+ [{'X_FOO': {'ALSO': 'INVALID'}}, 400],
+ ],
+)
def test_awx_task_env_validity(get, patch, admin, value, expected):
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'jobs'})
patch(url, user=admin, data={'AWX_TASK_ENV': value}, expect=expected)
@@ -90,46 +93,48 @@ def test_ldap_settings(get, put, patch, delete, admin):
@pytest.mark.django_db
-@pytest.mark.parametrize('value', [
- None, '', 'INVALID', 1, [1], ['INVALID'],
-])
+@pytest.mark.parametrize(
+ 'value',
+ [
+ None,
+ '',
+ 'INVALID',
+ 1,
+ [1],
+ ['INVALID'],
+ ],
+)
def test_ldap_user_flags_by_group_invalid_dn(get, patch, admin, value):
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'ldap'})
- patch(url, user=admin,
- data={'AUTH_LDAP_USER_FLAGS_BY_GROUP': {'is_superuser': value}},
- expect=400)
+ patch(url, user=admin, data={'AUTH_LDAP_USER_FLAGS_BY_GROUP': {'is_superuser': value}}, expect=400)
@pytest.mark.django_db
def test_ldap_user_flags_by_group_string(get, patch, admin):
expected = 'CN=Admins,OU=Groups,DC=example,DC=com'
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'ldap'})
- patch(url, user=admin,
- data={'AUTH_LDAP_USER_FLAGS_BY_GROUP': {'is_superuser': expected}},
- expect=200)
+ patch(url, user=admin, data={'AUTH_LDAP_USER_FLAGS_BY_GROUP': {'is_superuser': expected}}, expect=200)
resp = get(url, user=admin)
assert resp.data['AUTH_LDAP_USER_FLAGS_BY_GROUP']['is_superuser'] == [expected]
@pytest.mark.django_db
def test_ldap_user_flags_by_group_list(get, patch, admin):
- expected = [
- 'CN=Admins,OU=Groups,DC=example,DC=com',
- 'CN=Superadmins,OU=Groups,DC=example,DC=com'
- ]
+ expected = ['CN=Admins,OU=Groups,DC=example,DC=com', 'CN=Superadmins,OU=Groups,DC=example,DC=com']
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'ldap'})
- patch(url, user=admin,
- data={'AUTH_LDAP_USER_FLAGS_BY_GROUP': {'is_superuser': expected}},
- expect=200)
+ patch(url, user=admin, data={'AUTH_LDAP_USER_FLAGS_BY_GROUP': {'is_superuser': expected}}, expect=200)
resp = get(url, user=admin)
assert resp.data['AUTH_LDAP_USER_FLAGS_BY_GROUP']['is_superuser'] == expected
-@pytest.mark.parametrize('setting', [
- 'AUTH_LDAP_USER_DN_TEMPLATE',
- 'AUTH_LDAP_REQUIRE_GROUP',
- 'AUTH_LDAP_DENY_GROUP',
-])
+@pytest.mark.parametrize(
+ 'setting',
+ [
+ 'AUTH_LDAP_USER_DN_TEMPLATE',
+ 'AUTH_LDAP_REQUIRE_GROUP',
+ 'AUTH_LDAP_DENY_GROUP',
+ ],
+)
@pytest.mark.django_db
def test_empty_ldap_dn(get, put, patch, delete, admin, setting):
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'ldap'})
@@ -239,6 +244,7 @@ def _mock_logging_defaults():
# Pre-populate settings obj with defaults
class MockSettings:
pass
+
mock_settings_obj = MockSettings()
mock_settings_json = dict()
for key in settings_registry.get_registered_settings(category_slug='logging'):
@@ -249,11 +255,13 @@ def _mock_logging_defaults():
return mock_settings_obj, mock_settings_json
-
-@pytest.mark.parametrize('key, value, error', [
- ['LOG_AGGREGATOR_TYPE', 'logstash', 'Cannot enable log aggregator without providing host.'],
- ['LOG_AGGREGATOR_HOST', 'https://logstash', 'Cannot enable log aggregator without providing type.']
-])
+@pytest.mark.parametrize(
+ 'key, value, error',
+ [
+ ['LOG_AGGREGATOR_TYPE', 'logstash', 'Cannot enable log aggregator without providing host.'],
+ ['LOG_AGGREGATOR_HOST', 'https://logstash', 'Cannot enable log aggregator without providing type.'],
+ ],
+)
@pytest.mark.django_db
def test_logging_aggregator_missing_settings(put, post, admin, key, value, error):
_, mock_settings = _mock_logging_defaults()
@@ -264,14 +272,16 @@ def test_logging_aggregator_missing_settings(put, post, admin, key, value, error
assert error in str(response.data)
-@pytest.mark.parametrize('type, host, port, username, password', [
- ['logstash', 'localhost', 8080, 'logger', 'mcstash'],
- ['loggly', 'http://logs-01.loggly.com/inputs/1fd38090-hash-h4a$h-8d80-t0k3n71/tag/http/', None, None, None],
- ['splunk', 'https://yoursplunk:8088/services/collector/event', None, None, None],
- ['other', '97.221.40.41', 9000, 'logger', 'mcstash'],
- ['sumologic', 'https://endpoint5.collection.us2.sumologic.com/receiver/v1/http/Zagnw_f9XGr_zZgd-_EPM0hb8_rUU7_RU8Q==',
- None, None, None]
-])
+@pytest.mark.parametrize(
+ 'type, host, port, username, password',
+ [
+ ['logstash', 'localhost', 8080, 'logger', 'mcstash'],
+ ['loggly', 'http://logs-01.loggly.com/inputs/1fd38090-hash-h4a$h-8d80-t0k3n71/tag/http/', None, None, None],
+ ['splunk', 'https://yoursplunk:8088/services/collector/event', None, None, None],
+ ['other', '97.221.40.41', 9000, 'logger', 'mcstash'],
+ ['sumologic', 'https://endpoint5.collection.us2.sumologic.com/receiver/v1/http/Zagnw_f9XGr_zZgd-_EPM0hb8_rUU7_RU8Q==', None, None, None],
+ ],
+)
@pytest.mark.django_db
def test_logging_aggregator_valid_settings(put, post, admin, type, host, port, username, password):
_, mock_settings = _mock_logging_defaults()
@@ -294,7 +304,7 @@ def test_logging_aggregator_valid_settings(put, post, admin, type, host, port, u
assert port == response.data.get('LOG_AGGREGATOR_PORT')
if username:
assert username in response.data.get('LOG_AGGREGATOR_USERNAME')
- if password: # Note: password should be encrypted
+ if password: # Note: password should be encrypted
assert '$encrypted$' in response.data.get('LOG_AGGREGATOR_PASSWORD')
@@ -315,40 +325,39 @@ def test_logging_aggregator_connection_test_valid(put, post, admin):
@pytest.mark.django_db
-@pytest.mark.parametrize('setting_name', [
- 'AWX_ISOLATED_CHECK_INTERVAL',
- 'AWX_ISOLATED_LAUNCH_TIMEOUT',
- 'AWX_ISOLATED_CONNECTION_TIMEOUT',
-])
+@pytest.mark.parametrize(
+ 'setting_name',
+ [
+ 'AWX_ISOLATED_CHECK_INTERVAL',
+ 'AWX_ISOLATED_LAUNCH_TIMEOUT',
+ 'AWX_ISOLATED_CONNECTION_TIMEOUT',
+ ],
+)
def test_isolated_job_setting_validation(get, patch, admin, setting_name):
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'jobs'})
- patch(url, user=admin, data={
- setting_name: -1
- }, expect=400)
+ patch(url, user=admin, data={setting_name: -1}, expect=400)
data = get(url, user=admin).data
assert data[setting_name] != -1
@pytest.mark.django_db
-@pytest.mark.parametrize('key, expected', [
- ['AWX_ISOLATED_PRIVATE_KEY', '$encrypted$'],
- ['AWX_ISOLATED_PUBLIC_KEY', 'secret'],
-])
+@pytest.mark.parametrize(
+ 'key, expected',
+ [
+ ['AWX_ISOLATED_PRIVATE_KEY', '$encrypted$'],
+ ['AWX_ISOLATED_PUBLIC_KEY', 'secret'],
+ ],
+)
def test_isolated_keys_readonly(get, patch, delete, admin, key, expected):
- Setting.objects.create(
- key=key,
- value='secret'
- ).save()
+ Setting.objects.create(key=key, value='secret').save()
assert getattr(settings, key) == 'secret'
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'jobs'})
resp = get(url, user=admin)
assert resp.data[key] == expected
- patch(url, user=admin, data={
- key: 'new-secret'
- })
+ patch(url, user=admin, data={key: 'new-secret'})
assert getattr(settings, key) == 'secret'
delete(url, user=admin)
@@ -362,9 +371,7 @@ def test_isolated_key_flag_readonly(get, patch, delete, admin):
resp = get(url, user=admin)
assert resp.data['AWX_ISOLATED_KEY_GENERATION'] is True
- patch(url, user=admin, data={
- 'AWX_ISOLATED_KEY_GENERATION': False
- })
+ patch(url, user=admin, data={'AWX_ISOLATED_KEY_GENERATION': False})
assert settings.AWX_ISOLATED_KEY_GENERATION is True
delete(url, user=admin)
@@ -378,20 +385,24 @@ def test_saml_x509cert_validation(patch, get, admin, headers):
if headers:
cert = '-----BEGIN CERTIFICATE-----\n' + cert + '\n-----END CERTIFICATE-----'
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'saml'})
- resp = patch(url, user=admin, data={
- 'SOCIAL_AUTH_SAML_ENABLED_IDPS': {
- "okta": {
- "attr_last_name": "LastName",
- "attr_username": "login",
- "entity_id": "http://www.okta.com/abc123",
- "attr_user_permanent_id": "login",
- "url": "https://example.okta.com/app/abc123/xyz123/sso/saml",
- "attr_email": "Email",
- "x509cert": cert,
- "attr_first_name": "FirstName"
+ resp = patch(
+ url,
+ user=admin,
+ data={
+ 'SOCIAL_AUTH_SAML_ENABLED_IDPS': {
+ "okta": {
+ "attr_last_name": "LastName",
+ "attr_username": "login",
+ "entity_id": "http://www.okta.com/abc123",
+ "attr_user_permanent_id": "login",
+ "url": "https://example.okta.com/app/abc123/xyz123/sso/saml",
+ "attr_email": "Email",
+ "x509cert": cert,
+ "attr_first_name": "FirstName",
+ }
}
- }
- })
+ },
+ )
assert resp.status_code == 200
@@ -420,10 +431,15 @@ def test_github_enterprise_settings(get, put, patch, delete, admin):
response = get(url, user=admin, expect=200)
data = dict(response.data.items())
put(url, user=admin, data=data, expect=200)
- patch(url, user=admin, data={
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_URL': 'example.com',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL': 'example.com',
- }, expect=200)
+ patch(
+ url,
+ user=admin,
+ data={
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_URL': 'example.com',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL': 'example.com',
+ },
+ expect=200,
+ )
response = get(url, user=admin, expect=200)
assert response.data['SOCIAL_AUTH_GITHUB_ENTERPRISE_URL'] == 'example.com'
assert response.data['SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL'] == 'example.com'
diff --git a/awx/main/tests/functional/api/test_survey_spec.py b/awx/main/tests/functional/api/test_survey_spec.py
index 0a5d009d25..cbb22b3bdc 100644
--- a/awx/main/tests/functional/api/test_survey_spec.py
+++ b/awx/main/tests/functional/api/test_survey_spec.py
@@ -10,7 +10,6 @@ from awx.main.access import JobTemplateAccess
from awx.main.utils.common import get_type_for_model
-
@pytest.fixture
def job_template_with_survey(job_template_factory):
objects = job_template_factory('jt', project='prj', survey='submitted_email')
@@ -19,155 +18,116 @@ def job_template_with_survey(job_template_factory):
@pytest.mark.django_db
@pytest.mark.survey
-@pytest.mark.parametrize("role_field,expected_status_code", [
- ('admin_role', 200),
- ('execute_role', 403),
- ('read_role', 403)
-])
-def test_survey_edit_access(job_template, workflow_job_template, survey_spec_factory, rando, post,
- role_field, expected_status_code):
+@pytest.mark.parametrize("role_field,expected_status_code", [('admin_role', 200), ('execute_role', 403), ('read_role', 403)])
+def test_survey_edit_access(job_template, workflow_job_template, survey_spec_factory, rando, post, role_field, expected_status_code):
survey_input_data = survey_spec_factory('new_question')
for template in (job_template, workflow_job_template):
role = getattr(template, role_field)
role.members.add(rando)
- post(reverse('api:{}_survey_spec'.format(get_type_for_model(template.__class__)),
- kwargs={'pk': template.id}),
- user=rando, data=survey_input_data, expect=expected_status_code)
+ post(
+ reverse('api:{}_survey_spec'.format(get_type_for_model(template.__class__)), kwargs={'pk': template.id}),
+ user=rando,
+ data=survey_input_data,
+ expect=expected_status_code,
+ )
# Test normal operations with survey license work
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_view_allowed(deploy_jobtemplate, get, admin_user):
- get(reverse('api:job_template_survey_spec', kwargs={'pk': deploy_jobtemplate.id}),
- admin_user, expect=200)
+ get(reverse('api:job_template_survey_spec', kwargs={'pk': deploy_jobtemplate.id}), admin_user, expect=200)
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_sucessful_creation(survey_spec_factory, job_template, post, admin_user):
survey_input_data = survey_spec_factory('new_question')
- post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
- data=survey_input_data, user=admin_user, expect=200)
+ post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}), data=survey_input_data, user=admin_user, expect=200)
updated_jt = JobTemplate.objects.get(pk=job_template.pk)
assert updated_jt.survey_spec == survey_input_data
@pytest.mark.django_db
@pytest.mark.parametrize('with_default', [True, False])
-@pytest.mark.parametrize('value, status', [
- ('SUPERSECRET', 201),
- (['some', 'invalid', 'list'], 400),
- ({'some-invalid': 'dict'}, 400),
- (False, 400)
-])
+@pytest.mark.parametrize('value, status', [('SUPERSECRET', 201), (['some', 'invalid', 'list'], 400), ({'some-invalid': 'dict'}, 400), (False, 400)])
def test_survey_spec_passwords_are_encrypted_on_launch(job_template_factory, post, admin_user, with_default, value, status):
- objects = job_template_factory('jt', organization='org1', project='prj',
- inventory='inv', credential='cred')
+ objects = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
job_template = objects.job_template
job_template.survey_enabled = True
job_template.save()
input_data = {
'description': 'A survey',
- 'spec': [{
- 'index': 0,
- 'question_name': 'What is your password?',
- 'required': True,
- 'variable': 'secret_value',
- 'type': 'password'
- }],
- 'name': 'my survey'
+ 'spec': [{'index': 0, 'question_name': 'What is your password?', 'required': True, 'variable': 'secret_value', 'type': 'password'}],
+ 'name': 'my survey',
}
if with_default:
input_data['spec'][0]['default'] = 'some-default'
- post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
- data=input_data, user=admin_user, expect=200)
- resp = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
- dict(extra_vars=dict(secret_value=value)), admin_user, expect=status)
+ post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}), data=input_data, user=admin_user, expect=200)
+ resp = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), dict(extra_vars=dict(secret_value=value)), admin_user, expect=status)
if status == 201:
job = Job.objects.get(pk=resp.data['id'])
assert json.loads(job.extra_vars)['secret_value'].startswith('$encrypted$')
- assert json.loads(job.decrypted_extra_vars()) == {
- 'secret_value': value
- }
+ assert json.loads(job.decrypted_extra_vars()) == {'secret_value': value}
else:
assert "for 'secret_value' expected to be a string." in json.dumps(resp.data)
@pytest.mark.django_db
def test_survey_spec_passwords_with_empty_default(job_template_factory, post, admin_user):
- objects = job_template_factory('jt', organization='org1', project='prj',
- inventory='inv', credential='cred')
+ objects = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
job_template = objects.job_template
job_template.survey_enabled = True
job_template.save()
input_data = {
'description': 'A survey',
- 'spec': [{
- 'index': 0,
- 'question_name': 'What is your password?',
- 'required': False,
- 'variable': 'secret_value',
- 'type': 'password',
- 'default': ''
- }],
- 'name': 'my survey'
+ 'spec': [{'index': 0, 'question_name': 'What is your password?', 'required': False, 'variable': 'secret_value', 'type': 'password', 'default': ''}],
+ 'name': 'my survey',
}
- post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
- data=input_data, user=admin_user, expect=200)
+ post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}), data=input_data, user=admin_user, expect=200)
- resp = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
- {}, admin_user, expect=201)
+ resp = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {}, admin_user, expect=201)
job = Job.objects.get(pk=resp.data['id'])
assert json.loads(job.extra_vars)['secret_value'] == ''
- assert json.loads(job.decrypted_extra_vars()) == {
- 'secret_value': ''
- }
+ assert json.loads(job.decrypted_extra_vars()) == {'secret_value': ''}
@pytest.mark.django_db
-@pytest.mark.parametrize('default, launch_value, expected_extra_vars, status', [
- ['', '$encrypted$', {'secret_value': ''}, 201],
- ['', 'y', {'secret_value': 'y'}, 201],
- ['', 'y' * 100, None, 400],
- [None, '$encrypted$', {}, 201],
- [None, 'y', {'secret_value': 'y'}, 201],
- [None, 'y' * 100, {}, 400],
- ['x', '$encrypted$', {'secret_value': 'x'}, 201],
- ['x', 'y', {'secret_value': 'y'}, 201],
- ['x', 'y' * 100, {}, 400],
- ['x' * 100, '$encrypted$', {}, 201],
- ['x' * 100, 'y', {'secret_value': 'y'}, 201],
- ['x' * 100, 'y' * 100, {}, 400],
-])
-def test_survey_spec_passwords_with_default_optional(job_template_factory, post, admin_user,
- default, launch_value,
- expected_extra_vars, status):
- objects = job_template_factory('jt', organization='org1', project='prj',
- inventory='inv', credential='cred')
+@pytest.mark.parametrize(
+ 'default, launch_value, expected_extra_vars, status',
+ [
+ ['', '$encrypted$', {'secret_value': ''}, 201],
+ ['', 'y', {'secret_value': 'y'}, 201],
+ ['', 'y' * 100, None, 400],
+ [None, '$encrypted$', {}, 201],
+ [None, 'y', {'secret_value': 'y'}, 201],
+ [None, 'y' * 100, {}, 400],
+ ['x', '$encrypted$', {'secret_value': 'x'}, 201],
+ ['x', 'y', {'secret_value': 'y'}, 201],
+ ['x', 'y' * 100, {}, 400],
+ ['x' * 100, '$encrypted$', {}, 201],
+ ['x' * 100, 'y', {'secret_value': 'y'}, 201],
+ ['x' * 100, 'y' * 100, {}, 400],
+ ],
+)
+def test_survey_spec_passwords_with_default_optional(job_template_factory, post, admin_user, default, launch_value, expected_extra_vars, status):
+ objects = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
job_template = objects.job_template
job_template.survey_enabled = True
job_template.save()
input_data = {
'description': 'A survey',
- 'spec': [{
- 'index': 0,
- 'question_name': 'What is your password?',
- 'required': False,
- 'variable': 'secret_value',
- 'type': 'password',
- 'max': 3
- }],
- 'name': 'my survey'
+ 'spec': [{'index': 0, 'question_name': 'What is your password?', 'required': False, 'variable': 'secret_value', 'type': 'password', 'max': 3}],
+ 'name': 'my survey',
}
if default is not None:
input_data['spec'][0]['default'] = default
- post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
- data=input_data, user=admin_user, expect=200)
+ post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}), data=input_data, user=admin_user, expect=200)
- resp = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
- data={'extra_vars': {'secret_value': launch_value}}, user=admin_user, expect=status)
+ resp = post(
+ reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), data={'extra_vars': {'secret_value': launch_value}}, user=admin_user, expect=status
+ )
if status == 201:
job = Job.objects.get(pk=resp.data['job'])
@@ -178,38 +138,31 @@ def test_survey_spec_passwords_with_default_optional(job_template_factory, post,
@pytest.mark.django_db
-@pytest.mark.parametrize('default, launch_value, expected_extra_vars, status', [
- ['', '$encrypted$', {'secret_value': ''}, 201],
- [None, '$encrypted$', {}, 400],
- [None, 'y', {'secret_value': 'y'}, 201],
-])
-def test_survey_spec_passwords_with_default_required(job_template_factory, post, admin_user,
- default, launch_value,
- expected_extra_vars, status):
- objects = job_template_factory('jt', organization='org1', project='prj',
- inventory='inv', credential='cred')
+@pytest.mark.parametrize(
+ 'default, launch_value, expected_extra_vars, status',
+ [
+ ['', '$encrypted$', {'secret_value': ''}, 201],
+ [None, '$encrypted$', {}, 400],
+ [None, 'y', {'secret_value': 'y'}, 201],
+ ],
+)
+def test_survey_spec_passwords_with_default_required(job_template_factory, post, admin_user, default, launch_value, expected_extra_vars, status):
+ objects = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
job_template = objects.job_template
job_template.survey_enabled = True
job_template.save()
input_data = {
'description': 'A survey',
- 'spec': [{
- 'index': 0,
- 'question_name': 'What is your password?',
- 'required': True,
- 'variable': 'secret_value',
- 'type': 'password',
- 'max': 3
- }],
- 'name': 'my survey'
+ 'spec': [{'index': 0, 'question_name': 'What is your password?', 'required': True, 'variable': 'secret_value', 'type': 'password', 'max': 3}],
+ 'name': 'my survey',
}
if default is not None:
input_data['spec'][0]['default'] = default
- post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
- data=input_data, user=admin_user, expect=200)
+ post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}), data=input_data, user=admin_user, expect=200)
- resp = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
- data={'extra_vars': {'secret_value': launch_value}}, user=admin_user, expect=status)
+ resp = post(
+ reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), data={'extra_vars': {'secret_value': launch_value}}, user=admin_user, expect=status
+ )
if status == 201:
job = Job.objects.get(pk=resp.data['job'])
@@ -223,48 +176,39 @@ def test_survey_spec_passwords_with_default_required(job_template_factory, post,
def test_survey_spec_default_not_allowed(job_template, post, admin_user):
survey_input_data = {
'description': 'A survey',
- 'spec': [{
- 'question_name': 'You must choose wisely',
- 'variable': 'your_choice',
- 'default': 'blue',
- 'required': False,
- 'type': 'multiplechoice',
- "choices": ["red", "green", "purple"]
- }],
- 'name': 'my survey'
+ 'spec': [
+ {
+ 'question_name': 'You must choose wisely',
+ 'variable': 'your_choice',
+ 'default': 'blue',
+ 'required': False,
+ 'type': 'multiplechoice',
+ "choices": ["red", "green", "purple"],
+ }
+ ],
+ 'name': 'my survey',
}
- r = post(
- url=reverse(
- 'api:job_template_survey_spec',
- kwargs={'pk': job_template.id}
- ),
- data=survey_input_data, user=admin_user, expect=400
- )
+ r = post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}), data=survey_input_data, user=admin_user, expect=400)
assert r.data['error'] == 'Default choice must be answered from the choices listed.'
@pytest.mark.django_db
-@pytest.mark.parametrize('default, status', [
- ('SUPERSECRET', 200),
- ({'some-invalid': 'dict'}, 400),
-])
+@pytest.mark.parametrize(
+ 'default, status',
+ [
+ ('SUPERSECRET', 200),
+ ({'some-invalid': 'dict'}, 400),
+ ],
+)
def test_survey_spec_default_passwords_are_encrypted(job_template, post, admin_user, default, status):
job_template.survey_enabled = True
job_template.save()
input_data = {
'description': 'A survey',
- 'spec': [{
- 'index': 0,
- 'question_name': 'What is your password?',
- 'required': True,
- 'variable': 'secret_value',
- 'default': default,
- 'type': 'password'
- }],
- 'name': 'my survey'
+ 'spec': [{'index': 0, 'question_name': 'What is your password?', 'required': True, 'variable': 'secret_value', 'default': default, 'type': 'password'}],
+ 'name': 'my survey',
}
- resp = post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
- data=input_data, user=admin_user, expect=status)
+ resp = post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}), data=input_data, user=admin_user, expect=status)
if status == 200:
updated_jt = JobTemplate.objects.get(pk=job_template.pk)
@@ -272,9 +216,7 @@ def test_survey_spec_default_passwords_are_encrypted(job_template, post, admin_u
job = updated_jt.create_unified_job()
assert json.loads(job.extra_vars)['secret_value'].startswith('$encrypted$')
- assert json.loads(job.decrypted_extra_vars()) == {
- 'secret_value': default
- }
+ assert json.loads(job.decrypted_extra_vars()) == {'secret_value': default}
else:
assert "expected to be string." in str(resp.data)
@@ -283,24 +225,17 @@ def test_survey_spec_default_passwords_are_encrypted(job_template, post, admin_u
def test_survey_spec_default_passwords_encrypted_on_update(job_template, post, put, admin_user):
input_data = {
'description': 'A survey',
- 'spec': [{
- 'index': 0,
- 'question_name': 'What is your password?',
- 'required': True,
- 'variable': 'secret_value',
- 'default': 'SUPERSECRET',
- 'type': 'password'
- }],
- 'name': 'my survey'
+ 'spec': [
+ {'index': 0, 'question_name': 'What is your password?', 'required': True, 'variable': 'secret_value', 'default': 'SUPERSECRET', 'type': 'password'}
+ ],
+ 'name': 'my survey',
}
- post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
- data=input_data, user=admin_user, expect=200)
+ post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}), data=input_data, user=admin_user, expect=200)
updated_jt = JobTemplate.objects.get(pk=job_template.pk)
# simulate a survey field edit where we're not changing the default value
input_data['spec'][0]['default'] = '$encrypted$'
- post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
- data=input_data, user=admin_user, expect=200)
+ post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}), data=input_data, user=admin_user, expect=200)
assert updated_jt.survey_spec == JobTemplate.objects.get(pk=job_template.pk).survey_spec
@@ -317,26 +252,22 @@ def test_job_template_delete_access_with_survey(job_template_with_survey, admin_
@pytest.mark.survey
def test_delete_survey_spec(job_template_with_survey, delete, admin_user):
"""Functional delete test through the survey_spec view."""
- delete(reverse('api:job_template_survey_spec', kwargs={'pk': job_template_with_survey.pk}),
- admin_user, expect=200)
+ delete(reverse('api:job_template_survey_spec', kwargs={'pk': job_template_with_survey.pk}), admin_user, expect=200)
new_jt = JobTemplate.objects.get(pk=job_template_with_survey.pk)
assert new_jt.survey_spec == {}
-@mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job',
- lambda self, **kwargs: mock.MagicMock(spec=Job, id=968))
+@mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', lambda self, **kwargs: mock.MagicMock(spec=Job, id=968))
@mock.patch('awx.api.serializers.JobSerializer.to_representation', lambda self, obj: {})
@pytest.mark.django_db
@pytest.mark.survey
def test_launch_survey_enabled_but_no_survey_spec(job_template_factory, post, admin_user):
"""False-ish values for survey_spec are interpreted as a survey with 0 questions."""
- objects = job_template_factory('jt', organization='org1', project='prj',
- inventory='inv', credential='cred')
+ objects = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
obj = objects.job_template
obj.survey_enabled = True
obj.save()
- response = post(reverse('api:job_template_launch', kwargs={'pk':obj.pk}),
- dict(extra_vars=dict(survey_var=7)), admin_user, expect=201)
+ response = post(reverse('api:job_template_launch', kwargs={'pk': obj.pk}), dict(extra_vars=dict(survey_var=7)), admin_user, expect=201)
assert 'survey_var' in response.data['ignored_fields']['extra_vars']
diff --git a/awx/main/tests/functional/api/test_unified_job_template.py b/awx/main/tests/functional/api/test_unified_job_template.py
index 3aa6c4024e..1a9adc3965 100644
--- a/awx/main/tests/functional/api/test_unified_job_template.py
+++ b/awx/main/tests/functional/api/test_unified_job_template.py
@@ -13,23 +13,12 @@ def test_aliased_forward_reverse_field_searches(instance, options, get, admin):
@pytest.mark.django_db
-@pytest.mark.parametrize('model', (
- 'Project',
- 'JobTemplate',
- 'WorkflowJobTemplate'
-))
+@pytest.mark.parametrize('model', ('Project', 'JobTemplate', 'WorkflowJobTemplate'))
class TestUnifiedOrganization:
-
def data_for_model(self, model, orm_style=False):
- data = {
- 'name': 'foo',
- 'organization': None
- }
+ data = {'name': 'foo', 'organization': None}
if model == 'JobTemplate':
- proj = models.Project.objects.create(
- name="test-proj",
- playbook_files=['helloworld.yml']
- )
+ proj = models.Project.objects.create(name="test-proj", playbook_files=['helloworld.yml'])
if orm_style:
data['project_id'] = proj.id
else:
@@ -42,12 +31,7 @@ class TestUnifiedOrganization:
cls = getattr(models, model)
data = self.data_for_model(model, orm_style=True)
obj = cls.objects.create(**data)
- patch(
- url=obj.get_absolute_url(),
- data={'name': 'foooooo'},
- user=admin_user,
- expect=200
- )
+ patch(url=obj.get_absolute_url(), data={'name': 'foooooo'}, user=admin_user, expect=200)
obj.refresh_from_db()
assert obj.name == 'foooooo'
@@ -61,12 +45,7 @@ class TestUnifiedOrganization:
if model == 'JobTemplate':
obj.project.admin_role.members.add(rando)
obj.admin_role.members.add(rando)
- patch(
- url=obj.get_absolute_url(),
- data={'name': 'foooooo'},
- user=rando,
- expect=200
- )
+ patch(url=obj.get_absolute_url(), data={'name': 'foooooo'}, user=rando, expect=200)
obj.refresh_from_db()
assert obj.name == 'foooooo'
@@ -75,11 +54,6 @@ class TestUnifiedOrganization:
data = self.data_for_model(model, orm_style=True)
data['organization'] = organization
obj = cls.objects.create(**data)
- patch(
- url=obj.get_absolute_url(),
- data={'name': 'foooooo'},
- user=admin_user,
- expect=200
- )
+ patch(url=obj.get_absolute_url(), data={'name': 'foooooo'}, user=admin_user, expect=200)
obj.refresh_from_db()
assert obj.name == 'foooooo'
diff --git a/awx/main/tests/functional/api/test_unified_jobs_stdout.py b/awx/main/tests/functional/api/test_unified_jobs_stdout.py
index e228f502a6..962ec9b4b5 100644
--- a/awx/main/tests/functional/api/test_unified_jobs_stdout.py
+++ b/awx/main/tests/functional/api/test_unified_jobs_stdout.py
@@ -10,10 +10,20 @@ from unittest import mock
import pytest
from awx.api.versioning import reverse
-from awx.main.models import (Job, JobEvent, AdHocCommand, AdHocCommandEvent,
- Project, ProjectUpdate, ProjectUpdateEvent,
- InventoryUpdate, InventorySource,
- InventoryUpdateEvent, SystemJob, SystemJobEvent)
+from awx.main.models import (
+ Job,
+ JobEvent,
+ AdHocCommand,
+ AdHocCommandEvent,
+ Project,
+ ProjectUpdate,
+ ProjectUpdateEvent,
+ InventoryUpdate,
+ InventorySource,
+ InventoryUpdateEvent,
+ SystemJob,
+ SystemJobEvent,
+)
def _mk_project_update():
@@ -30,12 +40,15 @@ def _mk_inventory_update():
@pytest.mark.django_db
-@pytest.mark.parametrize('Parent, Child, relation, view', [
- [Job, JobEvent, 'job', 'api:job_stdout'],
- [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
- [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
- [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
-])
+@pytest.mark.parametrize(
+ 'Parent, Child, relation, view',
+ [
+ [Job, JobEvent, 'job', 'api:job_stdout'],
+ [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
+ [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
+ [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
+ ],
+)
def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
job = Parent()
job.save()
@@ -48,23 +61,21 @@ def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, adm
@pytest.mark.django_db
-@pytest.mark.parametrize('Parent, Child, relation, view', [
- [Job, JobEvent, 'job', 'api:job_stdout'],
- [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
- [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
- [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
-])
+@pytest.mark.parametrize(
+ 'Parent, Child, relation, view',
+ [
+ [Job, JobEvent, 'job', 'api:job_stdout'],
+ [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
+ [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
+ [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
+ ],
+)
@pytest.mark.parametrize('download', [True, False])
-def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation,
- view, download, get, admin):
+def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation, view, download, get, admin):
job = Parent()
job.save()
for i in range(3):
- Child(**{
- relation: job,
- 'stdout': '\x1B[0;36mTesting {}\x1B[0m\n'.format(i),
- 'start_line': i
- }).save()
+ Child(**{relation: job, 'stdout': '\x1B[0;36mTesting {}\x1B[0m\n'.format(i), 'start_line': i}).save()
url = reverse(view, kwargs={'pk': job.pk})
# ansi codes in ?format=txt should get filtered
@@ -83,21 +94,20 @@ def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation,
@pytest.mark.django_db
-@pytest.mark.parametrize('Parent, Child, relation, view', [
- [Job, JobEvent, 'job', 'api:job_stdout'],
- [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
- [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
- [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
-])
+@pytest.mark.parametrize(
+ 'Parent, Child, relation, view',
+ [
+ [Job, JobEvent, 'job', 'api:job_stdout'],
+ [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
+ [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
+ [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
+ ],
+)
def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
job = Parent()
job.save()
for i in range(3):
- Child(**{
- relation: job,
- 'stdout': '\x1B[0;36mTesting {}\x1B[0m\n'.format(i),
- 'start_line': i
- }).save()
+ Child(**{relation: job, 'stdout': '\x1B[0;36mTesting {}\x1B[0m\n'.format(i), 'start_line': i}).save()
url = reverse(view, kwargs={'pk': job.pk}) + '?format=html'
response = get(url, user=admin, expect=200)
@@ -107,12 +117,15 @@ def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view
@pytest.mark.django_db
-@pytest.mark.parametrize('Parent, Child, relation, view', [
- [Job, JobEvent, 'job', 'api:job_stdout'],
- [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
- [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
- [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
-])
+@pytest.mark.parametrize(
+ 'Parent, Child, relation, view',
+ [
+ [Job, JobEvent, 'job', 'api:job_stdout'],
+ [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
+ [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
+ [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
+ ],
+)
def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
job = Parent()
job.save()
@@ -146,20 +159,20 @@ def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin):
response = get(url, user=admin, expect=200)
assert response.data['result_stdout'] == (
'Standard Output too large to display ({actual} bytes), only download '
- 'supported for sizes over {max} bytes.'.format(
- actual=total_bytes,
- max=settings.STDOUT_MAX_BYTES_DISPLAY
- )
+ 'supported for sizes over {max} bytes.'.format(actual=total_bytes, max=settings.STDOUT_MAX_BYTES_DISPLAY)
)
@pytest.mark.django_db
-@pytest.mark.parametrize('Parent, Child, relation, view', [
- [Job, JobEvent, 'job', 'api:job_stdout'],
- [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
- [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
- [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
-])
+@pytest.mark.parametrize(
+ 'Parent, Child, relation, view',
+ [
+ [Job, JobEvent, 'job', 'api:job_stdout'],
+ [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
+ [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
+ [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
+ ],
+)
@pytest.mark.parametrize('fmt', ['txt', 'ansi'])
@mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings
def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fmt, get, admin):
@@ -173,10 +186,7 @@ def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fm
response = get(url + '?format={}'.format(fmt), user=admin, expect=200)
assert smart_str(response.content) == (
'Standard Output too large to display ({actual} bytes), only download '
- 'supported for sizes over {max} bytes.'.format(
- actual=total_bytes,
- max=settings.STDOUT_MAX_BYTES_DISPLAY
- )
+ 'supported for sizes over {max} bytes.'.format(actual=total_bytes, max=settings.STDOUT_MAX_BYTES_DISPLAY)
)
response = get(url + '?format={}_download'.format(fmt), user=admin, expect=200)
@@ -184,10 +194,7 @@ def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fm
@pytest.mark.django_db
-@pytest.mark.parametrize('Cls, view', [
- [_mk_project_update, 'api:project_update_stdout'],
- [_mk_inventory_update, 'api:inventory_update_stdout']
-])
+@pytest.mark.parametrize('Cls, view', [[_mk_project_update, 'api:project_update_stdout'], [_mk_inventory_update, 'api:inventory_update_stdout']])
@pytest.mark.parametrize('fmt', ['txt', 'ansi', 'txt_download', 'ansi_download'])
def test_legacy_result_stdout_text_fallback(Cls, view, fmt, get, admin):
# older versions of stored raw stdout in a raw text blob at
@@ -204,10 +211,7 @@ def test_legacy_result_stdout_text_fallback(Cls, view, fmt, get, admin):
@pytest.mark.django_db
-@pytest.mark.parametrize('Cls, view', [
- [_mk_project_update, 'api:project_update_stdout'],
- [_mk_inventory_update, 'api:inventory_update_stdout']
-])
+@pytest.mark.parametrize('Cls, view', [[_mk_project_update, 'api:project_update_stdout'], [_mk_inventory_update, 'api:inventory_update_stdout']])
@pytest.mark.parametrize('fmt', ['txt', 'ansi'])
@mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings
def test_legacy_result_stdout_with_max_bytes(Cls, view, fmt, get, admin):
@@ -222,10 +226,7 @@ def test_legacy_result_stdout_with_max_bytes(Cls, view, fmt, get, admin):
response = get(url + '?format={}'.format(fmt), user=admin, expect=200)
assert smart_str(response.content) == (
'Standard Output too large to display ({actual} bytes), only download '
- 'supported for sizes over {max} bytes.'.format(
- actual=total_bytes,
- max=settings.STDOUT_MAX_BYTES_DISPLAY
- )
+ 'supported for sizes over {max} bytes.'.format(actual=total_bytes, max=settings.STDOUT_MAX_BYTES_DISPLAY)
)
response = get(url + '?format={}'.format(fmt + '_download'), user=admin, expect=200)
@@ -233,15 +234,17 @@ def test_legacy_result_stdout_with_max_bytes(Cls, view, fmt, get, admin):
@pytest.mark.django_db
-@pytest.mark.parametrize('Parent, Child, relation, view', [
- [Job, JobEvent, 'job', 'api:job_stdout'],
- [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
- [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
- [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
-])
+@pytest.mark.parametrize(
+ 'Parent, Child, relation, view',
+ [
+ [Job, JobEvent, 'job', 'api:job_stdout'],
+ [AdHocCommand, AdHocCommandEvent, 'ad_hoc_command', 'api:ad_hoc_command_stdout'],
+ [_mk_project_update, ProjectUpdateEvent, 'project_update', 'api:project_update_stdout'],
+ [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
+ ],
+)
@pytest.mark.parametrize('fmt', ['txt', 'ansi', 'txt_download', 'ansi_download'])
-def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation,
- view, get, admin, fmt):
+def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin, fmt):
job = Parent()
job.save()
for i in range(3):
@@ -258,10 +261,7 @@ def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin):
job.save()
for i in range(3):
JobEvent(job=job, stdout='オ{}\n'.format(i), start_line=i).save()
- url = reverse(
- 'api:job_stdout',
- kwargs={'pk': job.pk}
- ) + '?format=json&content_encoding=base64'
+ url = reverse('api:job_stdout', kwargs={'pk': job.pk}) + '?format=json&content_encoding=base64'
response = get(url, user=admin, expect=200)
content = base64.b64decode(json.loads(smart_str(response.content))['content'])
diff --git a/awx/main/tests/functional/api/test_unified_jobs_view.py b/awx/main/tests/functional/api/test_unified_jobs_view.py
index 554a0cfc63..a8c7b53461 100644
--- a/awx/main/tests/functional/api/test_unified_jobs_view.py
+++ b/awx/main/tests/functional/api/test_unified_jobs_view.py
@@ -14,21 +14,17 @@ TEST_STATES.remove('new')
TEST_STDOUTS = []
uri = URI(scheme="https", username="Dhh3U47nmC26xk9PKscV", password="PXPfWW8YzYrgS@E5NbQ2H@", host="github.ginger.com/theirrepo.git/info/refs")
-TEST_STDOUTS.append({
- 'description': 'uri in a plain text document',
- 'uri' : uri,
- 'text' : 'hello world %s goodbye world' % uri,
- 'occurrences' : 1
-})
+TEST_STDOUTS.append({'description': 'uri in a plain text document', 'uri': uri, 'text': 'hello world %s goodbye world' % uri, 'occurrences': 1})
uri = URI(scheme="https", username="applepie@@@", password="thatyouknow@@@@", host="github.ginger.com/theirrepo.git/info/refs")
-TEST_STDOUTS.append({
- 'description': 'uri appears twice in a multiline plain text document',
- 'uri' : uri,
- 'text' : 'hello world %s \n\nyoyo\n\nhello\n%s' % (uri, uri),
- 'occurrences' : 2
-})
-
+TEST_STDOUTS.append(
+ {
+ 'description': 'uri appears twice in a multiline plain text document',
+ 'uri': uri,
+ 'text': 'hello world %s \n\nyoyo\n\nhello\n%s' % (uri, uri),
+ 'occurrences': 2,
+ }
+)
@pytest.fixture
@@ -123,11 +119,7 @@ def test_delete_project_update_in_active_state(project, delete, admin, status):
@pytest.mark.parametrize("status", list(TEST_STATES))
@pytest.mark.django_db
def test_delete_inventory_update_in_active_state(inventory_source, delete, admin, status):
- i = InventoryUpdate.objects.create(
- inventory_source=inventory_source,
- status=status,
- source=inventory_source.source
- )
+ i = InventoryUpdate.objects.create(inventory_source=inventory_source, status=status, source=inventory_source.source)
url = reverse('api:inventory_update_detail', kwargs={'pk': i.pk})
delete(url, None, admin, expect=403)
diff --git a/awx/main/tests/functional/api/test_user.py b/awx/main/tests/functional/api/test_user.py
index 821b37d6ae..a201d4e1cc 100644
--- a/awx/main/tests/functional/api/test_user.py
+++ b/awx/main/tests/functional/api/test_user.py
@@ -12,14 +12,7 @@ from awx.api.versioning import reverse
# user creation
#
-EXAMPLE_USER_DATA = {
- "username": "affable",
- "first_name": "a",
- "last_name": "a",
- "email": "a@a.com",
- "is_superuser": False,
- "password": "r$TyKiOCb#ED"
-}
+EXAMPLE_USER_DATA = {"username": "affable", "first_name": "a", "last_name": "a", "email": "a@a.com", "is_superuser": False, "password": "r$TyKiOCb#ED"}
@pytest.mark.django_db
@@ -44,8 +37,7 @@ def test_create_delete_create_user(post, delete, admin):
response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware())
assert response.status_code == 201
- response = delete(reverse('api:user_detail', kwargs={'pk': response.data['id']}), admin,
- middleware=SessionMiddleware())
+ response = delete(reverse('api:user_detail', kwargs={'pk': response.data['id']}), admin, middleware=SessionMiddleware())
assert response.status_code == 204
response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware())
@@ -56,28 +48,17 @@ def test_create_delete_create_user(post, delete, admin):
@pytest.mark.django_db
def test_user_cannot_update_last_login(patch, admin):
assert admin.last_login is None
- patch(
- reverse('api:user_detail', kwargs={'pk': admin.pk}),
- {'last_login': '2020-03-13T16:39:47.303016Z'},
- admin,
- middleware=SessionMiddleware()
- )
+ patch(reverse('api:user_detail', kwargs={'pk': admin.pk}), {'last_login': '2020-03-13T16:39:47.303016Z'}, admin, middleware=SessionMiddleware())
assert User.objects.get(pk=admin.pk).last_login is None
@pytest.mark.django_db
def test_user_verify_attribute_created(admin, get):
assert admin.created == admin.date_joined
- resp = get(
- reverse('api:user_detail', kwargs={'pk': admin.pk}),
- admin
- )
+ resp = get(reverse('api:user_detail', kwargs={'pk': admin.pk}), admin)
assert resp.data['created'] == admin.date_joined
past = date(2020, 1, 1).isoformat()
for op, count in (('gt', 1), ('lt', 0)):
- resp = get(
- reverse('api:user_list') + f'?created__{op}={past}',
- admin
- )
+ resp = get(reverse('api:user_list') + f'?created__{op}={past}', admin)
assert resp.data['count'] == count
diff --git a/awx/main/tests/functional/api/test_webhooks.py b/awx/main/tests/functional/api/test_webhooks.py
index 971ea22b4f..e47f4fe15b 100644
--- a/awx/main/tests/functional/api/test_webhooks.py
+++ b/awx/main/tests/functional/api/test_webhooks.py
@@ -7,18 +7,18 @@ from awx.main.models.credential import Credential, CredentialType
@pytest.mark.django_db
@pytest.mark.parametrize(
- "user_role, expect", [
+ "user_role, expect",
+ [
('superuser', 200),
('org admin', 200),
('jt admin', 200),
('jt execute', 403),
('org member', 403),
- ]
+ ],
)
def test_get_webhook_key_jt(organization_factory, job_template_factory, get, user_role, expect):
objs = organization_factory("org", superusers=['admin'], users=['user'])
- jt = job_template_factory("jt", organization=objs.organization,
- inventory='test_inv', project='test_proj').job_template
+ jt = job_template_factory("jt", organization=objs.organization, inventory='test_inv', project='test_proj').job_template
if user_role == 'superuser':
user = objs.superusers.admin
else:
@@ -34,13 +34,14 @@ def test_get_webhook_key_jt(organization_factory, job_template_factory, get, use
@pytest.mark.django_db
@pytest.mark.parametrize(
- "user_role, expect", [
+ "user_role, expect",
+ [
('superuser', 200),
('org admin', 200),
('jt admin', 200),
('jt execute', 403),
('org member', 403),
- ]
+ ],
)
def test_get_webhook_key_wfjt(organization_factory, workflow_job_template_factory, get, user_role, expect):
objs = organization_factory("org", superusers=['admin'], users=['user'])
@@ -60,18 +61,18 @@ def test_get_webhook_key_wfjt(organization_factory, workflow_job_template_factor
@pytest.mark.django_db
@pytest.mark.parametrize(
- "user_role, expect", [
+ "user_role, expect",
+ [
('superuser', 201),
('org admin', 201),
('jt admin', 201),
('jt execute', 403),
('org member', 403),
- ]
+ ],
)
def test_post_webhook_key_jt(organization_factory, job_template_factory, post, user_role, expect):
objs = organization_factory("org", superusers=['admin'], users=['user'])
- jt = job_template_factory("jt", organization=objs.organization,
- inventory='test_inv', project='test_proj').job_template
+ jt = job_template_factory("jt", organization=objs.organization, inventory='test_inv', project='test_proj').job_template
if user_role == 'superuser':
user = objs.superusers.admin
else:
@@ -87,13 +88,14 @@ def test_post_webhook_key_jt(organization_factory, job_template_factory, post, u
@pytest.mark.django_db
@pytest.mark.parametrize(
- "user_role, expect", [
+ "user_role, expect",
+ [
('superuser', 201),
('org admin', 201),
('jt admin', 201),
('jt execute', 403),
('org member', 403),
- ]
+ ],
)
def test_post_webhook_key_wfjt(organization_factory, workflow_job_template_factory, post, user_role, expect):
objs = organization_factory("org", superusers=['admin'], users=['user'])
@@ -112,13 +114,10 @@ def test_post_webhook_key_wfjt(organization_factory, workflow_job_template_facto
@pytest.mark.django_db
-@pytest.mark.parametrize(
- "service", [s for s, _ in WebhookTemplateMixin.SERVICES]
-)
+@pytest.mark.parametrize("service", [s for s, _ in WebhookTemplateMixin.SERVICES])
def test_set_webhook_service(organization_factory, job_template_factory, patch, service):
objs = organization_factory("org", superusers=['admin'])
- jt = job_template_factory("jt", organization=objs.organization,
- inventory='test_inv', project='test_proj').job_template
+ jt = job_template_factory("jt", organization=objs.organization, inventory='test_inv', project='test_proj').job_template
admin = objs.superusers.admin
assert (jt.webhook_service, jt.webhook_key) == ('', '')
@@ -131,13 +130,10 @@ def test_set_webhook_service(organization_factory, job_template_factory, patch,
@pytest.mark.django_db
-@pytest.mark.parametrize(
- "service", [s for s, _ in WebhookTemplateMixin.SERVICES]
-)
+@pytest.mark.parametrize("service", [s for s, _ in WebhookTemplateMixin.SERVICES])
def test_unset_webhook_service(organization_factory, job_template_factory, patch, service):
objs = organization_factory("org", superusers=['admin'])
- jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
- inventory='test_inv', project='test_proj').job_template
+ jt = job_template_factory("jt", organization=objs.organization, webhook_service=service, inventory='test_inv', project='test_proj').job_template
admin = objs.superusers.admin
assert jt.webhook_service == service
assert jt.webhook_key != ''
@@ -150,21 +146,17 @@ def test_unset_webhook_service(organization_factory, job_template_factory, patch
@pytest.mark.django_db
-@pytest.mark.parametrize(
- "service", [s for s, _ in WebhookTemplateMixin.SERVICES]
-)
+@pytest.mark.parametrize("service", [s for s, _ in WebhookTemplateMixin.SERVICES])
def test_set_webhook_credential(organization_factory, job_template_factory, patch, service):
objs = organization_factory("org", superusers=['admin'])
- jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
- inventory='test_inv', project='test_proj').job_template
+ jt = job_template_factory("jt", organization=objs.organization, webhook_service=service, inventory='test_inv', project='test_proj').job_template
admin = objs.superusers.admin
assert jt.webhook_service == service
assert jt.webhook_key != ''
cred_type = CredentialType.defaults['{}_token'.format(service)]()
cred_type.save()
- cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
- inputs={'token': 'secret'})
+ cred = Credential.objects.create(credential_type=cred_type, name='test-cred', inputs={'token': 'secret'})
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
patch(url, {'webhook_credential': cred.pk}, user=admin, expect=200)
@@ -176,23 +168,17 @@ def test_set_webhook_credential(organization_factory, job_template_factory, patc
@pytest.mark.django_db
-@pytest.mark.parametrize(
- "service,token", [
- (s, WebhookTemplateMixin.SERVICES[i - 1][0]) for i, (s, _) in enumerate(WebhookTemplateMixin.SERVICES)
- ]
-)
+@pytest.mark.parametrize("service,token", [(s, WebhookTemplateMixin.SERVICES[i - 1][0]) for i, (s, _) in enumerate(WebhookTemplateMixin.SERVICES)])
def test_set_wrong_service_webhook_credential(organization_factory, job_template_factory, patch, service, token):
objs = organization_factory("org", superusers=['admin'])
- jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
- inventory='test_inv', project='test_proj').job_template
+ jt = job_template_factory("jt", organization=objs.organization, webhook_service=service, inventory='test_inv', project='test_proj').job_template
admin = objs.superusers.admin
assert jt.webhook_service == service
assert jt.webhook_key != ''
cred_type = CredentialType.defaults['{}_token'.format(token)]()
cred_type.save()
- cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
- inputs={'token': 'secret'})
+ cred = Credential.objects.create(credential_type=cred_type, name='test-cred', inputs={'token': 'secret'})
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
response = patch(url, {'webhook_credential': cred.pk}, user=admin, expect=400)
@@ -205,21 +191,17 @@ def test_set_wrong_service_webhook_credential(organization_factory, job_template
@pytest.mark.django_db
-@pytest.mark.parametrize(
- "service", [s for s, _ in WebhookTemplateMixin.SERVICES]
-)
+@pytest.mark.parametrize("service", [s for s, _ in WebhookTemplateMixin.SERVICES])
def test_set_webhook_credential_without_service(organization_factory, job_template_factory, patch, service):
objs = organization_factory("org", superusers=['admin'])
- jt = job_template_factory("jt", organization=objs.organization,
- inventory='test_inv', project='test_proj').job_template
+ jt = job_template_factory("jt", organization=objs.organization, inventory='test_inv', project='test_proj').job_template
admin = objs.superusers.admin
assert jt.webhook_service == ''
assert jt.webhook_key == ''
cred_type = CredentialType.defaults['{}_token'.format(service)]()
cred_type.save()
- cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
- inputs={'token': 'secret'})
+ cred = Credential.objects.create(credential_type=cred_type, name='test-cred', inputs={'token': 'secret'})
url = reverse('api:job_template_detail', kwargs={'pk': jt.pk})
response = patch(url, {'webhook_credential': cred.pk}, user=admin, expect=400)
@@ -232,21 +214,17 @@ def test_set_webhook_credential_without_service(organization_factory, job_templa
@pytest.mark.django_db
-@pytest.mark.parametrize(
- "service", [s for s, _ in WebhookTemplateMixin.SERVICES]
-)
+@pytest.mark.parametrize("service", [s for s, _ in WebhookTemplateMixin.SERVICES])
def test_unset_webhook_service_with_credential(organization_factory, job_template_factory, patch, service):
objs = organization_factory("org", superusers=['admin'])
- jt = job_template_factory("jt", organization=objs.organization, webhook_service=service,
- inventory='test_inv', project='test_proj').job_template
+ jt = job_template_factory("jt", organization=objs.organization, webhook_service=service, inventory='test_inv', project='test_proj').job_template
admin = objs.superusers.admin
assert jt.webhook_service == service
assert jt.webhook_key != ''
cred_type = CredentialType.defaults['{}_token'.format(service)]()
cred_type.save()
- cred = Credential.objects.create(credential_type=cred_type, name='test-cred',
- inputs={'token': 'secret'})
+ cred = Credential.objects.create(credential_type=cred_type, name='test-cred', inputs={'token': 'secret'})
jt.webhook_credential = cred
jt.save()
diff --git a/awx/main/tests/functional/api/test_workflow_node.py b/awx/main/tests/functional/api/test_workflow_node.py
index 6253548d60..ecbe284b6a 100644
--- a/awx/main/tests/functional/api/test_workflow_node.py
+++ b/awx/main/tests/functional/api/test_workflow_node.py
@@ -19,128 +19,98 @@ from awx.main.scheduler import TaskManager
@pytest.fixture
def job_template(inventory, project):
# need related resources set for these tests
- return JobTemplate.objects.create(
- name='test-job_template',
- inventory=inventory,
- project=project
- )
+ return JobTemplate.objects.create(name='test-job_template', inventory=inventory, project=project)
@pytest.fixture
def node(workflow_job_template, admin_user, job_template):
- return WorkflowJobTemplateNode.objects.create(
- workflow_job_template=workflow_job_template,
- unified_job_template=job_template
- )
+ return WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template, unified_job_template=job_template)
@pytest.fixture
def approval_node(workflow_job_template, admin_user):
- return WorkflowJobTemplateNode.objects.create(
- workflow_job_template=workflow_job_template
- )
+ return WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template)
@pytest.mark.django_db
def test_node_rejects_unprompted_fields(inventory, project, workflow_job_template, post, admin_user):
- job_template = JobTemplate.objects.create(
- inventory = inventory,
- project = project,
- playbook = 'helloworld.yml',
- ask_limit_on_launch = False
- )
- url = reverse('api:workflow_job_template_workflow_nodes_list',
- kwargs={'pk': workflow_job_template.pk})
- r = post(url, {'unified_job_template': job_template.pk, 'limit': 'webservers'},
- user=admin_user, expect=400)
+ job_template = JobTemplate.objects.create(inventory=inventory, project=project, playbook='helloworld.yml', ask_limit_on_launch=False)
+ url = reverse('api:workflow_job_template_workflow_nodes_list', kwargs={'pk': workflow_job_template.pk})
+ r = post(url, {'unified_job_template': job_template.pk, 'limit': 'webservers'}, user=admin_user, expect=400)
assert 'limit' in r.data
assert 'not configured to prompt on launch' in r.data['limit'][0]
@pytest.mark.django_db
def test_node_accepts_prompted_fields(inventory, project, workflow_job_template, post, admin_user):
- job_template = JobTemplate.objects.create(
- inventory = inventory,
- project = project,
- playbook = 'helloworld.yml',
- ask_limit_on_launch = True
- )
- url = reverse('api:workflow_job_template_workflow_nodes_list',
- kwargs={'pk': workflow_job_template.pk})
- post(url, {'unified_job_template': job_template.pk, 'limit': 'webservers'},
- user=admin_user, expect=201)
+ job_template = JobTemplate.objects.create(inventory=inventory, project=project, playbook='helloworld.yml', ask_limit_on_launch=True)
+ url = reverse('api:workflow_job_template_workflow_nodes_list', kwargs={'pk': workflow_job_template.pk})
+ post(url, {'unified_job_template': job_template.pk, 'limit': 'webservers'}, user=admin_user, expect=201)
@pytest.mark.django_db
-@pytest.mark.parametrize("field_name, field_value", [
- ('all_parents_must_converge', True),
- ('all_parents_must_converge', False),
-])
+@pytest.mark.parametrize(
+ "field_name, field_value",
+ [
+ ('all_parents_must_converge', True),
+ ('all_parents_must_converge', False),
+ ],
+)
def test_create_node_with_field(field_name, field_value, workflow_job_template, post, admin_user):
- url = reverse('api:workflow_job_template_workflow_nodes_list',
- kwargs={'pk': workflow_job_template.pk})
+ url = reverse('api:workflow_job_template_workflow_nodes_list', kwargs={'pk': workflow_job_template.pk})
res = post(url, {field_name: field_value}, user=admin_user, expect=201)
assert res.data[field_name] == field_value
@pytest.mark.django_db
-class TestApprovalNodes():
+class TestApprovalNodes:
def test_approval_node_creation(self, post, approval_node, admin_user):
- url = reverse('api:workflow_job_template_node_create_approval',
- kwargs={'pk': approval_node.pk, 'version': 'v2'})
- post(url, {'name': 'Test', 'description': 'Approval Node', 'timeout': 0},
- user=admin_user, expect=201)
+ url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': approval_node.pk, 'version': 'v2'})
+ post(url, {'name': 'Test', 'description': 'Approval Node', 'timeout': 0}, user=admin_user, expect=201)
approval_node = WorkflowJobTemplateNode.objects.get(pk=approval_node.pk)
assert isinstance(approval_node.unified_job_template, WorkflowApprovalTemplate)
- assert approval_node.unified_job_template.name=='Test'
- assert approval_node.unified_job_template.description=='Approval Node'
- assert approval_node.unified_job_template.timeout==0
+ assert approval_node.unified_job_template.name == 'Test'
+ assert approval_node.unified_job_template.description == 'Approval Node'
+ assert approval_node.unified_job_template.timeout == 0
def test_approval_node_creation_failure(self, post, approval_node, admin_user):
# This test leaves off a required param to assert that user will get a 400.
- url = reverse('api:workflow_job_template_node_create_approval',
- kwargs={'pk': approval_node.pk, 'version': 'v2'})
- r = post(url, {'name': '', 'description': 'Approval Node', 'timeout': 0},
- user=admin_user, expect=400)
+ url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': approval_node.pk, 'version': 'v2'})
+ r = post(url, {'name': '', 'description': 'Approval Node', 'timeout': 0}, user=admin_user, expect=400)
approval_node = WorkflowJobTemplateNode.objects.get(pk=approval_node.pk)
assert isinstance(approval_node.unified_job_template, WorkflowApprovalTemplate) is False
assert {'name': ['This field may not be blank.']} == json.loads(r.content)
- @pytest.mark.parametrize("is_admin, is_org_admin, status", [
- [True, False, 201], # if they're a WFJT admin, they get a 201
- [False, False, 403], # if they're not a WFJT *nor* org admin, they get a 403
- [False, True, 201], # if they're an organization admin, they get a 201
- ])
+ @pytest.mark.parametrize(
+ "is_admin, is_org_admin, status",
+ [
+ [True, False, 201], # if they're a WFJT admin, they get a 201
+ [False, False, 403], # if they're not a WFJT *nor* org admin, they get a 403
+ [False, True, 201], # if they're an organization admin, they get a 201
+ ],
+ )
def test_approval_node_creation_rbac(self, post, approval_node, alice, is_admin, is_org_admin, status):
- url = reverse('api:workflow_job_template_node_create_approval',
- kwargs={'pk': approval_node.pk, 'version': 'v2'})
+ url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': approval_node.pk, 'version': 'v2'})
if is_admin is True:
approval_node.workflow_job_template.admin_role.members.add(alice)
if is_org_admin is True:
approval_node.workflow_job_template.organization.admin_role.members.add(alice)
- post(url, {'name': 'Test', 'description': 'Approval Node', 'timeout': 0},
- user=alice, expect=status)
+ post(url, {'name': 'Test', 'description': 'Approval Node', 'timeout': 0}, user=alice, expect=status)
@pytest.mark.django_db
def test_approval_node_exists(self, post, admin_user, get):
workflow_job_template = WorkflowJobTemplate.objects.create()
- approval_node = WorkflowJobTemplateNode.objects.create(
- workflow_job_template=workflow_job_template
- )
- url = reverse('api:workflow_job_template_node_create_approval',
- kwargs={'pk': approval_node.pk, 'version': 'v2'})
- post(url, {'name': 'URL Test', 'description': 'An approval', 'timeout': 0},
- user=admin_user)
+ approval_node = WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template)
+ url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': approval_node.pk, 'version': 'v2'})
+ post(url, {'name': 'URL Test', 'description': 'An approval', 'timeout': 0}, user=admin_user)
get(url, admin_user, expect=200)
@pytest.mark.django_db
def test_activity_stream_create_wf_approval(self, post, admin_user, workflow_job_template):
wfjn = WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template)
- url = reverse('api:workflow_job_template_node_create_approval',
- kwargs={'pk': wfjn.pk, 'version': 'v2'})
- post(url, {'name': 'Activity Stream Test', 'description': 'Approval Node', 'timeout': 0},
- user=admin_user)
+ url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': wfjn.pk, 'version': 'v2'})
+ post(url, {'name': 'Activity Stream Test', 'description': 'Approval Node', 'timeout': 0}, user=admin_user)
qs1 = ActivityStream.objects.filter(organization__isnull=False)
assert qs1.count() == 1
@@ -148,12 +118,13 @@ class TestApprovalNodes():
qs2 = ActivityStream.objects.filter(organization__isnull=True)
assert qs2.count() == 5
- assert list(qs2.values_list('operation', 'object1')) == [('create', 'user'),
- ('create', 'workflow_job_template'),
- ('create', 'workflow_job_template_node'),
- ('create', 'workflow_approval_template'),
- ('update', 'workflow_job_template_node'),
- ]
+ assert list(qs2.values_list('operation', 'object1')) == [
+ ('create', 'user'),
+ ('create', 'workflow_job_template'),
+ ('create', 'workflow_job_template_node'),
+ ('create', 'workflow_approval_template'),
+ ('update', 'workflow_job_template_node'),
+ ]
@pytest.mark.django_db
def test_approval_node_approve(self, post, admin_user, job_template):
@@ -162,28 +133,23 @@ class TestApprovalNodes():
# that have already been dealt with will throw an error.
wfjt = WorkflowJobTemplate.objects.create(name='foobar')
node = wfjt.workflow_nodes.create(unified_job_template=job_template)
- url = reverse('api:workflow_job_template_node_create_approval',
- kwargs={'pk': node.pk, 'version': 'v2'})
- post(url, {'name': 'Approve Test', 'description': '', 'timeout': 0},
- user=admin_user, expect=201)
- post(reverse('api:workflow_job_template_launch', kwargs={'pk': wfjt.pk}),
- user=admin_user, expect=201)
+ url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': node.pk, 'version': 'v2'})
+ post(url, {'name': 'Approve Test', 'description': '', 'timeout': 0}, user=admin_user, expect=201)
+ post(reverse('api:workflow_job_template_launch', kwargs={'pk': wfjt.pk}), user=admin_user, expect=201)
wf_job = WorkflowJob.objects.first()
TaskManager().schedule()
TaskManager().schedule()
wfj_node = wf_job.workflow_nodes.first()
approval = wfj_node.job
assert approval.name == 'Approve Test'
- post(reverse('api:workflow_approval_approve', kwargs={'pk': approval.pk}),
- user=admin_user, expect=204)
+ post(reverse('api:workflow_approval_approve', kwargs={'pk': approval.pk}), user=admin_user, expect=204)
# Test that there is an activity stream entry that was created for the "approve" action.
qs = ActivityStream.objects.order_by('-timestamp').first()
assert qs.object1 == 'workflow_approval'
assert qs.changes == '{"status": ["pending", "successful"]}'
assert WorkflowApproval.objects.get(pk=approval.pk).status == 'successful'
assert qs.operation == 'update'
- post(reverse('api:workflow_approval_approve', kwargs={'pk': approval.pk}),
- user=admin_user, expect=400)
+ post(reverse('api:workflow_approval_approve', kwargs={'pk': approval.pk}), user=admin_user, expect=400)
@pytest.mark.django_db
def test_approval_node_deny(self, post, admin_user, job_template):
@@ -192,39 +158,30 @@ class TestApprovalNodes():
# that have already been dealt with will throw an error.
wfjt = WorkflowJobTemplate.objects.create(name='foobar')
node = wfjt.workflow_nodes.create(unified_job_template=job_template)
- url = reverse('api:workflow_job_template_node_create_approval',
- kwargs={'pk': node.pk, 'version': 'v2'})
- post(url, {'name': 'Deny Test', 'description': '', 'timeout': 0},
- user=admin_user, expect=201)
- post(reverse('api:workflow_job_template_launch', kwargs={'pk': wfjt.pk}),
- user=admin_user, expect=201)
+ url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': node.pk, 'version': 'v2'})
+ post(url, {'name': 'Deny Test', 'description': '', 'timeout': 0}, user=admin_user, expect=201)
+ post(reverse('api:workflow_job_template_launch', kwargs={'pk': wfjt.pk}), user=admin_user, expect=201)
wf_job = WorkflowJob.objects.first()
TaskManager().schedule()
TaskManager().schedule()
wfj_node = wf_job.workflow_nodes.first()
approval = wfj_node.job
assert approval.name == 'Deny Test'
- post(reverse('api:workflow_approval_deny', kwargs={'pk': approval.pk}),
- user=admin_user, expect=204)
+ post(reverse('api:workflow_approval_deny', kwargs={'pk': approval.pk}), user=admin_user, expect=204)
# Test that there is an activity stream entry that was created for the "deny" action.
qs = ActivityStream.objects.order_by('-timestamp').first()
assert qs.object1 == 'workflow_approval'
assert qs.changes == '{"status": ["pending", "failed"]}'
assert WorkflowApproval.objects.get(pk=approval.pk).status == 'failed'
assert qs.operation == 'update'
- post(reverse('api:workflow_approval_deny', kwargs={'pk': approval.pk}),
- user=admin_user, expect=400)
+ post(reverse('api:workflow_approval_deny', kwargs={'pk': approval.pk}), user=admin_user, expect=400)
def test_approval_node_cleanup(self, post, approval_node, admin_user, get):
workflow_job_template = WorkflowJobTemplate.objects.create()
- approval_node = WorkflowJobTemplateNode.objects.create(
- workflow_job_template=workflow_job_template
- )
- url = reverse('api:workflow_job_template_node_create_approval',
- kwargs={'pk': approval_node.pk, 'version': 'v2'})
+ approval_node = WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template)
+ url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': approval_node.pk, 'version': 'v2'})
- post(url, {'name': 'URL Test', 'description': 'An approval', 'timeout': 0},
- user=admin_user)
+ post(url, {'name': 'URL Test', 'description': 'An approval', 'timeout': 0}, user=admin_user)
assert WorkflowApprovalTemplate.objects.count() == 1
workflow_job_template.delete()
assert WorkflowApprovalTemplate.objects.count() == 0
@@ -235,13 +192,9 @@ class TestApprovalNodes():
# (in this case, a job template), then the previously-set WorkflowApprovalTemplate
# is automatically deleted.
workflow_job_template = WorkflowJobTemplate.objects.create()
- approval_node = WorkflowJobTemplateNode.objects.create(
- workflow_job_template=workflow_job_template
- )
- url = reverse('api:workflow_job_template_node_create_approval',
- kwargs={'pk': approval_node.pk, 'version': 'v2'})
- post(url, {'name': 'URL Test', 'description': 'An approval', 'timeout': 0},
- user=admin_user)
+ approval_node = WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template)
+ url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': approval_node.pk, 'version': 'v2'})
+ post(url, {'name': 'URL Test', 'description': 'An approval', 'timeout': 0}, user=admin_user)
assert WorkflowApprovalTemplate.objects.count() == 1
approval_node.unified_job_template = job_template
approval_node.save()
@@ -251,13 +204,9 @@ class TestApprovalNodes():
# Verifying that when a WorkflowApprovalTemplate is deleted, any/all of
# its pending approvals are auto-denied (vs left in 'pending' state).
workflow_job_template = WorkflowJobTemplate.objects.create()
- approval_node = WorkflowJobTemplateNode.objects.create(
- workflow_job_template=workflow_job_template
- )
- url = reverse('api:workflow_job_template_node_create_approval',
- kwargs={'pk': approval_node.pk, 'version': 'v2'})
- post(url, {'name': 'URL Test', 'description': 'An approval', 'timeout': 0},
- user=admin_user)
+ approval_node = WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template)
+ url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': approval_node.pk, 'version': 'v2'})
+ post(url, {'name': 'URL Test', 'description': 'An approval', 'timeout': 0}, user=admin_user)
assert WorkflowApprovalTemplate.objects.count() == 1
approval_template = WorkflowApprovalTemplate.objects.first()
approval = approval_template.create_unified_job()
@@ -269,7 +218,7 @@ class TestApprovalNodes():
@pytest.mark.django_db
-class TestExclusiveRelationshipEnforcement():
+class TestExclusiveRelationshipEnforcement:
@pytest.fixture
def n1(self, workflow_job_template):
return WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template)
@@ -279,8 +228,7 @@ class TestExclusiveRelationshipEnforcement():
return WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template)
def generate_url(self, relationship, id):
- return reverse('api:workflow_job_template_node_{}_nodes_list'.format(relationship),
- kwargs={'pk': id})
+ return reverse('api:workflow_job_template_node_{}_nodes_list'.format(relationship), kwargs={'pk': id})
relationship_permutations = [
['success', 'failure', 'always'],
@@ -294,89 +242,60 @@ class TestExclusiveRelationshipEnforcement():
@pytest.mark.parametrize("relationships", relationship_permutations, ids=["-".join(item) for item in relationship_permutations])
def test_multi_connections_same_parent_disallowed(self, post, admin_user, n1, n2, relationships):
for index, relationship in enumerate(relationships):
- r = post(self.generate_url(relationship, n1.id),
- data={'associate': True, 'id': n2.id},
- user=admin_user,
- expect=204 if index == 0 else 400)
+ r = post(self.generate_url(relationship, n1.id), data={'associate': True, 'id': n2.id}, user=admin_user, expect=204 if index == 0 else 400)
if index != 0:
assert {'Error': 'Relationship not allowed.'} == json.loads(r.content)
@pytest.mark.parametrize("relationship", ['success', 'failure', 'always'])
def test_existing_relationship_allowed(self, post, admin_user, n1, n2, relationship):
- post(self.generate_url(relationship, n1.id),
- data={'associate': True, 'id': n2.id},
- user=admin_user,
- expect=204)
- post(self.generate_url(relationship, n1.id),
- data={'associate': True, 'id': n2.id},
- user=admin_user,
- expect=204)
+ post(self.generate_url(relationship, n1.id), data={'associate': True, 'id': n2.id}, user=admin_user, expect=204)
+ post(self.generate_url(relationship, n1.id), data={'associate': True, 'id': n2.id}, user=admin_user, expect=204)
@pytest.mark.django_db
class TestNodeCredentials:
- '''
+ """
The supported way to provide credentials on launch is through a list
under the "credentials" key - WFJT nodes have a many-to-many relationship
corresponding to this, and it must follow rules consistent with other prompts
- '''
+ """
+
@pytest.fixture
def job_template_ask(self, job_template):
job_template.ask_credential_on_launch = True
job_template.save()
return job_template
- def test_not_allows_non_job_models(self, post, admin_user, workflow_job_template,
- project, machine_credential):
- node = WorkflowJobTemplateNode.objects.create(
- workflow_job_template=workflow_job_template,
- unified_job_template=project
- )
+ def test_not_allows_non_job_models(self, post, admin_user, workflow_job_template, project, machine_credential):
+ node = WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template, unified_job_template=project)
r = post(
- reverse(
- 'api:workflow_job_template_node_credentials_list',
- kwargs = {'pk': node.pk}
- ),
- data = {'id': machine_credential.pk},
- user = admin_user,
- expect = 400
+ reverse('api:workflow_job_template_node_credentials_list', kwargs={'pk': node.pk}), data={'id': machine_credential.pk}, user=admin_user, expect=400
)
assert 'cannot accept credentials on launch' in str(r.data['msg'])
- def test_credential_accepted_create(self, workflow_job_template, post, admin_user,
- job_template_ask, machine_credential):
+ def test_credential_accepted_create(self, workflow_job_template, post, admin_user, job_template_ask, machine_credential):
r = post(
- reverse(
- 'api:workflow_job_template_workflow_nodes_list',
- kwargs = {'pk': workflow_job_template.pk}
- ),
- data = {'unified_job_template': job_template_ask.pk},
- user = admin_user,
- expect = 201
+ reverse('api:workflow_job_template_workflow_nodes_list', kwargs={'pk': workflow_job_template.pk}),
+ data={'unified_job_template': job_template_ask.pk},
+ user=admin_user,
+ expect=201,
)
node = WorkflowJobTemplateNode.objects.get(pk=r.data['id'])
post(url=r.data['related']['credentials'], data={'id': machine_credential.pk}, user=admin_user, expect=204)
assert list(node.credentials.all()) == [machine_credential]
- @pytest.mark.parametrize('role,code', [
- ['use_role', 204],
- ['read_role', 403]
- ])
- def test_credential_rbac(self, role, code, workflow_job_template, post, rando,
- job_template_ask, machine_credential):
+ @pytest.mark.parametrize('role,code', [['use_role', 204], ['read_role', 403]])
+ def test_credential_rbac(self, role, code, workflow_job_template, post, rando, job_template_ask, machine_credential):
role_obj = getattr(machine_credential, role)
role_obj.members.add(rando)
job_template_ask.execute_role.members.add(rando)
workflow_job_template.admin_role.members.add(rando)
r = post(
- reverse(
- 'api:workflow_job_template_workflow_nodes_list',
- kwargs = {'pk': workflow_job_template.pk}
- ),
- data = {'unified_job_template': job_template_ask.pk},
- user = rando,
- expect = 201
+ reverse('api:workflow_job_template_workflow_nodes_list', kwargs={'pk': workflow_job_template.pk}),
+ data={'unified_job_template': job_template_ask.pk},
+ user=rando,
+ expect=201,
)
creds_url = r.data['related']['credentials']
post(url=creds_url, data={'id': machine_credential.pk}, user=rando, expect=code)
@@ -386,34 +305,18 @@ class TestNodeCredentials:
node.unified_job_template.save()
url = node.get_absolute_url()
r = get(url=url, user=admin_user, expect=200)
- post(
- url = r.data['related']['credentials'],
- data = {'id': machine_credential.pk},
- user = admin_user,
- expect = 204
- )
+ post(url=r.data['related']['credentials'], data={'id': machine_credential.pk}, user=admin_user, expect=204)
node.refresh_from_db()
- post(
- url = r.data['related']['credentials'],
- data = {'id': machine_credential.pk, 'disassociate': True},
- user = admin_user,
- expect = 204
- )
+ post(url=r.data['related']['credentials'], data={'id': machine_credential.pk, 'disassociate': True}, user=admin_user, expect=204)
node.refresh_from_db()
assert list(node.credentials.values_list('pk', flat=True)) == []
def test_credential_replace(self, node, get, post, credentialtype_ssh, admin_user):
node.unified_job_template.ask_credential_on_launch = True
node.unified_job_template.save()
- cred1 = Credential.objects.create(
- credential_type=credentialtype_ssh,
- name='machine-cred1',
- inputs={'username': 'test_user', 'password': 'pas4word'})
- cred2 = Credential.objects.create(
- credential_type=credentialtype_ssh,
- name='machine-cred2',
- inputs={'username': 'test_user', 'password': 'pas4word'})
+ cred1 = Credential.objects.create(credential_type=credentialtype_ssh, name='machine-cred1', inputs={'username': 'test_user', 'password': 'pas4word'})
+ cred2 = Credential.objects.create(credential_type=credentialtype_ssh, name='machine-cred2', inputs={'username': 'test_user', 'password': 'pas4word'})
node.credentials.add(cred1)
url = node.get_absolute_url()
r = get(url=url, user=admin_user, expect=200)
diff --git a/awx/main/tests/functional/commands/test_cleanup_jobs.py b/awx/main/tests/functional/commands/test_cleanup_jobs.py
index 98be403d1f..0b934a43ed 100644
--- a/awx/main/tests/functional/commands/test_cleanup_jobs.py
+++ b/awx/main/tests/functional/commands/test_cleanup_jobs.py
@@ -7,19 +7,16 @@ from django.db.models.deletion import Collector, SET_NULL, CASCADE
from django.core.management import call_command
from awx.main.utils.deletion import AWXCollector
-from awx.main.models import (
- JobTemplate, User, Job, JobEvent, Notification,
- WorkflowJobNode, JobHostSummary
-)
+from awx.main.models import JobTemplate, User, Job, JobEvent, Notification, WorkflowJobNode, JobHostSummary
@pytest.fixture
def setup_environment(inventory, project, machine_credential, host, notification_template, label):
- '''
+ """
Create old jobs and new jobs, with various other objects to hit the
related fields of Jobs. This makes sure on_delete() effects are tested
properly.
- '''
+ """
old_jobs = []
new_jobs = []
days = 10
@@ -36,11 +33,10 @@ def setup_environment(inventory, project, machine_credential, host, notification
for i in range(3):
job1 = jt.create_job()
- job1.created =datetime.now(tz=timezone('UTC'))
+ job1.created = datetime.now(tz=timezone('UTC'))
job1.save()
# create jobs with current time
- JobEvent.create_from_data(job_id=job1.pk, uuid='abc123', event='runner_on_start',
- stdout='a' * 1025).save()
+ JobEvent.create_from_data(job_id=job1.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save()
new_jobs.append(job1)
job2 = jt.create_job()
@@ -48,8 +44,7 @@ def setup_environment(inventory, project, machine_credential, host, notification
job2.created = datetime.now(tz=timezone('UTC')) - timedelta(days=days)
job2.save()
job2.dependent_jobs.add(job1)
- JobEvent.create_from_data(job_id=job2.pk, uuid='abc123', event='runner_on_start',
- stdout='a' * 1025).save()
+ JobEvent.create_from_data(job_id=job2.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save()
old_jobs.append(job2)
jt.last_job = job2
@@ -72,25 +67,20 @@ def test_cleanup_jobs(setup_environment):
(old_jobs, new_jobs, days_str) = setup_environment
# related_fields
- related = [f for f in Job._meta.get_fields(include_hidden=True)
- if f.auto_created and not
- f.concrete and
- (f.one_to_one or f.one_to_many)]
+ related = [f for f in Job._meta.get_fields(include_hidden=True) if f.auto_created and not f.concrete and (f.one_to_one or f.one_to_many)]
- job = old_jobs[-1] # last job
+ job = old_jobs[-1] # last job
# gather related objects for job
related_should_be_removed = {}
related_should_be_null = {}
for r in related:
- qs = r.related_model._base_manager.using('default').filter(
- **{"%s__in" % r.field.name: [job.pk]}
- )
+ qs = r.related_model._base_manager.using('default').filter(**{"%s__in" % r.field.name: [job.pk]})
if qs.exists():
if r.field.remote_field.on_delete == CASCADE:
related_should_be_removed[qs.model] = set(qs.values_list('pk', flat=True))
if r.field.remote_field.on_delete == SET_NULL:
- related_should_be_null[(qs.model,r.field.name)] = set(qs.values_list('pk', flat=True))
+ related_should_be_null[(qs.model, r.field.name)] = set(qs.values_list('pk', flat=True))
assert related_should_be_removed
assert related_should_be_null
@@ -106,14 +96,14 @@ def test_cleanup_jobs(setup_environment):
for model, values in related_should_be_removed.items():
assert not model.objects.filter(pk__in=values).exists()
- for (model,fieldname), values in related_should_be_null.items():
+ for (model, fieldname), values in related_should_be_null.items():
for v in values:
assert not getattr(model.objects.get(pk=v), fieldname)
@pytest.mark.django_db
def test_awxcollector(setup_environment):
- '''
+ """
Efforts to improve the performance of cleanup_jobs involved
sub-classing the django Collector class. This unit test will
check for parity between the django Collector and the modified
@@ -127,7 +117,7 @@ def test_awxcollector(setup_environment):
(after running .collect on jobs), from querysets to sets of
objects. The final result should be a dictionary that is
equivalent to django's Collector.
- '''
+ """
(old_jobs, new_jobs, days_str) = setup_environment
collector = Collector('default')
@@ -154,18 +144,18 @@ def test_awxcollector(setup_environment):
for model, instances_for_fieldvalues in awx_col.field_updates.items():
awx_del_dict.setdefault(model, {})
for (field, value), instances in instances_for_fieldvalues.items():
- awx_del_dict[model].setdefault((field,value), set())
+ awx_del_dict[model].setdefault((field, value), set())
for inst in instances:
- awx_del_dict[model][(field,value)].update(inst)
+ awx_del_dict[model][(field, value)].update(inst)
# collector field updates don't use the base (polymorphic parent) model, e.g.
# it will use JobTemplate instead of UnifiedJobTemplate. Therefore,
# we need to rebuild the dictionary and grab the model from the field
collector_del_dict = OrderedDict()
for model, instances_for_fieldvalues in collector.field_updates.items():
- for (field,value), instances in instances_for_fieldvalues.items():
+ for (field, value), instances in instances_for_fieldvalues.items():
collector_del_dict.setdefault(field.model, {})
- collector_del_dict[field.model][(field, value)] = collector.field_updates[model][(field,value)]
+ collector_del_dict[field.model][(field, value)] = collector.field_updates[model][(field, value)]
assert awx_del_dict == collector_del_dict
# check that fast deletes are the same
diff --git a/awx/main/tests/functional/commands/test_commands.py b/awx/main/tests/functional/commands/test_commands.py
index 078ca96f2d..69f584c287 100644
--- a/awx/main/tests/functional/commands/test_commands.py
+++ b/awx/main/tests/functional/commands/test_commands.py
@@ -34,12 +34,13 @@ def run_command(name, *args, **options):
@pytest.mark.parametrize(
- "username,password,expected,changed", [
+ "username,password,expected,changed",
+ [
('admin', 'dingleberry', 'Password updated', True),
('admin', 'admin', 'Password not updated', False),
(None, 'foo', 'username required', False),
('admin', None, 'password required', False),
- ]
+ ],
)
def test_update_password_command(mocker, username, password, expected, changed):
with mocker.patch.object(UpdatePassword, 'update_password', return_value=changed):
diff --git a/awx/main/tests/functional/commands/test_inventory_import.py b/awx/main/tests/functional/commands/test_inventory_import.py
index 0500ef197c..c53630bcb5 100644
--- a/awx/main/tests/functional/commands/test_inventory_import.py
+++ b/awx/main/tests/functional/commands/test_inventory_import.py
@@ -19,66 +19,28 @@ from awx.main.utils.mem_inventory import MemGroup
TEST_INVENTORY_CONTENT = {
- "_meta": {
- "hostvars": {}
- },
- "all": {
- "children": [
- "others",
- "servers",
- "ungrouped"
- ],
- "vars": {
- "vara": "A"
- }
- },
- "dbservers": {
- "hosts": [
- "db1.example.com",
- "db2.example.com"
- ],
- "vars": {
- "dbvar": "ugh"
- }
- },
+ "_meta": {"hostvars": {}},
+ "all": {"children": ["others", "servers", "ungrouped"], "vars": {"vara": "A"}},
+ "dbservers": {"hosts": ["db1.example.com", "db2.example.com"], "vars": {"dbvar": "ugh"}},
"others": {
"hosts": {
"10.11.12.13": {},
"10.12.14.16": {"ansible_port": 8022},
"::1": {},
"fe80::1610:9fff:fedd:654b": {},
- "fe80::1610:9fff:fedd:b654": {"ansible_port": 1022}
- }
- },
- "servers": {
- "children": [
- "dbservers",
- "webservers"
- ],
- "vars": {
- "varb": "B"
+ "fe80::1610:9fff:fedd:b654": {"ansible_port": 1022},
}
},
+ "servers": {"children": ["dbservers", "webservers"], "vars": {"varb": "B"}},
"ungrouped": {},
"webservers": {
- "hosts": {
- "web1.example.com": {
- "ansible_ssh_host": "w1.example.net"
- },
- "web2.example.com": {},
- "web3.example.com": {
- "ansible_port": 1022
- }
- },
- "vars": {
- "webvar": "blah"
- }
- }
+ "hosts": {"web1.example.com": {"ansible_ssh_host": "w1.example.net"}, "web2.example.com": {}, "web3.example.com": {"ansible_port": 1022}},
+ "vars": {"webvar": "blah"},
+ },
}
class MockLoader:
-
def __init__(self, *args, **kwargs):
pass
@@ -95,15 +57,12 @@ def mock_logging(self, level):
@mock.patch.object(inventory_import.Command, 'check_license', mock.MagicMock())
@mock.patch.object(inventory_import.Command, 'set_logging_level', mock_logging)
class TestInvalidOptionsFunctional:
-
def test_invalid_options_invalid_source(self, inventory):
# Give invalid file to the command
cmd = inventory_import.Command()
with mock.patch('django.db.transaction.rollback'):
with pytest.raises(OSError) as err:
- cmd.handle(
- inventory_id=inventory.id,
- source='/tmp/pytest-of-root/pytest-7/inv_files0-invalid')
+ cmd.handle(inventory_id=inventory.id, source='/tmp/pytest-of-root/pytest-7/inv_files0-invalid')
assert 'Source does not exist' in str(err.value)
def test_invalid_inventory_id(self):
@@ -126,27 +85,31 @@ class TestInvalidOptionsFunctional:
@mock.patch.object(inventory_import.Command, 'check_license', new=mock.MagicMock())
@mock.patch.object(inventory_import.Command, 'set_logging_level', new=mock_logging)
class TestINIImports:
-
@mock.patch.object(inventory_import, 'AnsibleInventoryLoader', MockLoader)
def test_inventory_single_ini_import(self, inventory, capsys):
inventory_import.AnsibleInventoryLoader._data = TEST_INVENTORY_CONTENT
cmd = inventory_import.Command()
- r = cmd.handle(
- inventory_id=inventory.pk, source=__file__,
- method='backport')
+ r = cmd.handle(inventory_id=inventory.pk, source=__file__, method='backport')
out, err = capsys.readouterr()
assert r is None
assert out == ''
- assert set(inventory.groups.values_list('name', flat=True)) == set([
- 'servers', 'dbservers', 'webservers', 'others'])
-
- assert set(inventory.hosts.values_list('name', flat=True)) == set([
- 'web1.example.com', 'web2.example.com',
- 'web3.example.com', 'db1.example.com',
- 'db2.example.com', '10.11.12.13',
- '10.12.14.16', 'fe80::1610:9fff:fedd:654b',
- 'fe80::1610:9fff:fedd:b654', '::1'])
+ assert set(inventory.groups.values_list('name', flat=True)) == set(['servers', 'dbservers', 'webservers', 'others'])
+
+ assert set(inventory.hosts.values_list('name', flat=True)) == set(
+ [
+ 'web1.example.com',
+ 'web2.example.com',
+ 'web3.example.com',
+ 'db1.example.com',
+ 'db2.example.com',
+ '10.11.12.13',
+ '10.12.14.16',
+ 'fe80::1610:9fff:fedd:654b',
+ 'fe80::1610:9fff:fedd:b654',
+ '::1',
+ ]
+ )
reloaded_inv = Inventory.objects.get(pk=inventory.pk)
assert reloaded_inv.variables_dict == {'vara': 'A'}
@@ -166,12 +129,12 @@ class TestINIImports:
servers = Group.objects.get(name='dbservers')
assert servers.variables_dict == {'dbvar': 'ugh'}
assert servers.children.count() == 0
- assert set(servers.hosts.values_list('name', flat=True)) == set(['db1.example.com','db2.example.com'])
+ assert set(servers.hosts.values_list('name', flat=True)) == set(['db1.example.com', 'db2.example.com'])
servers = Group.objects.get(name='webservers')
assert servers.variables_dict == {'webvar': 'blah'}
assert servers.children.count() == 0
- assert set(servers.hosts.values_list('name', flat=True)) == set(['web1.example.com','web2.example.com', 'web3.example.com'])
+ assert set(servers.hosts.values_list('name', flat=True)) == set(['web1.example.com', 'web2.example.com', 'web3.example.com'])
assert reloaded_inv.inventory_sources.filter().count() == 1
invsrc = reloaded_inv.inventory_sources.first()
@@ -186,15 +149,9 @@ class TestINIImports:
@mock.patch.object(inventory_import, 'AnsibleInventoryLoader', MockLoader)
def test_hostvars_are_saved(self, inventory):
inventory_import.AnsibleInventoryLoader._data = {
- "_meta": {
- "hostvars": {"foo": {"some_hostvar": "foobar"}}
- },
- "all": {
- "children": ["ungrouped"]
- },
- "ungrouped": {
- "hosts": ["foo"]
- }
+ "_meta": {"hostvars": {"foo": {"some_hostvar": "foobar"}}},
+ "all": {"children": ["ungrouped"]},
+ "ungrouped": {"hosts": ["foo"]},
}
cmd = inventory_import.Command()
cmd.handle(inventory_id=inventory.pk, source=__file__)
@@ -210,29 +167,17 @@ class TestINIImports:
importing the same parent groups
"""
inventory_import.AnsibleInventoryLoader._data = {
- "_meta": {
- "hostvars": {"foo": {}}
- },
- "all": {
- "children": ["ungrouped", "is_a_parent", "has_a_host", "is_a_child"]
- },
- "is_a_parent": {
- "children": ["is_a_child"]
- },
- "has_a_host": {
- "hosts": ["foo"]
- },
- "ungrouped": {
- "hosts": []
- }
+ "_meta": {"hostvars": {"foo": {}}},
+ "all": {"children": ["ungrouped", "is_a_parent", "has_a_host", "is_a_child"]},
+ "is_a_parent": {"children": ["is_a_child"]},
+ "has_a_host": {"hosts": ["foo"]},
+ "ungrouped": {"hosts": []},
}
cmd = inventory_import.Command()
cmd.handle(inventory_id=inventory.pk, source=__file__)
assert inventory.hosts.count() == 1 # baseline worked
- inv_src2 = inventory.inventory_sources.create(
- name='bar', overwrite=True, source='ec2'
- )
+ inv_src2 = inventory.inventory_sources.create(name='bar', overwrite=True, source='ec2')
os.environ['INVENTORY_SOURCE_ID'] = str(inv_src2.pk)
os.environ['INVENTORY_UPDATE_ID'] = str(inv_src2.create_unified_job().pk)
# scenario where groups are already imported, and overwrite is true
@@ -240,15 +185,9 @@ class TestINIImports:
inv_src2.groups.add(inventory.groups.get(name='has_a_host'))
inventory_import.AnsibleInventoryLoader._data = {
- "_meta": {
- "hostvars": {"bar": {}}
- },
- "all": {
- "children": ["ungrouped", "is_a_parent", "has_a_host"]
- },
- "ungrouped": {
- "hosts": ["bar"]
- }
+ "_meta": {"hostvars": {"bar": {}}},
+ "all": {"children": ["ungrouped", "is_a_parent", "has_a_host"]},
+ "ungrouped": {"hosts": ["bar"]},
}
cmd = inventory_import.Command()
cmd.handle(inventory_id=inventory.pk, source=__file__, overwrite=True)
@@ -265,18 +204,10 @@ class TestINIImports:
@mock.patch.object(inventory_import, 'AnsibleInventoryLoader', MockLoader)
def test_recursive_group_error(self, inventory):
inventory_import.AnsibleInventoryLoader._data = {
- "_meta": {
- "hostvars": {}
- },
- "all": {
- "children": ["fooland", "barland"]
- },
- "fooland": {
- "children": ["barland"]
- },
- "barland": {
- "children": ["fooland"]
- }
+ "_meta": {"hostvars": {}},
+ "all": {"children": ["fooland", "barland"]},
+ "fooland": {"children": ["barland"]},
+ "barland": {"children": ["fooland"]},
}
cmd = inventory_import.Command()
cmd.handle(inventory_id=inventory.pk, source=__file__)
@@ -285,12 +216,12 @@ class TestINIImports:
@pytest.mark.django_db
@pytest.mark.inventory_import
class TestEnabledVar:
- '''
+ """
Meaning of return values
None - import script did not give an indication of enablement
True - host is enabled
False - host is not enabled
- '''
+ """
@pytest.fixture
def cmd(self):
@@ -318,13 +249,7 @@ def test_tower_version_compare():
cmd.all_group = MemGroup('all')
# mimic example from https://github.com/ansible/ansible/pull/52747
# until that is merged, this is the best testing we can do
- cmd.all_group.variables = {
- 'tower_metadata': {
- "ansible_version": "2.7.5",
- "license_type": "open",
- "version": "2.0.1-1068-g09684e2c41"
- }
- }
+ cmd.all_group.variables = {'tower_metadata': {"ansible_version": "2.7.5", "license_type": "open", "version": "2.0.1-1068-g09684e2c41"}}
with pytest.raises(PermissionDenied):
cmd.remote_tower_license_compare('very_supported')
cmd.remote_tower_license_compare('open')
diff --git a/awx/main/tests/functional/commands/test_oauth2_token_revoke.py b/awx/main/tests/functional/commands/test_oauth2_token_revoke.py
index 4e576e5558..69b25fd0a8 100644
--- a/awx/main/tests/functional/commands/test_oauth2_token_revoke.py
+++ b/awx/main/tests/functional/commands/test_oauth2_token_revoke.py
@@ -17,7 +17,6 @@ from awx.api.versioning import reverse
@pytest.mark.django_db
class TestOAuth2RevokeCommand:
-
def test_non_existing_user(self):
out = StringIO()
fake_username = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
@@ -30,22 +29,14 @@ class TestOAuth2RevokeCommand:
def test_revoke_all_access_tokens(self, post, admin, alice):
url = reverse('api:o_auth2_token_list')
for user in (admin, alice):
- post(
- url,
- {'description': 'test token', 'scope': 'read'},
- user
- )
+ post(url, {'description': 'test token', 'scope': 'read'}, user)
assert OAuth2AccessToken.objects.count() == 2
call_command('revoke_oauth2_tokens')
assert OAuth2AccessToken.objects.count() == 0
def test_revoke_access_token_for_user(self, post, admin, alice):
url = reverse('api:o_auth2_token_list')
- post(
- url,
- {'description': 'test token', 'scope': 'read'},
- alice
- )
+ post(url, {'description': 'test token', 'scope': 'read'}, alice)
assert OAuth2AccessToken.objects.count() == 1
call_command('revoke_oauth2_tokens', '--user=admin')
assert OAuth2AccessToken.objects.count() == 1
@@ -54,15 +45,7 @@ class TestOAuth2RevokeCommand:
def test_revoke_all_refresh_tokens(self, post, admin, oauth_application):
url = reverse('api:o_auth2_token_list')
- post(
- url,
- {
- 'description': 'test token for',
- 'scope': 'read',
- 'application': oauth_application.pk
- },
- admin
- )
+ post(url, {'description': 'test token for', 'scope': 'read', 'application': oauth_application.pk}, admin)
assert OAuth2AccessToken.objects.count() == 1
assert RefreshToken.objects.count() == 1
diff --git a/awx/main/tests/functional/commands/test_secret_key_regeneration.py b/awx/main/tests/functional/commands/test_secret_key_regeneration.py
index d27b4329cd..c90894c663 100644
--- a/awx/main/tests/functional/commands/test_secret_key_regeneration.py
+++ b/awx/main/tests/functional/commands/test_secret_key_regeneration.py
@@ -16,7 +16,6 @@ PREFIX = '$encrypted$UTF8$AESCBC$'
@pytest.mark.django_db
class TestKeyRegeneration:
-
def test_encrypted_ssh_password(self, credential):
# test basic decryption
assert credential.inputs['password'].startswith(PREFIX)
@@ -67,7 +66,6 @@ class TestKeyRegeneration:
Slack = nt.CLASS_FOR_NOTIFICATION_TYPE[nt.notification_type]
class TestBackend(Slack):
-
def __init__(self, *args, **kw):
assert kw['token'] == 'token'
@@ -112,9 +110,7 @@ class TestKeyRegeneration:
# verify that the new SECRET_KEY *does* work
with override_settings(SECRET_KEY=new_key):
- assert json.loads(
- decrypt_field(new_job, field_name='start_args')
- ) == {'foo': 'bar'}
+ assert json.loads(decrypt_field(new_job, field_name='start_args')) == {'foo': 'bar'}
@pytest.mark.parametrize('cls', ('JobTemplate', 'WorkflowJobTemplate'))
def test_survey_spec(self, inventory, project, survey_spec_factory, cls):
@@ -125,11 +121,7 @@ class TestKeyRegeneration:
# test basic decryption
jt = getattr(models, cls).objects.create(
name='Example Template',
- survey_spec=survey_spec_factory([{
- 'variable': 'secret_key',
- 'default': encrypt_value('donttell', pk=None),
- 'type': 'password'
- }]),
+ survey_spec=survey_spec_factory([{'variable': 'secret_key', 'default': encrypt_value('donttell', pk=None), 'type': 'password'}]),
survey_enabled=True,
**params
)
@@ -149,9 +141,7 @@ class TestKeyRegeneration:
# verify that the new SECRET_KEY *does* work
with override_settings(SECRET_KEY=new_key):
- assert json.loads(
- new_job.decrypted_extra_vars()
- )['secret_key'] == 'donttell'
+ assert json.loads(new_job.decrypted_extra_vars())['secret_key'] == 'donttell'
def test_oauth2_application_client_secret(self, oauth_application):
# test basic decryption
@@ -163,12 +153,8 @@ class TestKeyRegeneration:
# verify that the old SECRET_KEY doesn't work
with pytest.raises(InvalidToken):
- models.OAuth2Application.objects.get(
- pk=oauth_application.pk
- ).client_secret
+ models.OAuth2Application.objects.get(pk=oauth_application.pk).client_secret
# verify that the new SECRET_KEY *does* work
with override_settings(SECRET_KEY=new_key):
- assert models.OAuth2Application.objects.get(
- pk=oauth_application.pk
- ).client_secret == secret
+ assert models.OAuth2Application.objects.get(pk=oauth_application.pk).client_secret == secret
diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py
index 4cbd5a40d3..96101ffb41 100644
--- a/awx/main/tests/functional/conftest.py
+++ b/awx/main/tests/functional/conftest.py
@@ -26,22 +26,13 @@ from rest_framework.test import (
from awx.main.models.credential import CredentialType, Credential
from awx.main.models.jobs import JobTemplate, SystemJobTemplate
-from awx.main.models.inventory import (
- Group,
- Inventory,
- InventoryUpdate,
- InventorySource,
- CustomInventoryScript
-)
+from awx.main.models.inventory import Group, Inventory, InventoryUpdate, InventorySource, CustomInventoryScript
from awx.main.models.organization import (
Organization,
Team,
)
from awx.main.models.rbac import Role
-from awx.main.models.notifications import (
- NotificationTemplate,
- Notification
-)
+from awx.main.models.notifications import NotificationTemplate, Notification
from awx.main.models.events import (
JobEvent,
AdHocCommandEvent,
@@ -72,29 +63,20 @@ def user():
user.set_password(name)
user.save()
return user
+
return u
@pytest.fixture
def check_jobtemplate(project, inventory, credential):
- jt = JobTemplate.objects.create(
- job_type='check',
- project=project,
- inventory=inventory,
- name='check-job-template'
- )
+ jt = JobTemplate.objects.create(job_type='check', project=project, inventory=inventory, name='check-job-template')
jt.credentials.add(credential)
return jt
@pytest.fixture
def deploy_jobtemplate(project, inventory, credential):
- jt = JobTemplate.objects.create(
- job_type='run',
- project=project,
- inventory=inventory,
- name='deploy-job-template'
- )
+ jt = JobTemplate.objects.create(job_type='run', project=project, inventory=inventory, name='deploy-job-template')
jt.credentials.add(credential)
return jt
@@ -113,27 +95,25 @@ def team_member(user, team):
@pytest.fixture(scope="session", autouse=True)
def project_playbooks():
- '''
+ """
Return playbook_files as playbooks for manual projects when testing.
- '''
+ """
+
class PlaybooksMock(mock.PropertyMock):
def __get__(self, obj, obj_type):
return obj.playbook_files
+
mocked = mock.patch.object(Project, 'playbooks', new_callable=PlaybooksMock)
mocked.start()
@pytest.fixture
def run_computed_fields_right_away(request):
-
def run_me(inventory_id):
i = Inventory.objects.get(id=inventory_id)
i.update_computed_fields()
- mocked = mock.patch(
- 'awx.main.signals.update_inventory_computed_fields.delay',
- new=run_me
- )
+ mocked = mock.patch('awx.main.signals.update_inventory_computed_fields.delay', new=run_me)
mocked.start()
request.addfinalizer(mocked.stop)
@@ -142,26 +122,28 @@ def run_computed_fields_right_away(request):
@pytest.fixture
@mock.patch.object(Project, "update", lambda self, **kwargs: None)
def project(instance, organization):
- prj = Project.objects.create(name="test-proj",
- description="test-proj-desc",
- organization=organization,
- playbook_files=['helloworld.yml', 'alt-helloworld.yml'],
- scm_revision='1234567890123456789012345678901234567890',
- scm_url='localhost',
- scm_type='git'
- )
+ prj = Project.objects.create(
+ name="test-proj",
+ description="test-proj-desc",
+ organization=organization,
+ playbook_files=['helloworld.yml', 'alt-helloworld.yml'],
+ scm_revision='1234567890123456789012345678901234567890',
+ scm_url='localhost',
+ scm_type='git',
+ )
return prj
@pytest.fixture
@mock.patch.object(Project, "update", lambda self, **kwargs: None)
def manual_project(instance, organization):
- prj = Project.objects.create(name="test-manual-proj",
- description="manual-proj-desc",
- organization=organization,
- playbook_files=['helloworld.yml', 'alt-helloworld.yml'],
- local_path='_92__test_proj'
- )
+ prj = Project.objects.create(
+ name="test-manual-proj",
+ description="manual-proj-desc",
+ organization=organization,
+ playbook_files=['helloworld.yml', 'alt-helloworld.yml'],
+ local_path='_92__test_proj',
+ )
return prj
@@ -171,19 +153,17 @@ def project_factory(organization):
try:
prj = Project.objects.get(name=name)
except Project.DoesNotExist:
- prj = Project.objects.create(name=name,
- description="description for " + name,
- organization=organization
- )
+ prj = Project.objects.create(name=name, description="description for " + name, organization=organization)
return prj
+
return factory
@pytest.fixture
def job_factory(jt_linked, admin):
def factory(job_template=jt_linked, initial_state='new', created_by=admin):
- return job_template.create_unified_job(_eager_fields={
- 'status': initial_state, 'created_by': created_by})
+ return job_template.create_unified_job(_eager_fields={'status': initial_state, 'created_by': created_by})
+
return factory
@@ -193,10 +173,9 @@ def team_factory(organization):
try:
t = Team.objects.get(name=name)
except Team.DoesNotExist:
- t = Team.objects.create(name=name,
- description="description for " + name,
- organization=organization)
+ t = Team.objects.create(name=name, description="description for " + name, organization=organization)
return t
+
return factory
@@ -273,27 +252,11 @@ def credentialtype_insights():
@pytest.fixture
def credentialtype_external():
external_type_inputs = {
- 'fields': [{
- 'id': 'url',
- 'label': 'Server URL',
- 'type': 'string',
- 'help_text': 'The server url.'
- }, {
- 'id': 'token',
- 'label': 'Token',
- 'type': 'string',
- 'secret': True,
- 'help_text': 'An access token for the server.'
- }],
- 'metadata': [{
- 'id': 'key',
- 'label': 'Key',
- 'type': 'string'
- }, {
- 'id': 'version',
- 'label': 'Version',
- 'type': 'string'
- }],
+ 'fields': [
+ {'id': 'url', 'label': 'Server URL', 'type': 'string', 'help_text': 'The server url.'},
+ {'id': 'token', 'label': 'Token', 'type': 'string', 'secret': True, 'help_text': 'An access token for the server.'},
+ ],
+ 'metadata': [{'id': 'key', 'label': 'Key', 'type': 'string'}, {'id': 'version', 'label': 'Version', 'type': 'string'}],
'required': ['url', 'token', 'key'],
}
@@ -303,75 +266,67 @@ def credentialtype_external():
with mock.patch('awx.main.models.credential.CredentialType.plugin', new_callable=PropertyMock) as mock_plugin:
mock_plugin.return_value = MockPlugin()
- external_type = CredentialType(
- kind='external',
- managed_by_tower=True,
- name='External Service',
- inputs=external_type_inputs
- )
+ external_type = CredentialType(kind='external', managed_by_tower=True, name='External Service', inputs=external_type_inputs)
external_type.save()
yield external_type
@pytest.fixture
def credential(credentialtype_aws):
- return Credential.objects.create(credential_type=credentialtype_aws, name='test-cred',
- inputs={'username': 'something', 'password': 'secret'})
+ return Credential.objects.create(credential_type=credentialtype_aws, name='test-cred', inputs={'username': 'something', 'password': 'secret'})
@pytest.fixture
def net_credential(credentialtype_net):
- return Credential.objects.create(credential_type=credentialtype_net, name='test-cred',
- inputs={'username': 'something', 'password': 'secret'})
+ return Credential.objects.create(credential_type=credentialtype_net, name='test-cred', inputs={'username': 'something', 'password': 'secret'})
@pytest.fixture
def vault_credential(credentialtype_vault):
- return Credential.objects.create(credential_type=credentialtype_vault, name='test-cred',
- inputs={'vault_password': 'secret'})
+ return Credential.objects.create(credential_type=credentialtype_vault, name='test-cred', inputs={'vault_password': 'secret'})
@pytest.fixture
def machine_credential(credentialtype_ssh):
- return Credential.objects.create(credential_type=credentialtype_ssh, name='machine-cred',
- inputs={'username': 'test_user', 'password': 'pas4word'})
+ return Credential.objects.create(credential_type=credentialtype_ssh, name='machine-cred', inputs={'username': 'test_user', 'password': 'pas4word'})
@pytest.fixture
def scm_credential(credentialtype_scm):
- return Credential.objects.create(credential_type=credentialtype_scm, name='scm-cred',
- inputs={'username': 'optimus', 'password': 'prime'})
+ return Credential.objects.create(credential_type=credentialtype_scm, name='scm-cred', inputs={'username': 'optimus', 'password': 'prime'})
@pytest.fixture
def insights_credential(credentialtype_insights):
- return Credential.objects.create(credential_type=credentialtype_insights, name='insights-cred',
- inputs={'username': 'morocco_mole', 'password': 'secret_squirrel'})
+ return Credential.objects.create(
+ credential_type=credentialtype_insights, name='insights-cred', inputs={'username': 'morocco_mole', 'password': 'secret_squirrel'}
+ )
@pytest.fixture
def org_credential(organization, credentialtype_aws):
- return Credential.objects.create(credential_type=credentialtype_aws, name='test-cred',
- inputs={'username': 'something', 'password': 'secret'},
- organization=organization)
+ return Credential.objects.create(
+ credential_type=credentialtype_aws, name='test-cred', inputs={'username': 'something', 'password': 'secret'}, organization=organization
+ )
@pytest.fixture
def external_credential(credentialtype_external):
- return Credential.objects.create(credential_type=credentialtype_external, name='external-cred',
- inputs={'url': 'http://testhost.com', 'token': 'secret1'})
+ return Credential.objects.create(credential_type=credentialtype_external, name='external-cred', inputs={'url': 'http://testhost.com', 'token': 'secret1'})
@pytest.fixture
def other_external_credential(credentialtype_external):
- return Credential.objects.create(credential_type=credentialtype_external, name='other-external-cred',
- inputs={'url': 'http://testhost.com', 'token': 'secret2'})
+ return Credential.objects.create(
+ credential_type=credentialtype_external, name='other-external-cred', inputs={'url': 'http://testhost.com', 'token': 'secret2'}
+ )
@pytest.fixture
def kube_credential(credentialtype_kube):
- return Credential.objects.create(credential_type=credentialtype_kube, name='kube-cred',
- inputs={'host': 'my.cluster', 'bearer_token': 'my-token', 'verify_ssl': False})
+ return Credential.objects.create(
+ credential_type=credentialtype_kube, name='kube-cred', inputs={'host': 'my.cluster', 'bearer_token': 'my-token', 'verify_ssl': False}
+ )
@pytest.fixture
@@ -395,7 +350,8 @@ def scm_inventory_source(inventory, project):
source_path='inventory_file',
update_on_project_update=True,
inventory=inventory,
- scm_last_revision=project.scm_revision)
+ scm_last_revision=project.scm_revision,
+ )
with mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.update'):
inv_src.save()
return inv_src
@@ -409,6 +365,7 @@ def inventory_factory(organization):
except Inventory.DoesNotExist:
inv = Inventory.objects.create(name=name, organization=org)
return inv
+
return factory
@@ -419,32 +376,41 @@ def label(organization):
@pytest.fixture
def notification_template(organization):
- return NotificationTemplate.objects.create(name='test-notification_template',
- organization=organization,
- notification_type="webhook",
- notification_configuration=dict(url="http://localhost",
- username="",
- password="",
- headers={"Test": "Header",}))
+ return NotificationTemplate.objects.create(
+ name='test-notification_template',
+ organization=organization,
+ notification_type="webhook",
+ notification_configuration=dict(
+ url="http://localhost",
+ username="",
+ password="",
+ headers={
+ "Test": "Header",
+ },
+ ),
+ )
@pytest.fixture
def notification_template_with_encrypt(organization):
- return NotificationTemplate.objects.create(name='test-notification_template_with_encrypt',
- organization=organization,
- notification_type="slack",
- notification_configuration=dict(channels=["Foo", "Bar"],
- token="token"))
+ return NotificationTemplate.objects.create(
+ name='test-notification_template_with_encrypt',
+ organization=organization,
+ notification_type="slack",
+ notification_configuration=dict(channels=["Foo", "Bar"], token="token"),
+ )
@pytest.fixture
def notification(notification_template):
- return Notification.objects.create(notification_template=notification_template,
- status='successful',
- notifications_sent=1,
- notification_type='email',
- recipients='admin@redhat.com',
- subject='email subject')
+ return Notification.objects.create(
+ notification_template=notification_template,
+ status='successful',
+ notifications_sent=1,
+ notification_type='email',
+ recipients='admin@redhat.com',
+ subject='email subject',
+ )
@pytest.fixture
@@ -511,6 +477,7 @@ def organizations(instance):
o = Organization.objects.create(name="test-org-%d" % i, description="test-org-desc")
orgs.append(o)
return orgs
+
return rf
@@ -521,6 +488,7 @@ def group_factory(inventory):
return Group.objects.get(name=name, inventory=inventory)
except Exception:
return Group.objects.create(inventory=inventory, name=name)
+
return g
@@ -537,6 +505,7 @@ def hosts(group_factory):
group1.hosts.add(host)
hosts.append(host)
return hosts
+
return rf
@@ -548,8 +517,7 @@ def group(inventory):
@pytest.fixture
def inventory_source(inventory):
# by making it ec2, the credential is not required
- return InventorySource.objects.create(name='single-inv-src',
- inventory=inventory, source='ec2')
+ return InventorySource.objects.create(name='single-inv-src', inventory=inventory, source='ec2')
@pytest.fixture
@@ -563,22 +531,18 @@ def inventory_source_factory(inventory_factory):
return inventory.inventory_sources.get(name=name)
except Exception:
return inventory.inventory_sources.create(name=name, source=source)
+
return invsrc
@pytest.fixture
def inventory_update(inventory_source):
- return InventoryUpdate.objects.create(
- inventory_source=inventory_source,
- source=inventory_source.source
- )
+ return InventoryUpdate.objects.create(inventory_source=inventory_source, source=inventory_source.source)
@pytest.fixture
def inventory_script(organization):
- return CustomInventoryScript.objects.create(name='test inv script',
- organization=organization,
- script='#!/usr/bin/python')
+ return CustomInventoryScript.objects.create(name='test inv script', organization=organization, script='#!/usr/bin/python')
@pytest.fixture
@@ -589,14 +553,36 @@ def host(group, inventory):
@pytest.fixture
def permissions():
return {
- 'admin':{'create':True, 'read':True, 'write':True,
- 'update':True, 'delete':True, 'scm_update':True, 'execute':True, 'use':True,},
-
- 'auditor':{'read':True, 'create':False, 'write':False,
- 'update':False, 'delete':False, 'scm_update':False, 'execute':False, 'use':False,},
-
- 'usage':{'read':False, 'create':False, 'write':False,
- 'update':False, 'delete':False, 'scm_update':False, 'execute':False, 'use':True,},
+ 'admin': {
+ 'create': True,
+ 'read': True,
+ 'write': True,
+ 'update': True,
+ 'delete': True,
+ 'scm_update': True,
+ 'execute': True,
+ 'use': True,
+ },
+ 'auditor': {
+ 'read': True,
+ 'create': False,
+ 'write': False,
+ 'update': False,
+ 'delete': False,
+ 'scm_update': False,
+ 'execute': False,
+ 'use': False,
+ },
+ 'usage': {
+ 'read': False,
+ 'create': False,
+ 'write': False,
+ 'update': False,
+ 'delete': False,
+ 'scm_update': False,
+ 'execute': False,
+ 'use': True,
+ },
}
@@ -644,15 +630,16 @@ def _request(verb):
response.data[key] = str(value)
except Exception:
response.data = data_copy
- assert response.status_code == expect, 'Response data: {}'.format(
- getattr(response, 'data', None)
- )
+ assert response.status_code == expect, 'Response data: {}'.format(getattr(response, 'data', None))
if hasattr(response, 'render'):
response.render()
- __SWAGGER_REQUESTS__.setdefault(request.path, {})[
- (request.method.lower(), response.status_code)
- ] = (response.get('Content-Type', None), response.content, kwargs.get('data'))
+ __SWAGGER_REQUESTS__.setdefault(request.path, {})[(request.method.lower(), response.status_code)] = (
+ response.get('Content-Type', None),
+ response.content,
+ kwargs.get('data'),
+ )
return response
+
return rf
@@ -694,12 +681,10 @@ def options():
@pytest.fixture
def ad_hoc_command_factory(inventory, machine_credential, admin):
def factory(inventory=inventory, credential=machine_credential, initial_state='new', created_by=admin):
- adhoc = AdHocCommand(
- name='test-adhoc', inventory=inventory, credential=credential,
- status=initial_state, created_by=created_by
- )
+ adhoc = AdHocCommand(name='test-adhoc', inventory=inventory, credential=credential, status=initial_state, created_by=created_by)
adhoc.save()
return adhoc
+
return factory
@@ -718,14 +703,11 @@ def job_template_labels(organization, job_template):
@pytest.fixture
def jt_linked(organization, project, inventory, machine_credential, credential, net_credential, vault_credential):
- '''
+ """
A job template with a reasonably complete set of related objects to
test RBAC and other functionality affected by related objects
- '''
- jt = JobTemplate.objects.create(
- project=project, inventory=inventory, playbook='helloworld.yml',
- organization=organization
- )
+ """
+ jt = JobTemplate.objects.create(project=project, inventory=inventory, playbook='helloworld.yml', organization=organization)
jt.credentials.add(machine_credential, vault_credential, credential, net_credential)
return jt
@@ -741,8 +723,8 @@ def workflow_job_template(organization):
@pytest.fixture
def workflow_job_factory(workflow_job_template, admin):
def factory(workflow_job_template=workflow_job_template, initial_state='new', created_by=admin):
- return workflow_job_template.create_unified_job(_eager_fields={
- 'status': initial_state, 'created_by': created_by})
+ return workflow_job_template.create_unified_job(_eager_fields={'status': initial_state, 'created_by': created_by})
+
return factory
@@ -756,8 +738,8 @@ def system_job_template():
@pytest.fixture
def system_job_factory(system_job_template, admin):
def factory(system_job_template=system_job_template, initial_state='new', created_by=admin):
- return system_job_template.create_unified_job(_eager_fields={
- 'status': initial_state, 'created_by': created_by})
+ return system_job_template.create_unified_job(_eager_fields={'status': initial_state, 'created_by': created_by})
+
return factory
@@ -785,10 +767,7 @@ def monkeypatch_jsonbfield_get_db_prep_save(mocker):
@pytest.fixture
def oauth_application(admin):
- return Application.objects.create(
- name='test app', user=admin, client_type='confidential',
- authorization_grant_type='password'
- )
+ return Application.objects.create(name='test app', user=admin, client_type='confidential', authorization_grant_type='password')
@pytest.fixture
@@ -801,8 +780,7 @@ def sqlite_copy_expert(request):
# simulate postgres copy_expert support with ORM code
parts = sql.split(' ')
tablename = parts[parts.index('from') + 1]
- for cls in (JobEvent, AdHocCommandEvent, ProjectUpdateEvent,
- InventoryUpdateEvent, SystemJobEvent):
+ for cls in (JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent):
if cls._meta.db_table == tablename:
for event in cls.objects.order_by('start_line').all():
fd.write(event.stdout)
@@ -826,12 +804,8 @@ def slice_jt_factory(inventory):
inventory.hosts.create(name='foo{}'.format(i))
if not jt_kwargs:
jt_kwargs = {}
- return JobTemplate.objects.create(
- name='slice-jt-from-factory',
- job_slice_count=N,
- inventory=inventory,
- **jt_kwargs
- )
+ return JobTemplate.objects.create(name='slice-jt-from-factory', job_slice_count=N, inventory=inventory, **jt_kwargs)
+
return r
@@ -850,6 +824,7 @@ def slice_job_factory(slice_jt_factory):
node.job = job
node.save()
return slice_job
+
return r
diff --git a/awx/main/tests/functional/models/test_activity_stream.py b/awx/main/tests/functional/models/test_activity_stream.py
index f220641759..9399077940 100644
--- a/awx/main/tests/functional/models/test_activity_stream.py
+++ b/awx/main/tests/functional/models/test_activity_stream.py
@@ -4,17 +4,7 @@ from unittest import mock
import json
# AWX models
-from awx.main.models import (
- ActivityStream,
- Organization,
- JobTemplate,
- Credential,
- CredentialType,
- Inventory,
- InventorySource,
- Project,
- User
-)
+from awx.main.models import ActivityStream, Organization, JobTemplate, Credential, CredentialType, Inventory, InventorySource, Project, User
# other AWX
from awx.main.utils import model_to_dict, model_instance_diff
@@ -29,12 +19,12 @@ from crum import impersonate
class TestImplicitRolesOmitted:
- '''
+ """
Test that there is exactly 1 "create" entry in the activity stream for
common items in the system.
These tests will fail if `rbac_activity_stream` creates
false-positive entries.
- '''
+ """
@pytest.mark.django_db
def test_activity_stream_create_organization(self):
@@ -79,12 +69,12 @@ class TestImplicitRolesOmitted:
@pytest.mark.django_db
class TestRolesAssociationEntries:
- '''
+ """
Test that non-implicit role associations have a corresponding
activity stream entry.
These tests will fail if `rbac_activity_stream` skipping logic
in signals is wrong.
- '''
+ """
def test_non_implicit_associations_are_recorded(self, project):
org2 = Organization.objects.create(name='test-organization2')
@@ -93,11 +83,7 @@ class TestRolesAssociationEntries:
# Not supported, should not be possible via API
# org2.admin_role.children.add(project.admin_role)
project.admin_role.parents.add(org2.admin_role)
- assert ActivityStream.objects.filter(
- role=org2.admin_role,
- organization=org2,
- project=project
- ).count() == 1, 'In loop %s' % i
+ assert ActivityStream.objects.filter(role=org2.admin_role, organization=org2, project=project).count() == 1, 'In loop %s' % i
def test_model_associations_are_recorded(self, organization):
proj1 = Project.objects.create(name='proj1', organization=organization)
@@ -136,27 +122,16 @@ def somecloud_type():
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string',
- 'secret': True
- }]
- },
- injectors={
- 'env': {
- 'MY_CLOUD_API_TOKEN': '{{api_token.foo()}}'
- }
- }
+ inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True}]},
+ injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token.foo()}}'}},
)
@pytest.mark.django_db
class TestCredentialModels:
- '''
+ """
Assure that core elements of activity stream feature are working
- '''
+ """
def test_create_credential_type(self, somecloud_type):
assert ActivityStream.objects.filter(credential_type=somecloud_type).count() == 1
@@ -164,10 +139,7 @@ class TestCredentialModels:
assert entry.operation == 'create'
def test_credential_hidden_information(self, somecloud_type):
- cred = Credential.objects.create(
- credential_type=somecloud_type,
- inputs = {'api_token': 'ABC123'}
- )
+ cred = Credential.objects.create(credential_type=somecloud_type, inputs={'api_token': 'ABC123'})
entry = ActivityStream.objects.filter(credential=cred)[0]
assert entry.operation == 'create'
assert json.loads(entry.changes)['inputs'] == 'hidden'
@@ -175,7 +147,6 @@ class TestCredentialModels:
@pytest.mark.django_db
class TestUserModels:
-
def test_user_hidden_information(self, alice):
entry = ActivityStream.objects.filter(user=alice)[0]
assert entry.operation == 'create'
@@ -235,14 +206,14 @@ def test_activity_stream_deleted_actor(alice, bob):
@pytest.mark.django_db
def test_modified_not_allowed_field(somecloud_type):
- '''
+ """
If this test fails, that means that read-only fields are showing
up in the activity stream serialization of an instance.
That _probably_ means that you just connected a new model to the
activity_stream_registrar, but did not add its serializer to
the model->serializer mapping.
- '''
+ """
from awx.main.registrar import activity_stream_registrar
for Model in activity_stream_registrar.models:
@@ -269,9 +240,7 @@ def test_survey_create_diff(job_template, survey_spec_factory):
@pytest.mark.django_db
def test_saved_passwords_hidden_activity(workflow_job_template, job_template_with_survey_passwords):
node_with_passwords = workflow_job_template.workflow_nodes.create(
- unified_job_template=job_template_with_survey_passwords,
- extra_data={'bbbb': '$encrypted$fooooo'},
- survey_passwords={'bbbb': '$encrypted$'}
+ unified_job_template=job_template_with_survey_passwords, extra_data={'bbbb': '$encrypted$fooooo'}, survey_passwords={'bbbb': '$encrypted$'}
)
node_with_passwords.delete()
entry = ActivityStream.objects.order_by('timestamp').last()
diff --git a/awx/main/tests/functional/models/test_context_managers.py b/awx/main/tests/functional/models/test_context_managers.py
index 0e1fe024f2..9807d8a6e9 100644
--- a/awx/main/tests/functional/models/test_context_managers.py
+++ b/awx/main/tests/functional/models/test_context_managers.py
@@ -2,11 +2,7 @@ import pytest
# AWX context managers for testing
from awx.main.models.rbac import batch_role_ancestor_rebuilding
-from awx.main.signals import (
- disable_activity_stream,
- disable_computed_fields,
- update_inventory_computed_fields
-)
+from awx.main.signals import disable_activity_stream, disable_computed_fields, update_inventory_computed_fields
# AWX models
from awx.main.models.organization import Organization
@@ -32,7 +28,6 @@ def test_disable_activity_stream():
@pytest.mark.django_db
class TestComputedFields:
-
def test_computed_fields_normal_use(self, mocker, inventory):
job = Job.objects.create(name='fake-job', inventory=inventory)
with immediate_on_commit():
@@ -46,4 +41,3 @@ class TestComputedFields:
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
job.delete()
update_inventory_computed_fields.delay.assert_not_called()
-
diff --git a/awx/main/tests/functional/models/test_events.py b/awx/main/tests/functional/models/test_events.py
index 943bd34654..758e69b641 100644
--- a/awx/main/tests/functional/models/test_events.py
+++ b/awx/main/tests/functional/models/test_events.py
@@ -16,20 +16,9 @@ def test_parent_changed(emit):
for e in JobEvent.objects.all():
assert e.changed is False
- JobEvent.create_from_data(
- job_id=j.pk,
- parent_uuid='abc123',
- event='runner_on_ok',
- event_data={
- 'res': {'changed': ['localhost']}
- }
- ).save()
+ JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='runner_on_ok', event_data={'res': {'changed': ['localhost']}}).save()
# the `playbook_on_stats` event is where we update the parent changed linkage
- JobEvent.create_from_data(
- job_id=j.pk,
- parent_uuid='abc123',
- event='playbook_on_stats'
- ).save()
+ JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats').save()
events = JobEvent.objects.filter(event__in=['playbook_on_task_start', 'runner_on_ok'])
assert events.count() == 2
for e in events.all():
@@ -47,18 +36,10 @@ def test_parent_failed(emit, event):
for e in JobEvent.objects.all():
assert e.failed is False
- JobEvent.create_from_data(
- job_id=j.pk,
- parent_uuid='abc123',
- event=event
- ).save()
+ JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event=event).save()
# the `playbook_on_stats` event is where we update the parent failed linkage
- JobEvent.create_from_data(
- job_id=j.pk,
- parent_uuid='abc123',
- event='playbook_on_stats'
- ).save()
+ JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats').save()
events = JobEvent.objects.filter(event__in=['playbook_on_task_start', event])
assert events.count() == 2
for e in events.all():
@@ -70,10 +51,7 @@ def test_host_summary_generation():
hostnames = [f'Host {i}' for i in range(100)]
inv = Inventory()
inv.save()
- Host.objects.bulk_create([
- Host(created=now(), modified=now(), name=h, inventory_id=inv.id)
- for h in hostnames
- ])
+ Host.objects.bulk_create([Host(created=now(), modified=now(), name=h, inventory_id=inv.id) for h in hostnames])
j = Job(inventory=inv)
j.save()
host_map = dict((host.name, host.id) for host in inv.hosts.all())
@@ -91,7 +69,7 @@ def test_host_summary_generation():
'rescued': {},
'skipped': {},
},
- host_map=host_map
+ host_map=host_map,
).save()
assert j.job_host_summaries.count() == len(hostnames)
@@ -118,10 +96,7 @@ def test_host_summary_generation_with_deleted_hosts():
hostnames = [f'Host {i}' for i in range(10)]
inv = Inventory()
inv.save()
- Host.objects.bulk_create([
- Host(created=now(), modified=now(), name=h, inventory_id=inv.id)
- for h in hostnames
- ])
+ Host.objects.bulk_create([Host(created=now(), modified=now(), name=h, inventory_id=inv.id) for h in hostnames])
j = Job(inventory=inv)
j.save()
host_map = dict((host.name, host.id) for host in inv.hosts.all())
@@ -144,15 +119,13 @@ def test_host_summary_generation_with_deleted_hosts():
'rescued': {},
'skipped': {},
},
- host_map=host_map
+ host_map=host_map,
).save()
-
ids = sorted([s.host_id or -1 for s in j.job_host_summaries.order_by('id').all()])
names = sorted([s.host_name for s in j.job_host_summaries.all()])
assert ids == [-1, -1, -1, -1, -1, 6, 7, 8, 9, 10]
- assert names == ['Host 0', 'Host 1', 'Host 2', 'Host 3', 'Host 4', 'Host 5',
- 'Host 6', 'Host 7', 'Host 8', 'Host 9']
+ assert names == ['Host 0', 'Host 1', 'Host 2', 'Host 3', 'Host 4', 'Host 5', 'Host 6', 'Host 7', 'Host 8', 'Host 9']
@pytest.mark.django_db
@@ -164,10 +137,7 @@ def test_host_summary_generation_with_limit():
hostnames = [f'Host {i}' for i in range(10)]
inv = Inventory()
inv.save()
- Host.objects.bulk_create([
- Host(created=now(), modified=now(), name=h, inventory_id=inv.id)
- for h in hostnames
- ])
+ Host.objects.bulk_create([Host(created=now(), modified=now(), name=h, inventory_id=inv.id) for h in hostnames])
j = Job(inventory=inv)
j.save()
@@ -184,7 +154,7 @@ def test_host_summary_generation_with_limit():
parent_uuid='abc123',
event='playbook_on_stats',
event_data={
- 'ok': {matching_host.name: len(matching_host.name)}, # effectively, limit=Host 1
+ 'ok': {matching_host.name: len(matching_host.name)}, # effectively, limit=Host 1
'changed': {},
'dark': {},
'failures': {},
@@ -193,7 +163,7 @@ def test_host_summary_generation_with_limit():
'rescued': {},
'skipped': {},
},
- host_map=host_map
+ host_map=host_map,
).save()
# since the playbook_on_stats only references one host,
diff --git a/awx/main/tests/functional/models/test_inventory.py b/awx/main/tests/functional/models/test_inventory.py
index 04b92d5a1d..8493b798fe 100644
--- a/awx/main/tests/functional/models/test_inventory.py
+++ b/awx/main/tests/functional/models/test_inventory.py
@@ -6,38 +6,22 @@ from unittest import mock
from django.core.exceptions import ValidationError
# AWX
-from awx.main.models import (
- Host,
- Inventory,
- InventorySource,
- InventoryUpdate,
- CredentialType,
- Credential,
- Job
-)
+from awx.main.models import Host, Inventory, InventorySource, InventoryUpdate, CredentialType, Credential, Job
from awx.main.constants import CLOUD_PROVIDERS
from awx.main.utils.filters import SmartFilter
@pytest.mark.django_db
class TestInventoryScript:
-
def test_hostvars(self, inventory):
inventory.hosts.create(name='ahost', variables={"foo": "bar"})
- assert inventory.get_script_data(
- hostvars=True
- )['_meta']['hostvars']['ahost'] == {
- 'foo': 'bar'
- }
+ assert inventory.get_script_data(hostvars=True)['_meta']['hostvars']['ahost'] == {'foo': 'bar'}
def test_towervars(self, inventory):
host = inventory.hosts.create(name='ahost')
- assert inventory.get_script_data(
- hostvars=True,
- towervars=True
- )['_meta']['hostvars']['ahost'] == {
+ assert inventory.get_script_data(hostvars=True, towervars=True)['_meta']['hostvars']['ahost'] == {
'remote_tower_enabled': 'true',
- 'remote_tower_id': host.id
+ 'remote_tower_id': host.id,
}
def test_all_group(self, inventory):
@@ -45,11 +29,7 @@ class TestInventoryScript:
# make sure we return a1 details in output
data = inventory.get_script_data()
assert 'all' in data
- assert data['all'] == {
- 'vars': {
- 'a1': 'a1'
- }
- }
+ assert data['all'] == {'vars': {'a1': 'a1'}}
def test_empty_group(self, inventory):
inventory.groups.create(name='ghost')
@@ -79,22 +59,14 @@ class TestInventoryScript:
data = inventory.get_script_data(hostvars=1)
assert 'g1' in data
assert 'g2' in data
- assert data['g1'] == {
- 'children': ['g2'],
- 'vars': {'v1': 'v1'}
- }
- assert data['g2'] == {
- 'hosts': ['h1'],
- 'vars': {'v2': 'v2'}
- }
+ assert data['g1'] == {'children': ['g2'], 'vars': {'v1': 'v1'}}
+ assert data['g2'] == {'hosts': ['h1'], 'vars': {'v2': 'v2'}}
def test_slice_subset(self, inventory):
for i in range(3):
inventory.hosts.create(name='host{}'.format(i))
for i in range(3):
- assert inventory.get_script_data(slice_number=i + 1, slice_count=3) == {
- 'all': {'hosts': ['host{}'.format(i)]}
- }
+ assert inventory.get_script_data(slice_number=i + 1, slice_count=3) == {'all': {'hosts': ['host{}'.format(i)]}}
def test_slice_subset_with_groups(self, inventory):
hosts = []
@@ -120,7 +92,6 @@ class TestInventoryScript:
@pytest.mark.django_db
class TestActiveCount:
-
def test_host_active_count(self, organization):
inv1 = Inventory.objects.create(name='inv1', organization=organization)
inv2 = Inventory.objects.create(name='inv2', organization=organization)
@@ -133,25 +104,15 @@ class TestActiveCount:
def test_active_count_minus_tower(self, inventory):
inventory.hosts.create(name='locally-managed-host')
- source = inventory.inventory_sources.create(
- name='tower-source', source='tower'
- )
- source.hosts.create(
- name='remotely-managed-host', inventory=inventory
- )
+ source = inventory.inventory_sources.create(name='tower-source', source='tower')
+ source.hosts.create(name='remotely-managed-host', inventory=inventory)
assert Host.objects.active_count() == 1
@pytest.mark.django_db
class TestSCMUpdateFeatures:
-
def test_automatic_project_update_on_create(self, inventory, project):
- inv_src = InventorySource(
- source_project=project,
- source_path='inventory_file',
- inventory=inventory,
- update_on_project_update=True,
- source='scm')
+ inv_src = InventorySource(source_project=project, source_path='inventory_file', inventory=inventory, update_on_project_update=True, source='scm')
with mock.patch.object(inv_src, 'update') as mck_update:
inv_src.save()
mck_update.assert_called_once_with()
@@ -165,9 +126,7 @@ class TestSCMUpdateFeatures:
def test_source_location(self, scm_inventory_source):
# Combines project directory with the inventory file specified
- inventory_update = InventoryUpdate(
- inventory_source=scm_inventory_source,
- source_path=scm_inventory_source.source_path)
+ inventory_update = InventoryUpdate(inventory_source=scm_inventory_source, source_path=scm_inventory_source.source_path)
p = scm_inventory_source.source_project
assert inventory_update.get_actual_source_path().endswith(f'_{p.id}__test_proj/inventory_file')
@@ -182,43 +141,31 @@ class TestSCMUpdateFeatures:
@pytest.mark.django_db
class TestRelatedJobs:
-
def test_inventory_related(self, inventory):
- job = Job.objects.create(
- inventory=inventory
- )
+ job = Job.objects.create(inventory=inventory)
assert job.id in [jerb.id for jerb in inventory._get_related_jobs()]
def test_related_group_jobs(self, group):
- job = Job.objects.create(
- inventory=group.inventory
- )
+ job = Job.objects.create(inventory=group.inventory)
assert job.id in [jerb.id for jerb in group._get_related_jobs()]
def test_related_group_update(self, group):
src = group.inventory_sources.create(name='foo', source='ec2')
- job = InventoryUpdate.objects.create(
- inventory_source=src,
- source=src.source
- )
+ job = InventoryUpdate.objects.create(inventory_source=src, source=src.source)
assert job.id in [jerb.id for jerb in group._get_related_jobs()]
@pytest.mark.django_db
class TestSCMClean:
def test_clean_update_on_project_update_multiple(self, inventory):
- inv_src1 = InventorySource(inventory=inventory,
- update_on_project_update=True,
- source='scm')
+ inv_src1 = InventorySource(inventory=inventory, update_on_project_update=True, source='scm')
inv_src1.clean_update_on_project_update()
inv_src1.save()
inv_src1.source_vars = '---\nhello: world'
inv_src1.clean_update_on_project_update()
- inv_src2 = InventorySource(inventory=inventory,
- update_on_project_update=True,
- source='scm')
+ inv_src2 = InventorySource(inventory=inventory, update_on_project_update=True, source='scm')
with pytest.raises(ValidationError):
inv_src2.clean_update_on_project_update()
@@ -227,9 +174,7 @@ class TestSCMClean:
@pytest.mark.django_db
class TestInventorySourceInjectors:
def test_extra_credentials(self, project, credential):
- inventory_source = InventorySource.objects.create(
- name='foo', source='custom', source_project=project
- )
+ inventory_source = InventorySource.objects.create(name='foo', source='custom', source_project=project)
inventory_source.credentials.add(credential)
assert inventory_source.get_cloud_credential() == credential # for serializer
assert inventory_source.get_extra_credentials() == [credential]
@@ -245,11 +190,7 @@ class TestInventorySourceInjectors:
"""
assert set(CLOUD_PROVIDERS) == set(InventorySource.injectors.keys())
- @pytest.mark.parametrize('source,filename', [
- ('ec2', 'aws_ec2.yml'),
- ('openstack', 'openstack.yml'),
- ('gce', 'gcp_compute.yml')
- ])
+ @pytest.mark.parametrize('source,filename', [('ec2', 'aws_ec2.yml'), ('openstack', 'openstack.yml'), ('gce', 'gcp_compute.yml')])
def test_plugin_filenames(self, source, filename):
"""It is important that the filenames for inventory plugin files
are named correctly, because Ansible will reject files that do
@@ -258,16 +199,19 @@ class TestInventorySourceInjectors:
injector = InventorySource.injectors[source]()
assert injector.filename == filename
- @pytest.mark.parametrize('source,proper_name', [
- ('ec2', 'amazon.aws.aws_ec2'),
- ('openstack', 'openstack.cloud.openstack'),
- ('gce', 'google.cloud.gcp_compute'),
- ('azure_rm', 'azure.azcollection.azure_rm'),
- ('vmware', 'community.vmware.vmware_vm_inventory'),
- ('rhv', 'ovirt.ovirt.ovirt'),
- ('satellite6', 'theforeman.foreman.foreman'),
- ('tower', 'awx.awx.tower'),
- ])
+ @pytest.mark.parametrize(
+ 'source,proper_name',
+ [
+ ('ec2', 'amazon.aws.aws_ec2'),
+ ('openstack', 'openstack.cloud.openstack'),
+ ('gce', 'google.cloud.gcp_compute'),
+ ('azure_rm', 'azure.azcollection.azure_rm'),
+ ('vmware', 'community.vmware.vmware_vm_inventory'),
+ ('rhv', 'ovirt.ovirt.ovirt'),
+ ('satellite6', 'theforeman.foreman.foreman'),
+ ('tower', 'awx.awx.tower'),
+ ],
+ )
def test_plugin_proper_names(self, source, proper_name):
injector = InventorySource.injectors[source]()
assert injector.get_proper_name() == proper_name
@@ -276,21 +220,9 @@ class TestInventorySourceInjectors:
@pytest.mark.django_db
def test_custom_source_custom_credential(organization):
credential_type = CredentialType.objects.create(
- kind='cloud',
- name='MyCloud',
- inputs = {
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string',
- 'secret': True
- }]
- }
- )
- credential = Credential.objects.create(
- name='my cred', credential_type=credential_type, organization=organization,
- inputs={'api_token': 'secret'}
+ kind='cloud', name='MyCloud', inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True}]}
)
+ credential = Credential.objects.create(name='my cred', credential_type=credential_type, organization=organization, inputs={'api_token': 'secret'})
inv_source = InventorySource.objects.create(source='scm')
inv_source.credentials.add(credential)
assert inv_source.get_cloud_credential() == credential
@@ -356,19 +288,14 @@ def test_inventory_update_excessively_long_name(inventory, inventory_source):
@pytest.mark.django_db
class TestHostManager:
def test_host_filter_not_smart(self, setup_ec2_gce, organization):
- smart_inventory = Inventory(name='smart',
- organization=organization,
- host_filter='inventory_sources__source=ec2')
+ smart_inventory = Inventory(name='smart', organization=organization, host_filter='inventory_sources__source=ec2')
assert len(smart_inventory.hosts.all()) == 0
def test_host_distinctness(self, setup_inventory_groups, organization):
"""
two criteria would both yield the same host, check that we only get 1 copy here
"""
- assert (
- list(SmartFilter.query_from_string('name=single_host or name__startswith=single_')) ==
- [Host.objects.get(name='single_host')]
- )
+ assert list(SmartFilter.query_from_string('name=single_host or name__startswith=single_')) == [Host.objects.get(name='single_host')]
# Things we can not easily test due to SQLite backend:
# 2 organizations with host of same name only has 1 entry in smart inventory
diff --git a/awx/main/tests/functional/models/test_job.py b/awx/main/tests/functional/models/test_job.py
index c6c4d2d6e6..1ba79fee7c 100644
--- a/awx/main/tests/functional/models/test_job.py
+++ b/awx/main/tests/functional/models/test_job.py
@@ -1,19 +1,11 @@
import pytest
-from awx.main.models import (
- JobTemplate, Job, JobHostSummary,
- WorkflowJob, Inventory, Project, Organization
-)
+from awx.main.models import JobTemplate, Job, JobHostSummary, WorkflowJob, Inventory, Project, Organization
@pytest.mark.django_db
def test_awx_virtualenv_from_settings(inventory, project, machine_credential):
- jt = JobTemplate.objects.create(
- name='my-jt',
- inventory=inventory,
- project=project,
- playbook='helloworld.yml'
- )
+ jt = JobTemplate.objects.create(name='my-jt', inventory=inventory, project=project, playbook='helloworld.yml')
jt.credentials.add(machine_credential)
job = jt.create_unified_job()
assert job.ansible_virtualenv_path == '/var/lib/awx/venv/ansible'
@@ -21,10 +13,7 @@ def test_awx_virtualenv_from_settings(inventory, project, machine_credential):
@pytest.mark.django_db
def test_prevent_slicing():
- jt = JobTemplate.objects.create(
- name='foo',
- job_slice_count=4
- )
+ jt = JobTemplate.objects.create(name='foo', job_slice_count=4)
job = jt.create_unified_job(_prevent_slicing=True)
assert job.job_slice_count == 1
assert job.job_slice_number == 0
@@ -33,13 +22,7 @@ def test_prevent_slicing():
@pytest.mark.django_db
def test_awx_custom_virtualenv(inventory, project, machine_credential, organization):
- jt = JobTemplate.objects.create(
- name='my-jt',
- inventory=inventory,
- project=project,
- playbook='helloworld.yml',
- organization=organization
- )
+ jt = JobTemplate.objects.create(name='my-jt', inventory=inventory, project=project, playbook='helloworld.yml', organization=organization)
jt.credentials.add(machine_credential)
job = jt.create_unified_job()
@@ -70,10 +53,7 @@ def test_awx_custom_virtualenv_without_jt(project):
@pytest.mark.django_db
def test_job_host_summary_representation(host):
job = Job.objects.create(name='foo')
- jhs = JobHostSummary.objects.create(
- host=host, job=job,
- changed=1, dark=2, failures=3, ignored=4, ok=5, processed=6, rescued=7, skipped=8
- )
+ jhs = JobHostSummary.objects.create(host=host, job=job, changed=1, dark=2, failures=3, ignored=4, ok=5, processed=6, rescued=7, skipped=8)
assert 'single-host changed=1 dark=2 failures=3 ignored=4 ok=5 processed=6 rescued=7 skipped=8' == str(jhs)
# Representation should be robust to deleted related items
@@ -88,10 +68,7 @@ def test_jt_organization_follows_project():
org2 = Organization.objects.create(name='foo2')
project1 = Project.objects.create(name='proj1', organization=org1)
project2 = Project.objects.create(name='proj2', organization=org2)
- jt = JobTemplate.objects.create(
- name='foo', playbook='helloworld.yml',
- project=project1
- )
+ jt = JobTemplate.objects.create(name='foo', playbook='helloworld.yml', project=project1)
assert jt.organization == org1
jt.project = project2
jt.save()
@@ -100,7 +77,6 @@ def test_jt_organization_follows_project():
@pytest.mark.django_db
class TestSlicingModels:
-
def test_slice_workflow_spawn(self, slice_jt_factory):
slice_jt = slice_jt_factory(3)
job = slice_jt.create_unified_job()
diff --git a/awx/main/tests/functional/models/test_job_launch_config.py b/awx/main/tests/functional/models/test_job_launch_config.py
index 9b7d75d7f9..96c422af80 100644
--- a/awx/main/tests/functional/models/test_job_launch_config.py
+++ b/awx/main/tests/functional/models/test_job_launch_config.py
@@ -6,12 +6,7 @@ from awx.main.models import JobTemplate, JobLaunchConfig
@pytest.fixture
def full_jt(inventory, project, machine_credential):
- jt = JobTemplate.objects.create(
- name='my-jt',
- inventory=inventory,
- project=project,
- playbook='helloworld.yml'
- )
+ jt = JobTemplate.objects.create(name='my-jt', inventory=inventory, project=project, playbook='helloworld.yml')
jt.credentials.add(machine_credential)
return jt
@@ -24,15 +19,17 @@ def config_factory(full_jt):
return job.launch_config
except JobLaunchConfig.DoesNotExist:
return None
+
return return_config
@pytest.mark.django_db
class TestConfigCreation:
- '''
+ """
Checks cases for the auto-creation of a job configuration with the
creation of a unified job
- '''
+ """
+
def test_null_configuration(self, full_jt):
job = full_jt.create_unified_job()
assert job.launch_config.prompts_dict() == {}
@@ -49,27 +46,22 @@ class TestConfigCreation:
assert set(config.credentials.all()) == set([credential])
def test_survey_passwords_ignored(self, inventory_source):
- iu = inventory_source.create_unified_job(
- survey_passwords={'foo': '$encrypted$'}
- )
+ iu = inventory_source.create_unified_job(survey_passwords={'foo': '$encrypted$'})
assert iu.launch_config.prompts_dict() == {}
@pytest.mark.django_db
class TestConfigReversibility:
- '''
+ """
Checks that a blob of saved prompts will be re-created in the
prompts_dict for launching new jobs
- '''
+ """
+
def test_char_field_only(self, config_factory):
config = config_factory({'limit': 'foobar'})
assert config.prompts_dict() == {'limit': 'foobar'}
def test_related_objects(self, config_factory, inventory, credential):
- prompts = {
- 'limit': 'foobar',
- 'inventory': inventory,
- 'credentials': set([credential])
- }
+ prompts = {'limit': 'foobar', 'inventory': inventory, 'credentials': set([credential])}
config = config_factory(prompts)
assert config.prompts_dict() == prompts
diff --git a/awx/main/tests/functional/models/test_job_options.py b/awx/main/tests/functional/models/test_job_options.py
index 97b277c5b5..65add96de9 100644
--- a/awx/main/tests/functional/models/test_job_options.py
+++ b/awx/main/tests/functional/models/test_job_options.py
@@ -5,10 +5,7 @@ from awx.main.models import Credential
@pytest.mark.django_db
def test_clean_credential_with_ssh_type(credentialtype_ssh, job_template):
- credential = Credential(
- name='My Credential',
- credential_type=credentialtype_ssh
- )
+ credential = Credential(name='My Credential', credential_type=credentialtype_ssh)
credential.save()
job_template.credentials.add(credential)
@@ -17,15 +14,9 @@ def test_clean_credential_with_ssh_type(credentialtype_ssh, job_template):
@pytest.mark.django_db
def test_clean_credential_with_custom_types(credentialtype_aws, credentialtype_net, job_template):
- aws = Credential(
- name='AWS Credential',
- credential_type=credentialtype_aws
- )
+ aws = Credential(name='AWS Credential', credential_type=credentialtype_aws)
aws.save()
- net = Credential(
- name='Net Credential',
- credential_type=credentialtype_net
- )
+ net = Credential(name='Net Credential', credential_type=credentialtype_net)
net.save()
job_template.credentials.add(aws)
diff --git a/awx/main/tests/functional/models/test_notifications.py b/awx/main/tests/functional/models/test_notifications.py
index 6fda35c805..2d1d5e0f17 100644
--- a/awx/main/tests/functional/models/test_notifications.py
+++ b/awx/main/tests/functional/models/test_notifications.py
@@ -4,94 +4,81 @@ import datetime
import pytest
-#from awx.main.models import NotificationTemplates, Notifications, JobNotificationMixin
-from awx.main.models import (AdHocCommand, InventoryUpdate, Job, JobNotificationMixin, ProjectUpdate,
- Schedule, SystemJob, WorkflowJob)
+# from awx.main.models import NotificationTemplates, Notifications, JobNotificationMixin
+from awx.main.models import AdHocCommand, InventoryUpdate, Job, JobNotificationMixin, ProjectUpdate, Schedule, SystemJob, WorkflowJob
from awx.api.serializers import UnifiedJobSerializer
class TestJobNotificationMixin(object):
- CONTEXT_STRUCTURE = {'job': {'allow_simultaneous': bool,
- 'artifacts': {},
- 'custom_virtualenv': str,
- 'controller_node': str,
- 'created': datetime.datetime,
- 'description': str,
- 'diff_mode': bool,
- 'elapsed': float,
- 'execution_node': str,
- 'failed': bool,
- 'finished': bool,
- 'force_handlers': bool,
- 'forks': int,
- 'host_status_counts': {
- 'skipped': int, 'ok': int, 'changed': int,
- 'failures': int, 'dark': int, 'processed': int,
- 'rescued': int, 'failed': bool
- },
- 'id': int,
- 'job_explanation': str,
- 'job_slice_count': int,
- 'job_slice_number': int,
- 'job_tags': str,
- 'job_type': str,
- 'launch_type': str,
- 'limit': str,
- 'modified': datetime.datetime,
- 'name': str,
- 'playbook': str,
- 'scm_branch': str,
- 'scm_revision': str,
- 'skip_tags': str,
- 'start_at_task': str,
- 'started': str,
- 'status': str,
- 'summary_fields': {'created_by': {'first_name': str,
- 'id': int,
- 'last_name': str,
- 'username': str},
- 'instance_group': {'id': int, 'name': str},
- 'inventory': {'description': str,
- 'has_active_failures': bool,
- 'has_inventory_sources': bool,
- 'hosts_with_active_failures': int,
- 'id': int,
- 'inventory_sources_with_failures': int,
- 'kind': str,
- 'name': str,
- 'organization_id': int,
- 'total_groups': int,
- 'total_hosts': int,
- 'total_inventory_sources': int},
- 'job_template': {'description': str,
- 'id': int,
- 'name': str},
- 'labels': {'count': int, 'results': list},
- 'project': {'description': str,
- 'id': int,
- 'name': str,
- 'scm_type': str,
- 'status': str},
- 'schedule': {'description': str,
- 'id': int,
- 'name': str,
- 'next_run': datetime.datetime},
- 'unified_job_template': {'description': str,
- 'id': int,
- 'name': str,
- 'unified_job_type': str}},
-
- 'timeout': int,
- 'type': str,
- 'url': str,
- 'use_fact_cache': bool,
- 'verbosity': int},
- 'job_friendly_name': str,
- 'job_metadata': str,
- 'approval_status': str,
- 'approval_node_name': str,
- 'workflow_url': str,
- 'url': str}
+ CONTEXT_STRUCTURE = {
+ 'job': {
+ 'allow_simultaneous': bool,
+ 'artifacts': {},
+ 'custom_virtualenv': str,
+ 'controller_node': str,
+ 'created': datetime.datetime,
+ 'description': str,
+ 'diff_mode': bool,
+ 'elapsed': float,
+ 'execution_node': str,
+ 'failed': bool,
+ 'finished': bool,
+ 'force_handlers': bool,
+ 'forks': int,
+ 'host_status_counts': {'skipped': int, 'ok': int, 'changed': int, 'failures': int, 'dark': int, 'processed': int, 'rescued': int, 'failed': bool},
+ 'id': int,
+ 'job_explanation': str,
+ 'job_slice_count': int,
+ 'job_slice_number': int,
+ 'job_tags': str,
+ 'job_type': str,
+ 'launch_type': str,
+ 'limit': str,
+ 'modified': datetime.datetime,
+ 'name': str,
+ 'playbook': str,
+ 'scm_branch': str,
+ 'scm_revision': str,
+ 'skip_tags': str,
+ 'start_at_task': str,
+ 'started': str,
+ 'status': str,
+ 'summary_fields': {
+ 'created_by': {'first_name': str, 'id': int, 'last_name': str, 'username': str},
+ 'instance_group': {'id': int, 'name': str},
+ 'inventory': {
+ 'description': str,
+ 'has_active_failures': bool,
+ 'has_inventory_sources': bool,
+ 'hosts_with_active_failures': int,
+ 'id': int,
+ 'inventory_sources_with_failures': int,
+ 'kind': str,
+ 'name': str,
+ 'organization_id': int,
+ 'total_groups': int,
+ 'total_hosts': int,
+ 'total_inventory_sources': int,
+ },
+ 'job_template': {'description': str, 'id': int, 'name': str},
+ 'labels': {'count': int, 'results': list},
+ 'project': {'description': str, 'id': int, 'name': str, 'scm_type': str, 'status': str},
+ 'schedule': {'description': str, 'id': int, 'name': str, 'next_run': datetime.datetime},
+ 'unified_job_template': {'description': str, 'id': int, 'name': str, 'unified_job_type': str},
+ },
+ 'timeout': int,
+ 'type': str,
+ 'url': str,
+ 'use_fact_cache': bool,
+ 'verbosity': int,
+ },
+ 'job_friendly_name': str,
+ 'job_metadata': str,
+ 'approval_status': str,
+ 'approval_node_name': str,
+ 'workflow_url': str,
+ 'url': str,
+ }
def check_structure(self, expected_structure, obj):
if isinstance(expected_structure, dict):
@@ -129,17 +116,8 @@ class TestJobNotificationMixin(object):
@pytest.mark.django_db
def test_schedule_context(self, job_template, admin_user):
- schedule = Schedule.objects.create(
- name='job-schedule',
- rrule='DTSTART:20171129T155939z\nFREQ=MONTHLY',
- unified_job_template=job_template
- )
- job = Job.objects.create(
- name='fake-job',
- launch_type='workflow',
- schedule=schedule,
- job_template=job_template
- )
+ schedule = Schedule.objects.create(name='job-schedule', rrule='DTSTART:20171129T155939z\nFREQ=MONTHLY', unified_job_template=job_template)
+ job = Job.objects.create(name='fake-job', launch_type='workflow', schedule=schedule, job_template=job_template)
job_serialization = UnifiedJobSerializer(job).to_representation(job)
@@ -153,11 +131,11 @@ class TestJobNotificationMixin(object):
context = job.context(job_serialization)
assert '批量安装项目' in context['job_metadata']
-
def test_context_stub(self):
"""The context stub is a fake context used to validate custom notification messages. Ensure that
this also has the expected structure. Furthermore, ensure that the stub context contains
*all* fields that could possibly be included in a context."""
+
def check_structure_and_completeness(expected_structure, obj):
expected_structure = deepcopy(expected_structure)
if isinstance(expected_structure, dict):
diff --git a/awx/main/tests/functional/models/test_project.py b/awx/main/tests/functional/models/test_project.py
index d3c34498b0..37abdc2bf8 100644
--- a/awx/main/tests/functional/models/test_project.py
+++ b/awx/main/tests/functional/models/test_project.py
@@ -37,12 +37,7 @@ def test_sensitive_change_triggers_update(project):
@pytest.mark.django_db
def test_local_path_autoset(organization):
with mock.patch.object(Project, "update"):
- p = Project.objects.create(
- name="test-proj",
- organization=organization,
- scm_url='localhost',
- scm_type='git'
- )
+ p = Project.objects.create(name="test-proj", organization=organization, scm_url='localhost', scm_type='git')
assert p.local_path == f'_{p.id}__test_proj'
@@ -66,19 +61,12 @@ def test_galaxy_credentials(project):
galaxy.save()
for i in range(5):
cred = Credential.objects.create(
- name=f'Ansible Galaxy {i + 1}',
- organization=org,
- credential_type=galaxy,
- inputs={
- 'url': 'https://galaxy.ansible.com/'
- }
+ name=f'Ansible Galaxy {i + 1}', organization=org, credential_type=galaxy, inputs={'url': 'https://galaxy.ansible.com/'}
)
cred.save()
org.galaxy_credentials.add(cred)
- assert [
- cred.name for cred in org.galaxy_credentials.all()
- ] == [
+ assert [cred.name for cred in org.galaxy_credentials.all()] == [
'Ansible Galaxy 1',
'Ansible Galaxy 2',
'Ansible Galaxy 3',
diff --git a/awx/main/tests/functional/models/test_schedule.py b/awx/main/tests/functional/models/test_schedule.py
index fb5bfbf271..6db1b3a112 100644
--- a/awx/main/tests/functional/models/test_schedule.py
+++ b/awx/main/tests/functional/models/test_schedule.py
@@ -15,11 +15,7 @@ from crum import impersonate
@pytest.fixture
def job_template(inventory, project):
# need related resources set for these tests
- return JobTemplate.objects.create(
- name='test-job_template',
- inventory=inventory,
- project=project
- )
+ return JobTemplate.objects.create(name='test-job_template', inventory=inventory, project=project)
@pytest.mark.django_db
@@ -33,9 +29,7 @@ class TestComputedFields:
def distant_rrule(self):
# this rule should produce a next_run, but it should not overlap with test run time
this_year = now().year
- return "DTSTART;TZID=UTC:{}0520T190000 RRULE:FREQ=YEARLY;INTERVAL=1;BYMONTH=1;BYMONTHDAY=1;UNTIL={}0530T000000Z".format(
- this_year + 1, this_year + 2
- )
+ return "DTSTART;TZID=UTC:{}0520T190000 RRULE:FREQ=YEARLY;INTERVAL=1;BYMONTH=1;BYMONTHDAY=1;UNTIL={}0530T000000Z".format(this_year + 1, this_year + 2)
@contextmanager
def assert_no_unwanted_stuff(self, schedule, act_stream=True, sch_assert=True):
@@ -57,17 +51,11 @@ class TestComputedFields:
assert schedule.unified_job_template.modified == original_ujt_modified
assert schedule.unified_job_template.modified_by == original_ujt_modified_by
if act_stream:
- assert ActivityStream.objects.count() == original_AS_entries, (
- ActivityStream.objects.order_by('-timestamp').first().changes
- )
+ assert ActivityStream.objects.count() == original_AS_entries, ActivityStream.objects.order_by('-timestamp').first().changes
def test_computed_fields_modified_by_retained(self, job_template, admin_user):
with impersonate(admin_user):
- s = Schedule.objects.create(
- name='Some Schedule',
- rrule='DTSTART:20300112T210000Z RRULE:FREQ=DAILY;INTERVAL=1',
- unified_job_template=job_template
- )
+ s = Schedule.objects.create(name='Some Schedule', rrule='DTSTART:20300112T210000Z RRULE:FREQ=DAILY;INTERVAL=1', unified_job_template=job_template)
assert s.created_by == admin_user
with self.assert_no_unwanted_stuff(s):
s.update_computed_fields() # modification done by system here
@@ -75,12 +63,7 @@ class TestComputedFields:
assert s.modified_by == admin_user
def test_computed_fields_no_op(self, job_template):
- s = Schedule.objects.create(
- name='Some Schedule',
- rrule=self.dead_rrule,
- unified_job_template=job_template,
- enabled=True
- )
+ s = Schedule.objects.create(name='Some Schedule', rrule=self.dead_rrule, unified_job_template=job_template, enabled=True)
with self.assert_no_unwanted_stuff(s):
assert s.next_run is None
assert s.dtend is not None
@@ -90,12 +73,7 @@ class TestComputedFields:
assert s.dtend == prior_dtend
def test_computed_fields_time_change(self, job_template):
- s = Schedule.objects.create(
- name='Some Schedule',
- rrule=self.continuing_rrule,
- unified_job_template=job_template,
- enabled=True
- )
+ s = Schedule.objects.create(name='Some Schedule', rrule=self.continuing_rrule, unified_job_template=job_template, enabled=True)
with self.assert_no_unwanted_stuff(s):
# force update of next_run, as if schedule re-calculation had not happened
# since this time
@@ -109,12 +87,7 @@ class TestComputedFields:
assert s.modified == prior_modified
def test_computed_fields_turning_on(self, job_template):
- s = Schedule.objects.create(
- name='Some Schedule',
- rrule=self.distant_rrule,
- unified_job_template=job_template,
- enabled=False
- )
+ s = Schedule.objects.create(name='Some Schedule', rrule=self.distant_rrule, unified_job_template=job_template, enabled=False)
# we expect 1 activity stream entry for changing enabled field
with self.assert_no_unwanted_stuff(s, act_stream=False):
assert s.next_run is None
@@ -125,11 +98,7 @@ class TestComputedFields:
assert job_template.next_schedule == s
def test_computed_fields_turning_on_via_rrule(self, job_template):
- s = Schedule.objects.create(
- name='Some Schedule',
- rrule=self.dead_rrule,
- unified_job_template=job_template
- )
+ s = Schedule.objects.create(name='Some Schedule', rrule=self.dead_rrule, unified_job_template=job_template)
with self.assert_no_unwanted_stuff(s, act_stream=False):
assert s.next_run is None
assert job_template.next_schedule is None
@@ -140,16 +109,8 @@ class TestComputedFields:
assert job_template.next_schedule == s
def test_computed_fields_turning_off_by_deleting(self, job_template):
- s1 = Schedule.objects.create(
- name='first schedule',
- rrule=self.distant_rrule,
- unified_job_template=job_template
- )
- s2 = Schedule.objects.create(
- name='second schedule',
- rrule=self.distant_rrule,
- unified_job_template=job_template
- )
+ s1 = Schedule.objects.create(name='first schedule', rrule=self.distant_rrule, unified_job_template=job_template)
+ s2 = Schedule.objects.create(name='second schedule', rrule=self.distant_rrule, unified_job_template=job_template)
assert job_template.next_schedule in [s1, s2]
if job_template.next_schedule == s1:
expected_schedule = s2
@@ -162,40 +123,26 @@ class TestComputedFields:
@pytest.mark.django_db
-@pytest.mark.parametrize('freq, delta', (
- ('MINUTELY', 1),
- ('HOURLY', 1)
-))
+@pytest.mark.parametrize('freq, delta', (('MINUTELY', 1), ('HOURLY', 1)))
def test_past_week_rrule(job_template, freq, delta):
# see: https://github.com/ansible/awx/issues/8071
- recent = (datetime.utcnow() - timedelta(days=3))
+ recent = datetime.utcnow() - timedelta(days=3)
recent = recent.replace(hour=0, minute=0, second=0, microsecond=0)
recent_dt = recent.strftime('%Y%m%d')
rrule = f'DTSTART;TZID=America/New_York:{recent_dt}T000000 RRULE:FREQ={freq};INTERVAL={delta};COUNT=5' # noqa
- sched = Schedule.objects.create(
- name='example schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ sched = Schedule.objects.create(name='example schedule', rrule=rrule, unified_job_template=job_template)
first_event = sched.rrulestr(sched.rrule)[0]
assert first_event.replace(tzinfo=None) == recent
@pytest.mark.django_db
-@pytest.mark.parametrize('freq, delta', (
- ('MINUTELY', 1),
- ('HOURLY', 1)
-))
+@pytest.mark.parametrize('freq, delta', (('MINUTELY', 1), ('HOURLY', 1)))
def test_really_old_dtstart(job_template, freq, delta):
# see: https://github.com/ansible/awx/issues/8071
# If an event is per-minute/per-hour and was created a *really long*
# time ago, we should just bump forward to start counting "in the last week"
rrule = f'DTSTART;TZID=America/New_York:20150101T000000 RRULE:FREQ={freq};INTERVAL={delta}' # noqa
- sched = Schedule.objects.create(
- name='example schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ sched = Schedule.objects.create(name='example schedule', rrule=rrule, unified_job_template=job_template)
last_week = (datetime.utcnow() - timedelta(days=7)).date()
first_event = sched.rrulestr(sched.rrule)[0]
assert last_week == first_event.date()
@@ -207,19 +154,13 @@ def test_really_old_dtstart(job_template, freq, delta):
last = None
for event in next_five_events:
if last:
- assert event == last + (
- timedelta(minutes=1) if freq == 'MINUTELY' else timedelta(hours=1)
- )
+ assert event == last + (timedelta(minutes=1) if freq == 'MINUTELY' else timedelta(hours=1))
last = event
@pytest.mark.django_db
def test_repeats_forever(job_template):
- s = Schedule(
- name='Some Schedule',
- rrule='DTSTART:20300112T210000Z RRULE:FREQ=DAILY;INTERVAL=1',
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule='DTSTART:20300112T210000Z RRULE:FREQ=DAILY;INTERVAL=1', unified_job_template=job_template)
s.save()
assert str(s.next_run) == str(s.dtstart) == '2030-01-12 21:00:00+00:00'
assert s.dtend is None
@@ -227,11 +168,7 @@ def test_repeats_forever(job_template):
@pytest.mark.django_db
def test_no_recurrence_utc(job_template):
- s = Schedule(
- name='Some Schedule',
- rrule='DTSTART:20300112T210000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1',
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule='DTSTART:20300112T210000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1', unified_job_template=job_template)
s.save()
assert str(s.next_run) == str(s.dtstart) == str(s.dtend) == '2030-01-12 21:00:00+00:00'
@@ -239,9 +176,7 @@ def test_no_recurrence_utc(job_template):
@pytest.mark.django_db
def test_no_recurrence_est(job_template):
s = Schedule(
- name='Some Schedule',
- rrule='DTSTART;TZID=America/New_York:20300112T210000 RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1',
- unified_job_template=job_template
+ name='Some Schedule', rrule='DTSTART;TZID=America/New_York:20300112T210000 RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1', unified_job_template=job_template
)
s.save()
assert str(s.next_run) == str(s.dtstart) == str(s.dtend) == '2030-01-13 02:00:00+00:00'
@@ -250,9 +185,7 @@ def test_no_recurrence_est(job_template):
@pytest.mark.django_db
def test_next_run_utc(job_template):
s = Schedule(
- name='Some Schedule',
- rrule='DTSTART:20300112T210000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYDAY=SA;BYSETPOS=1;COUNT=4',
- unified_job_template=job_template
+ name='Some Schedule', rrule='DTSTART:20300112T210000Z RRULE:FREQ=MONTHLY;INTERVAL=1;BYDAY=SA;BYSETPOS=1;COUNT=4', unified_job_template=job_template
)
s.save()
assert str(s.next_run) == '2030-02-02 21:00:00+00:00'
@@ -265,7 +198,7 @@ def test_next_run_est(job_template):
s = Schedule(
name='Some Schedule',
rrule='DTSTART;TZID=America/New_York:20300112T210000 RRULE:FREQ=MONTHLY;INTERVAL=1;BYDAY=SA;BYSETPOS=1;COUNT=4',
- unified_job_template=job_template
+ unified_job_template=job_template,
)
s.save()
@@ -279,11 +212,7 @@ def test_next_run_est(job_template):
@pytest.mark.django_db
def test_year_boundary(job_template):
rrule = 'DTSTART;TZID=America/New_York:20301231T230000 RRULE:FREQ=YEARLY;INTERVAL=1;BYMONTH=12;BYMONTHDAY=31;COUNT=4' # noqa
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
assert str(s.next_run) == '2031-01-01 04:00:00+00:00' # UTC = +5 EST
@@ -294,11 +223,7 @@ def test_year_boundary(job_template):
@pytest.mark.django_db
def test_leap_year_day(job_template):
rrule = 'DTSTART;TZID=America/New_York:20320229T050000 RRULE:FREQ=YEARLY;INTERVAL=1;BYMONTH=02;BYMONTHDAY=29;COUNT=2' # noqa
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
assert str(s.next_run) == '2032-02-29 10:00:00+00:00' # UTC = +5 EST
@@ -307,17 +232,16 @@ def test_leap_year_day(job_template):
@pytest.mark.django_db
-@pytest.mark.parametrize('until, dtend', [
- ['20300602T170000Z', '2030-06-02 12:00:00+00:00'],
- ['20300602T000000Z', '2030-06-01 12:00:00+00:00'],
-])
+@pytest.mark.parametrize(
+ 'until, dtend',
+ [
+ ['20300602T170000Z', '2030-06-02 12:00:00+00:00'],
+ ['20300602T000000Z', '2030-06-01 12:00:00+00:00'],
+ ],
+)
def test_utc_until(job_template, until, dtend):
rrule = 'DTSTART:20300601T120000Z RRULE:FREQ=DAILY;INTERVAL=1;UNTIL={}'.format(until)
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
assert str(s.next_run) == '2030-06-01 12:00:00+00:00'
@@ -326,17 +250,16 @@ def test_utc_until(job_template, until, dtend):
@pytest.mark.django_db
-@pytest.mark.parametrize('dtstart, until', [
- ['DTSTART:20380601T120000Z', '20380601T170000'], # noon UTC to 5PM UTC
- ['DTSTART;TZID=America/New_York:20380601T120000', '20380601T170000'], # noon EST to 5PM EST
-])
+@pytest.mark.parametrize(
+ 'dtstart, until',
+ [
+ ['DTSTART:20380601T120000Z', '20380601T170000'], # noon UTC to 5PM UTC
+ ['DTSTART;TZID=America/New_York:20380601T120000', '20380601T170000'], # noon EST to 5PM EST
+ ],
+)
def test_tzinfo_naive_until(job_template, dtstart, until):
rrule = '{} RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL={}'.format(dtstart, until) # noqa
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
gen = Schedule.rrulestr(s.rrule).xafter(now(), count=20)
assert len(list(gen)) == 6 # noon, 1PM, 2, 3, 4, 5PM
@@ -345,11 +268,7 @@ def test_tzinfo_naive_until(job_template, dtstart, until):
@pytest.mark.django_db
def test_utc_until_in_the_past(job_template):
rrule = 'DTSTART:20180601T120000Z RRULE:FREQ=DAILY;INTERVAL=1;UNTIL=20150101T100000Z'
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
assert s.next_run is s.dtstart is s.dtend is None
@@ -365,11 +284,7 @@ def test_dst_phantom_hour(job_template):
# Three Sundays, starting 2:30AM America/New_York, starting Mar 3, 2030,
# (which doesn't exist)
rrule = 'DTSTART;TZID=America/New_York:20300303T023000 RRULE:FREQ=WEEKLY;BYDAY=SU;INTERVAL=1;COUNT=3'
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
# 3/10/30 @ 2:30AM is skipped because it _doesn't exist_ <cue twilight zone music>
@@ -382,38 +297,29 @@ def test_beginning_of_time(job_template):
# ensure that really large generators don't have performance issues
start = now()
rrule = 'DTSTART:19700101T000000Z RRULE:FREQ=MINUTELY;INTERVAL=1'
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
assert s.next_run > start
assert (s.next_run - start).total_seconds() < 60
@pytest.mark.django_db
-@pytest.mark.parametrize('rrule, tz', [
- ['DTSTART:20300112T210000Z RRULE:FREQ=DAILY;INTERVAL=1', 'UTC'],
- ['DTSTART;TZID=America/New_York:20300112T210000 RRULE:FREQ=DAILY;INTERVAL=1', 'America/New_York']
-])
+@pytest.mark.parametrize(
+ 'rrule, tz',
+ [
+ ['DTSTART:20300112T210000Z RRULE:FREQ=DAILY;INTERVAL=1', 'UTC'],
+ ['DTSTART;TZID=America/New_York:20300112T210000 RRULE:FREQ=DAILY;INTERVAL=1', 'America/New_York'],
+ ],
+)
def test_timezone_property(job_template, rrule, tz):
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
assert s.timezone == tz
@pytest.mark.django_db
def test_utc_until_property(job_template):
rrule = 'DTSTART:20380601T120000Z RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000Z'
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
assert s.rrule.endswith('20380601T170000Z')
@@ -423,11 +329,7 @@ def test_utc_until_property(job_template):
@pytest.mark.django_db
def test_localized_until_property(job_template):
rrule = 'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T220000Z'
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
assert s.rrule.endswith('20380601T220000Z')
@@ -437,11 +339,7 @@ def test_localized_until_property(job_template):
@pytest.mark.django_db
def test_utc_naive_coercion(job_template):
rrule = 'DTSTART:20380601T120000Z RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000'
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
assert s.rrule.endswith('20380601T170000Z')
@@ -451,11 +349,7 @@ def test_utc_naive_coercion(job_template):
@pytest.mark.django_db
def test_est_naive_coercion(job_template):
rrule = 'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000'
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
assert s.rrule.endswith('20380601T220000Z') # 5PM EDT = 10PM UTC
@@ -465,11 +359,7 @@ def test_est_naive_coercion(job_template):
@pytest.mark.django_db
def test_empty_until_property(job_template):
rrule = 'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1'
- s = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s.save()
assert s.until == ''
@@ -479,16 +369,8 @@ def test_duplicate_name_across_templates(job_template):
# Assert that duplicate name is allowed for different unified job templates.
rrule = 'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1'
job_template_2 = JobTemplate.objects.create(name='test-job_template_2')
- s1 = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
- s2 = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template_2
- )
+ s1 = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
+ s2 = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template_2)
s1.save()
s2.save()
@@ -499,16 +381,8 @@ def test_duplicate_name_across_templates(job_template):
def test_duplicate_name_within_template(job_template):
# Assert that duplicate name is not allowed for the same unified job templates.
rrule = 'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1'
- s1 = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
- s2 = Schedule(
- name='Some Schedule',
- rrule=rrule,
- unified_job_template=job_template
- )
+ s1 = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
+ s2 = Schedule(name='Some Schedule', rrule=rrule, unified_job_template=job_template)
s1.save()
with pytest.raises(IntegrityError) as ierror:
diff --git a/awx/main/tests/functional/models/test_unified_job.py b/awx/main/tests/functional/models/test_unified_job.py
index c8376a9728..76bd14cf38 100644
--- a/awx/main/tests/functional/models/test_unified_job.py
+++ b/awx/main/tests/functional/models/test_unified_job.py
@@ -8,53 +8,42 @@ from crum import impersonate
from django.contrib.contenttypes.models import ContentType
# AWX
-from awx.main.models import (
- UnifiedJobTemplate, Job, JobTemplate, WorkflowJobTemplate,
- WorkflowApprovalTemplate, Project, WorkflowJob, Schedule,
- Credential
-)
+from awx.main.models import UnifiedJobTemplate, Job, JobTemplate, WorkflowJobTemplate, WorkflowApprovalTemplate, Project, WorkflowJob, Schedule, Credential
from awx.api.versioning import reverse
@pytest.mark.django_db
def test_subclass_types(rando):
- assert set(UnifiedJobTemplate._submodels_with_roles()) == set([
- ContentType.objects.get_for_model(JobTemplate).id,
- ContentType.objects.get_for_model(Project).id,
- ContentType.objects.get_for_model(WorkflowJobTemplate).id,
- ContentType.objects.get_for_model(WorkflowApprovalTemplate).id
-
- ])
+ assert set(UnifiedJobTemplate._submodels_with_roles()) == set(
+ [
+ ContentType.objects.get_for_model(JobTemplate).id,
+ ContentType.objects.get_for_model(Project).id,
+ ContentType.objects.get_for_model(WorkflowJobTemplate).id,
+ ContentType.objects.get_for_model(WorkflowApprovalTemplate).id,
+ ]
+ )
@pytest.mark.django_db
def test_soft_unique_together(post, project, admin_user):
- """This tests that SOFT_UNIQUE_TOGETHER restrictions are applied correctly.
- """
- jt1 = JobTemplate.objects.create(
- name='foo_jt',
- project=project
- )
+ """This tests that SOFT_UNIQUE_TOGETHER restrictions are applied correctly."""
+ jt1 = JobTemplate.objects.create(name='foo_jt', project=project)
assert jt1.organization == project.organization
r = post(
url=reverse('api:job_template_list'),
- data=dict(
- name='foo_jt', # same as first
- project=project.id,
- ask_inventory_on_launch=True,
- playbook='helloworld.yml'
- ),
+ data=dict(name='foo_jt', project=project.id, ask_inventory_on_launch=True, playbook='helloworld.yml'), # same as first
user=admin_user,
- expect=400
+ expect=400,
)
assert 'combination already exists' in str(r.data)
@pytest.mark.django_db
class TestCreateUnifiedJob:
- '''
+ """
Ensure that copying a job template to a job handles many to many field copy
- '''
+ """
+
def test_many_to_many(self, mocker, job_template_labels):
jt = job_template_labels
_get_unified_job_field_names = mocker.patch('awx.main.models.jobs.JobTemplate._get_unified_job_field_names', return_value=['labels'])
@@ -68,6 +57,7 @@ class TestCreateUnifiedJob:
'''
Ensure that data is looked for in parameter list before looking at the object
'''
+
def test_many_to_many_kwargs(self, mocker, job_template_labels):
jt = job_template_labels
_get_unified_job_field_names = mocker.patch('awx.main.models.jobs.JobTemplate._get_unified_job_field_names', return_value=['labels'])
@@ -78,8 +68,8 @@ class TestCreateUnifiedJob:
'''
Ensure that credentials m2m field is copied to new relaunched job
'''
- def test_job_relaunch_copy_vars(self, machine_credential, inventory,
- deploy_jobtemplate, post, mocker, net_credential):
+
+ def test_job_relaunch_copy_vars(self, machine_credential, inventory, deploy_jobtemplate, post, mocker, net_credential):
job_with_links = Job(name='existing-job', inventory=inventory)
job_with_links.job_template = deploy_jobtemplate
job_with_links.limit = "my_server"
@@ -89,10 +79,7 @@ class TestCreateUnifiedJob:
second_job = job_with_links.copy_unified_job()
# Check that job data matches the original variables
- assert [c.pk for c in second_job.credentials.all()] == [
- machine_credential.pk,
- net_credential.pk
- ]
+ assert [c.pk for c in second_job.credentials.all()] == [machine_credential.pk, net_credential.pk]
assert second_job.inventory == job_with_links.inventory
assert second_job.limit == 'my_server'
assert net_credential in second_job.credentials.all()
@@ -101,11 +88,7 @@ class TestCreateUnifiedJob:
# Replace all credentials with a new one of same type
new_creds = []
for cred in jt_linked.credentials.all():
- new_creds.append(Credential.objects.create(
- name=str(cred.name) + '_new',
- credential_type=cred.credential_type,
- inputs=cred.inputs
- ))
+ new_creds.append(Credential.objects.create(name=str(cred.name) + '_new', credential_type=cred.credential_type, inputs=cred.inputs))
job = jt_linked.create_unified_job()
jt_linked.credentials.clear()
jt_linked.credentials.add(*new_creds)
@@ -115,21 +98,15 @@ class TestCreateUnifiedJob:
@pytest.mark.django_db
class TestMetaVars:
- '''
+ """
Extension of unit tests with same class name
- '''
+ """
def test_deleted_user(self, admin_user):
- job = Job.objects.create(
- name='job',
- created_by=admin_user
- )
+ job = Job.objects.create(name='job', created_by=admin_user)
job.save()
- user_vars = ['_'.join(x) for x in itertools.product(
- ['tower', 'awx'],
- ['user_name', 'user_id', 'user_email', 'user_first_name', 'user_last_name']
- )]
+ user_vars = ['_'.join(x) for x in itertools.product(['tower', 'awx'], ['user_name', 'user_id', 'user_email', 'user_first_name', 'user_last_name'])]
for key in user_vars:
assert key in job.awx_meta_vars()
@@ -141,10 +118,7 @@ class TestMetaVars:
assert key not in job.awx_meta_vars()
def test_workflow_job_metavars(self, admin_user, job_template):
- workflow_job = WorkflowJob.objects.create(
- name='workflow-job',
- created_by=admin_user
- )
+ workflow_job = WorkflowJob.objects.create(name='workflow-job', created_by=admin_user)
node = workflow_job.workflow_nodes.create(unified_job_template=job_template)
job_kv = node.get_job_kwargs()
job = node.unified_job_template.create_unified_job(**job_kv)
@@ -157,37 +131,18 @@ class TestMetaVars:
assert data['awx_workflow_job_launch_type'] == workflow_job.launch_type
def test_scheduled_job_metavars(self, job_template, admin_user):
- schedule = Schedule.objects.create(
- name='job-schedule',
- rrule='DTSTART:20171129T155939z\nFREQ=MONTHLY',
- unified_job_template=job_template
- )
- job = Job.objects.create(
- name='fake-job',
- launch_type='workflow',
- schedule=schedule,
- job_template=job_template
- )
+ schedule = Schedule.objects.create(name='job-schedule', rrule='DTSTART:20171129T155939z\nFREQ=MONTHLY', unified_job_template=job_template)
+ job = Job.objects.create(name='fake-job', launch_type='workflow', schedule=schedule, job_template=job_template)
data = job.awx_meta_vars()
assert data['awx_schedule_id'] == schedule.pk
assert 'awx_user_name' not in data
def test_scheduled_workflow_job_node_metavars(self, workflow_job_template):
- schedule = Schedule.objects.create(
- name='job-schedule',
- rrule='DTSTART:20171129T155939z\nFREQ=MONTHLY',
- unified_job_template=workflow_job_template
- )
-
- workflow_job = WorkflowJob.objects.create(
- name='workflow-job',
- workflow_job_template=workflow_job_template,
- schedule=schedule
- )
-
- job = Job.objects.create(
- launch_type='workflow'
- )
+ schedule = Schedule.objects.create(name='job-schedule', rrule='DTSTART:20171129T155939z\nFREQ=MONTHLY', unified_job_template=workflow_job_template)
+
+ workflow_job = WorkflowJob.objects.create(name='workflow-job', workflow_job_template=workflow_job_template, schedule=schedule)
+
+ job = Job.objects.create(launch_type='workflow')
workflow_job.workflow_nodes.create(job=job)
assert job.awx_meta_vars() == {
'awx_job_id': job.id,
@@ -204,7 +159,6 @@ class TestMetaVars:
'tower_parent_job_schedule_id': schedule.id,
'awx_parent_job_schedule_name': 'job-schedule',
'tower_parent_job_schedule_name': 'job-schedule',
-
}
@@ -223,7 +177,6 @@ def test_event_model_undefined():
@pytest.mark.django_db
class TestUpdateParentInstance:
-
def test_template_modified_by_not_changed_on_launch(self, job_template, alice):
# jobs are launched as a particular user, user not saved as JT modified_by
with impersonate(alice):
@@ -238,9 +191,7 @@ class TestUpdateParentInstance:
assert job_template.modified_by is None
def check_update(self, project, status):
- pu_check = project.project_updates.create(
- job_type='check', status='new', launch_type='manual'
- )
+ pu_check = project.project_updates.create(job_type='check', status='new', launch_type='manual')
pu_check.status = 'running'
pu_check.save()
# these should always be updated for a running check job
@@ -252,9 +203,7 @@ class TestUpdateParentInstance:
return pu_check
def run_update(self, project, status):
- pu_run = project.project_updates.create(
- job_type='run', status='new', launch_type='sync'
- )
+ pu_run = project.project_updates.create(job_type='run', status='new', launch_type='sync')
pu_run.status = 'running'
pu_run.save()
@@ -298,14 +247,9 @@ class TestTaskImpact:
def r(hosts, forks):
for i in range(hosts):
inventory.hosts.create(name='foo' + str(i))
- job = Job.objects.create(
- name='fake-job',
- launch_type='workflow',
- job_template=job_template,
- inventory=inventory,
- forks=forks
- )
+ job = Job.objects.create(name='fake-job', launch_type='workflow', job_template=job_template, inventory=inventory, forks=forks)
return job
+
return r
def test_limit_task_impact(self, job_host_limit, run_computed_fields_right_away):
@@ -327,17 +271,11 @@ class TestTaskImpact:
for node in workflow_job.workflow_nodes.all():
jobs[node.job.job_slice_number - 1] = node.job
# Even distribution - all jobs run on 1 host
- assert [
- len(jobs[0].inventory.get_script_data(slice_number=i + 1, slice_count=3)['all']['hosts'])
- for i in range(3)
- ] == [1, 1, 1]
+ assert [len(jobs[0].inventory.get_script_data(slice_number=i + 1, slice_count=3)['all']['hosts']) for i in range(3)] == [1, 1, 1]
jobs[0].inventory.update_computed_fields()
assert [job.task_impact for job in jobs] == [2, 2, 2] # plus one base task impact
# Uneven distribution - first job takes the extra host
jobs[0].inventory.hosts.create(name='remainder_foo')
- assert [
- len(jobs[0].inventory.get_script_data(slice_number=i + 1, slice_count=3)['all']['hosts'])
- for i in range(3)
- ] == [2, 1, 1]
+ assert [len(jobs[0].inventory.get_script_data(slice_number=i + 1, slice_count=3)['all']['hosts']) for i in range(3)] == [2, 1, 1]
jobs[0].inventory.update_computed_fields()
assert [job.task_impact for job in jobs] == [3, 2, 2]
diff --git a/awx/main/tests/functional/models/test_workflow.py b/awx/main/tests/functional/models/test_workflow.py
index f4daf7d578..9544e43561 100644
--- a/awx/main/tests/functional/models/test_workflow.py
+++ b/awx/main/tests/functional/models/test_workflow.py
@@ -1,4 +1,3 @@
-
# Python
import pytest
from unittest import mock
@@ -52,11 +51,11 @@ class TestWorkflowDAGFunctional(TransactionTestCase):
return wfj
def test_build_WFJT_dag(self):
- '''
+ """
Test that building the graph uses 4 queries
1 to get the nodes
3 to get the related success, failure, and always connections
- '''
+ """
dag = WorkflowDAG()
wfj = self.workflow_job()
with self.assertNumQueries(4):
@@ -114,7 +113,7 @@ class TestWorkflowDAGFunctional(TransactionTestCase):
@pytest.mark.django_db
-class TestWorkflowDNR():
+class TestWorkflowDNR:
@pytest.fixture
def workflow_job_fn(self):
def fn(states=['new', 'new', 'new', 'new', 'new', 'new']):
@@ -150,10 +149,20 @@ class TestWorkflowDNR():
nodes[2].success_nodes.add(nodes[5])
nodes[4].failure_nodes.add(nodes[5])
return wfj, nodes
+
return fn
def test_workflow_dnr_because_parent(self, workflow_job_fn):
- wfj, nodes = workflow_job_fn(states=['successful', None, None, None, None, None,])
+ wfj, nodes = workflow_job_fn(
+ states=[
+ 'successful',
+ None,
+ None,
+ None,
+ None,
+ None,
+ ]
+ )
dag = WorkflowDAG(workflow_job=wfj)
workflow_nodes = dag.mark_dnr_nodes()
assert 2 == len(workflow_nodes)
@@ -196,8 +205,7 @@ class TestWorkflowJob:
wfj = WorkflowJob.objects.create(name='test-wf-job')
job = Job.objects.create(name='test-job', artifacts={'b': 43})
# Workflow job nodes
- job_node = WorkflowJobNode.objects.create(workflow_job=wfj, job=job,
- ancestor_artifacts={'a': 42})
+ job_node = WorkflowJobNode.objects.create(workflow_job=wfj, job=job, ancestor_artifacts={'a': 42})
queued_node = WorkflowJobNode.objects.create(workflow_job=wfj, unified_job_template=job_template)
# Connect old job -> new job
mocker.patch.object(queued_node, 'get_parent_nodes', lambda: [job_node])
@@ -213,8 +221,7 @@ class TestWorkflowJob:
wfj = WorkflowJob.objects.create(name='test-wf-job')
update = ProjectUpdate.objects.create(name='test-update', project=project)
# Workflow job nodes
- project_node = WorkflowJobNode.objects.create(workflow_job=wfj, job=update,
- ancestor_artifacts={'a': 42, 'b': 43})
+ project_node = WorkflowJobNode.objects.create(workflow_job=wfj, job=update, ancestor_artifacts={'a': 42, 'b': 43})
queued_node = WorkflowJobNode.objects.create(workflow_job=wfj, unified_job_template=job_template)
# Connect project update -> new job
mocker.patch.object(queued_node, 'get_parent_nodes', lambda: [project_node])
@@ -226,8 +233,7 @@ class TestWorkflowJob:
class TestWorkflowJobTemplate:
@pytest.fixture
def wfjt(self, workflow_job_template_factory, organization):
- wfjt = workflow_job_template_factory(
- 'test', organization=organization).workflow_job_template
+ wfjt = workflow_job_template_factory('test', organization=organization).workflow_job_template
wfjt.organization = organization
nodes = [WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt) for i in range(0, 3)]
nodes[0].success_nodes.add(nodes[1])
@@ -276,23 +282,16 @@ class TestWorkflowJobTemplatePrompts:
"""These are tests for prompts that live on the workflow job template model
not the node, prompts apply for entire workflow
"""
+
@pytest.fixture
def wfjt_prompts(self):
return WorkflowJobTemplate.objects.create(
- ask_inventory_on_launch=True,
- ask_variables_on_launch=True,
- ask_limit_on_launch=True,
- ask_scm_branch_on_launch=True
+ ask_inventory_on_launch=True, ask_variables_on_launch=True, ask_limit_on_launch=True, ask_scm_branch_on_launch=True
)
@pytest.fixture
def prompts_data(self, inventory):
- return dict(
- inventory=inventory,
- extra_vars={'foo': 'bar'},
- limit='webservers',
- scm_branch='release-3.3'
- )
+ return dict(inventory=inventory, extra_vars={'foo': 'bar'}, limit='webservers', scm_branch='release-3.3')
def test_apply_workflow_job_prompts(self, workflow_job_template, wfjt_prompts, prompts_data, inventory):
# null or empty fields used
@@ -317,7 +316,6 @@ class TestWorkflowJobTemplatePrompts:
assert workflow_job.inventory == inventory
assert workflow_job.scm_branch == 'bar'
-
@pytest.mark.django_db
def test_process_workflow_job_prompts(self, inventory, workflow_job_template, wfjt_prompts, prompts_data):
accepted, rejected, errors = workflow_job_template._accept_or_ignore_job_kwargs(**prompts_data)
@@ -329,41 +327,30 @@ class TestWorkflowJobTemplatePrompts:
assert rejected == {}
assert not errors
-
@pytest.mark.django_db
def test_set_all_the_prompts(self, post, organization, inventory, org_admin):
r = post(
- url = reverse('api:workflow_job_template_list'),
- data = dict(
+ url=reverse('api:workflow_job_template_list'),
+ data=dict(
name='My new workflow',
organization=organization.id,
inventory=inventory.id,
limit='foooo',
ask_limit_on_launch=True,
scm_branch='bar',
- ask_scm_branch_on_launch=True
+ ask_scm_branch_on_launch=True,
),
- user = org_admin,
- expect = 201
+ user=org_admin,
+ expect=201,
)
wfjt = WorkflowJobTemplate.objects.get(id=r.data['id'])
- assert wfjt.char_prompts == {
- 'limit': 'foooo', 'scm_branch': 'bar'
- }
+ assert wfjt.char_prompts == {'limit': 'foooo', 'scm_branch': 'bar'}
assert wfjt.ask_scm_branch_on_launch is True
assert wfjt.ask_limit_on_launch is True
launch_url = r.data['related']['launch']
with mock.patch('awx.main.queue.CallbackQueueDispatcher.dispatch', lambda self, obj: None):
- r = post(
- url = launch_url,
- data = dict(
- scm_branch = 'prompt_branch',
- limit = 'prompt_limit'
- ),
- user = org_admin,
- expect=201
- )
+ r = post(url=launch_url, data=dict(scm_branch='prompt_branch', limit='prompt_limit'), user=org_admin, expect=201)
assert r.data['limit'] == 'prompt_limit'
assert r.data['scm_branch'] == 'prompt_branch'
@@ -373,32 +360,15 @@ def test_workflow_ancestors(organization):
# Spawn order of templates grandparent -> parent -> child
# create child WFJT and workflow job
child = WorkflowJobTemplate.objects.create(organization=organization, name='child')
- child_job = WorkflowJob.objects.create(
- workflow_job_template=child,
- launch_type='workflow'
- )
+ child_job = WorkflowJob.objects.create(workflow_job_template=child, launch_type='workflow')
# create parent WFJT and workflow job, and link it up
parent = WorkflowJobTemplate.objects.create(organization=organization, name='parent')
- parent_job = WorkflowJob.objects.create(
- workflow_job_template=parent,
- launch_type='workflow'
- )
- WorkflowJobNode.objects.create(
- workflow_job=parent_job,
- unified_job_template=child,
- job=child_job
- )
+ parent_job = WorkflowJob.objects.create(workflow_job_template=parent, launch_type='workflow')
+ WorkflowJobNode.objects.create(workflow_job=parent_job, unified_job_template=child, job=child_job)
# create grandparent WFJT and workflow job and link it up
grandparent = WorkflowJobTemplate.objects.create(organization=organization, name='grandparent')
- grandparent_job = WorkflowJob.objects.create(
- workflow_job_template=grandparent,
- launch_type='schedule'
- )
- WorkflowJobNode.objects.create(
- workflow_job=grandparent_job,
- unified_job_template=parent,
- job=parent_job
- )
+ grandparent_job = WorkflowJob.objects.create(workflow_job_template=grandparent, launch_type='schedule')
+ WorkflowJobNode.objects.create(workflow_job=grandparent_job, unified_job_template=parent, job=parent_job)
# ancestors method gives a list of WFJT ids
assert child_job.get_ancestor_workflows() == [parent, grandparent]
@@ -407,14 +377,7 @@ def test_workflow_ancestors(organization):
def test_workflow_ancestors_recursion_prevention(organization):
# This is toxic database data, this tests that it doesn't create an infinite loop
wfjt = WorkflowJobTemplate.objects.create(organization=organization, name='child')
- wfj = WorkflowJob.objects.create(
- workflow_job_template=wfjt,
- launch_type='workflow'
- )
- WorkflowJobNode.objects.create(
- workflow_job=wfj,
- unified_job_template=wfjt,
- job=wfj # well, this is a problem
- )
+ wfj = WorkflowJob.objects.create(workflow_job_template=wfjt, launch_type='workflow')
+ WorkflowJobNode.objects.create(workflow_job=wfj, unified_job_template=wfjt, job=wfj) # well, this is a problem
# mostly, we just care that this assertion finishes in finite time
assert wfj.get_ancestor_workflows() == []
diff --git a/awx/main/tests/functional/task_management/test_capacity.py b/awx/main/tests/functional/task_management/test_capacity.py
index b3be1a3a77..ee05150255 100644
--- a/awx/main/tests/functional/task_management/test_capacity.py
+++ b/awx/main/tests/functional/task_management/test_capacity.py
@@ -7,7 +7,6 @@ from awx.main.models import (
class TestCapacityMapping(TransactionTestCase):
-
def sample_cluster(self):
ig_small = InstanceGroup.objects.create(name='ig_small')
ig_large = InstanceGroup.objects.create(name='ig_large')
diff --git a/awx/main/tests/functional/task_management/test_container_groups.py b/awx/main/tests/functional/task_management/test_container_groups.py
index e739ff879b..e88ef2deb1 100644
--- a/awx/main/tests/functional/task_management/test_container_groups.py
+++ b/awx/main/tests/functional/task_management/test_container_groups.py
@@ -1,7 +1,7 @@
import subprocess
import base64
-from unittest import mock # noqa
+from unittest import mock # noqa
import pytest
from awx.main.scheduler.kubernetes import PodManager
@@ -15,9 +15,7 @@ def containerized_job(default_instance_group, kube_credential, job_template_fact
default_instance_group.credential = kube_credential
default_instance_group.is_container_group = True
default_instance_group.save()
- objects = job_template_factory('jt', organization='org1', project='proj',
- inventory='inv', credential='cred',
- jobs=['my_job'])
+ objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred', jobs=['my_job'])
jt = objects.job_template
jt.instance_groups.add(default_instance_group)
@@ -39,9 +37,7 @@ def test_containerized_job(containerized_job):
def test_kubectl_ssl_verification(containerized_job):
cred = containerized_job.instance_group.credential
cred.inputs['verify_ssl'] = True
- key_material = subprocess.run('openssl genrsa 2> /dev/null',
- shell=True, check=True,
- stdout=subprocess.PIPE)
+ key_material = subprocess.run('openssl genrsa 2> /dev/null', shell=True, check=True, stdout=subprocess.PIPE)
key = create_temporary_fifo(key_material.stdout)
cmd = f"""
openssl req -x509 -sha256 -new -nodes \
diff --git a/awx/main/tests/functional/task_management/test_rampart_groups.py b/awx/main/tests/functional/task_management/test_rampart_groups.py
index b763ef5ca3..b51fcab797 100644
--- a/awx/main/tests/functional/task_management/test_rampart_groups.py
+++ b/awx/main/tests/functional/task_management/test_rampart_groups.py
@@ -7,22 +7,17 @@ from awx.main.tasks import apply_cluster_membership_policies
@pytest.mark.django_db
-def test_multi_group_basic_job_launch(instance_factory, default_instance_group, mocker,
- instance_group_factory, job_template_factory):
+def test_multi_group_basic_job_launch(instance_factory, default_instance_group, mocker, instance_group_factory, job_template_factory):
i1 = instance_factory("i1")
i2 = instance_factory("i2")
ig1 = instance_group_factory("ig1", instances=[i1])
ig2 = instance_group_factory("ig2", instances=[i2])
- objects1 = job_template_factory('jt1', organization='org1', project='proj1',
- inventory='inv1', credential='cred1',
- jobs=["job_should_start"])
+ objects1 = job_template_factory('jt1', organization='org1', project='proj1', inventory='inv1', credential='cred1', jobs=["job_should_start"])
objects1.job_template.instance_groups.add(ig1)
j1 = objects1.jobs['job_should_start']
j1.status = 'pending'
j1.save()
- objects2 = job_template_factory('jt2', organization='org2', project='proj2',
- inventory='inv2', credential='cred2',
- jobs=["job_should_still_start"])
+ objects2 = job_template_factory('jt2', organization='org2', project='proj2', inventory='inv2', credential='cred2', jobs=["job_should_still_start"])
objects2.job_template.instance_groups.add(ig2)
j2 = objects2.jobs['job_should_still_start']
j2.status = 'pending'
@@ -34,17 +29,13 @@ def test_multi_group_basic_job_launch(instance_factory, default_instance_group,
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, [], i1), mock.call(j2, ig2, [], i2)])
-
@pytest.mark.django_db
-def test_multi_group_with_shared_dependency(instance_factory, default_instance_group, mocker,
- instance_group_factory, job_template_factory):
+def test_multi_group_with_shared_dependency(instance_factory, default_instance_group, mocker, instance_group_factory, job_template_factory):
i1 = instance_factory("i1")
i2 = instance_factory("i2")
ig1 = instance_group_factory("ig1", instances=[i1])
ig2 = instance_group_factory("ig2", instances=[i2])
- objects1 = job_template_factory('jt1', organization='org1', project='proj1',
- inventory='inv1', credential='cred1',
- jobs=["job_should_start"])
+ objects1 = job_template_factory('jt1', organization='org1', project='proj1', inventory='inv1', credential='cred1', jobs=["job_should_start"])
objects1.job_template.instance_groups.add(ig1)
p = objects1.project
p.scm_update_on_launch = True
@@ -55,9 +46,7 @@ def test_multi_group_with_shared_dependency(instance_factory, default_instance_g
j1 = objects1.jobs['job_should_start']
j1.status = 'pending'
j1.save()
- objects2 = job_template_factory('jt2', organization=objects1.organization, project=p,
- inventory='inv2', credential='cred2',
- jobs=["job_should_still_start"])
+ objects2 = job_template_factory('jt2', organization=objects1.organization, project=p, inventory='inv2', credential='cred2', jobs=["job_should_still_start"])
objects2.job_template.instance_groups.add(ig2)
j2 = objects2.jobs['job_should_still_start']
j2.status = 'pending'
@@ -65,10 +54,7 @@ def test_multi_group_with_shared_dependency(instance_factory, default_instance_g
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
TaskManager().schedule()
pu = p.project_updates.first()
- TaskManager.start_task.assert_called_once_with(pu,
- default_instance_group,
- [j1,j2],
- default_instance_group.instances.all()[0])
+ TaskManager.start_task.assert_called_once_with(pu, default_instance_group, [j1, j2], default_instance_group.instances.all()[0])
pu.finished = pu.created + timedelta(seconds=1)
pu.status = "successful"
pu.save()
@@ -93,38 +79,33 @@ def test_workflow_job_no_instancegroup(workflow_job_template_factory, default_in
@pytest.mark.django_db
-def test_overcapacity_blocking_other_groups_unaffected(instance_factory, default_instance_group, mocker,
- instance_group_factory, job_template_factory):
+def test_overcapacity_blocking_other_groups_unaffected(instance_factory, default_instance_group, mocker, instance_group_factory, job_template_factory):
i1 = instance_factory("i1")
i1.capacity = 1000
i1.save()
i2 = instance_factory("i2")
ig1 = instance_group_factory("ig1", instances=[i1])
ig2 = instance_group_factory("ig2", instances=[i2])
- objects1 = job_template_factory('jt1', organization='org1', project='proj1',
- inventory='inv1', credential='cred1',
- jobs=["job_should_start"])
+ objects1 = job_template_factory('jt1', organization='org1', project='proj1', inventory='inv1', credential='cred1', jobs=["job_should_start"])
objects1.job_template.instance_groups.add(ig1)
j1 = objects1.jobs['job_should_start']
j1.status = 'pending'
j1.save()
- objects2 = job_template_factory('jt2', organization=objects1.organization, project='proj2',
- inventory='inv2', credential='cred2',
- jobs=["job_should_start", "job_should_also_start"])
+ objects2 = job_template_factory(
+ 'jt2', organization=objects1.organization, project='proj2', inventory='inv2', credential='cred2', jobs=["job_should_start", "job_should_also_start"]
+ )
objects2.job_template.instance_groups.add(ig1)
j1_1 = objects2.jobs['job_should_also_start']
j1_1.status = 'pending'
j1_1.save()
- objects3 = job_template_factory('jt3', organization='org2', project='proj3',
- inventory='inv3', credential='cred3',
- jobs=["job_should_still_start"])
+ objects3 = job_template_factory('jt3', organization='org2', project='proj3', inventory='inv3', credential='cred3', jobs=["job_should_still_start"])
objects3.job_template.instance_groups.add(ig2)
j2 = objects3.jobs['job_should_still_start']
j2.status = 'pending'
j2.save()
- objects4 = job_template_factory('jt4', organization=objects3.organization, project='proj4',
- inventory='inv4', credential='cred4',
- jobs=["job_should_not_start"])
+ objects4 = job_template_factory(
+ 'jt4', organization=objects3.organization, project='proj4', inventory='inv4', credential='cred4', jobs=["job_should_not_start"]
+ )
objects4.job_template.instance_groups.add(ig2)
j2_1 = objects4.jobs['job_should_not_start']
j2_1.status = 'pending'
@@ -134,29 +115,24 @@ def test_overcapacity_blocking_other_groups_unaffected(instance_factory, default
mock_task_impact.return_value = 500
with mock.patch.object(TaskManager, "start_task", wraps=tm.start_task) as mock_job:
tm.schedule()
- mock_job.assert_has_calls([mock.call(j1, ig1, [], i1),
- mock.call(j1_1, ig1, [], i1),
- mock.call(j2, ig2, [], i2)])
+ mock_job.assert_has_calls([mock.call(j1, ig1, [], i1), mock.call(j1_1, ig1, [], i1), mock.call(j2, ig2, [], i2)])
assert mock_job.call_count == 3
@pytest.mark.django_db
-def test_failover_group_run(instance_factory, default_instance_group, mocker,
- instance_group_factory, job_template_factory):
+def test_failover_group_run(instance_factory, default_instance_group, mocker, instance_group_factory, job_template_factory):
i1 = instance_factory("i1")
i2 = instance_factory("i2")
ig1 = instance_group_factory("ig1", instances=[i1])
ig2 = instance_group_factory("ig2", instances=[i2])
- objects1 = job_template_factory('jt1', organization='org1', project='proj1',
- inventory='inv1', credential='cred1',
- jobs=["job_should_start"])
+ objects1 = job_template_factory('jt1', organization='org1', project='proj1', inventory='inv1', credential='cred1', jobs=["job_should_start"])
objects1.job_template.instance_groups.add(ig1)
j1 = objects1.jobs['job_should_start']
j1.status = 'pending'
j1.save()
- objects2 = job_template_factory('jt2', organization=objects1.organization, project='proj2',
- inventory='inv2', credential='cred2',
- jobs=["job_should_start", "job_should_also_start"])
+ objects2 = job_template_factory(
+ 'jt2', organization=objects1.organization, project='proj2', inventory='inv2', credential='cred2', jobs=["job_should_start", "job_should_also_start"]
+ )
objects2.job_template.instance_groups.add(ig1)
objects2.job_template.instance_groups.add(ig2)
j1_1 = objects2.jobs['job_should_also_start']
@@ -167,8 +143,7 @@ def test_failover_group_run(instance_factory, default_instance_group, mocker,
mock_task_impact.return_value = 500
with mock.patch.object(TaskManager, "start_task", wraps=tm.start_task) as mock_job:
tm.schedule()
- mock_job.assert_has_calls([mock.call(j1, ig1, [], i1),
- mock.call(j1_1, ig2, [], i2)])
+ mock_job.assert_has_calls([mock.call(j1, ig1, [], i1), mock.call(j1_1, ig2, [], i2)])
assert mock_job.call_count == 2
diff --git a/awx/main/tests/functional/task_management/test_scheduler.py b/awx/main/tests/functional/task_management/test_scheduler.py
index 64a86127a8..64dcc97415 100644
--- a/awx/main/tests/functional/task_management/test_scheduler.py
+++ b/awx/main/tests/functional/task_management/test_scheduler.py
@@ -12,9 +12,7 @@ from awx.main.models import WorkflowJobTemplate, JobTemplate, Job
@pytest.mark.django_db
def test_single_job_scheduler_launch(default_instance_group, job_template_factory, mocker):
instance = default_instance_group.instances.all()[0]
- objects = job_template_factory('jt', organization='org1', project='proj',
- inventory='inv', credential='cred',
- jobs=["job_should_start"])
+ objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred', jobs=["job_should_start"])
j = objects.jobs["job_should_start"]
j.status = 'pending'
j.save()
@@ -25,7 +23,6 @@ def test_single_job_scheduler_launch(default_instance_group, job_template_factor
@pytest.mark.django_db
class TestJobLifeCycle:
-
def run_tm(self, tm, expect_channel=None, expect_schedule=None, expect_commit=None):
"""Test helper method that takes parameters to assert against
expect_channel - list of expected websocket emit channel message calls
@@ -50,17 +47,10 @@ class TestJobLifeCycle:
assert mock_commit.mock_calls == expect_commit
def test_task_manager_workflow_rescheduling(self, job_template_factory, inventory, project, default_instance_group):
- jt = JobTemplate.objects.create(
- allow_simultaneous=True,
- inventory=inventory,
- project=project,
- playbook='helloworld.yml'
- )
+ jt = JobTemplate.objects.create(allow_simultaneous=True, inventory=inventory, project=project, playbook='helloworld.yml')
wfjt = WorkflowJobTemplate.objects.create(name='foo')
for i in range(2):
- wfjt.workflow_nodes.create(
- unified_job_template=jt
- )
+ wfjt.workflow_nodes.create(unified_job_template=jt)
wj = wfjt.create_unified_job()
assert wj.workflow_nodes.count() == 2
wj.signal_start()
@@ -93,9 +83,7 @@ class TestJobLifeCycle:
wfjts = [WorkflowJobTemplate.objects.create(name='foo')]
for i in range(5):
wfjt = WorkflowJobTemplate.objects.create(name='foo{}'.format(i))
- wfjts[-1].workflow_nodes.create(
- unified_job_template=wfjt
- )
+ wfjts[-1].workflow_nodes.create(unified_job_template=wfjt)
wfjts.append(wfjt)
wj = wfjts[0].create_unified_job()
@@ -115,9 +103,9 @@ class TestJobLifeCycle:
@pytest.mark.django_db
def test_single_jt_multi_job_launch_blocks_last(default_instance_group, job_template_factory, mocker):
instance = default_instance_group.instances.all()[0]
- objects = job_template_factory('jt', organization='org1', project='proj',
- inventory='inv', credential='cred',
- jobs=["job_should_start", "job_should_not_start"])
+ objects = job_template_factory(
+ 'jt', organization='org1', project='proj', inventory='inv', credential='cred', jobs=["job_should_start", "job_should_not_start"]
+ )
j1 = objects.jobs["job_should_start"]
j1.status = 'pending'
j1.save()
@@ -137,9 +125,9 @@ def test_single_jt_multi_job_launch_blocks_last(default_instance_group, job_temp
@pytest.mark.django_db
def test_single_jt_multi_job_launch_allow_simul_allowed(default_instance_group, job_template_factory, mocker):
instance = default_instance_group.instances.all()[0]
- objects = job_template_factory('jt', organization='org1', project='proj',
- inventory='inv', credential='cred',
- jobs=["job_should_start", "job_should_not_start"])
+ objects = job_template_factory(
+ 'jt', organization='org1', project='proj', inventory='inv', credential='cred', jobs=["job_should_start", "job_should_not_start"]
+ )
jt = objects.job_template
jt.save()
@@ -153,19 +141,14 @@ def test_single_jt_multi_job_launch_allow_simul_allowed(default_instance_group,
j2.save()
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
TaskManager().schedule()
- TaskManager.start_task.assert_has_calls([mock.call(j1, default_instance_group, [], instance),
- mock.call(j2, default_instance_group, [], instance)])
+ TaskManager.start_task.assert_has_calls([mock.call(j1, default_instance_group, [], instance), mock.call(j2, default_instance_group, [], instance)])
@pytest.mark.django_db
def test_multi_jt_capacity_blocking(default_instance_group, job_template_factory, mocker):
instance = default_instance_group.instances.all()[0]
- objects1 = job_template_factory('jt1', organization='org1', project='proj1',
- inventory='inv1', credential='cred1',
- jobs=["job_should_start"])
- objects2 = job_template_factory('jt2', organization='org2', project='proj2',
- inventory='inv2', credential='cred2',
- jobs=["job_should_not_start"])
+ objects1 = job_template_factory('jt1', organization='org1', project='proj1', inventory='inv1', credential='cred1', jobs=["job_should_start"])
+ objects2 = job_template_factory('jt2', organization='org2', project='proj2', inventory='inv2', credential='cred2', jobs=["job_should_not_start"])
j1 = objects1.jobs["job_should_start"]
j1.status = 'pending'
j1.save()
@@ -187,9 +170,7 @@ def test_multi_jt_capacity_blocking(default_instance_group, job_template_factory
@pytest.mark.django_db
def test_single_job_dependencies_project_launch(default_instance_group, job_template_factory, mocker):
- objects = job_template_factory('jt', organization='org1', project='proj',
- inventory='inv', credential='cred',
- jobs=["job_should_start"])
+ objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred', jobs=["job_should_start"])
instance = default_instance_group.instances.all()[0]
j = objects.jobs["job_should_start"]
j.status = 'pending'
@@ -217,9 +198,7 @@ def test_single_job_dependencies_project_launch(default_instance_group, job_temp
@pytest.mark.django_db
def test_single_job_dependencies_inventory_update_launch(default_instance_group, job_template_factory, mocker, inventory_source_factory):
- objects = job_template_factory('jt', organization='org1', project='proj',
- inventory='inv', credential='cred',
- jobs=["job_should_start"])
+ objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred', jobs=["job_should_start"])
instance = default_instance_group.instances.all()[0]
j = objects.jobs["job_should_start"]
j.status = 'pending'
@@ -248,9 +227,7 @@ def test_single_job_dependencies_inventory_update_launch(default_instance_group,
@pytest.mark.django_db
def test_job_dependency_with_already_updated(default_instance_group, job_template_factory, mocker, inventory_source_factory):
- objects = job_template_factory('jt', organization='org1', project='proj',
- inventory='inv', credential='cred',
- jobs=["job_should_start"])
+ objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred', jobs=["job_should_start"])
instance = default_instance_group.instances.all()[0]
j = objects.jobs["job_should_start"]
j.status = 'pending'
@@ -279,9 +256,7 @@ def test_job_dependency_with_already_updated(default_instance_group, job_templat
@pytest.mark.django_db
def test_shared_dependencies_launch(default_instance_group, job_template_factory, mocker, inventory_source_factory):
instance = default_instance_group.instances.all()[0]
- objects = job_template_factory('jt', organization='org1', project='proj',
- inventory='inv', credential='cred',
- jobs=["first_job", "second_job"])
+ objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred', jobs=["first_job", "second_job"])
j1 = objects.jobs["first_job"]
j1.status = 'pending'
j1.save()
@@ -307,8 +282,9 @@ def test_shared_dependencies_launch(default_instance_group, job_template_factory
TaskManager().schedule()
pu = p.project_updates.first()
iu = ii.inventory_updates.first()
- TaskManager.start_task.assert_has_calls([mock.call(iu, default_instance_group, [j1, j2, pu], instance),
- mock.call(pu, default_instance_group, [j1, j2, iu], instance)])
+ TaskManager.start_task.assert_has_calls(
+ [mock.call(iu, default_instance_group, [j1, j2, pu], instance), mock.call(pu, default_instance_group, [j1, j2, iu], instance)]
+ )
pu.status = "successful"
pu.finished = pu.created + timedelta(seconds=1)
pu.save()
@@ -331,9 +307,7 @@ def test_shared_dependencies_launch(default_instance_group, job_template_factory
@pytest.mark.django_db
def test_job_not_blocking_project_update(default_instance_group, job_template_factory):
- objects = job_template_factory('jt', organization='org1', project='proj',
- inventory='inv', credential='cred',
- jobs=["job"])
+ objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred', jobs=["job"])
job = objects.jobs["job"]
job.instance_group = default_instance_group
job.status = "running"
@@ -357,9 +331,7 @@ def test_job_not_blocking_project_update(default_instance_group, job_template_fa
@pytest.mark.django_db
def test_job_not_blocking_inventory_update(default_instance_group, job_template_factory, inventory_source_factory):
- objects = job_template_factory('jt', organization='org1', project='proj',
- inventory='inv', credential='cred',
- jobs=["job"])
+ objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred', jobs=["job"])
job = objects.jobs["job"]
job.instance_group = default_instance_group
job.status = "running"
@@ -394,7 +366,6 @@ def test_generate_dependencies_only_once(job_template_factory):
job.name = "job_gen_dep"
job.save()
-
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
# job starts with dependencies_processed as False
assert not job.dependencies_processed
diff --git a/awx/main/tests/functional/test_copy.py b/awx/main/tests/functional/test_copy.py
index 7be582d6c8..31bd250662 100644
--- a/awx/main/tests/functional/test_copy.py
+++ b/awx/main/tests/functional/test_copy.py
@@ -3,16 +3,13 @@ from unittest import mock
from awx.api.versioning import reverse
from awx.main.utils import decrypt_field
-from awx.main.models.workflow import (
- WorkflowJobTemplate, WorkflowJobTemplateNode, WorkflowApprovalTemplate
-)
+from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobTemplateNode, WorkflowApprovalTemplate
from awx.main.models.jobs import JobTemplate
from awx.main.tasks import deep_copy_model_obj
@pytest.mark.django_db
-def test_job_template_copy(post, get, project, inventory, machine_credential, vault_credential,
- credential, alice, job_template_with_survey_passwords, admin):
+def test_job_template_copy(post, get, project, inventory, machine_credential, vault_credential, credential, alice, job_template_with_survey_passwords, admin):
job_template_with_survey_passwords.project = project
job_template_with_survey_passwords.inventory = inventory
job_template_with_survey_passwords.save()
@@ -22,34 +19,23 @@ def test_job_template_copy(post, get, project, inventory, machine_credential, va
job_template_with_survey_passwords.admin_role.members.add(alice)
project.admin_role.members.add(alice)
inventory.admin_role.members.add(alice)
- assert get(
- reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}),
- alice, expect=200
- ).data['can_copy'] is False
- assert get(
- reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}),
- admin, expect=200
- ).data['can_copy'] is True
- assert post(
- reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}),
- {'name': 'new jt name'}, alice, expect=403
- ).data['detail'] == 'Insufficient access to Job Template credentials.'
- jt_copy_pk = post(
- reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}),
- {'name': 'new jt name'}, admin, expect=201
- ).data['id']
+ assert get(reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), alice, expect=200).data['can_copy'] is False
+ assert get(reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), admin, expect=200).data['can_copy'] is True
+ assert (
+ post(reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), {'name': 'new jt name'}, alice, expect=403).data['detail']
+ == 'Insufficient access to Job Template credentials.'
+ )
+ jt_copy_pk = post(reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), {'name': 'new jt name'}, admin, expect=201).data[
+ 'id'
+ ]
# give credential access to user 'alice'
for c in (credential, machine_credential, vault_credential):
c.use_role.members.add(alice)
c.save()
- assert get(
- reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}),
- alice, expect=200
- ).data['can_copy'] is True
+ assert get(reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), alice, expect=200).data['can_copy'] is True
jt_copy_pk_alice = post(
- reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}),
- {'name': 'new jt name'}, alice, expect=201
+ reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), {'name': 'new jt name'}, alice, expect=201
).data['id']
jt_copy_admin = type(job_template_with_survey_passwords).objects.get(pk=jt_copy_pk)
@@ -75,18 +61,11 @@ def test_project_copy(post, get, project, organization, scm_credential, alice):
project.credential = scm_credential
project.save()
project.admin_role.members.add(alice)
- assert get(
- reverse('api:project_copy', kwargs={'pk': project.pk}), alice, expect=200
- ).data['can_copy'] is False
+ assert get(reverse('api:project_copy', kwargs={'pk': project.pk}), alice, expect=200).data['can_copy'] is False
project.organization.admin_role.members.add(alice)
scm_credential.use_role.members.add(alice)
- assert get(
- reverse('api:project_copy', kwargs={'pk': project.pk}), alice, expect=200
- ).data['can_copy'] is True
- project_copy_pk = post(
- reverse('api:project_copy', kwargs={'pk': project.pk}),
- {'name': 'copied project'}, alice, expect=201
- ).data['id']
+ assert get(reverse('api:project_copy', kwargs={'pk': project.pk}), alice, expect=200).data['can_copy'] is True
+ project_copy_pk = post(reverse('api:project_copy', kwargs={'pk': project.pk}), {'name': 'copied project'}, alice, expect=201).data['id']
project_copy = type(project).objects.get(pk=project_copy_pk)
assert project_copy.created_by == alice
assert project_copy.name == 'copied project'
@@ -105,18 +84,11 @@ def test_inventory_copy(inventory, group_factory, post, get, alice, organization
host = group_1_1.hosts.create(name='host', inventory=inventory)
group_2_1.hosts.add(host)
inventory.admin_role.members.add(alice)
- assert get(
- reverse('api:inventory_copy', kwargs={'pk': inventory.pk}), alice, expect=200
- ).data['can_copy'] is False
+ assert get(reverse('api:inventory_copy', kwargs={'pk': inventory.pk}), alice, expect=200).data['can_copy'] is False
inventory.organization.admin_role.members.add(alice)
- assert get(
- reverse('api:inventory_copy', kwargs={'pk': inventory.pk}), alice, expect=200
- ).data['can_copy'] is True
+ assert get(reverse('api:inventory_copy', kwargs={'pk': inventory.pk}), alice, expect=200).data['can_copy'] is True
with mock.patch('awx.api.generics.trigger_delayed_deep_copy') as deep_copy_mock:
- inv_copy_pk = post(
- reverse('api:inventory_copy', kwargs={'pk': inventory.pk}),
- {'name': 'new inv name'}, alice, expect=201
- ).data['id']
+ inv_copy_pk = post(reverse('api:inventory_copy', kwargs={'pk': inventory.pk}), {'name': 'new inv name'}, alice, expect=201).data['id']
inventory_copy = type(inventory).objects.get(pk=inv_copy_pk)
args, kwargs = deep_copy_mock.call_args
deep_copy_model_obj(*args, **kwargs)
@@ -140,19 +112,14 @@ def test_workflow_job_template_copy(workflow_job_template, post, get, admin, org
workflow_job_template.organization = organization
workflow_job_template.save()
jts = [JobTemplate.objects.create(name='test-jt-{}'.format(i)) for i in range(0, 5)]
- nodes = [
- WorkflowJobTemplateNode.objects.create(
- workflow_job_template=workflow_job_template, unified_job_template=jts[i]
- ) for i in range(0, 5)
- ]
+ nodes = [WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template, unified_job_template=jts[i]) for i in range(0, 5)]
nodes[0].success_nodes.add(nodes[1])
nodes[1].success_nodes.add(nodes[2])
nodes[0].failure_nodes.add(nodes[3])
nodes[3].failure_nodes.add(nodes[4])
with mock.patch('awx.api.generics.trigger_delayed_deep_copy') as deep_copy_mock:
wfjt_copy_id = post(
- reverse('api:workflow_job_template_copy', kwargs={'pk': workflow_job_template.pk}),
- {'name': 'new wfjt name'}, admin, expect=201
+ reverse('api:workflow_job_template_copy', kwargs={'pk': workflow_job_template.pk}), {'name': 'new wfjt name'}, admin, expect=201
).data['id']
wfjt_copy = type(workflow_job_template).objects.get(pk=wfjt_copy_id)
args, kwargs = deep_copy_mock.call_args
@@ -162,12 +129,7 @@ def test_workflow_job_template_copy(workflow_job_template, post, get, admin, org
assert wfjt_copy.name == 'new wfjt name'
copied_node_list = [x for x in wfjt_copy.workflow_job_template_nodes.all()]
copied_node_list.sort(key=lambda x: int(x.unified_job_template.name[-1]))
- for node, success_count, failure_count, always_count in zip(
- copied_node_list,
- [1, 1, 0, 0, 0],
- [1, 0, 0, 1, 0],
- [0, 0, 0, 0, 0]
- ):
+ for node, success_count, failure_count, always_count in zip(copied_node_list, [1, 1, 0, 0, 0], [1, 0, 0, 1, 0], [0, 0, 0, 0, 0]):
assert node.success_nodes.count() == success_count
assert node.failure_nodes.count() == failure_count
assert node.always_nodes.count() == always_count
@@ -181,19 +143,8 @@ def test_workflow_job_template_copy(workflow_job_template, post, get, admin, org
def test_workflow_approval_node_copy(workflow_job_template, post, get, admin, organization):
workflow_job_template.organization = organization
workflow_job_template.save()
- ajts = [
- WorkflowApprovalTemplate.objects.create(
- name='test-approval-{}'.format(i),
- description='description-{}'.format(i),
- timeout=30
- )
- for i in range(0, 5)
- ]
- nodes = [
- WorkflowJobTemplateNode.objects.create(
- workflow_job_template=workflow_job_template, unified_job_template=ajts[i]
- ) for i in range(0, 5)
- ]
+ ajts = [WorkflowApprovalTemplate.objects.create(name='test-approval-{}'.format(i), description='description-{}'.format(i), timeout=30) for i in range(0, 5)]
+ nodes = [WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template, unified_job_template=ajts[i]) for i in range(0, 5)]
nodes[0].success_nodes.add(nodes[1])
nodes[1].success_nodes.add(nodes[2])
nodes[0].failure_nodes.add(nodes[3])
@@ -204,8 +155,7 @@ def test_workflow_approval_node_copy(workflow_job_template, post, get, admin, or
with mock.patch('awx.api.generics.trigger_delayed_deep_copy') as deep_copy_mock:
wfjt_copy_id = post(
- reverse('api:workflow_job_template_copy', kwargs={'pk': workflow_job_template.pk}),
- {'name': 'new wfjt name'}, admin, expect=201
+ reverse('api:workflow_job_template_copy', kwargs={'pk': workflow_job_template.pk}), {'name': 'new wfjt name'}, admin, expect=201
).data['id']
wfjt_copy = type(workflow_job_template).objects.get(pk=wfjt_copy_id)
args, kwargs = deep_copy_mock.call_args
@@ -217,12 +167,8 @@ def test_workflow_approval_node_copy(workflow_job_template, post, get, admin, or
assert WorkflowJobTemplate.objects.count() == 2
assert WorkflowJobTemplateNode.objects.count() == 10
assert WorkflowApprovalTemplate.objects.count() == 10
- original_templates = [
- x.unified_job_template for x in workflow_job_template.workflow_job_template_nodes.all()
- ]
- copied_templates = [
- x.unified_job_template for x in wfjt_copy.workflow_job_template_nodes.all()
- ]
+ original_templates = [x.unified_job_template for x in workflow_job_template.workflow_job_template_nodes.all()]
+ copied_templates = [x.unified_job_template for x in wfjt_copy.workflow_job_template_nodes.all()]
# make sure shallow fields like `timeout` are copied properly
for i, t in enumerate(original_templates):
@@ -235,78 +181,50 @@ def test_workflow_approval_node_copy(workflow_job_template, post, get, admin, or
# the Approval Template IDs on the *original* WFJT should not match *any*
# of the Approval Template IDs on the *copied* WFJT
- assert not set([x.id for x in original_templates]).intersection(
- set([x.id for x in copied_templates])
- )
+ assert not set([x.id for x in original_templates]).intersection(set([x.id for x in copied_templates]))
# if you remove the " copy" suffix from the copied template names, they
# should match the original templates
- assert (
- set([x.name for x in original_templates]) ==
- set([x.name.replace(' copy', '') for x in copied_templates])
- )
+ assert set([x.name for x in original_templates]) == set([x.name.replace(' copy', '') for x in copied_templates])
@pytest.mark.django_db
def test_credential_copy(post, get, machine_credential, credentialtype_ssh, admin):
- assert get(
- reverse('api:credential_copy', kwargs={'pk': machine_credential.pk}), admin, expect=200
- ).data['can_copy'] is True
- credential_copy_pk = post(
- reverse('api:credential_copy', kwargs={'pk': machine_credential.pk}),
- {'name': 'copied credential'}, admin, expect=201
- ).data['id']
+ assert get(reverse('api:credential_copy', kwargs={'pk': machine_credential.pk}), admin, expect=200).data['can_copy'] is True
+ credential_copy_pk = post(reverse('api:credential_copy', kwargs={'pk': machine_credential.pk}), {'name': 'copied credential'}, admin, expect=201).data['id']
credential_copy = type(machine_credential).objects.get(pk=credential_copy_pk)
assert credential_copy.created_by == admin
assert credential_copy.name == 'copied credential'
assert credential_copy.credential_type == credentialtype_ssh
assert credential_copy.inputs['username'] == machine_credential.inputs['username']
- assert (decrypt_field(credential_copy, 'password') ==
- decrypt_field(machine_credential, 'password'))
+ assert decrypt_field(credential_copy, 'password') == decrypt_field(machine_credential, 'password')
@pytest.mark.django_db
-def test_notification_template_copy(post, get, notification_template_with_encrypt,
- organization, alice):
+def test_notification_template_copy(post, get, notification_template_with_encrypt, organization, alice):
notification_template_with_encrypt.organization.auditor_role.members.add(alice)
- assert get(
- reverse(
- 'api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk}
- ), alice, expect=200
- ).data['can_copy'] is False
+ assert get(reverse('api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk}), alice, expect=200).data['can_copy'] is False
notification_template_with_encrypt.organization.admin_role.members.add(alice)
- assert get(
- reverse(
- 'api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk}
- ), alice, expect=200
- ).data['can_copy'] is True
+ assert get(reverse('api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk}), alice, expect=200).data['can_copy'] is True
nt_copy_pk = post(
- reverse(
- 'api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk}
- ), {'name': 'copied nt'}, alice, expect=201
+ reverse('api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk}), {'name': 'copied nt'}, alice, expect=201
).data['id']
notification_template_copy = type(notification_template_with_encrypt).objects.get(pk=nt_copy_pk)
assert notification_template_copy.created_by == alice
assert notification_template_copy.name == 'copied nt'
assert notification_template_copy.organization == organization
- assert (decrypt_field(notification_template_with_encrypt, 'notification_configuration', 'token') ==
- decrypt_field(notification_template_copy, 'notification_configuration', 'token'))
+ assert decrypt_field(notification_template_with_encrypt, 'notification_configuration', 'token') == decrypt_field(
+ notification_template_copy, 'notification_configuration', 'token'
+ )
@pytest.mark.django_db
def test_inventory_script_copy(post, get, inventory_script, organization, alice):
inventory_script.organization.auditor_role.members.add(alice)
- assert get(
- reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}), alice, expect=200
- ).data['can_copy'] is False
+ assert get(reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}), alice, expect=200).data['can_copy'] is False
inventory_script.organization.admin_role.members.add(alice)
- assert get(
- reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}), alice, expect=200
- ).data['can_copy'] is True
- is_copy_pk = post(
- reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}),
- {'name': 'copied inv script'}, alice, expect=201
- ).data['id']
+ assert get(reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}), alice, expect=200).data['can_copy'] is True
+ is_copy_pk = post(reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}), {'name': 'copied inv script'}, alice, expect=201).data['id']
inventory_script_copy = type(inventory_script).objects.get(pk=is_copy_pk)
assert inventory_script_copy.created_by == alice
assert inventory_script_copy.name == 'copied inv script'
diff --git a/awx/main/tests/functional/test_credential.py b/awx/main/tests/functional/test_credential.py
index f2bfa92dac..2d96a25a5f 100644
--- a/awx/main/tests/functional/test_credential.py
+++ b/awx/main/tests/functional/test_credential.py
@@ -108,21 +108,11 @@ def test_default_cred_types():
def test_credential_creation(organization_factory):
org = organization_factory('test').organization
type_ = CredentialType(
- kind='cloud',
- name='SomeCloud',
- managed_by_tower=True,
- inputs={
- 'fields': [{
- 'id': 'username',
- 'label': 'Username for SomeCloud',
- 'type': 'string'
- }]
- }
+ kind='cloud', name='SomeCloud', managed_by_tower=True, inputs={'fields': [{'id': 'username', 'label': 'Username for SomeCloud', 'type': 'string'}]}
)
type_.save()
- cred = Credential(credential_type=type_, name="Bob's Credential",
- inputs={'username': 'bob'}, organization=org)
+ cred = Credential(credential_type=type_, name="Bob's Credential", inputs={'username': 'bob'}, organization=org)
cred.save()
cred.full_clean()
assert isinstance(cred, Credential)
@@ -131,20 +121,23 @@ def test_credential_creation(organization_factory):
@pytest.mark.django_db
-@pytest.mark.parametrize('kind', ['ssh', 'net', 'scm'])
-@pytest.mark.parametrize('ssh_key_data, ssh_key_unlock, valid', [
- [EXAMPLE_PRIVATE_KEY, None, True], # unencrypted key, no unlock pass
- [EXAMPLE_PRIVATE_KEY, 'super-secret', False], # unencrypted key, unlock pass
- [EXAMPLE_ENCRYPTED_PRIVATE_KEY, 'super-secret', True], # encrypted key, unlock pass
- [EXAMPLE_ENCRYPTED_PRIVATE_KEY, None, False], # encrypted key, no unlock pass
- [PKCS8_ENCRYPTED_PRIVATE_KEY, 'passme', True], # encrypted PKCS8 key, unlock pass
- [PKCS8_ENCRYPTED_PRIVATE_KEY, None, False], # encrypted PKCS8 key, no unlock pass
- [PKCS8_PRIVATE_KEY, None, True], # unencrypted PKCS8 key, no unlock pass
- [PKCS8_PRIVATE_KEY, 'passme', False], # unencrypted PKCS8 key, unlock pass
- [None, None, True], # no key, no unlock pass
- ['INVALID-KEY-DATA', None, False], # invalid key data
- [EXAMPLE_PRIVATE_KEY.replace('=', '\u003d'), None, True], # automatically fix JSON-encoded GCE keys
-])
+@pytest.mark.parametrize('kind', ['ssh', 'net', 'scm'])
+@pytest.mark.parametrize(
+ 'ssh_key_data, ssh_key_unlock, valid',
+ [
+ [EXAMPLE_PRIVATE_KEY, None, True], # unencrypted key, no unlock pass
+ [EXAMPLE_PRIVATE_KEY, 'super-secret', False], # unencrypted key, unlock pass
+ [EXAMPLE_ENCRYPTED_PRIVATE_KEY, 'super-secret', True], # encrypted key, unlock pass
+ [EXAMPLE_ENCRYPTED_PRIVATE_KEY, None, False], # encrypted key, no unlock pass
+ [PKCS8_ENCRYPTED_PRIVATE_KEY, 'passme', True], # encrypted PKCS8 key, unlock pass
+ [PKCS8_ENCRYPTED_PRIVATE_KEY, None, False], # encrypted PKCS8 key, no unlock pass
+ [PKCS8_PRIVATE_KEY, None, True], # unencrypted PKCS8 key, no unlock pass
+ [PKCS8_PRIVATE_KEY, 'passme', False], # unencrypted PKCS8 key, unlock pass
+ [None, None, True], # no key, no unlock pass
+ ['INVALID-KEY-DATA', None, False], # invalid key data
+ [EXAMPLE_PRIVATE_KEY.replace('=', '\u003d'), None, True], # automatically fix JSON-encoded GCE keys
+ ],
+)
def test_ssh_key_data_validation(organization, kind, ssh_key_data, ssh_key_unlock, valid):
inputs = {'username': 'joe-user'}
if ssh_key_data:
@@ -153,12 +146,7 @@ def test_ssh_key_data_validation(organization, kind, ssh_key_data, ssh_key_unloc
inputs['ssh_key_unlock'] = ssh_key_unlock
cred_type = CredentialType.defaults[kind]()
cred_type.save()
- cred = Credential(
- credential_type=cred_type,
- name="Best credential ever",
- inputs=inputs,
- organization=organization
- )
+ cred = Credential(credential_type=cred_type, name="Best credential ever", inputs=inputs, organization=organization)
cred.save()
if valid:
cred.full_clean()
@@ -169,21 +157,19 @@ def test_ssh_key_data_validation(organization, kind, ssh_key_data, ssh_key_unloc
@pytest.mark.django_db
-@pytest.mark.parametrize('inputs, valid', [
- ({'vault_password': 'some-pass'}, True),
- ({}, True),
- ({'vault_password': 'dev-pass', 'vault_id': 'dev'}, True),
- ({'vault_password': 'dev-pass', 'vault_id': 'dev@prompt'}, False), # @ not allowed
-])
+@pytest.mark.parametrize(
+ 'inputs, valid',
+ [
+ ({'vault_password': 'some-pass'}, True),
+ ({}, True),
+ ({'vault_password': 'dev-pass', 'vault_id': 'dev'}, True),
+ ({'vault_password': 'dev-pass', 'vault_id': 'dev@prompt'}, False), # @ not allowed
+ ],
+)
def test_vault_validation(organization, inputs, valid):
cred_type = CredentialType.defaults['vault']()
cred_type.save()
- cred = Credential(
- credential_type=cred_type,
- name="Best credential ever",
- inputs=inputs,
- organization=organization
- )
+ cred = Credential(credential_type=cred_type, name="Best credential ever", inputs=inputs, organization=organization)
cred.save()
if valid:
cred.full_clean()
@@ -194,21 +180,19 @@ def test_vault_validation(organization, inputs, valid):
@pytest.mark.django_db
-@pytest.mark.parametrize('become_method, valid', [
- ('', True),
- ('sudo', True),
- ('custom-plugin', True),
-])
+@pytest.mark.parametrize(
+ 'become_method, valid',
+ [
+ ('', True),
+ ('sudo', True),
+ ('custom-plugin', True),
+ ],
+)
def test_choices_validity(become_method, valid, organization):
inputs = {'become_method': become_method}
cred_type = CredentialType.defaults['ssh']()
cred_type.save()
- cred = Credential(
- credential_type=cred_type,
- name="Best credential ever",
- inputs=inputs,
- organization=organization
- )
+ cred = Credential(credential_type=cred_type, name="Best credential ever", inputs=inputs, organization=organization)
cred.save()
if valid:
@@ -222,12 +206,7 @@ def test_choices_validity(become_method, valid, organization):
@pytest.mark.django_db
def test_credential_encryption(organization_factory, credentialtype_ssh):
org = organization_factory('test').organization
- cred = Credential(
- credential_type=credentialtype_ssh,
- name="Bob's Credential",
- inputs={'password': 'testing123'},
- organization=org
- )
+ cred = Credential(credential_type=credentialtype_ssh, name="Bob's Credential", inputs={'password': 'testing123'}, organization=org)
cred.save()
assert Credential.objects.count() == 1
@@ -239,12 +218,7 @@ def test_credential_encryption(organization_factory, credentialtype_ssh):
@pytest.mark.django_db
def test_credential_encryption_with_ask(organization_factory, credentialtype_ssh):
org = organization_factory('test').organization
- cred = Credential(
- credential_type=credentialtype_ssh,
- name="Bob's Credential",
- inputs={'password': 'ASK'},
- organization=org
- )
+ cred = Credential(credential_type=credentialtype_ssh, name="Bob's Credential", inputs={'password': 'ASK'}, organization=org)
cred.save()
assert Credential.objects.count() == 1
@@ -256,10 +230,7 @@ def test_credential_encryption_with_ask(organization_factory, credentialtype_ssh
def test_credential_with_multiple_secrets(organization_factory, credentialtype_ssh):
org = organization_factory('test').organization
cred = Credential(
- credential_type=credentialtype_ssh,
- name="Bob's Credential",
- inputs={'ssh_key_data': 'SOMEKEY', 'ssh_key_unlock': 'testing123'},
- organization=org
+ credential_type=credentialtype_ssh, name="Bob's Credential", inputs={'ssh_key_data': 'SOMEKEY', 'ssh_key_unlock': 'testing123'}, organization=org
)
cred.save()
@@ -275,12 +246,7 @@ def test_credential_with_multiple_secrets(organization_factory, credentialtype_s
@pytest.mark.django_db
def test_credential_update(organization_factory, credentialtype_ssh):
org = organization_factory('test').organization
- cred = Credential(
- credential_type=credentialtype_ssh,
- name="Bob's Credential",
- inputs={'password': 'testing123'},
- organization=org
- )
+ cred = Credential(credential_type=credentialtype_ssh, name="Bob's Credential", inputs={'password': 'testing123'}, organization=org)
cred.save()
assert Credential.objects.count() == 1
@@ -297,12 +263,7 @@ def test_credential_update(organization_factory, credentialtype_ssh):
@pytest.mark.django_db
def test_credential_update_with_prior(organization_factory, credentialtype_ssh):
org = organization_factory('test').organization
- cred = Credential(
- credential_type=credentialtype_ssh,
- name="Bob's Credential",
- inputs={'password': 'testing123'},
- organization=org
- )
+ cred = Credential(credential_type=credentialtype_ssh, name="Bob's Credential", inputs={'password': 'testing123'}, organization=org)
cred.save()
assert Credential.objects.count() == 1
@@ -326,29 +287,24 @@ def test_credential_get_input(organization_factory):
name='somevault',
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'vault_password',
- 'type': 'string',
- 'secret': True,
- }, {
- 'id': 'vault_id',
- 'type': 'string',
- 'secret': False
- }, {
- 'id': 'secret',
- 'type': 'string',
- 'secret': True,
- }]
- }
+ 'fields': [
+ {
+ 'id': 'vault_password',
+ 'type': 'string',
+ 'secret': True,
+ },
+ {'id': 'vault_id', 'type': 'string', 'secret': False},
+ {
+ 'id': 'secret',
+ 'type': 'string',
+ 'secret': True,
+ },
+ ]
+ },
)
type_.save()
- cred = Credential(
- organization=organization,
- credential_type=type_,
- name="Bob's Credential",
- inputs={'vault_password': 'testing321'}
- )
+ cred = Credential(organization=organization, credential_type=type_, name="Bob's Credential", inputs={'vault_password': 'testing321'})
cred.save()
cred.full_clean()
diff --git a/awx/main/tests/functional/test_credential_plugins.py b/awx/main/tests/functional/test_credential_plugins.py
index b8daf6b41e..8ff8093c34 100644
--- a/awx/main/tests/functional/test_credential_plugins.py
+++ b/awx/main/tests/functional/test_credential_plugins.py
@@ -1,5 +1,6 @@
def test_imported_azure_cloud_sdk_vars():
from awx.main.credential_plugins import azure_kv
+
assert len(azure_kv.clouds) > 0
assert all([hasattr(c, 'name') for c in azure_kv.clouds])
assert all([hasattr(c, 'suffixes') for c in azure_kv.clouds])
diff --git a/awx/main/tests/functional/test_dispatch.py b/awx/main/tests/functional/test_dispatch.py
index e92867d6a5..a6fcb5b6ce 100644
--- a/awx/main/tests/functional/test_dispatch.py
+++ b/awx/main/tests/functional/test_dispatch.py
@@ -36,7 +36,6 @@ def add(a, b):
class BaseTask(object):
-
def add(self, a, b):
return add(a, b)
@@ -58,19 +57,16 @@ def multiply(a, b):
class SimpleWorker(BaseWorker):
-
def perform_work(self, body, *args):
pass
class ResultWriter(BaseWorker):
-
def perform_work(self, body, result_queue):
result_queue.put(body + '!!!')
class SlowResultWriter(BaseWorker):
-
def perform_work(self, body, result_queue):
time.sleep(3)
super(SlowResultWriter, self).perform_work(body, result_queue)
@@ -78,13 +74,12 @@ class SlowResultWriter(BaseWorker):
@pytest.mark.usefixtures("disable_database_settings")
class TestPoolWorker:
-
def setup_method(self, test_method):
self.worker = StatefulPoolWorker(1000, self.tick, tuple())
def tick(self):
self.worker.finished.put(self.worker.queue.get()['uuid'])
- time.sleep(.5)
+ time.sleep(0.5)
def test_qsize(self):
assert self.worker.qsize == 0
@@ -127,7 +122,6 @@ class TestPoolWorker:
@pytest.mark.django_db
class TestWorkerPool:
-
def setup_method(self, test_method):
self.pool = WorkerPool(min_workers=3)
@@ -159,16 +153,10 @@ class TestWorkerPool:
result_queue = multiprocessing.Queue()
self.pool.init_workers(ResultWriter().work_loop, result_queue)
for i in range(10):
- self.pool.write(
- random.choice(range(len(self.pool))),
- 'Hello, Worker {}'.format(i)
- )
+ self.pool.write(random.choice(range(len(self.pool))), 'Hello, Worker {}'.format(i))
all_messages = [result_queue.get(timeout=1) for i in range(10)]
all_messages.sort()
- assert all_messages == [
- 'Hello, Worker {}!!!'.format(i)
- for i in range(10)
- ]
+ assert all_messages == ['Hello, Worker {}!!!'.format(i) for i in range(10)]
total_handled = sum([worker.messages_sent for worker in self.pool.workers])
assert total_handled == 10
@@ -176,7 +164,6 @@ class TestWorkerPool:
@pytest.mark.django_db
class TestAutoScaling:
-
def setup_method(self, test_method):
self.pool = AutoscalePool(min_workers=2, max_workers=10)
@@ -275,59 +262,45 @@ class TestAutoScaling:
@pytest.mark.usefixtures("disable_database_settings")
class TestTaskDispatcher:
-
@property
def tm(self):
return TaskWorker()
def test_function_dispatch(self):
- result = self.tm.perform_work({
- 'task': 'awx.main.tests.functional.test_dispatch.add',
- 'args': [2, 2]
- })
+ result = self.tm.perform_work({'task': 'awx.main.tests.functional.test_dispatch.add', 'args': [2, 2]})
assert result == 4
def test_function_dispatch_must_be_decorated(self):
- result = self.tm.perform_work({
- 'task': 'awx.main.tests.functional.test_dispatch.restricted',
- 'args': [2, 2]
- })
+ result = self.tm.perform_work({'task': 'awx.main.tests.functional.test_dispatch.restricted', 'args': [2, 2]})
assert isinstance(result, ValueError)
assert str(result) == 'awx.main.tests.functional.test_dispatch.restricted is not decorated with @task()' # noqa
def test_method_dispatch(self):
- result = self.tm.perform_work({
- 'task': 'awx.main.tests.functional.test_dispatch.Adder',
- 'args': [2, 2]
- })
+ result = self.tm.perform_work({'task': 'awx.main.tests.functional.test_dispatch.Adder', 'args': [2, 2]})
assert result == 4
def test_method_dispatch_must_be_decorated(self):
- result = self.tm.perform_work({
- 'task': 'awx.main.tests.functional.test_dispatch.Restricted',
- 'args': [2, 2]
- })
+ result = self.tm.perform_work({'task': 'awx.main.tests.functional.test_dispatch.Restricted', 'args': [2, 2]})
assert isinstance(result, ValueError)
assert str(result) == 'awx.main.tests.functional.test_dispatch.Restricted is not decorated with @task()' # noqa
def test_python_function_cannot_be_imported(self):
- result = self.tm.perform_work({
- 'task': 'os.system',
- 'args': ['ls'],
- })
+ result = self.tm.perform_work(
+ {
+ 'task': 'os.system',
+ 'args': ['ls'],
+ }
+ )
assert isinstance(result, ValueError)
assert str(result) == 'os.system is not a valid awx task' # noqa
def test_undefined_function_cannot_be_imported(self):
- result = self.tm.perform_work({
- 'task': 'awx.foo.bar'
- })
+ result = self.tm.perform_work({'task': 'awx.foo.bar'})
assert isinstance(result, ModuleNotFoundError)
assert str(result) == "No module named 'awx.foo'" # noqa
class TestTaskPublisher:
-
def test_function_callable(self):
assert add(2, 2) == 4
@@ -371,17 +344,19 @@ yesterday = tz_now() - datetime.timedelta(days=1)
@pytest.mark.django_db
class TestJobReaper(object):
-
- @pytest.mark.parametrize('status, execution_node, controller_node, modified, fail', [
- ('running', '', '', None, False), # running, not assigned to the instance
- ('running', 'awx', '', None, True), # running, has the instance as its execution_node
- ('running', '', 'awx', None, True), # running, has the instance as its controller_node
- ('waiting', '', '', None, False), # waiting, not assigned to the instance
- ('waiting', 'awx', '', None, False), # waiting, was edited less than a minute ago
- ('waiting', '', 'awx', None, False), # waiting, was edited less than a minute ago
- ('waiting', 'awx', '', yesterday, True), # waiting, assigned to the execution_node, stale
- ('waiting', '', 'awx', yesterday, True), # waiting, assigned to the controller_node, stale
- ])
+ @pytest.mark.parametrize(
+ 'status, execution_node, controller_node, modified, fail',
+ [
+ ('running', '', '', None, False), # running, not assigned to the instance
+ ('running', 'awx', '', None, True), # running, has the instance as its execution_node
+ ('running', '', 'awx', None, True), # running, has the instance as its controller_node
+ ('waiting', '', '', None, False), # waiting, not assigned to the instance
+ ('waiting', 'awx', '', None, False), # waiting, was edited less than a minute ago
+ ('waiting', '', 'awx', None, False), # waiting, was edited less than a minute ago
+ ('waiting', 'awx', '', yesterday, True), # waiting, assigned to the execution_node, stale
+ ('waiting', '', 'awx', yesterday, True), # waiting, assigned to the controller_node, stale
+ ],
+ )
def test_should_reap(self, status, fail, execution_node, controller_node, modified):
i = Instance(hostname='awx')
i.save()
@@ -405,10 +380,13 @@ class TestJobReaper(object):
else:
assert job.status == status
- @pytest.mark.parametrize('excluded_uuids, fail', [
- (['abc123'], False),
- ([], True),
- ])
+ @pytest.mark.parametrize(
+ 'excluded_uuids, fail',
+ [
+ (['abc123'], False),
+ ([], True),
+ ],
+ )
def test_do_not_reap_excluded_uuids(self, excluded_uuids, fail):
i = Instance(hostname='awx')
i.save()
@@ -434,10 +412,7 @@ class TestJobReaper(object):
def test_workflow_does_not_reap(self):
i = Instance(hostname='awx')
i.save()
- j = WorkflowJob(
- status='running',
- execution_node='awx'
- )
+ j = WorkflowJob(status='running', execution_node='awx')
j.save()
reaper.reap(i)
diff --git a/awx/main/tests/functional/test_execution_environments.py b/awx/main/tests/functional/test_execution_environments.py
index 5f1e430fe8..c47f0d9859 100644
--- a/awx/main/tests/functional/test_execution_environments.py
+++ b/awx/main/tests/functional/test_execution_environments.py
@@ -1,17 +1,12 @@
import pytest
-from awx.main.models import (ExecutionEnvironment)
+from awx.main.models import ExecutionEnvironment
@pytest.mark.django_db
def test_execution_environment_creation(execution_environment, organization):
execution_env = ExecutionEnvironment.objects.create(
- name='Hello Environment',
- image='',
- organization=organization,
- managed_by_tower=False,
- credential=None,
- pull='missing'
+ name='Hello Environment', image='', organization=organization, managed_by_tower=False, credential=None, pull='missing'
)
assert type(execution_env) is type(execution_environment)
assert execution_env.organization == organization
diff --git a/awx/main/tests/functional/test_fixture_factories.py b/awx/main/tests/functional/test_fixture_factories.py
index 83d96fdbd3..1af7b66246 100644
--- a/awx/main/tests/functional/test_fixture_factories.py
+++ b/awx/main/tests/functional/test_fixture_factories.py
@@ -45,14 +45,13 @@ def test_roles_exc_not_user(organization_factory):
@pytest.mark.django_db
def test_org_factory_roles(organization_factory):
- objects = organization_factory('org_roles_test',
- teams=['team1', 'team2'],
- users=['team1:foo', 'bar'],
- projects=['baz', 'bang'],
- roles=['team2.member_role:foo',
- 'team1.admin_role:bar',
- 'team1.admin_role:team2.admin_role',
- 'baz.admin_role:foo'])
+ objects = organization_factory(
+ 'org_roles_test',
+ teams=['team1', 'team2'],
+ users=['team1:foo', 'bar'],
+ projects=['baz', 'bang'],
+ roles=['team2.member_role:foo', 'team1.admin_role:bar', 'team1.admin_role:team2.admin_role', 'baz.admin_role:foo'],
+ )
assert objects.users.bar in objects.teams.team2.admin_role
assert objects.users.foo in objects.projects.baz.admin_role
@@ -62,11 +61,7 @@ def test_org_factory_roles(organization_factory):
@pytest.mark.django_db
def test_org_factory(organization_factory):
- objects = organization_factory('organization1',
- teams=['team1'],
- superusers=['superuser'],
- users=['admin', 'alice', 'team1:bob'],
- projects=['proj1'])
+ objects = organization_factory('organization1', teams=['team1'], superusers=['superuser'], users=['admin', 'alice', 'team1:bob'], projects=['proj1'])
assert hasattr(objects.users, 'admin')
assert hasattr(objects.users, 'alice')
assert hasattr(objects.superusers, 'superuser')
@@ -76,12 +71,17 @@ def test_org_factory(organization_factory):
@pytest.mark.django_db
def test_job_template_factory(job_template_factory):
- jt_objects = job_template_factory('testJT', organization='org1',
- project='proj1', inventory='inventory1',
- credential='cred1', survey='test-survey',
- cloud_credential='aws1',
- network_credential='juniper1',
- jobs=[1])
+ jt_objects = job_template_factory(
+ 'testJT',
+ organization='org1',
+ project='proj1',
+ inventory='inventory1',
+ credential='cred1',
+ survey='test-survey',
+ cloud_credential='aws1',
+ network_credential='juniper1',
+ jobs=[1],
+ )
assert jt_objects.job_template.name == 'testJT'
assert jt_objects.project.name == 'proj1'
assert jt_objects.inventory.name == 'inventory1'
@@ -104,10 +104,7 @@ def test_survey_spec_generator_simple(survey_spec_factory):
def test_survey_spec_generator_mixed(survey_spec_factory):
- survey_spec = survey_spec_factory(
- [{'variable': 'question1', 'type': 'integer', 'max': 87},
- {'variable': 'question2', 'type': 'str'},
- 'some_variable'])
+ survey_spec = survey_spec_factory([{'variable': 'question1', 'type': 'integer', 'max': 87}, {'variable': 'question2', 'type': 'str'}, 'some_variable'])
assert len(survey_spec['spec']) == 3
assert [spec_item['type'] for spec_item in survey_spec['spec']] == ['integer', 'str', 'integer']
assert survey_spec['spec'][0]['max'] == 87
diff --git a/awx/main/tests/functional/test_galaxy_credential_migration.py b/awx/main/tests/functional/test_galaxy_credential_migration.py
index 110628e19c..f825874ef0 100644
--- a/awx/main/tests/functional/test_galaxy_credential_migration.py
+++ b/awx/main/tests/functional/test_galaxy_credential_migration.py
@@ -91,14 +91,16 @@ def test_fallback_galaxies():
Setting.objects.create(key='PRIMARY_GALAXY_AUTH_URL', value='https://auth.example.org/')
Setting.objects.create(key='PRIMARY_GALAXY_TOKEN', value='secret123')
try:
- settings.FALLBACK_GALAXY_SERVERS = [{
- 'id': 'abc123',
- 'url': 'https://some-other-galaxy.example.org/',
- 'auth_url': 'https://some-other-galaxy.sso.example.org/',
- 'username': 'user',
- 'password': 'pass',
- 'token': 'fallback123',
- }]
+ settings.FALLBACK_GALAXY_SERVERS = [
+ {
+ 'id': 'abc123',
+ 'url': 'https://some-other-galaxy.example.org/',
+ 'auth_url': 'https://some-other-galaxy.sso.example.org/',
+ 'username': 'user',
+ 'password': 'pass',
+ 'token': 'fallback123',
+ }
+ ]
galaxy.migrate_galaxy_settings(apps, None)
finally:
settings.FALLBACK_GALAXY_SERVERS = []
diff --git a/awx/main/tests/functional/test_instance_group_ordering.py b/awx/main/tests/functional/test_instance_group_ordering.py
index aaed0779eb..42c69ffc7f 100644
--- a/awx/main/tests/functional/test_instance_group_ordering.py
+++ b/awx/main/tests/functional/test_instance_group_ordering.py
@@ -9,25 +9,15 @@ def source_model(request):
@pytest.mark.django_db
-@pytest.mark.parametrize(
- 'source_model', ['job_template', 'inventory', 'organization'], indirect=True
-)
+@pytest.mark.parametrize('source_model', ['job_template', 'inventory', 'organization'], indirect=True)
def test_instance_group_ordering(source_model):
- groups = [
- InstanceGroup.objects.create(name='host-%d' % i)
- for i in range(5)
- ]
+ groups = [InstanceGroup.objects.create(name='host-%d' % i) for i in range(5)]
groups.reverse()
for group in groups:
source_model.instance_groups.add(group)
- assert [g.name for g in source_model.instance_groups.all()] == [
- 'host-4', 'host-3', 'host-2', 'host-1', 'host-0'
- ]
- assert [
- (row.position, row.instancegroup.name)
- for row in source_model.instance_groups.through.objects.all()
- ] == [
+ assert [g.name for g in source_model.instance_groups.all()] == ['host-4', 'host-3', 'host-2', 'host-1', 'host-0']
+ assert [(row.position, row.instancegroup.name) for row in source_model.instance_groups.through.objects.all()] == [
(0, 'host-4'),
(1, 'host-3'),
(2, 'host-2'),
@@ -36,13 +26,8 @@ def test_instance_group_ordering(source_model):
]
source_model.instance_groups.remove(groups[0])
- assert [g.name for g in source_model.instance_groups.all()] == [
- 'host-3', 'host-2', 'host-1', 'host-0'
- ]
- assert [
- (row.position, row.instancegroup.name)
- for row in source_model.instance_groups.through.objects.all()
- ] == [
+ assert [g.name for g in source_model.instance_groups.all()] == ['host-3', 'host-2', 'host-1', 'host-0']
+ assert [(row.position, row.instancegroup.name) for row in source_model.instance_groups.through.objects.all()] == [
(0, 'host-3'),
(1, 'host-2'),
(2, 'host-1'),
@@ -54,26 +39,16 @@ def test_instance_group_ordering(source_model):
@pytest.mark.django_db
-@pytest.mark.parametrize(
- 'source_model', ['job_template', 'inventory', 'organization'], indirect=True
-)
+@pytest.mark.parametrize('source_model', ['job_template', 'inventory', 'organization'], indirect=True)
def test_instance_group_middle_deletion(source_model):
- groups = [
- InstanceGroup.objects.create(name='host-%d' % i)
- for i in range(5)
- ]
+ groups = [InstanceGroup.objects.create(name='host-%d' % i) for i in range(5)]
groups.reverse()
for group in groups:
source_model.instance_groups.add(group)
source_model.instance_groups.remove(groups[2])
- assert [g.name for g in source_model.instance_groups.all()] == [
- 'host-4', 'host-3', 'host-1', 'host-0'
- ]
- assert [
- (row.position, row.instancegroup.name)
- for row in source_model.instance_groups.through.objects.all()
- ] == [
+ assert [g.name for g in source_model.instance_groups.all()] == ['host-4', 'host-3', 'host-1', 'host-0']
+ assert [(row.position, row.instancegroup.name) for row in source_model.instance_groups.through.objects.all()] == [
(0, 'host-4'),
(1, 'host-3'),
(2, 'host-1'),
@@ -82,21 +57,12 @@ def test_instance_group_middle_deletion(source_model):
@pytest.mark.django_db
-@pytest.mark.parametrize(
- 'source_model', ['job_template', 'inventory', 'organization'], indirect=True
-)
+@pytest.mark.parametrize('source_model', ['job_template', 'inventory', 'organization'], indirect=True)
def test_explicit_ordering(source_model):
- groups = [
- InstanceGroup.objects.create(name='host-%d' % i)
- for i in range(5)
- ]
+ groups = [InstanceGroup.objects.create(name='host-%d' % i) for i in range(5)]
groups.reverse()
for group in groups:
source_model.instance_groups.add(group)
- assert [g.name for g in source_model.instance_groups.all()] == [
- 'host-4', 'host-3', 'host-2', 'host-1', 'host-0'
- ]
- assert [g.name for g in source_model.instance_groups.order_by('name').all()] == [
- 'host-0', 'host-1', 'host-2', 'host-3', 'host-4'
- ]
+ assert [g.name for g in source_model.instance_groups.all()] == ['host-4', 'host-3', 'host-2', 'host-1', 'host-0']
+ assert [g.name for g in source_model.instance_groups.order_by('name').all()] == ['host-0', 'host-1', 'host-2', 'host-3', 'host-4']
diff --git a/awx/main/tests/functional/test_instances.py b/awx/main/tests/functional/test_instances.py
index 649c4c646a..7b8a2f41ab 100644
--- a/awx/main/tests/functional/test_instances.py
+++ b/awx/main/tests/functional/test_instances.py
@@ -18,13 +18,16 @@ def test_default_tower_instance_group(default_instance_group, job_factory):
class TestPolicyTaskScheduling:
"""Tests make assertions about when the policy task gets scheduled"""
- @pytest.mark.parametrize('field, value, expect', [
- ('name', 'foo-bar-foo-bar', False),
- ('policy_instance_percentage', 35, True),
- ('policy_instance_minimum', 3, True),
- ('policy_instance_list', ['bar?'], True),
- ('modified', now(), False)
- ])
+ @pytest.mark.parametrize(
+ 'field, value, expect',
+ [
+ ('name', 'foo-bar-foo-bar', False),
+ ('policy_instance_percentage', 35, True),
+ ('policy_instance_minimum', 3, True),
+ ('policy_instance_list', ['bar?'], True),
+ ('modified', now(), False),
+ ],
+ )
def test_policy_task_ran_for_ig_when_needed(self, instance_group_factory, field, value, expect):
# always run on instance group creation
with mock.patch('awx.main.models.ha.schedule_policy_task') as mock_policy:
@@ -39,13 +42,16 @@ class TestPolicyTaskScheduling:
else:
mock_policy.assert_not_called()
- @pytest.mark.parametrize('field, value, expect', [
- ('hostname', 'foo-bar-foo-bar', True),
- ('managed_by_policy', False, True),
- ('enabled', False, False),
- ('capacity_adjustment', 0.42, True),
- ('capacity', 42, False)
- ])
+ @pytest.mark.parametrize(
+ 'field, value, expect',
+ [
+ ('hostname', 'foo-bar-foo-bar', True),
+ ('managed_by_policy', False, True),
+ ('enabled', False, False),
+ ('capacity_adjustment', 0.42, True),
+ ('capacity', 42, False),
+ ],
+ )
def test_policy_task_ran_for_instance_when_needed(self, instance_group_factory, field, value, expect):
# always run on instance group creation
with mock.patch('awx.main.models.ha.schedule_policy_task') as mock_policy:
@@ -285,7 +291,6 @@ def test_instance_group_capacity(instance_factory, instance_group_factory):
@pytest.mark.django_db
class TestInstanceGroupOrdering:
-
def test_ad_hoc_instance_groups(self, instance_group_factory, inventory, default_instance_group):
ad_hoc = AdHocCommand.objects.create(inventory=inventory)
assert ad_hoc.preferred_instance_groups == [default_instance_group]
@@ -297,10 +302,7 @@ class TestInstanceGroupOrdering:
assert ad_hoc.preferred_instance_groups == [ig_inv, ig_org]
def test_inventory_update_instance_groups(self, instance_group_factory, inventory_source, default_instance_group):
- iu = InventoryUpdate.objects.create(
- inventory_source=inventory_source,
- source=inventory_source.source
- )
+ iu = InventoryUpdate.objects.create(inventory_source=inventory_source, source=inventory_source.source)
assert iu.preferred_instance_groups == [default_instance_group]
ig_org = instance_group_factory("OrgIstGrp", [default_instance_group.instances.first()])
ig_inv = instance_group_factory("InvIstGrp", [default_instance_group.instances.first()])
diff --git a/awx/main/tests/functional/test_inventory_source_injectors.py b/awx/main/tests/functional/test_inventory_source_injectors.py
index f9edfdcd22..5fd12a332d 100644
--- a/awx/main/tests/functional/test_inventory_source_injectors.py
+++ b/awx/main/tests/functional/test_inventory_source_injectors.py
@@ -16,24 +16,25 @@ DATA = os.path.join(os.path.dirname(data.__file__), 'inventory')
def generate_fake_var(element):
- """Given a credential type field element, makes up something acceptable.
- """
+ """Given a credential type field element, makes up something acceptable."""
if element['type'] == 'string':
if element.get('format', None) == 'ssh_private_key':
# this example came from the internet
- return '\n'.join([
- '-----BEGIN ENCRYPTED PRIVATE KEY-----'
- 'MIIBpjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQI5yNCu9T5SnsCAggA'
- 'MBQGCCqGSIb3DQMHBAhJISTgOAxtYwSCAWDXK/a1lxHIbRZHud1tfRMR4ROqkmr4'
- 'kVGAnfqTyGptZUt3ZtBgrYlFAaZ1z0wxnhmhn3KIbqebI4w0cIL/3tmQ6eBD1Ad1'
- 'nSEjUxZCuzTkimXQ88wZLzIS9KHc8GhINiUu5rKWbyvWA13Ykc0w65Ot5MSw3cQc'
- 'w1LEDJjTculyDcRQgiRfKH5376qTzukileeTrNebNq+wbhY1kEPAHojercB7d10E'
- '+QcbjJX1Tb1Zangom1qH9t/pepmV0Hn4EMzDs6DS2SWTffTddTY4dQzvksmLkP+J'
- 'i8hkFIZwUkWpT9/k7MeklgtTiy0lR/Jj9CxAIQVxP8alLWbIqwCNRApleSmqtitt'
- 'Z+NdsuNeTm3iUaPGYSw237tjLyVE6pr0EJqLv7VUClvJvBnH2qhQEtWYB9gvE1dS'
- 'BioGu40pXVfjiLqhEKVVVEoHpI32oMkojhCGJs8Oow4bAxkzQFCtuWB1'
- '-----END ENCRYPTED PRIVATE KEY-----'
- ])
+ return '\n'.join(
+ [
+ '-----BEGIN ENCRYPTED PRIVATE KEY-----'
+ 'MIIBpjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQI5yNCu9T5SnsCAggA'
+ 'MBQGCCqGSIb3DQMHBAhJISTgOAxtYwSCAWDXK/a1lxHIbRZHud1tfRMR4ROqkmr4'
+ 'kVGAnfqTyGptZUt3ZtBgrYlFAaZ1z0wxnhmhn3KIbqebI4w0cIL/3tmQ6eBD1Ad1'
+ 'nSEjUxZCuzTkimXQ88wZLzIS9KHc8GhINiUu5rKWbyvWA13Ykc0w65Ot5MSw3cQc'
+ 'w1LEDJjTculyDcRQgiRfKH5376qTzukileeTrNebNq+wbhY1kEPAHojercB7d10E'
+ '+QcbjJX1Tb1Zangom1qH9t/pepmV0Hn4EMzDs6DS2SWTffTddTY4dQzvksmLkP+J'
+ 'i8hkFIZwUkWpT9/k7MeklgtTiy0lR/Jj9CxAIQVxP8alLWbIqwCNRApleSmqtitt'
+ 'Z+NdsuNeTm3iUaPGYSw237tjLyVE6pr0EJqLv7VUClvJvBnH2qhQEtWYB9gvE1dS'
+ 'BioGu40pXVfjiLqhEKVVVEoHpI32oMkojhCGJs8Oow4bAxkzQFCtuWB1'
+ '-----END ENCRYPTED PRIVATE KEY-----'
+ ]
+ )
if element['id'] == 'host':
return 'https://foo.invalid'
return 'fooo'
@@ -43,8 +44,7 @@ def generate_fake_var(element):
def credential_kind(source):
- """Given the inventory source kind, return expected credential kind
- """
+ """Given the inventory source kind, return expected credential kind"""
return source.replace('ec2', 'aws')
@@ -64,12 +64,9 @@ def fake_credential_factory():
if source == 'tower':
inputs.pop('oauth_token') # mutually exclusive with user/pass
- return Credential.objects.create(
- credential_type=ct,
- inputs=inputs
- )
- return wrap
+ return Credential.objects.create(credential_type=ct, inputs=inputs)
+ return wrap
def read_content(private_data_dir, raw_env, inventory_update):
@@ -94,9 +91,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
for key, value in env.items():
inverse_env.setdefault(value, []).append(key)
- cache_file_regex = re.compile(r'/tmp/awx_{0}_[a-zA-Z0-9_]+/{1}_cache[a-zA-Z0-9_]+'.format(
- inventory_update.id, inventory_update.source)
- )
+ cache_file_regex = re.compile(r'/tmp/awx_{0}_[a-zA-Z0-9_]+/{1}_cache[a-zA-Z0-9_]+'.format(inventory_update.id, inventory_update.source))
private_key_regex = re.compile(r'-----BEGIN ENCRYPTED PRIVATE KEY-----.*-----END ENCRYPTED PRIVATE KEY-----')
# read directory content
@@ -119,8 +114,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
break
alias = 'file_reference_{}'.format(i)
else:
- raise RuntimeError('Test not able to cope with >10 references by env vars. '
- 'Something probably went very wrong.')
+ raise RuntimeError('Test not able to cope with >10 references by env vars. ' 'Something probably went very wrong.')
file_aliases[abs_file_path] = alias
for env_key in inverse_env[runner_path]:
env[env_key] = '{{{{ {} }}}}'.format(alias)
@@ -141,9 +135,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
for abs_file_path, file_content in dir_contents.copy().items():
if cache_file_regex.match(file_content):
if 'cache_dir' not in file_aliases.values() and 'cache_file' not in file_aliases in file_aliases.values():
- raise AssertionError(
- 'A cache file was referenced but never created, files:\n{}'.format(
- json.dumps(dir_contents, indent=4)))
+ raise AssertionError('A cache file was referenced but never created, files:\n{}'.format(json.dumps(dir_contents, indent=4)))
# if another files path appears in this file, replace it with its alias
for target_path in dir_contents.keys():
other_alias = file_aliases[target_path]
@@ -157,8 +149,8 @@ def read_content(private_data_dir, raw_env, inventory_update):
# assert that all files laid down are used
if abs_file_path not in referenced_paths:
raise AssertionError(
- "File {} is not referenced. References and files:\n{}\n{}".format(
- abs_file_path, json.dumps(env, indent=4), json.dumps(dir_contents, indent=4)))
+ "File {} is not referenced. References and files:\n{}\n{}".format(abs_file_path, json.dumps(env, indent=4), json.dumps(dir_contents, indent=4))
+ )
file_content = private_key_regex.sub('{{private_key}}', file_content)
content[file_aliases[abs_file_path]] = file_content
@@ -215,8 +207,9 @@ def test_inventory_update_injected_content(this_kind, inventory, fake_credential
# Assert inventory plugin inventory file is in private_data_dir
inventory_filename = InventorySource.injectors[inventory_update.source]().filename
- assert len([True for k in content.keys() if k.endswith(inventory_filename)]) > 0, \
- f"'{inventory_filename}' file not found in inventory update runtime files {content.keys()}"
+ assert (
+ len([True for k in content.keys() if k.endswith(inventory_filename)]) > 0
+ ), f"'{inventory_filename}' file not found in inventory update runtime files {content.keys()}"
env.pop('ANSIBLE_COLLECTIONS_PATHS', None) # collection paths not relevant to this test
base_dir = os.path.join(DATA, 'plugins')
@@ -230,9 +223,7 @@ def test_inventory_update_injected_content(this_kind, inventory, fake_credential
source_dir = os.path.join(base_dir, this_kind) # this_kind is a global
if not os.path.exists(source_dir):
- raise FileNotFoundError(
- 'Maybe you never made reference files? '
- 'MAKE_INVENTORY_REFERENCE_FILES=true py.test ...\noriginal: {}')
+ raise FileNotFoundError('Maybe you never made reference files? ' 'MAKE_INVENTORY_REFERENCE_FILES=true py.test ...\noriginal: {}')
files_dir = os.path.join(source_dir, 'files')
try:
expected_file_list = os.listdir(files_dir)
diff --git a/awx/main/tests/functional/test_inventory_source_migration.py b/awx/main/tests/functional/test_inventory_source_migration.py
index 2b1e089392..b8858614e0 100644
--- a/awx/main/tests/functional/test_inventory_source_migration.py
+++ b/awx/main/tests/functional/test_inventory_source_migration.py
@@ -8,25 +8,15 @@ from django.apps import apps
from awx.main.models import InventorySource, InventoryUpdate, ManagedCredentialType, CredentialType, Credential
-@pytest.mark.parametrize('vars,id_var,result', [
- ({'foo': {'bar': '1234'}}, 'foo.bar', '1234'),
- ({'cat': 'meow'}, 'cat', 'meow'),
- ({'dog': 'woof'}, 'cat', '')
-])
+@pytest.mark.parametrize('vars,id_var,result', [({'foo': {'bar': '1234'}}, 'foo.bar', '1234'), ({'cat': 'meow'}, 'cat', 'meow'), ({'dog': 'woof'}, 'cat', '')])
def test_instance_id(vars, id_var, result):
assert invsrc._get_instance_id(vars, id_var) == result
@pytest.mark.django_db
def test_apply_new_instance_id(inventory_source):
- host1 = inventory_source.hosts.create(
- name='foo1', inventory=inventory_source.inventory,
- variables={'foo': 'bar'}, instance_id=''
- )
- host2 = inventory_source.hosts.create(
- name='foo2', inventory=inventory_source.inventory,
- variables={'foo': 'bar'}, instance_id='bad_user'
- )
+ host1 = inventory_source.hosts.create(name='foo1', inventory=inventory_source.inventory, variables={'foo': 'bar'}, instance_id='')
+ host2 = inventory_source.hosts.create(name='foo2', inventory=inventory_source.inventory, variables={'foo': 'bar'}, instance_id='bad_user')
with mock.patch('django.conf.settings.{}_INSTANCE_ID_VAR'.format(inventory_source.source.upper()), 'foo'):
invsrc.set_new_instance_id(apps, inventory_source.source, 'foo')
host1.refresh_from_db()
diff --git a/awx/main/tests/functional/test_jobs.py b/awx/main/tests/functional/test_jobs.py
index b4754a6803..197c1197e1 100644
--- a/awx/main/tests/functional/test_jobs.py
+++ b/awx/main/tests/functional/test_jobs.py
@@ -3,9 +3,7 @@ import pytest
from unittest import mock
import json
-from awx.main.models import (Job, Instance, JobHostSummary, InventoryUpdate,
- InventorySource, Project, ProjectUpdate,
- SystemJob, AdHocCommand)
+from awx.main.models import Job, Instance, JobHostSummary, InventoryUpdate, InventorySource, Project, ProjectUpdate, SystemJob, AdHocCommand
from awx.main.tasks import cluster_node_heartbeat
from django.test.utils import override_settings
@@ -22,8 +20,8 @@ def test_orphan_unified_job_creation(instance, inventory):
@pytest.mark.django_db
-@mock.patch('awx.main.utils.common.get_cpu_capacity', lambda: (2,8))
-@mock.patch('awx.main.utils.common.get_mem_capacity', lambda: (8000,62))
+@mock.patch('awx.main.utils.common.get_cpu_capacity', lambda: (2, 8))
+@mock.patch('awx.main.utils.common.get_mem_capacity', lambda: (8000, 62))
def test_job_capacity_and_with_inactive_node():
i = Instance.objects.create(hostname='test-1')
with mock.patch.object(redis.client.Redis, 'ping', lambda self: True):
@@ -38,13 +36,14 @@ def test_job_capacity_and_with_inactive_node():
@pytest.mark.django_db
-@mock.patch('awx.main.utils.common.get_cpu_capacity', lambda: (2,8))
-@mock.patch('awx.main.utils.common.get_mem_capacity', lambda: (8000,62))
+@mock.patch('awx.main.utils.common.get_cpu_capacity', lambda: (2, 8))
+@mock.patch('awx.main.utils.common.get_mem_capacity', lambda: (8000, 62))
def test_job_capacity_with_redis_disabled():
i = Instance.objects.create(hostname='test-1')
def _raise(self):
raise redis.ConnectionError()
+
with mock.patch.object(redis.client.Redis, 'ping', _raise):
i.refresh_capacity()
assert i.capacity == 0
@@ -60,10 +59,7 @@ def test_job_type_name():
source = InventorySource.objects.create(source='ec2')
source.save()
- iu = InventoryUpdate.objects.create(
- inventory_source=source,
- source='ec2'
- )
+ iu = InventoryUpdate.objects.create(inventory_source=source, source='ec2')
assert iu.job_type_name == 'inventory_update'
proj = Project.objects.create()
@@ -79,7 +75,9 @@ def test_job_type_name():
def test_job_notification_data(inventory, machine_credential, project):
encrypted_str = "$encrypted$"
job = Job.objects.create(
- job_template=None, inventory=inventory, name='hi world',
+ job_template=None,
+ inventory=inventory,
+ name='hi world',
extra_vars=json.dumps({"SSN": "123-45-6789"}),
survey_passwords={"SSN": encrypted_str},
project=project,
@@ -91,44 +89,24 @@ def test_job_notification_data(inventory, machine_credential, project):
@pytest.mark.django_db
def test_job_notification_host_data(inventory, machine_credential, project, job_template, host):
- job = Job.objects.create(
- job_template=job_template, inventory=inventory, name='hi world', project=project
- )
+ job = Job.objects.create(job_template=job_template, inventory=inventory, name='hi world', project=project)
JobHostSummary.objects.create(job=job, host=host, changed=1, dark=2, failures=3, ok=4, processed=3, skipped=2, rescued=1, ignored=0)
- assert job.notification_data()['hosts'] == {'single-host':
- {'failed': True,
- 'changed': 1,
- 'dark': 2,
- 'failures': 3,
- 'ok': 4,
- 'processed': 3,
- 'skipped': 2,
- 'rescued': 1,
- 'ignored': 0}}
+ assert job.notification_data()['hosts'] == {
+ 'single-host': {'failed': True, 'changed': 1, 'dark': 2, 'failures': 3, 'ok': 4, 'processed': 3, 'skipped': 2, 'rescued': 1, 'ignored': 0}
+ }
@pytest.mark.django_db
class TestLaunchConfig:
-
def test_null_creation_from_prompts(self):
job = Job.objects.create()
- data = {
- "credentials": [],
- "extra_vars": {},
- "limit": None,
- "job_type": None
- }
+ data = {"credentials": [], "extra_vars": {}, "limit": None, "job_type": None}
config = job.create_config_from_prompts(data)
assert config is None
def test_only_limit_defined(self, job_template):
job = Job.objects.create(job_template=job_template)
- data = {
- "credentials": [],
- "extra_vars": {},
- "job_tags": None,
- "limit": ""
- }
+ data = {"credentials": [], "extra_vars": {}, "job_tags": None, "limit": ""}
config = job.create_config_from_prompts(data)
assert config.char_prompts == {"limit": ""}
assert not config.credentials.exists()
diff --git a/awx/main/tests/functional/test_labels.py b/awx/main/tests/functional/test_labels.py
index fad1869d0e..aaf74e41e4 100644
--- a/awx/main/tests/functional/test_labels.py
+++ b/awx/main/tests/functional/test_labels.py
@@ -6,7 +6,7 @@ from awx.api.versioning import reverse
@pytest.mark.django_db
-def test_workflow_can_add_label(org_admin,organization, post):
+def test_workflow_can_add_label(org_admin, organization, post):
# create workflow
wfjt = WorkflowJobTemplate.objects.create(name='test-wfjt')
wfjt.organization = organization
@@ -28,10 +28,7 @@ def test_workflow_can_remove_label(org_admin, organization, post, get):
label = wfjt.labels.create(name='dev-label', organization=organization)
# delete label
url = reverse('api:workflow_job_template_label_list', kwargs={'pk': wfjt.pk})
- data = {
- "id": label.pk,
- "disassociate": True
- }
+ data = {"id": label.pk, "disassociate": True}
post(url, data, org_admin, expect=204)
results = get(url, org_admin, expect=200)
assert results.data['count'] == 0
diff --git a/awx/main/tests/functional/test_ldap.py b/awx/main/tests/functional/test_ldap.py
index 9b463da664..d85e04c475 100644
--- a/awx/main/tests/functional/test_ldap.py
+++ b/awx/main/tests/functional/test_ldap.py
@@ -1,4 +1,3 @@
-
import ldap
import ldif
import pytest
@@ -26,7 +25,7 @@ def ldap_generator():
conn = ldap.initialize('ldap://{}/'.format(host))
return conn
- #mockldap.stop()
+ # mockldap.stop()
return fn
@@ -40,42 +39,16 @@ def ldap_settings_generator():
'AUTH_LDAP_SERVER_URI': 'ldap://{}'.format(host),
'AUTH_LDAP_BIND_DN': 'cn=eng_user1,ou=people,dc={},dc=com'.format(dc),
'AUTH_LDAP_BIND_PASSWORD': 'password',
- "AUTH_LDAP_USER_SEARCH": [
- "ou=people,dc={},dc=com".format(dc),
- "SCOPE_SUBTREE",
- "(cn=%(user)s)"
- ],
+ "AUTH_LDAP_USER_SEARCH": ["ou=people,dc={},dc=com".format(dc), "SCOPE_SUBTREE", "(cn=%(user)s)"],
"AUTH_LDAP_TEAM_MAP": {
- "LDAP Sales": {
- "organization": "LDAP Organization",
- "users": "cn=sales,ou=groups,dc={},dc=com".format(dc),
- "remove": True
- },
- "LDAP IT": {
- "organization": "LDAP Organization",
- "users": "cn=it,ou=groups,dc={},dc=com".format(dc),
- "remove": True
- },
- "LDAP Engineering": {
- "organization": "LDAP Organization",
- "users": "cn=engineering,ou=groups,dc={},dc=com".format(dc),
- "remove": True
- }
+ "LDAP Sales": {"organization": "LDAP Organization", "users": "cn=sales,ou=groups,dc={},dc=com".format(dc), "remove": True},
+ "LDAP IT": {"organization": "LDAP Organization", "users": "cn=it,ou=groups,dc={},dc=com".format(dc), "remove": True},
+ "LDAP Engineering": {"organization": "LDAP Organization", "users": "cn=engineering,ou=groups,dc={},dc=com".format(dc), "remove": True},
},
"AUTH_LDAP_REQUIRE_GROUP": None,
- "AUTH_LDAP_USER_ATTR_MAP": {
- "first_name": "givenName",
- "last_name": "sn",
- "email": "mail"
- },
- "AUTH_LDAP_GROUP_SEARCH": [
- "dc={},dc=com".format(dc),
- "SCOPE_SUBTREE",
- "(objectClass=groupOfNames)"
- ],
- "AUTH_LDAP_USER_FLAGS_BY_GROUP": {
- "is_superuser": "cn=superusers,ou=groups,dc={},dc=com".format(dc)
- },
+ "AUTH_LDAP_USER_ATTR_MAP": {"first_name": "givenName", "last_name": "sn", "email": "mail"},
+ "AUTH_LDAP_GROUP_SEARCH": ["dc={},dc=com".format(dc), "SCOPE_SUBTREE", "(objectClass=groupOfNames)"],
+ "AUTH_LDAP_USER_FLAGS_BY_GROUP": {"is_superuser": "cn=superusers,ou=groups,dc={},dc=com".format(dc)},
"AUTH_LDAP_ORGANIZATION_MAP": {
"LDAP Organization": {
"admins": "cn=engineering_admins,ou=groups,dc={},dc=com".format(dc),
@@ -83,22 +56,23 @@ def ldap_settings_generator():
"users": [
"cn=engineering,ou=groups,dc={},dc=com".format(dc),
"cn=sales,ou=groups,dc={},dc=com".format(dc),
- "cn=it,ou=groups,dc={},dc=com".format(dc)
+ "cn=it,ou=groups,dc={},dc=com".format(dc),
],
- "remove_users": False
+ "remove_users": False,
}
},
}
if prefix:
data_new = dict()
- for k,v in data.items():
+ for k, v in data.items():
k_new = k.replace('AUTH_LDAP', 'AUTH_LDAP{}'.format(prefix))
data_new[k_new] = v
else:
data_new = data
return data_new
+
return fn
@@ -128,4 +102,3 @@ def test_login(ldap_generator, patch, post, admin, ldap_settings_generator):
patch(ldap_settings_url, user=admin, data=ldap_settings_redhat, expect=200)
post(auth_url, data={'username': 'eng_user1', 'password': 'password'}, expect=200)
-
diff --git a/awx/main/tests/functional/test_licenses.py b/awx/main/tests/functional/test_licenses.py
index 46700d38a8..d905ba21be 100644
--- a/awx/main/tests/functional/test_licenses.py
+++ b/awx/main/tests/functional/test_licenses.py
@@ -1,4 +1,3 @@
-
import glob
import os
@@ -11,7 +10,6 @@ except ImportError:
def test_python_and_js_licenses():
-
def index_licenses(path):
# Check for GPL (forbidden) and LGPL (need to ship source)
# This is not meant to be an exhaustive check.
@@ -27,7 +25,7 @@ def test_python_and_js_licenses():
def find_embedded_source_version(path, name):
for entry in os.listdir(path):
# Check variations of '-' and '_' in filenames due to python
- for fname in [name, name.replace('-','_')]:
+ for fname in [name, name.replace('-', '_')]:
if entry.startswith(fname) and entry.endswith('.tar.gz'):
v = entry.split(name + '-')[1].split('.tar.gz')[0]
return v
@@ -43,7 +41,7 @@ def test_python_and_js_licenses():
'filename': filename,
'gpl': is_gpl,
'source_required': (is_gpl or is_lgpl),
- 'source_version': find_embedded_source_version(path, name)
+ 'source_version': find_embedded_source_version(path, name),
}
return list
@@ -56,12 +54,12 @@ def test_python_and_js_licenses():
name = reqt.name
version = str(reqt.specifier)
if version.startswith('=='):
- version=version[2:]
+ version = version[2:]
if reqt.link:
- (name, version) = reqt.link.filename.split('@',1)
+ (name, version) = reqt.link.filename.split('@', 1)
if name.endswith('.git'):
name = name[:-4]
- ret[name] = { 'name': name, 'version': version}
+ ret[name] = {'name': name, 'version': version}
return ret
def remediate_licenses_and_requirements(licenses, requirements):
@@ -80,12 +78,12 @@ def test_python_and_js_licenses():
if version != licenses[item]['source_version']:
errors.append(" embedded source for %s is %s instead of the required version %s" % (item, licenses[item]['source_version'], version))
elif licenses[item]['source_version']:
- errors.append(" embedded source version %s for %s is included despite not being needed" % (licenses[item]['source_version'],item))
+ errors.append(" embedded source version %s for %s is included despite not being needed" % (licenses[item]['source_version'], item))
items = list(requirements.keys())
items.sort()
for item in items:
if item.lower() not in licenses.keys():
- errors.append(" license for requirement %s is missing" %(item,))
+ errors.append(" license for requirement %s is missing" % (item,))
return errors
base_dir = settings.BASE_DIR
@@ -95,5 +93,4 @@ def test_python_and_js_licenses():
errors = []
errors += remediate_licenses_and_requirements(api_licenses, api_requirements)
if errors:
- raise Exception('Included licenses not consistent with requirements:\n%s' %
- '\n'.join(errors))
+ raise Exception('Included licenses not consistent with requirements:\n%s' % '\n'.join(errors))
diff --git a/awx/main/tests/functional/test_named_url.py b/awx/main/tests/functional/test_named_url.py
index 6482dac3a8..e7bd9b4fae 100644
--- a/awx/main/tests/functional/test_named_url.py
+++ b/awx/main/tests/functional/test_named_url.py
@@ -7,9 +7,20 @@ from django.conf import settings
from awx.api.versioning import reverse
from awx.main.middleware import URLModificationMiddleware
from awx.main.models import ( # noqa
- Credential, CustomInventoryScript, Group, Host, Instance, InstanceGroup,
- Inventory, InventorySource, JobTemplate, NotificationTemplate,
- Organization, Project, User, WorkflowJobTemplate,
+ Credential,
+ CustomInventoryScript,
+ Group,
+ Host,
+ Instance,
+ InstanceGroup,
+ Inventory,
+ InventorySource,
+ JobTemplate,
+ NotificationTemplate,
+ Organization,
+ Project,
+ User,
+ WorkflowJobTemplate,
)
from awx.conf import settings_registry
@@ -118,8 +129,7 @@ def test_project(get, admin_user):
@pytest.mark.django_db
def test_notification_template(get, admin_user):
test_notification_template = NotificationTemplate.objects.create(
- name='test_note', notification_type='slack',
- notification_configuration=dict(channels=["Foo", "Bar"], token="token")
+ name='test_note', notification_type='slack', notification_configuration=dict(channels=["Foo", "Bar"], token="token")
)
url = reverse('api:notification_template_detail', kwargs={'pk': test_notification_template.pk})
response = get(url, user=admin_user, expect=200)
@@ -133,9 +143,7 @@ def test_notification_template(get, admin_user):
@pytest.mark.django_db
def test_instance(get, admin_user):
- test_instance = Instance.objects.create(
- uuid=settings.SYSTEM_UUID, hostname="localhost", capacity=100
- )
+ test_instance = Instance.objects.create(uuid=settings.SYSTEM_UUID, hostname="localhost", capacity=100)
url = reverse('api:instance_detail', kwargs={'pk': test_instance.pk})
response = get(url, user=admin_user, expect=200)
assert response.data['related']['named_url'].endswith('/localhost/')
@@ -186,11 +194,7 @@ def test_group(get, admin_user):
def test_inventory_source(get, admin_user):
test_org = Organization.objects.create(name='test_org')
test_inv = Inventory.objects.create(name='test_inv', organization=test_org)
- test_source = InventorySource.objects.create(
- name='test_source',
- inventory=test_inv,
- source='ec2'
- )
+ test_source = InventorySource.objects.create(name='test_source', inventory=test_inv, source='ec2')
url = reverse('api:inventory_source_detail', kwargs={'pk': test_source.pk})
response = get(url, user=admin_user, expect=200)
assert response.data['related']['named_url'].endswith('/test_source++test_inv++test_org/')
@@ -223,16 +227,8 @@ def test_credential(get, admin_user, credentialtype_ssh):
@pytest.mark.django_db
def test_403_vs_404(get):
- cindy = User.objects.create(
- username='cindy',
- password='test_user',
- is_superuser=False
- )
- bob = User.objects.create(
- username='bob',
- password='test_user',
- is_superuser=False
- )
+ cindy = User.objects.create(username='cindy', password='test_user', is_superuser=False)
+ bob = User.objects.create(username='bob', password='test_user', is_superuser=False)
# bob cannot see cindy, pk lookup should be a 403
url = reverse('api:user_detail', kwargs={'pk': cindy.pk})
diff --git a/awx/main/tests/functional/test_notifications.py b/awx/main/tests/functional/test_notifications.py
index 1c5e46fcda..f6ae506248 100644
--- a/awx/main/tests/functional/test_notifications.py
+++ b/awx/main/tests/functional/test_notifications.py
@@ -23,14 +23,17 @@ def test_get_notification_template_list(get, user, notification_template):
def test_basic_parameterization(get, post, user, organization):
u = user('admin-poster', True)
url = reverse('api:notification_template_list')
- response = post(url,
- dict(name="test-webhook",
- description="test webhook",
- organization=organization.id,
- notification_type="webhook",
- notification_configuration=dict(url="http://localhost", disable_ssl_verification=False,
- headers={"Test": "Header"})),
- u)
+ response = post(
+ url,
+ dict(
+ name="test-webhook",
+ description="test webhook",
+ organization=organization.id,
+ notification_type="webhook",
+ notification_configuration=dict(url="http://localhost", disable_ssl_verification=False, headers={"Test": "Header"}),
+ ),
+ u,
+ )
assert response.status_code == 201
url = reverse('api:notification_template_detail', kwargs={'pk': response.data['id']})
response = get(url, u)
@@ -51,18 +54,20 @@ def test_encrypted_subfields(get, post, user, organization):
def assert_send(self, messages):
assert self.account_token == "shouldhide"
return 1
+
u = user('admin-poster', True)
url = reverse('api:notification_template_list')
- response = post(url,
- dict(name="test-twilio",
- description="test twilio",
- organization=organization.id,
- notification_type="twilio",
- notification_configuration=dict(account_sid="dummy",
- account_token="shouldhide",
- from_number="+19999999999",
- to_numbers=["9998887777"])),
- u)
+ response = post(
+ url,
+ dict(
+ name="test-twilio",
+ description="test twilio",
+ organization=organization.id,
+ notification_type="twilio",
+ notification_configuration=dict(account_sid="dummy", account_token="shouldhide", from_number="+19999999999", to_numbers=["9998887777"]),
+ ),
+ u,
+ )
assert response.status_code == 201
notification_template_actual = NotificationTemplate.objects.get(id=response.data['id'])
url = reverse('api:notification_template_detail', kwargs={'pk': response.data['id']})
@@ -78,14 +83,17 @@ def test_inherited_notification_templates(get, post, user, organization, project
url = reverse('api:notification_template_list')
notification_templates = []
for nfiers in range(3):
- response = post(url,
- dict(name="test-webhook-{}".format(nfiers),
- description="test webhook {}".format(nfiers),
- organization=organization.id,
- notification_type="webhook",
- notification_configuration=dict(url="http://localhost", disable_ssl_verification=False,
- headers={"Test": "Header"})),
- u)
+ response = post(
+ url,
+ dict(
+ name="test-webhook-{}".format(nfiers),
+ description="test webhook {}".format(nfiers),
+ organization=organization.id,
+ notification_type="webhook",
+ notification_configuration=dict(url="http://localhost", disable_ssl_verification=False, headers={"Test": "Header"}),
+ ),
+ u,
+ )
assert response.status_code == 201
notification_templates.append(response.data['id'])
i = Inventory.objects.create(name='test', organization=organization)
@@ -98,20 +106,19 @@ def test_inherited_notification_templates(get, post, user, organization, project
@pytest.mark.django_db
def test_notification_template_simple_patch(patch, notification_template, admin):
- patch(reverse('api:notification_template_detail', kwargs={'pk': notification_template.id}), { 'name': 'foo'}, admin, expect=200)
+ patch(reverse('api:notification_template_detail', kwargs={'pk': notification_template.id}), {'name': 'foo'}, admin, expect=200)
@pytest.mark.django_db
def test_notification_template_invalid_notification_type(patch, notification_template, admin):
- patch(reverse('api:notification_template_detail', kwargs={'pk': notification_template.id}), { 'notification_type': 'invalid'}, admin, expect=400)
+ patch(reverse('api:notification_template_detail', kwargs={'pk': notification_template.id}), {'notification_type': 'invalid'}, admin, expect=400)
@pytest.mark.django_db
def test_disallow_delete_when_notifications_pending(delete, user, notification_template):
u = user('superuser', True)
url = reverse('api:notification_template_detail', kwargs={'pk': notification_template.id})
- Notification.objects.create(notification_template=notification_template,
- status='pending')
+ Notification.objects.create(notification_template=notification_template, status='pending')
response = delete(url, user=u)
assert response.status_code == 405
@@ -120,21 +127,26 @@ def test_disallow_delete_when_notifications_pending(delete, user, notification_t
def test_custom_environment_injection(post, user, organization):
u = user('admin-poster', True)
url = reverse('api:notification_template_list')
- response = post(url,
- dict(name="test-webhook",
- description="test webhook",
- organization=organization.id,
- notification_type="webhook",
- notification_configuration=dict(url="https://example.org", disable_ssl_verification=False,
- http_method="POST", headers={"Test": "Header"})),
- u)
+ response = post(
+ url,
+ dict(
+ name="test-webhook",
+ description="test webhook",
+ organization=organization.id,
+ notification_type="webhook",
+ notification_configuration=dict(url="https://example.org", disable_ssl_verification=False, http_method="POST", headers={"Test": "Header"}),
+ ),
+ u,
+ )
assert response.status_code == 201
template = NotificationTemplate.objects.get(pk=response.data['id'])
- with pytest.raises(ConnectionError), \
- mock.patch('django.conf.settings.AWX_TASK_ENV', {'HTTPS_PROXY': '192.168.50.100:1234'}), \
- mock.patch.object(HTTPAdapter, 'send') as fake_send:
+ with pytest.raises(ConnectionError), mock.patch('django.conf.settings.AWX_TASK_ENV', {'HTTPS_PROXY': '192.168.50.100:1234'}), mock.patch.object(
+ HTTPAdapter, 'send'
+ ) as fake_send:
+
def _send_side_effect(request, **kw):
assert select_proxy(request.url, kw['proxies']) == '192.168.50.100:1234'
raise ConnectionError()
+
fake_send.side_effect = _send_side_effect
template.send('subject', 'message')
diff --git a/awx/main/tests/functional/test_projects.py b/awx/main/tests/functional/test_projects.py
index ccfbd06627..b8471fda5d 100644
--- a/awx/main/tests/functional/test_projects.py
+++ b/awx/main/tests/functional/test_projects.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-from unittest import mock # noqa
+from unittest import mock # noqa
import pytest
from awx.api.versioning import reverse
@@ -14,15 +14,19 @@ from django.core.exceptions import ValidationError
#
@pytest.fixture
def team_project_list(organization_factory):
- objects = organization_factory('org-test',
- superusers=['admin'],
- users=['team1:alice', 'team2:bob'],
- teams=['team1', 'team2'],
- projects=['pteam1', 'pteam2', 'pshared'],
- roles=['team1.member_role:pteam1.admin_role',
- 'team2.member_role:pteam2.admin_role',
- 'team1.member_role:pshared.admin_role',
- 'team2.member_role:pshared.admin_role'])
+ objects = organization_factory(
+ 'org-test',
+ superusers=['admin'],
+ users=['team1:alice', 'team2:bob'],
+ teams=['team1', 'team2'],
+ projects=['pteam1', 'pteam2', 'pshared'],
+ roles=[
+ 'team1.member_role:pteam1.admin_role',
+ 'team2.member_role:pteam2.admin_role',
+ 'team1.member_role:pshared.admin_role',
+ 'team2.member_role:pshared.admin_role',
+ ],
+ )
return objects
@@ -47,33 +51,28 @@ def test_user_project_paged_list(get, organization_factory):
# first page has first project and no previous page
pk = objects.users.alice.pk
- url = reverse('api:user_projects_list', kwargs={'pk':pk,})
+ url = reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': pk,
+ },
+ )
results = get(url, objects.users.alice, QUERY_STRING='page_size=1').data
assert results['count'] == 3
assert len(results['results']) == 1
assert results['previous'] is None
- assert results['next'] == (
- '/api/v2/users/%s/projects/?page=2&page_size=1' % pk
- )
+ assert results['next'] == ('/api/v2/users/%s/projects/?page=2&page_size=1' % pk)
# second page has one more, a previous and next page
- results = get(url, objects.users.alice,
- QUERY_STRING='page=2&page_size=1').data
+ results = get(url, objects.users.alice, QUERY_STRING='page=2&page_size=1').data
assert len(results['results']) == 1
- assert results['previous'] == (
- '/api/v2/users/%s/projects/?page=1&page_size=1' % pk
- )
- assert results['next'] == (
- '/api/v2/users/%s/projects/?page=3&page_size=1' % pk
- )
+ assert results['previous'] == ('/api/v2/users/%s/projects/?page=1&page_size=1' % pk)
+ assert results['next'] == ('/api/v2/users/%s/projects/?page=3&page_size=1' % pk)
# third page has last project and a previous page
- results = get(url, objects.users.alice,
- QUERY_STRING='page=3&page_size=1').data
+ results = get(url, objects.users.alice, QUERY_STRING='page=3&page_size=1').data
assert len(results['results']) == 1
- assert results['previous'] == (
- '/api/v2/users/%s/projects/?page=2&page_size=1' % pk
- )
+ assert results['previous'] == ('/api/v2/users/%s/projects/?page=2&page_size=1' % pk)
assert results['next'] is None
@@ -85,61 +84,122 @@ def test_user_project_paged_list_with_unicode(get, organization_factory):
# can search it and properly generate next/previous page links
objects = organization_factory(
'org1',
- projects=['project-☁-1','project-☁-2'],
+ projects=['project-☁-1', 'project-☁-2'],
users=['alice'],
- roles=['project-☁-1.admin_role:alice','project-☁-2.admin_role:alice'],
+ roles=['project-☁-1.admin_role:alice', 'project-☁-2.admin_role:alice'],
)
pk = objects.users.alice.pk
- url = reverse('api:user_projects_list', kwargs={'pk':pk,})
+ url = reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': pk,
+ },
+ )
# first on first page, next page link contains unicode char
- results = get(url, objects.users.alice,
- QUERY_STRING='page_size=1&search=%E2%98%81').data
+ results = get(url, objects.users.alice, QUERY_STRING='page_size=1&search=%E2%98%81').data
assert results['count'] == 2
assert len(results['results']) == 1
- assert results['next'] == (
- '/api/v2/users/%s/projects/?page=2&page_size=1&search=%%E2%%98%%81' % pk # noqa
- )
+ assert results['next'] == ('/api/v2/users/%s/projects/?page=2&page_size=1&search=%%E2%%98%%81' % pk) # noqa
# second project on second page, previous page link contains unicode char
- results = get(url, objects.users.alice,
- QUERY_STRING='page=2&page_size=1&search=%E2%98%81').data
+ results = get(url, objects.users.alice, QUERY_STRING='page=2&page_size=1&search=%E2%98%81').data
assert results['count'] == 2
assert len(results['results']) == 1
- assert results['previous'] == (
- '/api/v2/users/%s/projects/?page=1&page_size=1&search=%%E2%%98%%81' % pk # noqa
- )
+ assert results['previous'] == ('/api/v2/users/%s/projects/?page=1&page_size=1&search=%%E2%%98%%81' % pk) # noqa
@pytest.mark.django_db
def test_user_project_list(get, organization_factory):
'List of projects a user has access to, filtered by projects you can also see'
- objects = organization_factory('org1',
- projects=['alice project', 'bob project', 'shared project'],
- superusers=['admin'],
- users=['alice', 'bob'],
- roles=['alice project.admin_role:alice',
- 'bob project.admin_role:bob',
- 'shared project.admin_role:bob',
- 'shared project.admin_role:alice'])
- assert get(reverse(
- 'api:user_projects_list',
- kwargs={'pk':objects.superusers.admin.pk,}
- ), objects.superusers.admin).data['count'] == 3
+ objects = organization_factory(
+ 'org1',
+ projects=['alice project', 'bob project', 'shared project'],
+ superusers=['admin'],
+ users=['alice', 'bob'],
+ roles=['alice project.admin_role:alice', 'bob project.admin_role:bob', 'shared project.admin_role:bob', 'shared project.admin_role:alice'],
+ )
+ assert (
+ get(
+ reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': objects.superusers.admin.pk,
+ },
+ ),
+ objects.superusers.admin,
+ ).data['count']
+ == 3
+ )
# admins can see everyones projects
- assert get(reverse('api:user_projects_list', kwargs={'pk':objects.users.alice.pk,}), objects.superusers.admin).data['count'] == 2
- assert get(reverse('api:user_projects_list', kwargs={'pk':objects.users.bob.pk,}), objects.superusers.admin).data['count'] == 2
+ assert (
+ get(
+ reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': objects.users.alice.pk,
+ },
+ ),
+ objects.superusers.admin,
+ ).data['count']
+ == 2
+ )
+ assert (
+ get(
+ reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': objects.users.bob.pk,
+ },
+ ),
+ objects.superusers.admin,
+ ).data['count']
+ == 2
+ )
# users can see their own projects
- assert get(reverse('api:user_projects_list', kwargs={'pk':objects.users.alice.pk,}), objects.users.alice).data['count'] == 2
+ assert (
+ get(
+ reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': objects.users.alice.pk,
+ },
+ ),
+ objects.users.alice,
+ ).data['count']
+ == 2
+ )
# alice should only be able to see the shared project when looking at bobs projects
- assert get(reverse('api:user_projects_list', kwargs={'pk':objects.users.bob.pk,}), objects.users.alice).data['count'] == 1
+ assert (
+ get(
+ reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': objects.users.bob.pk,
+ },
+ ),
+ objects.users.alice,
+ ).data['count']
+ == 1
+ )
# alice should see all projects they can see when viewing an admin
- assert get(reverse('api:user_projects_list', kwargs={'pk':objects.superusers.admin.pk,}), objects.users.alice).data['count'] == 2
+ assert (
+ get(
+ reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': objects.superusers.admin.pk,
+ },
+ ),
+ objects.users.alice,
+ ).data['count']
+ == 2
+ )
@pytest.mark.django_db
@@ -150,37 +210,131 @@ def test_team_project_list(get, team_project_list):
alice, bob, admin = objects.users.alice, objects.users.bob, objects.superusers.admin
# admins can see all projects on a team
- assert get(reverse('api:team_projects_list', kwargs={'pk':team1.pk,}), admin).data['count'] == 2
- assert get(reverse('api:team_projects_list', kwargs={'pk':team2.pk,}), admin).data['count'] == 2
+ assert (
+ get(
+ reverse(
+ 'api:team_projects_list',
+ kwargs={
+ 'pk': team1.pk,
+ },
+ ),
+ admin,
+ ).data['count']
+ == 2
+ )
+ assert (
+ get(
+ reverse(
+ 'api:team_projects_list',
+ kwargs={
+ 'pk': team2.pk,
+ },
+ ),
+ admin,
+ ).data['count']
+ == 2
+ )
# users can see all projects on teams they are a member of
- assert get(reverse('api:team_projects_list', kwargs={'pk':team1.pk,}), alice).data['count'] == 2
+ assert (
+ get(
+ reverse(
+ 'api:team_projects_list',
+ kwargs={
+ 'pk': team1.pk,
+ },
+ ),
+ alice,
+ ).data['count']
+ == 2
+ )
# but if she does, then she should only see the shared project
team2.read_role.members.add(alice)
- assert get(reverse('api:team_projects_list', kwargs={'pk':team2.pk,}), alice).data['count'] == 1
+ assert (
+ get(
+ reverse(
+ 'api:team_projects_list',
+ kwargs={
+ 'pk': team2.pk,
+ },
+ ),
+ alice,
+ ).data['count']
+ == 1
+ )
team2.read_role.members.remove(alice)
# admins can see all projects
- assert get(reverse('api:user_projects_list', kwargs={'pk':admin.pk,}), admin).data['count'] == 3
+ assert (
+ get(
+ reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': admin.pk,
+ },
+ ),
+ admin,
+ ).data['count']
+ == 3
+ )
# admins can see everyones projects
- assert get(reverse('api:user_projects_list', kwargs={'pk':alice.pk,}), admin).data['count'] == 2
- assert get(reverse('api:user_projects_list', kwargs={'pk':bob.pk,}), admin).data['count'] == 2
+ assert (
+ get(
+ reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': alice.pk,
+ },
+ ),
+ admin,
+ ).data['count']
+ == 2
+ )
+ assert (
+ get(
+ reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': bob.pk,
+ },
+ ),
+ admin,
+ ).data['count']
+ == 2
+ )
# users can see their own projects
- assert get(reverse('api:user_projects_list', kwargs={'pk':alice.pk,}), alice).data['count'] == 2
+ assert (
+ get(
+ reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': alice.pk,
+ },
+ ),
+ alice,
+ ).data['count']
+ == 2
+ )
# alice should see all projects they can see when viewing an admin
- assert get(reverse('api:user_projects_list', kwargs={'pk':admin.pk,}), alice).data['count'] == 2
+ assert (
+ get(
+ reverse(
+ 'api:user_projects_list',
+ kwargs={
+ 'pk': admin.pk,
+ },
+ ),
+ alice,
+ ).data['count']
+ == 2
+ )
-@pytest.mark.parametrize("u,expected_status_code", [
- ('rando', 403),
- ('org_member', 403),
- ('org_admin', 201),
- ('admin', 201)
-])
+@pytest.mark.parametrize("u,expected_status_code", [('rando', 403), ('org_member', 403), ('org_admin', 201), ('admin', 201)])
@pytest.mark.django_db()
def test_create_project(post, organization, org_admin, org_member, admin, rando, u, expected_status_code):
if u == 'rando':
@@ -192,10 +346,14 @@ def test_create_project(post, organization, org_admin, org_member, admin, rando,
elif u == 'admin':
u = admin
- result = post(reverse('api:project_list'), {
- 'name': 'Project',
- 'organization': organization.id,
- }, u)
+ result = post(
+ reverse('api:project_list'),
+ {
+ 'name': 'Project',
+ 'organization': organization.id,
+ },
+ u,
+ )
print(result.data)
assert result.status_code == expected_status_code
if expected_status_code == 201:
@@ -206,30 +364,22 @@ def test_create_project(post, organization, org_admin, org_member, admin, rando,
def test_project_credential_protection(post, put, project, organization, scm_credential, org_admin):
project.save()
project.admin_role.members.add(org_admin)
- put(
- reverse('api:project_detail', kwargs={'pk':project.id}), {
- 'name': 'should not change',
- 'credential': scm_credential.id
- }, org_admin, expect=403
- )
- post(
- reverse('api:project_list'), {
- 'name': 'should not create',
- 'organization':organization.id,
- 'credential': scm_credential.id
- }, org_admin, expect=403
- )
+ put(reverse('api:project_detail', kwargs={'pk': project.id}), {'name': 'should not change', 'credential': scm_credential.id}, org_admin, expect=403)
+ post(reverse('api:project_list'), {'name': 'should not create', 'organization': organization.id, 'credential': scm_credential.id}, org_admin, expect=403)
@pytest.mark.django_db
def test_cannot_schedule_manual_project(manual_project, admin_user, post):
response = post(
- reverse('api:project_schedules_list', kwargs={'pk':manual_project.pk,}),
- {
- "name": "foo", "description": "", "enabled": True,
- "rrule": "DTSTART:20160926T040000Z RRULE:FREQ=HOURLY;INTERVAL=1",
- "extra_data": {}
- }, admin_user, expect=400
+ reverse(
+ 'api:project_schedules_list',
+ kwargs={
+ 'pk': manual_project.pk,
+ },
+ ),
+ {"name": "foo", "description": "", "enabled": True, "rrule": "DTSTART:20160926T040000Z RRULE:FREQ=HOURLY;INTERVAL=1", "extra_data": {}},
+ admin_user,
+ expect=400,
)
assert 'Manual' in response.data['unified_job_template'][0]
@@ -249,4 +399,12 @@ def test_project_unique_together_with_org(organization):
def test_project_delete(delete, organization, admin_user):
proj = Project(name='foo', organization=organization)
proj.save()
- delete(reverse('api:project_detail', kwargs={'pk':proj.id,}), admin_user)
+ delete(
+ reverse(
+ 'api:project_detail',
+ kwargs={
+ 'pk': proj.id,
+ },
+ ),
+ admin_user,
+ )
diff --git a/awx/main/tests/functional/test_python_requirements.py b/awx/main/tests/functional/test_python_requirements.py
index 205e0cd6f7..d363b91db1 100644
--- a/awx/main/tests/functional/test_python_requirements.py
+++ b/awx/main/tests/functional/test_python_requirements.py
@@ -1,4 +1,3 @@
-
import os
import re
import pytest
@@ -18,9 +17,11 @@ def test_env_matches_requirements_txt():
def skip_line(line):
return (
- line == '' or line.strip().startswith('#') or
- line.strip().startswith('git') or line.startswith('-e') or
- '## The following requirements were added by pip freeze' in line
+ line == ''
+ or line.strip().startswith('#')
+ or line.strip().startswith('git')
+ or line.startswith('-e')
+ or '## The following requirements were added by pip freeze' in line
)
base_dir = settings.BASE_DIR
@@ -60,5 +61,3 @@ def test_env_matches_requirements_txt():
if len(not_found) > 0:
raise RuntimeError("%s not found in \n\n%s" % (not_found, reqs_actual))
-
-
diff --git a/awx/main/tests/functional/test_rbac_api.py b/awx/main/tests/functional/test_rbac_api.py
index 2e699da361..b697ef3144 100644
--- a/awx/main/tests/functional/test_rbac_api.py
+++ b/awx/main/tests/functional/test_rbac_api.py
@@ -1,4 +1,4 @@
-from unittest import mock # noqa
+from unittest import mock # noqa
import pytest
from django.db import transaction
@@ -6,7 +6,6 @@ from awx.api.versioning import reverse
from awx.main.models.rbac import Role, ROLE_SINGLETON_SYSTEM_ADMINISTRATOR
-
@pytest.fixture
def role():
return Role.objects.create(role_field='admin_role')
@@ -40,7 +39,7 @@ def test_get_roles_list_user(organization, inventory, team, get, user):
assert response.status_code == 200
roles = response.data
assert roles['count'] > 0
- assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid
+ assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid
role_hash = {}
@@ -77,7 +76,7 @@ def test_roles_filter_visibility(get, organization, project, admin, alice, bob):
organization.auditor_role.members.add(bob)
assert get(reverse('api:user_roles_list', kwargs={'pk': admin.id}) + '?id=%d' % project.update_role.id, user=bob).data['count'] == 1
organization.auditor_role.members.remove(bob)
- project.use_role.members.add(bob) # sibling role should still grant visibility
+ project.use_role.members.add(bob) # sibling role should still grant visibility
assert get(reverse('api:user_roles_list', kwargs={'pk': admin.id}) + '?id=%d' % project.update_role.id, user=bob).data['count'] == 1
@@ -112,7 +111,7 @@ def test_get_user_roles_list(get, admin):
response = get(url, admin)
assert response.status_code == 200
roles = response.data
- assert roles['count'] > 0 # 'system_administrator' role if nothing else
+ assert roles['count'] > 0 # 'system_administrator' role if nothing else
@pytest.mark.django_db
@@ -134,17 +133,17 @@ def test_user_view_other_user_roles(organization, inventory, team, get, alice, b
assert response.status_code == 200
roles = response.data
assert roles['count'] > 0
- assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid
+ assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid
role_hash = {}
for r in roles['results']:
role_hash[r['id']] = r['name']
assert organization.admin_role.id in role_hash
- assert custom_role.id not in role_hash # doesn't show up in the user roles list, not an explicit grant
+ assert custom_role.id not in role_hash # doesn't show up in the user roles list, not an explicit grant
assert Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).id not in role_hash
assert inventory.admin_role.id not in role_hash
- assert team.member_role.id not in role_hash # alice can't see this
+ assert team.member_role.id not in role_hash # alice can't see this
# again but this time alice is part of the team, and should be able to see the team role
team.member_role.members.add(alice)
@@ -152,13 +151,13 @@ def test_user_view_other_user_roles(organization, inventory, team, get, alice, b
assert response.status_code == 200
roles = response.data
assert roles['count'] > 0
- assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid
+ assert roles['count'] == len(roles['results']) # just to make sure the tests below are valid
role_hash = {}
for r in roles['results']:
role_hash[r['id']] = r['name']
- assert team.member_role.id in role_hash # Alice can now see this
+ assert team.member_role.id in role_hash # Alice can now see this
@pytest.mark.django_db
@@ -258,8 +257,8 @@ def test_put_role_405(put, admin, role):
url = reverse('api:role_detail', kwargs={'pk': role.id})
response = put(url, {'name': 'Some new name'}, admin)
assert response.status_code == 405
- #r = Role.objects.get(id=role.id)
- #assert r.name == 'Some new name'
+ # r = Role.objects.get(id=role.id)
+ # assert r.name == 'Some new name'
@pytest.mark.django_db
diff --git a/awx/main/tests/functional/test_rbac_core.py b/awx/main/tests/functional/test_rbac_core.py
index abec03da45..7029bbe544 100644
--- a/awx/main/tests/functional/test_rbac_core.py
+++ b/awx/main/tests/functional/test_rbac_core.py
@@ -92,7 +92,7 @@ def test_auto_field_adjustments(organization, inventory, team, alice):
inventory.organization = organization
inventory.save()
assert alice not in inventory.admin_role
- #assert False
+ # assert False
@pytest.mark.django_db
diff --git a/awx/main/tests/functional/test_rbac_credential.py b/awx/main/tests/functional/test_rbac_credential.py
index f37260eb65..7b208f644f 100644
--- a/awx/main/tests/functional/test_rbac_credential.py
+++ b/awx/main/tests/functional/test_rbac_credential.py
@@ -41,9 +41,7 @@ def test_credential_access_org_user(org_member, org_admin, ext_auth):
@pytest.mark.django_db
def test_credential_access_auditor(credential, organization_factory):
- objects = organization_factory("org_cred_auditor",
- users=["user1"],
- roles=['org_cred_auditor.auditor_role:user1'])
+ objects = organization_factory("org_cred_auditor", users=["user1"], roles=['org_cred_auditor.auditor_role:user1'])
credential.organization = objects.organization
credential.save()
@@ -55,9 +53,7 @@ def test_credential_access_auditor(credential, organization_factory):
def test_credential_access_member(alice, credential):
credential.admin_role.members.add(alice)
access = CredentialAccess(alice)
- assert access.can_change(credential, {
- 'description': 'New description.',
- 'organization': None})
+ assert access.can_change(credential, {'description': 'New description.', 'organization': None})
@pytest.mark.django_db
@@ -69,9 +65,7 @@ def test_org_credential_access_admin(role_name, alice, org_credential):
access = CredentialAccess(alice)
# Alice should be able to PATCH if organization is not changed
- assert access.can_change(org_credential, {
- 'description': 'New description.',
- 'organization': org_credential.organization.pk})
+ assert access.can_change(org_credential, {'description': 'New description.', 'organization': org_credential.organization.pk})
@pytest.mark.django_db
@@ -80,11 +74,7 @@ def test_org_and_user_credential_access(alice, organization):
in another org without any permissions to that org
"""
# Owner is both user and org, but org permission should still be checked
- assert not CredentialAccess(alice).can_add({
- 'name': 'New credential.',
- 'user': alice.pk,
- 'organization': organization.pk
- })
+ assert not CredentialAccess(alice).can_add({'name': 'New credential.', 'user': alice.pk, 'organization': organization.pk})
@pytest.mark.django_db
@@ -94,11 +84,8 @@ def test_org_credential_access_member(alice, org_credential):
access = CredentialAccess(alice)
# Alice should be able to PATCH if organization is not changed
- assert access.can_change(org_credential, {
- 'description': 'New description.',
- 'organization': org_credential.organization.pk})
- assert access.can_change(org_credential, {
- 'description': 'New description.'})
+ assert access.can_change(org_credential, {'description': 'New description.', 'organization': org_credential.organization.pk})
+ assert access.can_change(org_credential, {'description': 'New description.'})
@pytest.mark.django_db
diff --git a/awx/main/tests/functional/test_rbac_instance_groups.py b/awx/main/tests/functional/test_rbac_instance_groups.py
index ae2a143340..402040ea21 100644
--- a/awx/main/tests/functional/test_rbac_instance_groups.py
+++ b/awx/main/tests/functional/test_rbac_instance_groups.py
@@ -65,8 +65,7 @@ def test_ig_associability(organization, default_instance_group, admin, system_au
assert not auditor_access.can_unattach(organization, default_instance_group, 'instance_groups', None)
assert not omember_access.can_unattach(organization, default_instance_group, 'instance_groups', None)
- objects = job_template_factory('jt', organization=organization, project='p',
- inventory='i', credential='c')
+ objects = job_template_factory('jt', organization=organization, project='p', inventory='i', credential='c')
admin_access = InventoryAccess(admin)
auditor_access = InventoryAccess(system_auditor)
oadmin_access = InventoryAccess(org_admin)
diff --git a/awx/main/tests/functional/test_rbac_inventory.py b/awx/main/tests/functional/test_rbac_inventory.py
index 508b2e0773..1e4b47e45e 100644
--- a/awx/main/tests/functional/test_rbac_inventory.py
+++ b/awx/main/tests/functional/test_rbac_inventory.py
@@ -34,13 +34,11 @@ def test_custom_inv_script_access(organization, user):
@pytest.fixture
def custom_inv(organization):
- return CustomInventoryScript.objects.create(
- name='test', script='test', description='test', organization=organization)
+ return CustomInventoryScript.objects.create(name='test', script='test', description='test', organization=organization)
@pytest.mark.django_db
-def test_modify_inv_script_foreign_org_admin(
- org_admin, organization, organization_factory, project, custom_inv):
+def test_modify_inv_script_foreign_org_admin(org_admin, organization, organization_factory, project, custom_inv):
other_org = organization_factory('not-my-org').organization
access = CustomInventoryScriptAccess(org_admin)
assert not access.can_change(custom_inv, {'organization': other_org.pk, 'name': 'new-project'})
@@ -59,11 +57,7 @@ def test_copy_only_admin(org_member, organization, custom_inv):
custom_inv.admin_role.members.add(org_member)
access = CustomInventoryScriptAccess(org_member)
assert not access.can_copy(custom_inv)
- assert access.get_user_capabilities(custom_inv, method_list=['edit', 'delete', 'copy']) == {
- 'edit': True,
- 'delete': True,
- 'copy': False
- }
+ assert access.get_user_capabilities(custom_inv, method_list=['edit', 'delete', 'copy']) == {'edit': True, 'delete': True, 'copy': False}
@pytest.mark.django_db
@@ -111,13 +105,7 @@ def test_inventory_update_org_admin(inventory_update, org_admin):
assert access.can_delete(inventory_update)
-@pytest.mark.parametrize("role_field,allowed", [
- (None, False),
- ('admin_role', True),
- ('update_role', False),
- ('adhoc_role', False),
- ('use_role', False)
-])
+@pytest.mark.parametrize("role_field,allowed", [(None, False), ('admin_role', True), ('update_role', False), ('adhoc_role', False), ('use_role', False)])
@pytest.mark.django_db
def test_inventory_source_delete(inventory_source, alice, role_field, allowed):
if role_field:
@@ -126,13 +114,7 @@ def test_inventory_source_delete(inventory_source, alice, role_field, allowed):
# See companion test in tests/functional/api/test_inventory.py::test_inventory_update_access_called
-@pytest.mark.parametrize("role_field,allowed", [
- (None, False),
- ('admin_role', True),
- ('update_role', True),
- ('adhoc_role', False),
- ('use_role', False)
-])
+@pytest.mark.parametrize("role_field,allowed", [(None, False), ('admin_role', True), ('update_role', True), ('adhoc_role', False), ('use_role', False)])
@pytest.mark.django_db
def test_inventory_source_update(inventory_source, alice, role_field, allowed):
if role_field:
@@ -175,9 +157,7 @@ def test_inventory_source_credential_check(rando, inventory_source, credential):
@pytest.mark.django_db
def test_inventory_source_org_admin_schedule_access(org_admin, inventory_source):
- schedule = Schedule.objects.create(
- unified_job_template=inventory_source,
- rrule='DTSTART:20151117T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1')
+ schedule = Schedule.objects.create(unified_job_template=inventory_source, rrule='DTSTART:20151117T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1')
access = ScheduleAccess(org_admin)
assert access.get_queryset()
assert access.can_read(schedule)
@@ -191,7 +171,6 @@ def smart_inventory(organization):
@pytest.mark.django_db
class TestSmartInventory:
-
def test_host_filter_edit(self, smart_inventory, rando, org_admin):
assert InventoryAccess(org_admin).can_admin(smart_inventory, {'host_filter': 'search=foo'})
smart_inventory.admin_role.members.add(rando)
diff --git a/awx/main/tests/functional/test_rbac_job.py b/awx/main/tests/functional/test_rbac_job.py
index ed3aee47cb..f260f7b72c 100644
--- a/awx/main/tests/functional/test_rbac_job.py
+++ b/awx/main/tests/functional/test_rbac_job.py
@@ -2,24 +2,8 @@ import pytest
from rest_framework.exceptions import PermissionDenied
-from awx.main.access import (
- JobAccess,
- JobLaunchConfigAccess,
- AdHocCommandAccess,
- InventoryUpdateAccess,
- ProjectUpdateAccess
-)
-from awx.main.models import (
- Job,
- JobLaunchConfig,
- JobTemplate,
- AdHocCommand,
- InventoryUpdate,
- InventorySource,
- ProjectUpdate,
- User,
- Credential
-)
+from awx.main.access import JobAccess, JobLaunchConfigAccess, AdHocCommandAccess, InventoryUpdateAccess, ProjectUpdateAccess
+from awx.main.models import Job, JobLaunchConfig, JobTemplate, AdHocCommand, InventoryUpdate, InventorySource, ProjectUpdate, User, Credential
from crum import impersonate
@@ -30,7 +14,7 @@ def normal_job(deploy_jobtemplate):
job_template=deploy_jobtemplate,
project=deploy_jobtemplate.project,
inventory=deploy_jobtemplate.inventory,
- organization=deploy_jobtemplate.organization
+ organization=deploy_jobtemplate.organization,
)
@@ -71,8 +55,7 @@ def test_superuser_superauditor_sees_orphans(normal_job, superuser, admin_user,
normal_job.project = None
normal_job.inventory = None
access = JobAccess(u)
- assert access.can_read(normal_job), "User sys auditor: {}, sys admin: {}".format(
- u.is_system_auditor, u.is_superuser)
+ assert access.can_read(normal_job), "User sys auditor: {}, sys admin: {}".format(u.is_system_auditor, u.is_superuser)
@pytest.mark.django_db
@@ -130,37 +113,32 @@ def test_delete_job_with_orphan_proj(normal_job, rando):
@pytest.mark.django_db
def test_inventory_org_admin_delete_allowed(normal_job, org_admin):
- normal_job.project = None # do this so we test job->inventory->org->admin connection
+ normal_job.project = None # do this so we test job->inventory->org->admin connection
access = JobAccess(org_admin)
assert access.can_delete(normal_job)
@pytest.mark.django_db
def test_project_org_admin_delete_allowed(normal_job, org_admin):
- normal_job.inventory = None # do this so we test job->project->org->admin connection
+ normal_job.inventory = None # do this so we test job->project->org->admin connection
access = JobAccess(org_admin)
assert access.can_delete(normal_job)
@pytest.mark.django_db
class TestJobRelaunchAccess:
-
- @pytest.mark.parametrize("inv_access,cred_access,can_start", [
- (True, True, True), # Confirm that a user with inventory & credential access can launch
- (False, True, False), # Confirm that a user with credential access alone cannot launch
- (True, False, False), # Confirm that a user with inventory access alone cannot launch
- ])
- def test_job_relaunch_resource_access(self, user, inventory, machine_credential,
- inv_access, cred_access, can_start):
- job_template = JobTemplate.objects.create(
- ask_inventory_on_launch=True,
- ask_credential_on_launch=True
- )
+ @pytest.mark.parametrize(
+ "inv_access,cred_access,can_start",
+ [
+ (True, True, True), # Confirm that a user with inventory & credential access can launch
+ (False, True, False), # Confirm that a user with credential access alone cannot launch
+ (True, False, False), # Confirm that a user with inventory access alone cannot launch
+ ],
+ )
+ def test_job_relaunch_resource_access(self, user, inventory, machine_credential, inv_access, cred_access, can_start):
+ job_template = JobTemplate.objects.create(ask_inventory_on_launch=True, ask_credential_on_launch=True)
u = user('user1', False)
- job_with_links = Job.objects.create(
- name='existing-job', inventory=inventory, job_template=job_template,
- created_by=u
- )
+ job_with_links = Job.objects.create(name='existing-job', inventory=inventory, job_template=job_template, created_by=u)
job_with_links.credentials.add(machine_credential)
JobLaunchConfig.objects.create(job=job_with_links, inventory=inventory)
job_with_links.launch_config.credentials.add(machine_credential) # credential was prompted
@@ -177,8 +155,7 @@ class TestJobRelaunchAccess:
with pytest.raises(PermissionDenied):
access.can_start(job_with_links, validate_license=False)
- def test_job_relaunch_credential_access(
- self, inventory, project, credential, net_credential):
+ def test_job_relaunch_credential_access(self, inventory, project, credential, net_credential):
jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project)
jt.credentials.add(credential)
job = jt.create_unified_job()
@@ -193,11 +170,8 @@ class TestJobRelaunchAccess:
with pytest.raises(PermissionDenied):
jt_user.can_access(Job, 'start', job, validate_license=False)
- def test_prompted_credential_relaunch_denied(
- self, inventory, project, net_credential, rando):
- jt = JobTemplate.objects.create(
- name='testjt', inventory=inventory, project=project,
- ask_credential_on_launch=True)
+ def test_prompted_credential_relaunch_denied(self, inventory, project, net_credential, rando):
+ jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project, ask_credential_on_launch=True)
job = jt.create_unified_job()
jt.execute_role.members.add(rando)
assert rando.can_access(Job, 'start', job, validate_license=False)
@@ -207,11 +181,8 @@ class TestJobRelaunchAccess:
with pytest.raises(PermissionDenied):
rando.can_access(Job, 'start', job, validate_license=False)
- def test_prompted_credential_relaunch_allowed(
- self, inventory, project, net_credential, rando):
- jt = JobTemplate.objects.create(
- name='testjt', inventory=inventory, project=project,
- ask_credential_on_launch=True)
+ def test_prompted_credential_relaunch_allowed(self, inventory, project, net_credential, rando):
+ jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project, ask_credential_on_launch=True)
job = jt.create_unified_job()
jt.execute_role.members.add(rando)
@@ -220,11 +191,8 @@ class TestJobRelaunchAccess:
job.credentials.add(net_credential)
assert rando.can_access(Job, 'start', job, validate_license=False)
- def test_credential_relaunch_recreation_permission(
- self, inventory, project, net_credential, credential, rando):
- jt = JobTemplate.objects.create(
- name='testjt', inventory=inventory, project=project,
- ask_credential_on_launch=True)
+ def test_credential_relaunch_recreation_permission(self, inventory, project, net_credential, credential, rando):
+ jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project, ask_credential_on_launch=True)
job = jt.create_unified_job()
project.admin_role.members.add(rando)
inventory.admin_role.members.add(rando)
@@ -238,20 +206,14 @@ class TestJobRelaunchAccess:
@pytest.mark.job_runtime_vars
def test_callback_relaunchable_by_user(self, job_template, rando):
with impersonate(rando):
- job = job_template.create_unified_job(
- _eager_fields={'launch_type': 'callback'},
- limit='host2'
- )
+ job = job_template.create_unified_job(_eager_fields={'launch_type': 'callback'}, limit='host2')
assert 'limit' in job.launch_config.prompts_dict() # sanity assertion
job_template.execute_role.members.add(rando)
can_access, messages = rando.can_access_with_errors(Job, 'start', job, validate_license=False)
assert can_access, messages
def test_other_user_prompts(self, inventory, project, alice, bob):
- jt = JobTemplate.objects.create(
- name='testjt', inventory=inventory, project=project,
- ask_credential_on_launch=True,
- ask_variables_on_launch=True)
+ jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project, ask_credential_on_launch=True, ask_variables_on_launch=True)
jt.execute_role.members.add(alice, bob)
with impersonate(bob):
@@ -295,16 +257,12 @@ class TestJobAndUpdateCancels:
# used in view: inventory_source_update_view
def test_inventory_self_cancel(self, inventory, inv_updater):
- inventory_update = InventoryUpdate(inventory_source=InventorySource(
- name=inventory.name, inventory=inventory, source='gce'
- ), created_by=inv_updater)
+ inventory_update = InventoryUpdate(inventory_source=InventorySource(name=inventory.name, inventory=inventory, source='gce'), created_by=inv_updater)
access = InventoryUpdateAccess(inv_updater)
assert access.can_cancel(inventory_update)
def test_inventory_friend_cancel(self, inventory, admin_user, inv_updater):
- inventory_update = InventoryUpdate(inventory_source=InventorySource(
- name=inventory.name, inventory=inventory, source='gce'
- ), created_by=admin_user)
+ inventory_update = InventoryUpdate(inventory_source=InventorySource(name=inventory.name, inventory=inventory, source='gce'), created_by=admin_user)
access = InventoryUpdateAccess(inv_updater)
assert not access.can_cancel(inventory_update)
@@ -322,15 +280,10 @@ class TestJobAndUpdateCancels:
@pytest.mark.django_db
class TestLaunchConfigAccess:
-
def _make_two_credentials(self, cred_type):
return (
- Credential.objects.create(
- credential_type=cred_type, name='machine-cred-1',
- inputs={'username': 'test_user', 'password': 'pas4word'}),
- Credential.objects.create(
- credential_type=cred_type, name='machine-cred-2',
- inputs={'username': 'test_user', 'password': 'pas4word'})
+ Credential.objects.create(credential_type=cred_type, name='machine-cred-1', inputs={'username': 'test_user', 'password': 'pas4word'}),
+ Credential.objects.create(credential_type=cred_type, name='machine-cred-2', inputs={'username': 'test_user', 'password': 'pas4word'}),
)
def test_new_credentials_access(self, credentialtype_ssh, rando):
diff --git a/awx/main/tests/functional/test_rbac_job_start.py b/awx/main/tests/functional/test_rbac_job_start.py
index 6fa34cc874..6d03e8dc2e 100644
--- a/awx/main/tests/functional/test_rbac_job_start.py
+++ b/awx/main/tests/functional/test_rbac_job_start.py
@@ -5,11 +5,7 @@ from rest_framework.exceptions import PermissionDenied
from awx.main.models.inventory import Inventory
from awx.main.models.credential import Credential
from awx.main.models.jobs import JobTemplate, Job
-from awx.main.access import (
- UnifiedJobAccess,
- WorkflowJobAccess, WorkflowJobNodeAccess,
- JobAccess
-)
+from awx.main.access import UnifiedJobAccess, WorkflowJobAccess, WorkflowJobNodeAccess, JobAccess
@pytest.mark.django_db
@@ -99,19 +95,19 @@ class TestJobRelaunchAccess:
@pytest.fixture
def job_with_prompts(self, machine_credential, inventory, organization, credentialtype_ssh):
jt = JobTemplate.objects.create(
- name='test-job-template-prompts', inventory=inventory,
- ask_tags_on_launch=True, ask_variables_on_launch=True, ask_skip_tags_on_launch=True,
- ask_limit_on_launch=True, ask_job_type_on_launch=True, ask_verbosity_on_launch=True,
- ask_inventory_on_launch=True, ask_credential_on_launch=True)
- jt.credentials.add(machine_credential)
- new_cred = Credential.objects.create(
- name='new-cred',
- credential_type=credentialtype_ssh,
- inputs={
- 'username': 'test_user',
- 'password': 'pas4word'
- }
+ name='test-job-template-prompts',
+ inventory=inventory,
+ ask_tags_on_launch=True,
+ ask_variables_on_launch=True,
+ ask_skip_tags_on_launch=True,
+ ask_limit_on_launch=True,
+ ask_job_type_on_launch=True,
+ ask_verbosity_on_launch=True,
+ ask_inventory_on_launch=True,
+ ask_credential_on_launch=True,
)
+ jt.credentials.add(machine_credential)
+ new_cred = Credential.objects.create(name='new-cred', credential_type=credentialtype_ssh, inputs={'username': 'test_user', 'password': 'pas4word'})
new_cred.save()
new_inv = Inventory.objects.create(name='new-inv', organization=organization)
return jt.create_unified_job(credentials=[new_cred], inventory=new_inv)
diff --git a/awx/main/tests/functional/test_rbac_job_templates.py b/awx/main/tests/functional/test_rbac_job_templates.py
index 99dc1e22ce..7a9add0f37 100644
--- a/awx/main/tests/functional/test_rbac_job_templates.py
+++ b/awx/main/tests/functional/test_rbac_job_templates.py
@@ -2,11 +2,7 @@ from unittest import mock
import pytest
from awx.api.versioning import reverse
-from awx.main.access import (
- BaseAccess,
- JobTemplateAccess,
- ScheduleAccess
-)
+from awx.main.access import BaseAccess, JobTemplateAccess, ScheduleAccess
from awx.main.models.jobs import JobTemplate
from awx.main.models import Project, Organization, Inventory, Schedule, User
@@ -72,7 +68,9 @@ def test_project_use_access(project, rando):
assert access.can_add(None)
assert access.can_add({'project': project.id, 'ask_inventory_on_launch': True})
project2 = Project.objects.create(
- name='second-project', scm_type=project.scm_type, playbook_files=project.playbook_files,
+ name='second-project',
+ scm_type=project.scm_type,
+ playbook_files=project.playbook_files,
organization=project.organization,
)
project2.use_role.members.add(rando)
@@ -128,45 +126,30 @@ def test_job_template_access_admin(role_names, jt_linked, rando):
@pytest.mark.django_db
-def test_job_template_credentials_prompts_access(
- rando, post, inventory, project, machine_credential, vault_credential):
- jt = JobTemplate.objects.create(
- name = 'test-jt',
- project = project,
- playbook = 'helloworld.yml',
- inventory = inventory,
- ask_credential_on_launch = True
- )
+def test_job_template_credentials_prompts_access(rando, post, inventory, project, machine_credential, vault_credential):
+ jt = JobTemplate.objects.create(name='test-jt', project=project, playbook='helloworld.yml', inventory=inventory, ask_credential_on_launch=True)
jt.credentials.add(machine_credential)
jt.execute_role.members.add(rando)
- post(
- reverse('api:job_template_launch', kwargs={'pk': jt.id}),
- {'credentials': [machine_credential.pk, vault_credential.pk]}, rando,
- expect=403
- )
+ post(reverse('api:job_template_launch', kwargs={'pk': jt.id}), {'credentials': [machine_credential.pk, vault_credential.pk]}, rando, expect=403)
@pytest.mark.django_db
class TestJobTemplateCredentials:
-
def test_job_template_cannot_add_credentials(self, job_template, credential, rando):
job_template.admin_role.members.add(rando)
credential.read_role.members.add(rando)
# without permission to credential, user can not attach it
- assert not JobTemplateAccess(rando).can_attach(
- job_template, credential, 'credentials', {})
+ assert not JobTemplateAccess(rando).can_attach(job_template, credential, 'credentials', {})
def test_job_template_can_add_credentials(self, job_template, credential, rando):
job_template.admin_role.members.add(rando)
credential.use_role.members.add(rando)
# user has permission to apply credential
- assert JobTemplateAccess(rando).can_attach(
- job_template, credential, 'credentials', {})
+ assert JobTemplateAccess(rando).can_attach(job_template, credential, 'credentials', {})
@pytest.mark.django_db
class TestOrphanJobTemplate:
-
def test_orphan_JT_readable_by_system_auditor(self, job_template, system_auditor):
assert system_auditor.is_system_auditor
assert job_template.project is None
@@ -184,12 +167,12 @@ class TestOrphanJobTemplate:
@pytest.mark.job_permissions
def test_job_template_creator_access(project, organization, rando, post):
project.use_role.members.add(rando)
- response = post(url=reverse('api:job_template_list'), data=dict(
- name='newly-created-jt',
- ask_inventory_on_launch=True,
- project=project.pk,
- playbook='helloworld.yml'
- ), user=rando, expect=201)
+ response = post(
+ url=reverse('api:job_template_list'),
+ data=dict(name='newly-created-jt', ask_inventory_on_launch=True, project=project.pk, playbook='helloworld.yml'),
+ user=rando,
+ expect=201,
+ )
jt_pk = response.data['id']
jt_obj = JobTemplate.objects.get(pk=jt_pk)
@@ -209,12 +192,12 @@ def test_job_template_insufficient_creator_permissions(lacking, project, invento
inventory.use_role.members.add(rando)
else:
inventory.read_role.members.add(rando)
- post(url=reverse('api:job_template_list'), data=dict(
- name='newly-created-jt',
- inventory=inventory.id,
- project=project.pk,
- playbook='helloworld.yml'
- ), user=rando, expect=403)
+ post(
+ url=reverse('api:job_template_list'),
+ data=dict(name='newly-created-jt', inventory=inventory.id, project=project.pk, playbook='helloworld.yml'),
+ user=rando,
+ expect=403,
+ )
@pytest.mark.django_db
@@ -241,20 +224,17 @@ class TestJobTemplateSchedules:
access = ScheduleAccess(rando)
assert not access.can_change(schedule, data=dict(unified_job_template=jt2.pk))
-
def test_move_schedule_from_JT_no_access(self, job_template, rando, jt2):
schedule = Schedule.objects.create(unified_job_template=job_template, rrule=self.rrule)
jt2.admin_role.members.add(rando)
access = ScheduleAccess(rando)
assert not access.can_change(schedule, data=dict(unified_job_template=jt2.pk))
-
def test_can_create_schedule_with_execute(self, job_template, rando):
job_template.execute_role.members.add(rando)
access = ScheduleAccess(rando)
assert access.can_add({'unified_job_template': job_template})
-
def test_can_modify_ones_own_schedule(self, job_template, rando):
job_template.execute_role.members.add(rando)
schedule = Schedule.objects.create(unified_job_template=job_template, rrule=self.rrule, created_by=rando)
@@ -264,13 +244,7 @@ class TestJobTemplateSchedules:
def test_prompts_access_checked(self, job_template, inventory, credential, rando):
job_template.execute_role.members.add(rando)
access = ScheduleAccess(rando)
- data = dict(
- unified_job_template=job_template,
- rrule=self.rrule,
- created_by=rando,
- inventory=inventory,
- credentials=[credential]
- )
+ data = dict(unified_job_template=job_template, rrule=self.rrule, created_by=rando, inventory=inventory, credentials=[credential])
with mock.patch('awx.main.access.JobLaunchConfigAccess.can_add') as mock_add:
mock_add.return_value = True
assert access.can_add(data)
@@ -291,48 +265,24 @@ class TestProjectOrganization:
def test_new_project_org_change(self, project, patch, admin_user):
org2 = Organization.objects.create(name='bar')
- patch(
- url=project.get_absolute_url(),
- data={'organization': org2.id},
- user=admin_user,
- expect=200
- )
+ patch(url=project.get_absolute_url(), data={'organization': org2.id}, user=admin_user, expect=200)
assert Project.objects.get(pk=project.id).organization_id == org2.id
def test_jt_org_cannot_change(self, project, post, patch, admin_user):
post(
url=reverse('api:job_template_list'),
- data={
- 'name': 'foo_template',
- 'project': project.id,
- 'playbook': 'helloworld.yml',
- 'ask_inventory_on_launch': True
- },
+ data={'name': 'foo_template', 'project': project.id, 'playbook': 'helloworld.yml', 'ask_inventory_on_launch': True},
user=admin_user,
- expect=201
+ expect=201,
)
org2 = Organization.objects.create(name='bar')
- r = patch(
- url=project.get_absolute_url(),
- data={'organization': org2.id},
- user=admin_user,
- expect=400
- )
+ r = patch(url=project.get_absolute_url(), data={'organization': org2.id}, user=admin_user, expect=400)
assert 'Organization cannot be changed' in str(r.data)
def test_orphan_JT_adoption(self, project, patch, admin_user, org_admin):
- jt = JobTemplate.objects.create(
- name='bar',
- ask_inventory_on_launch=True,
- playbook='helloworld.yml'
- )
+ jt = JobTemplate.objects.create(name='bar', ask_inventory_on_launch=True, playbook='helloworld.yml')
assert org_admin not in jt.admin_role
- patch(
- url=jt.get_absolute_url(),
- data={'project': project.id},
- user=admin_user,
- expect=200
- )
+ patch(url=jt.get_absolute_url(), data={'project': project.id}, user=admin_user, expect=200)
assert org_admin in jt.admin_role
def test_inventory_read_transfer_direct(self, patch):
@@ -342,10 +292,7 @@ class TestProjectOrganization:
for i in range(2):
org = Organization.objects.create(name='org{}'.format(i))
org_admin = User.objects.create(username='user{}'.format(i))
- inv = Inventory.objects.create(
- organization=org,
- name='inv{}'.format(i)
- )
+ inv = Inventory.objects.create(organization=org, name='inv{}'.format(i))
org.auditor_role.members.add(org_admin)
orgs.append(org)
@@ -372,10 +319,7 @@ class TestProjectOrganization:
orgs.append(org)
admins.append(org_admin)
- inv = Inventory.objects.create(
- organization=orgs[0],
- name='inv{}'.format(i)
- )
+ inv = Inventory.objects.create(organization=orgs[0], name='inv{}'.format(i))
jt = JobTemplate.objects.create(name='foo', inventory=inv)
assert admins[0] in jt.read_role
diff --git a/awx/main/tests/functional/test_rbac_label.py b/awx/main/tests/functional/test_rbac_label.py
index ed819df9f0..c131934fc2 100644
--- a/awx/main/tests/functional/test_rbac_label.py
+++ b/awx/main/tests/functional/test_rbac_label.py
@@ -49,9 +49,7 @@ def test_label_access_superuser(label, user):
def test_label_access_admin(organization_factory):
'''can_change because I am an admin of that org'''
no_members = organization_factory("no_members")
- members = organization_factory("has_members",
- users=['admin'],
- labels=['test'])
+ members = organization_factory("has_members", users=['admin'], labels=['test'])
label = members.labels.test
admin = members.users.admin
diff --git a/awx/main/tests/functional/test_rbac_migration.py b/awx/main/tests/functional/test_rbac_migration.py
index 2f8e72b73b..5f1b2633e8 100644
--- a/awx/main/tests/functional/test_rbac_migration.py
+++ b/awx/main/tests/functional/test_rbac_migration.py
@@ -3,13 +3,7 @@ import pytest
from django.apps import apps
from awx.main.migrations import _rbac as rbac
-from awx.main.models import (
- UnifiedJobTemplate,
- InventorySource, Inventory,
- JobTemplate, Project,
- Organization,
- User
-)
+from awx.main.models import UnifiedJobTemplate, InventorySource, Inventory, JobTemplate, Project, Organization, User
@pytest.mark.django_db
@@ -24,14 +18,8 @@ def test_implied_organization_subquery_inventory():
inventory = Inventory.objects.create(name='foo{}'.format(i))
else:
inventory = Inventory.objects.create(name='foo{}'.format(i), organization=org)
- inv_src = InventorySource.objects.create(
- name='foo{}'.format(i),
- inventory=inventory,
- source='ec2'
- )
- sources = UnifiedJobTemplate.objects.annotate(
- test_field=rbac.implicit_org_subquery(UnifiedJobTemplate, InventorySource)
- )
+ inv_src = InventorySource.objects.create(name='foo{}'.format(i), inventory=inventory, source='ec2')
+ sources = UnifiedJobTemplate.objects.annotate(test_field=rbac.implicit_org_subquery(UnifiedJobTemplate, InventorySource))
for inv_src in sources:
assert inv_src.test_field == inv_src.inventory.organization_id
@@ -45,22 +33,14 @@ def test_implied_organization_subquery_job_template():
else:
org = None
if i <= 4:
- proj = Project.objects.create(
- name='foo{}'.format(i),
- organization=org
- )
+ proj = Project.objects.create(name='foo{}'.format(i), organization=org)
else:
proj = None
- jts.append(JobTemplate.objects.create(
- name='foo{}'.format(i),
- project=proj
- ))
+ jts.append(JobTemplate.objects.create(name='foo{}'.format(i), project=proj))
# test case of sharing same org
jts[2].project.organization = jts[3].project.organization
jts[2].save()
- ujts = UnifiedJobTemplate.objects.annotate(
- test_field=rbac.implicit_org_subquery(UnifiedJobTemplate, JobTemplate)
- )
+ ujts = UnifiedJobTemplate.objects.annotate(test_field=rbac.implicit_org_subquery(UnifiedJobTemplate, JobTemplate))
for jt in ujts:
if not isinstance(jt, JobTemplate): # some are projects
assert jt.test_field is None
@@ -81,20 +61,10 @@ def test_give_explicit_inventory_permission():
inv_org.admin_role.members.add(inv_admin, dual_admin)
proj_org.admin_role.members.add(dual_admin)
- proj = Project.objects.create(
- name="test-proj",
- organization=proj_org
- )
- inv = Inventory.objects.create(
- name='test-inv',
- organization=inv_org
- )
+ proj = Project.objects.create(name="test-proj", organization=proj_org)
+ inv = Inventory.objects.create(name='test-inv', organization=inv_org)
- jt = JobTemplate.objects.create(
- name='foo',
- project=proj,
- inventory=inv
- )
+ jt = JobTemplate.objects.create(name='foo', project=proj, inventory=inv)
assert dual_admin in jt.admin_role
diff --git a/awx/main/tests/functional/test_rbac_notifications.py b/awx/main/tests/functional/test_rbac_notifications.py
index bc1d7d9214..d05efa244c 100644
--- a/awx/main/tests/functional/test_rbac_notifications.py
+++ b/awx/main/tests/functional/test_rbac_notifications.py
@@ -1,11 +1,7 @@
import pytest
from awx.main.models import Organization, Project
-from awx.main.access import (
- NotificationTemplateAccess,
- NotificationAccess,
- JobTemplateAccess
-)
+from awx.main.access import NotificationTemplateAccess, NotificationAccess, JobTemplateAccess
@pytest.mark.django_db
@@ -70,10 +66,7 @@ def test_notification_template_access_superuser(notification_template_factory):
@pytest.mark.parametrize("role", ["present.admin_role:admin", "present.notification_admin_role:admin"])
def test_notification_template_access_admin(role, organization_factory, notification_template_factory):
other_objects = organization_factory('other')
- present_objects = organization_factory('present',
- users=['admin'],
- notification_templates=['test-notification'],
- roles=[role])
+ present_objects = organization_factory('present', users=['admin'], notification_templates=['test-notification'], roles=[role])
notification_template = present_objects.notification_templates.test_notification
other_org = other_objects.organization
@@ -133,30 +126,20 @@ def test_notification_access_system_admin(notification, admin):
def test_system_auditor_JT_attach(system_auditor, job_template, notification_template):
job_template.admin_role.members.add(system_auditor)
access = JobTemplateAccess(system_auditor)
- assert not access.can_attach(
- job_template, notification_template, 'notification_templates_success',
- {'id': notification_template.id})
+ assert not access.can_attach(job_template, notification_template, 'notification_templates_success', {'id': notification_template.id})
@pytest.mark.django_db
-@pytest.mark.parametrize("org_role,expect", [
- ('admin_role', True),
- ('notification_admin_role', True),
- ('workflow_admin_role', False),
- ('auditor_role', False),
- ('member_role', False)
-])
-def test_org_role_JT_attach(rando, job_template, project, workflow_job_template, inventory_source,
- notification_template, org_role, expect):
+@pytest.mark.parametrize(
+ "org_role,expect",
+ [('admin_role', True), ('notification_admin_role', True), ('workflow_admin_role', False), ('auditor_role', False), ('member_role', False)],
+)
+def test_org_role_JT_attach(rando, job_template, project, workflow_job_template, inventory_source, notification_template, org_role, expect):
nt_organization = Organization.objects.create(name='organization just for the notification template')
notification_template.organization = nt_organization
notification_template.save()
getattr(notification_template.organization, org_role).members.add(rando)
- kwargs = dict(
- sub_obj=notification_template,
- relationship='notification_templates_success',
- data={'id': notification_template.id}
- )
+ kwargs = dict(sub_obj=notification_template, relationship='notification_templates_success', data={'id': notification_template.id})
permissions = {}
expected_permissions = {}
organization = Organization.objects.create(name='objective organization')
@@ -178,44 +161,33 @@ def test_organization_NT_attach_permission(rando, notification_template):
notification_template.organization.notification_admin_role.members.add(rando)
target_organization = Organization.objects.create(name='objective organization')
target_organization.workflow_admin_role.members.add(rando)
- assert not rando.can_access(Organization, 'attach', obj=target_organization, sub_obj=notification_template,
- relationship='notification_templates_success', data={})
+ assert not rando.can_access(
+ Organization, 'attach', obj=target_organization, sub_obj=notification_template, relationship='notification_templates_success', data={}
+ )
target_organization.auditor_role.members.add(rando)
- assert rando.can_access(Organization, 'attach', obj=target_organization, sub_obj=notification_template,
- relationship='notification_templates_success', data={})
+ assert rando.can_access(
+ Organization, 'attach', obj=target_organization, sub_obj=notification_template, relationship='notification_templates_success', data={}
+ )
@pytest.mark.django_db
def test_project_NT_attach_permission(rando, notification_template):
notification_template.organization.notification_admin_role.members.add(rando)
- project = Project.objects.create(
- name='objective project',
- organization=Organization.objects.create(name='foo')
- )
+ project = Project.objects.create(name='objective project', organization=Organization.objects.create(name='foo'))
project.update_role.members.add(rando)
- assert not rando.can_access(Project, 'attach', obj=project, sub_obj=notification_template,
- relationship='notification_templates_success', data={})
+ assert not rando.can_access(Project, 'attach', obj=project, sub_obj=notification_template, relationship='notification_templates_success', data={})
project.admin_role.members.add(rando)
- assert rando.can_access(Project, 'attach', obj=project, sub_obj=notification_template,
- relationship='notification_templates_success', data={})
+ assert rando.can_access(Project, 'attach', obj=project, sub_obj=notification_template, relationship='notification_templates_success', data={})
@pytest.mark.django_db
-@pytest.mark.parametrize("res_role,expect", [
- ('read_role', True),
- (None, False)
-])
-def test_object_role_JT_attach(rando, job_template, workflow_job_template, inventory_source,
- notification_template, res_role, expect):
+@pytest.mark.parametrize("res_role,expect", [('read_role', True), (None, False)])
+def test_object_role_JT_attach(rando, job_template, workflow_job_template, inventory_source, notification_template, res_role, expect):
nt_organization = Organization.objects.create(name='organization just for the notification template')
nt_organization.notification_admin_role.members.add(rando)
notification_template.organization = nt_organization
notification_template.save()
- kwargs = dict(
- sub_obj=notification_template,
- relationship='notification_templates_success',
- data={'id': notification_template.id}
- )
+ kwargs = dict(sub_obj=notification_template, relationship='notification_templates_success', data={'id': notification_template.id})
permissions = {}
expected_permissions = {}
@@ -227,9 +199,7 @@ def test_object_role_JT_attach(rando, job_template, workflow_job_template, inven
if res_role is None or hasattr(permission_resource, res_role):
if res_role is not None:
getattr(permission_resource, res_role).members.add(rando)
- permissions[model_name] = rando.can_access(
- resource.__class__, 'attach', resource, **kwargs
- )
+ permissions[model_name] = rando.can_access(resource.__class__, 'attach', resource, **kwargs)
expected_permissions[model_name] = expect
else:
permissions[model_name] = None
diff --git a/awx/main/tests/functional/test_rbac_oauth.py b/awx/main/tests/functional/test_rbac_oauth.py
index e45c3ddedc..c55943adeb 100644
--- a/awx/main/tests/functional/test_rbac_oauth.py
+++ b/awx/main/tests/functional/test_rbac_oauth.py
@@ -15,40 +15,45 @@ from awx.api.versioning import reverse
@pytest.mark.django_db
class TestOAuth2Application:
-
- @pytest.mark.parametrize("user_for_access, can_access_list", [
- (0, [True, True]),
- (1, [True, True]),
- (2, [True, True]),
- (3, [False, False]),
- ])
- def test_can_read(
- self, admin, org_admin, org_member, alice, user_for_access, can_access_list, organization
- ):
+ @pytest.mark.parametrize(
+ "user_for_access, can_access_list",
+ [
+ (0, [True, True]),
+ (1, [True, True]),
+ (2, [True, True]),
+ (3, [False, False]),
+ ],
+ )
+ def test_can_read(self, admin, org_admin, org_member, alice, user_for_access, can_access_list, organization):
user_list = [admin, org_admin, org_member, alice]
access = OAuth2ApplicationAccess(user_list[user_for_access])
app_creation_user_list = [admin, org_admin]
for user, can_access in zip(app_creation_user_list, can_access_list):
app = Application.objects.create(
- name='test app for {}'.format(user.username), user=user,
- client_type='confidential', authorization_grant_type='password', organization=organization
+ name='test app for {}'.format(user.username),
+ user=user,
+ client_type='confidential',
+ authorization_grant_type='password',
+ organization=organization,
)
- assert access.can_read(app) is can_access
+ assert access.can_read(app) is can_access
def test_admin_only_can_read(self, user, organization):
user = user('org-admin', False)
organization.admin_role.members.add(user)
access = OAuth2ApplicationAccess(user)
app = Application.objects.create(
- name='test app for {}'.format(user.username), user=user,
- client_type='confidential', authorization_grant_type='password', organization=organization
+ name='test app for {}'.format(user.username), user=user, client_type='confidential', authorization_grant_type='password', organization=organization
)
assert access.can_read(app) is True
def test_app_activity_stream(self, org_admin, alice, organization):
app = Application.objects.create(
- name='test app for {}'.format(org_admin.username), user=org_admin,
- client_type='confidential', authorization_grant_type='password', organization=organization
+ name='test app for {}'.format(org_admin.username),
+ user=org_admin,
+ client_type='confidential',
+ authorization_grant_type='password',
+ organization=organization,
)
access = OAuth2ApplicationAccess(org_admin)
assert access.can_read(app) is True
@@ -58,17 +63,16 @@ class TestOAuth2Application:
access = ActivityStreamAccess(alice)
assert access.can_read(app) is False
assert access.can_read(activity_stream) is False
-
def test_token_activity_stream(self, org_admin, alice, organization, post):
app = Application.objects.create(
- name='test app for {}'.format(org_admin.username), user=org_admin,
- client_type='confidential', authorization_grant_type='password', organization=organization
- )
- response = post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}),
- {'scope': 'read'}, org_admin, expect=201
+ name='test app for {}'.format(org_admin.username),
+ user=org_admin,
+ client_type='confidential',
+ authorization_grant_type='password',
+ organization=organization,
)
+ response = post(reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}), {'scope': 'read'}, org_admin, expect=201)
token = AccessToken.objects.get(token=response.data['token'])
access = OAuth2ApplicationAccess(org_admin)
assert access.can_read(app) is True
@@ -78,74 +82,72 @@ class TestOAuth2Application:
access = ActivityStreamAccess(alice)
assert access.can_read(token) is False
assert access.can_read(activity_stream) is False
-
-
- def test_can_edit_delete_app_org_admin(
- self, admin, org_admin, org_member, alice, organization
- ):
+ def test_can_edit_delete_app_org_admin(self, admin, org_admin, org_member, alice, organization):
user_list = [admin, org_admin, org_member, alice]
can_access_list = [True, True, False, False]
for user, can_access in zip(user_list, can_access_list):
app = Application.objects.create(
- name='test app for {}'.format(user.username), user=org_admin,
- client_type='confidential', authorization_grant_type='password', organization=organization
+ name='test app for {}'.format(user.username),
+ user=org_admin,
+ client_type='confidential',
+ authorization_grant_type='password',
+ organization=organization,
)
access = OAuth2ApplicationAccess(user)
assert access.can_change(app, {}) is can_access
assert access.can_delete(app) is can_access
-
-
- def test_can_edit_delete_app_admin(
- self, admin, org_admin, org_member, alice, organization
- ):
+
+ def test_can_edit_delete_app_admin(self, admin, org_admin, org_member, alice, organization):
user_list = [admin, org_admin, org_member, alice]
can_access_list = [True, True, False, False]
for user, can_access in zip(user_list, can_access_list):
app = Application.objects.create(
- name='test app for {}'.format(user.username), user=admin,
- client_type='confidential', authorization_grant_type='password', organization=organization
+ name='test app for {}'.format(user.username),
+ user=admin,
+ client_type='confidential',
+ authorization_grant_type='password',
+ organization=organization,
)
access = OAuth2ApplicationAccess(user)
assert access.can_change(app, {}) is can_access
assert access.can_delete(app) is can_access
-
def test_superuser_can_always_create(self, admin, org_admin, org_member, alice, organization):
access = OAuth2ApplicationAccess(admin)
for user in [admin, org_admin, org_member, alice]:
- assert access.can_add({
- 'name': 'test app', 'user': user.pk, 'client_type': 'confidential',
- 'authorization_grant_type': 'password', 'organization': organization.id
- })
-
+ assert access.can_add(
+ {'name': 'test app', 'user': user.pk, 'client_type': 'confidential', 'authorization_grant_type': 'password', 'organization': organization.id}
+ )
+
def test_normal_user_cannot_create(self, admin, org_admin, org_member, alice, organization):
for access_user in [org_member, alice]:
access = OAuth2ApplicationAccess(access_user)
for user in [admin, org_admin, org_member, alice]:
- assert not access.can_add({
- 'name': 'test app', 'user': user.pk, 'client_type': 'confidential',
- 'authorization_grant_type': 'password', 'organization': organization.id
- })
+ assert not access.can_add(
+ {
+ 'name': 'test app',
+ 'user': user.pk,
+ 'client_type': 'confidential',
+ 'authorization_grant_type': 'password',
+ 'organization': organization.id,
+ }
+ )
@pytest.mark.django_db
class TestOAuth2Token:
-
- def test_can_read_change_delete_app_token(
- self, post, admin, org_admin, org_member, alice, organization
- ):
+ def test_can_read_change_delete_app_token(self, post, admin, org_admin, org_member, alice, organization):
user_list = [admin, org_admin, org_member, alice]
can_access_list = [True, True, False, False]
app = Application.objects.create(
- name='test app for {}'.format(admin.username), user=admin,
- client_type='confidential', authorization_grant_type='password',
- organization=organization
- )
- response = post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}),
- {'scope': 'read'}, admin, expect=201
+ name='test app for {}'.format(admin.username),
+ user=admin,
+ client_type='confidential',
+ authorization_grant_type='password',
+ organization=organization,
)
+ response = post(reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}), {'scope': 'read'}, admin, expect=201)
for user, can_access in zip(user_list, can_access_list):
token = AccessToken.objects.get(token=response.data['token'])
access = OAuth2TokenAccess(user)
@@ -153,41 +155,34 @@ class TestOAuth2Token:
assert access.can_change(token, {}) is can_access
assert access.can_delete(token) is can_access
-
- def test_auditor_can_read(
- self, post, admin, org_admin, org_member, alice, system_auditor, organization
- ):
+ def test_auditor_can_read(self, post, admin, org_admin, org_member, alice, system_auditor, organization):
user_list = [admin, org_admin, org_member]
can_access_list = [True, True, True]
cannot_access_list = [False, False, False]
app = Application.objects.create(
- name='test app for {}'.format(admin.username), user=admin,
- client_type='confidential', authorization_grant_type='password',
- organization=organization
+ name='test app for {}'.format(admin.username),
+ user=admin,
+ client_type='confidential',
+ authorization_grant_type='password',
+ organization=organization,
)
for user, can_access, cannot_access in zip(user_list, can_access_list, cannot_access_list):
- response = post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}),
- {'scope': 'read'}, user, expect=201
- )
+ response = post(reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}), {'scope': 'read'}, user, expect=201)
token = AccessToken.objects.get(token=response.data['token'])
access = OAuth2TokenAccess(system_auditor)
assert access.can_read(token) is can_access
assert access.can_change(token, {}) is cannot_access
assert access.can_delete(token) is cannot_access
-
- def test_user_auditor_can_change(
- self, post, org_member, org_admin, system_auditor, organization
- ):
+
+ def test_user_auditor_can_change(self, post, org_member, org_admin, system_auditor, organization):
app = Application.objects.create(
- name='test app for {}'.format(org_admin.username), user=org_admin,
- client_type='confidential', authorization_grant_type='password',
- organization=organization
- )
- response = post(
- reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}),
- {'scope': 'read'}, org_member, expect=201
+ name='test app for {}'.format(org_admin.username),
+ user=org_admin,
+ client_type='confidential',
+ authorization_grant_type='password',
+ organization=organization,
)
+ response = post(reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}), {'scope': 'read'}, org_member, expect=201)
token = AccessToken.objects.get(token=response.data['token'])
access = OAuth2TokenAccess(system_auditor)
assert access.can_read(token) is True
@@ -199,63 +194,54 @@ class TestOAuth2Token:
assert access.can_read(token) is True
assert access.can_change(token, {}) is True
assert access.can_delete(token) is True
-
-
-
- def test_can_read_change_delete_personal_token_org_member(
- self, post, admin, org_admin, org_member, alice
- ):
+
+ def test_can_read_change_delete_personal_token_org_member(self, post, admin, org_admin, org_member, alice):
# Tests who can read a token created by an org-member
user_list = [admin, org_admin, org_member, alice]
can_access_list = [True, False, True, False]
- response = post(
- reverse('api:user_personal_token_list', kwargs={'pk': org_member.pk}),
- {'scope': 'read'}, org_member, expect=201
- )
+ response = post(reverse('api:user_personal_token_list', kwargs={'pk': org_member.pk}), {'scope': 'read'}, org_member, expect=201)
token = AccessToken.objects.get(token=response.data['token'])
for user, can_access in zip(user_list, can_access_list):
access = OAuth2TokenAccess(user)
assert access.can_read(token) is can_access
assert access.can_change(token, {}) is can_access
assert access.can_delete(token) is can_access
-
-
- def test_can_read_personal_token_creator(
- self, post, admin, org_admin, org_member, alice
- ):
+
+ def test_can_read_personal_token_creator(self, post, admin, org_admin, org_member, alice):
# Tests the token's creator can read their tokens
user_list = [admin, org_admin, org_member, alice]
can_access_list = [True, True, True, True]
for user, can_access in zip(user_list, can_access_list):
- response = post(
- reverse('api:user_personal_token_list', kwargs={'pk': user.pk}),
- {'scope': 'read', 'application':None}, user, expect=201
- )
+ response = post(reverse('api:user_personal_token_list', kwargs={'pk': user.pk}), {'scope': 'read', 'application': None}, user, expect=201)
token = AccessToken.objects.get(token=response.data['token'])
access = OAuth2TokenAccess(user)
assert access.can_read(token) is can_access
assert access.can_change(token, {}) is can_access
assert access.can_delete(token) is can_access
-
- @pytest.mark.parametrize("user_for_access, can_access_list", [
- (0, [True, True]),
- (1, [True, True]),
- (2, [True, True]),
- (3, [False, False]),
- ])
- def test_can_create(
- self, post, admin, org_admin, org_member, alice, user_for_access, can_access_list, organization
- ):
+ @pytest.mark.parametrize(
+ "user_for_access, can_access_list",
+ [
+ (0, [True, True]),
+ (1, [True, True]),
+ (2, [True, True]),
+ (3, [False, False]),
+ ],
+ )
+ def test_can_create(self, post, admin, org_admin, org_member, alice, user_for_access, can_access_list, organization):
user_list = [admin, org_admin, org_member, alice]
for user, can_access in zip(user_list, can_access_list):
app = Application.objects.create(
- name='test app for {}'.format(user.username), user=user,
- client_type='confidential', authorization_grant_type='password', organization=organization
+ name='test app for {}'.format(user.username),
+ user=user,
+ client_type='confidential',
+ authorization_grant_type='password',
+ organization=organization,
)
post(
reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}),
- {'scope': 'read'}, user_list[user_for_access], expect=201 if can_access else 403
+ {'scope': 'read'},
+ user_list[user_for_access],
+ expect=201 if can_access else 403,
)
-
diff --git a/awx/main/tests/functional/test_rbac_role.py b/awx/main/tests/functional/test_rbac_role.py
index e308d1a6ea..8911ba880e 100644
--- a/awx/main/tests/functional/test_rbac_role.py
+++ b/awx/main/tests/functional/test_rbac_role.py
@@ -36,9 +36,9 @@ def test_user_access_attach(rando, inventory):
@pytest.mark.django_db
def test_visible_roles(admin_user, system_auditor, rando, organization, project):
- '''
+ """
system admin & system auditor fixtures needed to create system roles
- '''
+ """
organization.auditor_role.members.add(rando)
access = RoleAccess(rando)
@@ -54,10 +54,10 @@ def test_visible_roles(admin_user, system_auditor, rando, organization, project)
# Permissions when adding users to org member/admin
@pytest.mark.django_db
def test_org_user_role_attach(user, organization, inventory):
- '''
+ """
Org admins must not be able to add arbitrary users to their
organization, because that would give them admin permission to that user
- '''
+ """
admin = user('admin')
nonmember = user('nonmember')
other_org = Organization.objects.create(name="other_org")
@@ -77,54 +77,40 @@ def test_org_user_role_attach(user, organization, inventory):
# Permissions when adding users/teams to org special-purpose roles
@pytest.mark.django_db
def test_user_org_object_roles(organization, org_admin, org_member):
- '''
+ """
Unlike admin & member roles, the special-purpose organization roles do not
confer any permissions related to user management,
Normal rules about role delegation should apply, only admin to org needed.
- '''
- assert RoleAccess(org_admin).can_attach(
- organization.notification_admin_role, org_member, 'members', None
- )
- assert OrganizationAccess(org_admin).can_attach(
- organization, org_member, 'notification_admin_role.members', None
- )
- assert not RoleAccess(org_member).can_attach(
- organization.notification_admin_role, org_member, 'members', None
- )
- assert not OrganizationAccess(org_member).can_attach(
- organization, org_member, 'notification_admin_role.members', None
- )
+ """
+ assert RoleAccess(org_admin).can_attach(organization.notification_admin_role, org_member, 'members', None)
+ assert OrganizationAccess(org_admin).can_attach(organization, org_member, 'notification_admin_role.members', None)
+ assert not RoleAccess(org_member).can_attach(organization.notification_admin_role, org_member, 'members', None)
+ assert not OrganizationAccess(org_member).can_attach(organization, org_member, 'notification_admin_role.members', None)
@pytest.mark.django_db
def test_team_org_object_roles(organization, team, org_admin, org_member):
- '''
+ """
the special-purpose organization roles are not ancestors of any
team roles, and can be delegated en masse through teams,
following normal admin rules
- '''
- assert RoleAccess(org_admin).can_attach(
- organization.notification_admin_role, team, 'member_role.parents', {'id': 68}
- )
+ """
+ assert RoleAccess(org_admin).can_attach(organization.notification_admin_role, team, 'member_role.parents', {'id': 68})
# Obviously team admin isn't enough to assign organization roles to the team
team.admin_role.members.add(org_member)
- assert not RoleAccess(org_member).can_attach(
- organization.notification_admin_role, team, 'member_role.parents', {'id': 68}
- )
+ assert not RoleAccess(org_member).can_attach(organization.notification_admin_role, team, 'member_role.parents', {'id': 68})
# Cannot make a team member of an org
- assert not RoleAccess(org_admin).can_attach(
- organization.member_role, team, 'member_role.parents', {'id': 68}
- )
+ assert not RoleAccess(org_admin).can_attach(organization.member_role, team, 'member_role.parents', {'id': 68})
# Singleton user editing restrictions
@pytest.mark.django_db
def test_org_superuser_role_attach(admin_user, org_admin, organization):
- '''
+ """
Ideally, you would not add superusers to roles (particularly member_role)
but it has historically been possible
this checks that the situation does not grant unexpected permissions
- '''
+ """
organization.member_role.members.add(admin_user)
role_access = RoleAccess(org_admin)
@@ -153,12 +139,12 @@ def test_org_object_role_not_sufficient(user, organization):
# Org admin user editing permission ANY to ALL change
@pytest.mark.django_db
def test_need_all_orgs_to_admin_user(user):
- '''
+ """
Old behavior - org admin to ANY organization that a user is member of
grants permission to admin that user
New behavior enforced here - org admin to ALL organizations that a
user is member of grants permission to admin that user
- '''
+ """
org1 = Organization.objects.create(name='org1')
org2 = Organization.objects.create(name='org2')
@@ -189,12 +175,12 @@ def test_need_all_orgs_to_admin_user(user):
# Orphaned user can be added to member role, only in special cases
@pytest.mark.django_db
def test_orphaned_user_allowed(org_admin, rando, organization, org_credential):
- '''
+ """
We still allow adoption of orphaned* users by assigning them to
organization member role, but only in the situation where the
org admin already posesses indirect access to all of the user's roles
*orphaned means user is not a member of any organization
- '''
+ """
# give a descendent role to rando, to trigger the conditional
# where all ancestor roles of rando should be in the set of
# org_admin roles.
diff --git a/awx/main/tests/functional/test_rbac_user.py b/awx/main/tests/functional/test_rbac_user.py
index b62a0db25f..d5386343bd 100644
--- a/awx/main/tests/functional/test_rbac_user.py
+++ b/awx/main/tests/functional/test_rbac_user.py
@@ -60,12 +60,16 @@ def test_user_queryset(user):
@pytest.mark.django_db
-@pytest.mark.parametrize('ext_auth,superuser,expect', [
- (True, True, True),
- (False, True, True), # your setting can't touch me, I'm superuser
- (True, False, True), # org admin, managing my peeps
- (False, False, False), # setting blocks org admin
-], ids=['superuser', 'superuser-off', 'org', 'org-off'])
+@pytest.mark.parametrize(
+ 'ext_auth,superuser,expect',
+ [
+ (True, True, True),
+ (False, True, True), # your setting can't touch me, I'm superuser
+ (True, False, True), # org admin, managing my peeps
+ (False, False, False), # setting blocks org admin
+ ],
+ ids=['superuser', 'superuser-off', 'org', 'org-off'],
+)
def test_manage_org_auth_setting(ext_auth, superuser, expect, organization, rando, user, team):
u = user('foo-user', is_superuser=superuser)
if not superuser:
@@ -108,22 +112,22 @@ def test_team_org_resource_role(ext_auth, organization, rando, org_admin, team):
# use via /api/v2/teams/N/roles/
TeamAccess(org_admin).can_attach(team, organization.workflow_admin_role, 'roles'),
# use via /api/v2/roles/teams/
- RoleAccess(org_admin).can_attach(organization.workflow_admin_role, team, 'member_role.parents')
+ RoleAccess(org_admin).can_attach(organization.workflow_admin_role, team, 'member_role.parents'),
] == [True for i in range(2)]
assert [
# use via /api/v2/teams/N/roles/
TeamAccess(org_admin).can_unattach(team, organization.workflow_admin_role, 'roles'),
# use via /api/v2/roles/teams/
- RoleAccess(org_admin).can_unattach(organization.workflow_admin_role, team, 'member_role.parents')
+ RoleAccess(org_admin).can_unattach(organization.workflow_admin_role, team, 'member_role.parents'),
] == [True for i in range(2)]
@pytest.mark.django_db
def test_user_accessible_objects(user, organization):
- '''
+ """
We cannot directly use accessible_objects for User model because
both editing and read permissions are obligated to complex business logic
- '''
+ """
admin = user('admin', False)
u = user('john', False)
access = UserAccess(admin)
@@ -140,9 +144,7 @@ def test_user_accessible_objects(user, organization):
@pytest.mark.django_db
def test_org_admin_create_sys_auditor(org_admin):
access = UserAccess(org_admin)
- assert not access.can_add(data=dict(
- username='new_user', password="pa$$sowrd", email="asdf@redhat.com",
- is_system_auditor='true'))
+ assert not access.can_add(data=dict(username='new_user', password="pa$$sowrd", email="asdf@redhat.com", is_system_auditor='true'))
@pytest.mark.django_db
diff --git a/awx/main/tests/functional/test_rbac_workflow.py b/awx/main/tests/functional/test_rbac_workflow.py
index 6e92082358..0195b1adf3 100644
--- a/awx/main/tests/functional/test_rbac_workflow.py
+++ b/awx/main/tests/functional/test_rbac_workflow.py
@@ -21,7 +21,8 @@ def wfjt(workflow_job_template_factory, organization):
@pytest.fixture
def wfjt_with_nodes(workflow_job_template_factory, organization, job_template):
objects = workflow_job_template_factory(
- 'test_workflow', organization=organization, workflow_job_template_nodes=[{'unified_job_template': job_template}], persisted=True)
+ 'test_workflow', organization=organization, workflow_job_template_nodes=[{'unified_job_template': job_template}], persisted=True
+ )
return objects.workflow_job_template
@@ -37,7 +38,6 @@ def workflow_job(wfjt):
@pytest.mark.django_db
class TestWorkflowJobTemplateAccess:
-
def test_random_user_no_edit(self, wfjt, rando):
access = WorkflowJobTemplateAccess(rando)
assert not access.can_change(wfjt, {'name': 'new name'})
@@ -61,7 +61,6 @@ class TestWorkflowJobTemplateAccess:
@pytest.mark.django_db
class TestWorkflowJobTemplateNodeAccess:
-
def test_no_jt_access_to_edit(self, wfjt_node, rando):
# without access to the related job template, admin to the WFJT can
# not change the prompted parameters
@@ -76,9 +75,7 @@ class TestWorkflowJobTemplateNodeAccess:
def test_access_to_edit_non_JT(self, rando, workflow_job_template, organization, project):
workflow_job_template.admin_role.members.add(rando)
- node = workflow_job_template.workflow_job_template_nodes.create(
- unified_job_template=project
- )
+ node = workflow_job_template.workflow_job_template_nodes.create(unified_job_template=project)
assert not WorkflowJobTemplateNodeAccess(rando).can_change(node, {'limit': ''})
project.update_role.members.add(rando)
@@ -88,19 +85,14 @@ class TestWorkflowJobTemplateNodeAccess:
wfjt.admin_role.members.add(rando)
access = WorkflowJobTemplateNodeAccess(rando)
job_template.read_role.members.add(rando)
- assert not access.can_add({
- 'workflow_job_template': wfjt,
- 'unified_job_template': job_template})
+ assert not access.can_add({'workflow_job_template': wfjt, 'unified_job_template': job_template})
def test_add_node_with_minimum_permissions(self, wfjt, job_template, inventory, rando):
wfjt.admin_role.members.add(rando)
access = WorkflowJobTemplateNodeAccess(rando)
job_template.execute_role.members.add(rando)
inventory.use_role.members.add(rando)
- assert access.can_add({
- 'workflow_job_template': wfjt,
- 'inventory': inventory,
- 'unified_job_template': job_template})
+ assert access.can_add({'workflow_job_template': wfjt, 'inventory': inventory, 'unified_job_template': job_template})
def test_remove_unwanted_foreign_node(self, wfjt_node, job_template, rando):
wfjt = wfjt_node.workflow_job_template
@@ -112,7 +104,6 @@ class TestWorkflowJobTemplateNodeAccess:
@pytest.mark.django_db
class TestWorkflowJobAccess:
-
@pytest.mark.parametrize("role_name", ["admin_role", "workflow_admin_role"])
def test_org_admin_can_delete_workflow_job(self, role_name, workflow_job, org_member):
role = getattr(workflow_job.workflow_job_template.organization, role_name)
@@ -155,10 +146,7 @@ class TestWorkflowJobAccess:
def test_cannot_relaunch_friends_job(self, wfjt, rando, alice):
workflow_job = wfjt.workflow_jobs.create(name='foo', created_by=alice)
- JobLaunchConfig.objects.create(
- job=workflow_job,
- extra_data={'foo': 'fooforyou'}
- )
+ JobLaunchConfig.objects.create(job=workflow_job, extra_data={'foo': 'fooforyou'})
wfjt.execute_role.members.add(alice)
assert not WorkflowJobAccess(rando).can_start(workflow_job)
@@ -180,7 +168,6 @@ class TestWorkflowJobAccess:
@pytest.mark.django_db
class TestWFJTCopyAccess:
-
def test_copy_permissions_org_admin(self, wfjt, org_admin, org_member):
admin_access = WorkflowJobTemplateAccess(org_admin)
assert admin_access.can_copy(wfjt)
@@ -190,33 +177,28 @@ class TestWFJTCopyAccess:
assert admin_access.can_copy(wfjt)
def test_copy_permissions_user(self, wfjt, org_admin, org_member):
- '''
+ """
Only org admins and org workflow admins are able to add WFJTs, only org admins
are able to copy them
- '''
+ """
wfjt.admin_role.members.add(org_member)
member_access = WorkflowJobTemplateAccess(org_member)
assert not member_access.can_copy(wfjt)
def test_workflow_copy_warnings_inv(self, wfjt, rando, inventory):
- '''
+ """
The user `rando` does not have access to the prompted inventory in a
node inside the workflow - test surfacing this information
- '''
+ """
wfjt.workflow_job_template_nodes.create(inventory=inventory)
access = WorkflowJobTemplateAccess(rando, save_messages=True)
assert not access.can_copy(wfjt)
warnings = access.messages
assert 'inventories_unable_to_copy' in warnings
-
def test_workflow_copy_no_start(self, wfjt, inventory, admin_user):
# Test that un-startable resource doesn't block copy
- inv_src = InventorySource.objects.create(
- inventory = inventory,
- source = 'custom',
- source_script = None
- )
+ inv_src = InventorySource.objects.create(inventory=inventory, source='custom', source_script=None)
assert not inv_src.can_update
wfjt.workflow_job_template_nodes.create(unified_job_template=inv_src)
access = WorkflowJobTemplateAccess(admin_user, save_messages=True)
diff --git a/awx/main/tests/functional/test_session.py b/awx/main/tests/functional/test_session.py
index b30c5cb523..f9eb4c42a4 100644
--- a/awx/main/tests/functional/test_session.py
+++ b/awx/main/tests/functional/test_session.py
@@ -25,41 +25,28 @@ class AlwaysPassBackend(object):
@pytest.mark.django_db
-@pytest.mark.parametrize('accept, status', [
- ['*/*', 200],
- ['text/html', 200],
- ['application/json', 406]
-])
+@pytest.mark.parametrize('accept, status', [['*/*', 200], ['text/html', 200], ['application/json', 406]])
def test_login_json_not_allowed(get, accept, status):
- get(
- '/api/login/',
- HTTP_ACCEPT=accept,
- expect=status
- )
+ get('/api/login/', HTTP_ACCEPT=accept, expect=status)
@pytest.mark.skip(reason="Needs Update - CA")
@pytest.mark.django_db
def test_session_create_delete(admin, post, get):
AlwaysPassBackend.user = admin
- with override_settings(
- AUTHENTICATION_BACKENDS=(AlwaysPassBackend.get_backend_path(),),
- SESSION_COOKIE_NAME='session_id'
- ):
+ with override_settings(AUTHENTICATION_BACKENDS=(AlwaysPassBackend.get_backend_path(),), SESSION_COOKIE_NAME='session_id'):
response = post(
'/api/login/',
data={'username': admin.username, 'password': admin.password, 'next': '/api/'},
- expect=302, middleware=SessionMiddleware(), format='multipart'
+ expect=302,
+ middleware=SessionMiddleware(),
+ format='multipart',
)
assert 'session_id' in response.cookies
- session_key = re.findall(r'session_id=[a-zA-z0-9]+',
- str(response.cookies['session_id']))[0][len('session_id=') :]
+ session_key = re.findall(r'session_id=[a-zA-z0-9]+', str(response.cookies['session_id']))[0][len('session_id=') :]
session = Session.objects.get(session_key=session_key)
assert int(session.get_decoded()[SESSION_KEY]) == admin.pk
- response = get(
- '/api/logout/', middleware=SessionMiddleware(),
- cookies={'session_id': session_key}, expect=302
- )
+ response = get('/api/logout/', middleware=SessionMiddleware(), cookies={'session_id': session_key}, expect=302)
assert not Session.objects.filter(session_key=session_key).exists()
@@ -88,10 +75,7 @@ def test_session_overlimit(emit, admin, alice):
created.append(session.session_key)
assert [s.pk for s in Session.objects.all()] == created[-3:]
assert emit.call_count == 2 # 2 of 5 sessions were evicted
- emit.assert_called_with(
- 'control-limit_reached_{}'.format(admin.pk),
- {'reason': 'limit_reached', 'group_name': 'control'}
- )
+ emit.assert_called_with('control-limit_reached_{}'.format(admin.pk), {'reason': 'limit_reached', 'group_name': 'control'})
# Allow sessions for a different user to be saved
store = import_module(settings.SESSION_ENGINE).SessionStore()
@@ -103,22 +87,15 @@ def test_session_overlimit(emit, admin, alice):
@pytest.mark.django_db
def test_password_update_clears_sessions(admin, alice, post, patch):
AlwaysPassBackend.user = alice
- with override_settings(
- AUTHENTICATION_BACKENDS=(AlwaysPassBackend.get_backend_path(),),
- SESSION_COOKIE_NAME='session_id'
- ):
+ with override_settings(AUTHENTICATION_BACKENDS=(AlwaysPassBackend.get_backend_path(),), SESSION_COOKIE_NAME='session_id'):
response = post(
'/api/login/',
data={'username': alice.username, 'password': alice.password, 'next': '/api/'},
- expect=302, middleware=SessionMiddleware(), format='multipart'
+ expect=302,
+ middleware=SessionMiddleware(),
+ format='multipart',
)
- session_key = re.findall(
- r'session_id=[a-zA-z0-9]+',
- str(response.cookies['session_id'])
- )[0][len('session_id=') :]
+ session_key = re.findall(r'session_id=[a-zA-z0-9]+', str(response.cookies['session_id']))[0][len('session_id=') :]
assert Session.objects.filter(session_key=session_key).exists()
- patch(
- reverse('api:user_detail', kwargs={'pk': alice.pk}), admin,
- data={'password': 'new_password'}, expect=200
- )
+ patch(reverse('api:user_detail', kwargs={'pk': alice.pk}), admin, data={'password': 'new_password'}, expect=200)
assert not Session.objects.filter(session_key=session_key).exists()
diff --git a/awx/main/tests/functional/test_tasks.py b/awx/main/tests/functional/test_tasks.py
index c7bc50c8d2..23a79076bc 100644
--- a/awx/main/tests/functional/test_tasks.py
+++ b/awx/main/tests/functional/test_tasks.py
@@ -4,15 +4,8 @@ import os
from django.utils.timezone import now, timedelta
-from awx.main.tasks import (
- RunProjectUpdate, RunInventoryUpdate,
- awx_isolated_heartbeat,
- isolated_manager
-)
-from awx.main.models import (
- ProjectUpdate, InventoryUpdate, InventorySource,
- Instance, InstanceGroup
-)
+from awx.main.tasks import RunProjectUpdate, RunInventoryUpdate, awx_isolated_heartbeat, isolated_manager
+from awx.main.models import ProjectUpdate, InventoryUpdate, InventorySource, Instance, InstanceGroup
@pytest.fixture
@@ -26,7 +19,6 @@ def scm_revision_file(tmpdir_factory):
@pytest.mark.django_db
class TestDependentInventoryUpdate:
-
def test_dependent_inventory_updates_is_called(self, scm_inventory_source, scm_revision_file):
task = RunProjectUpdate()
task.revision_path = scm_revision_file
@@ -57,20 +49,14 @@ class TestDependentInventoryUpdate:
assert inv_update.source_project_update_id == proj_update.pk
def test_dependent_inventory_project_cancel(self, project, inventory):
- '''
+ """
Test that dependent inventory updates exhibit good behavior on cancel
of the source project update
- '''
+ """
task = RunProjectUpdate()
proj_update = ProjectUpdate.objects.create(project=project)
- kwargs = dict(
- source_project=project,
- source='scm',
- source_path='inventory_file',
- update_on_project_update=True,
- inventory=inventory
- )
+ kwargs = dict(source_project=project, source='scm', source_path='inventory_file', update_on_project_update=True, inventory=inventory)
is1 = InventorySource.objects.create(name="test-scm-inv", **kwargs)
is2 = InventorySource.objects.create(name="test-scm-inv2", **kwargs)
@@ -86,7 +72,6 @@ class TestDependentInventoryUpdate:
iu_run_mock.assert_called_once()
-
class MockSettings:
AWX_ISOLATED_PERIODIC_CHECK = 60
CLUSTER_HOST_ID = 'tower_1'
@@ -94,7 +79,6 @@ class MockSettings:
@pytest.mark.django_db
class TestIsolatedManagementTask:
-
@pytest.fixture
def control_group(self):
return InstanceGroup.objects.create(name='alpha')
@@ -107,7 +91,7 @@ class TestIsolatedManagementTask:
def needs_updating(self, control_group):
ig = InstanceGroup.objects.create(name='thepentagon', controller=control_group)
inst = ig.instances.create(hostname='isolated', capacity=103)
- inst.last_isolated_check=now() - timedelta(seconds=MockSettings.AWX_ISOLATED_PERIODIC_CHECK)
+ inst.last_isolated_check = now() - timedelta(seconds=MockSettings.AWX_ISOLATED_PERIODIC_CHECK)
inst.save()
return ig
@@ -115,7 +99,7 @@ class TestIsolatedManagementTask:
def just_updated(self, control_group):
ig = InstanceGroup.objects.create(name='thepentagon', controller=control_group)
inst = ig.instances.create(hostname='isolated', capacity=103)
- inst.last_isolated_check=now()
+ inst.last_isolated_check = now()
inst.save()
return inst
diff --git a/awx/main/tests/functional/utils/test_common.py b/awx/main/tests/functional/utils/test_common.py
index f375f69f62..7a2f0f850d 100644
--- a/awx/main/tests/functional/utils/test_common.py
+++ b/awx/main/tests/functional/utils/test_common.py
@@ -3,10 +3,7 @@ import pytest
import copy
import json
-from awx.main.utils.common import (
- model_instance_diff,
- model_to_dict
-)
+from awx.main.utils.common import model_instance_diff, model_to_dict
@pytest.mark.django_db
diff --git a/awx/main/tests/manual/workflows/linear.py b/awx/main/tests/manual/workflows/linear.py
index 7375d8e9ab..a283cca671 100755
--- a/awx/main/tests/manual/workflows/linear.py
+++ b/awx/main/tests/manual/workflows/linear.py
@@ -28,14 +28,14 @@ def do_init_workflow(job_template_success, job_template_fail, job_template_never
nodes_fail[0].failure_nodes.add(nodes_success[1])
nodes_fail[0].success_nodes.add(nodes_never[1])
-
+
nodes_success[1].failure_nodes.add(nodes_never[2])
def do_init():
jt_success = JobTemplate.objects.get(id=5)
- jt_fail= JobTemplate.objects.get(id=6)
- jt_never= JobTemplate.objects.get(id=7)
+ jt_fail = JobTemplate.objects.get(id=6)
+ jt_never = JobTemplate.objects.get(id=7)
do_init_workflow(jt_success, jt_fail, jt_never)
diff --git a/awx/main/tests/manual/workflows/parallel.py b/awx/main/tests/manual/workflows/parallel.py
index ff4973f8b4..f9ec99dbf7 100755
--- a/awx/main/tests/manual/workflows/parallel.py
+++ b/awx/main/tests/manual/workflows/parallel.py
@@ -18,11 +18,11 @@ def do_init_workflow(job_template_success, job_template_fail, job_template_never
nodes_never = []
for x in range(0, 3):
nodes_never.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never))
-
+
nodes_parallel = []
for jt in jts_parallel:
nodes_parallel.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=jt))
-
+
node_success.success_nodes.add(nodes_parallel[0])
node_success.success_nodes.add(nodes_parallel[1])
node_success.success_nodes.add(nodes_parallel[2])
@@ -34,9 +34,9 @@ def do_init_workflow(job_template_success, job_template_fail, job_template_never
def do_init():
jt_success = JobTemplate.objects.get(id=5)
- jt_fail= JobTemplate.objects.get(id=6)
- jt_never= JobTemplate.objects.get(id=7)
-
+ jt_fail = JobTemplate.objects.get(id=6)
+ jt_never = JobTemplate.objects.get(id=7)
+
jt_parallel = []
jt_parallel.append(JobTemplate.objects.get(id=16))
jt_parallel.append(JobTemplate.objects.get(id=17))
diff --git a/awx/main/tests/test_env.py b/awx/main/tests/test_env.py
index 135c90d99b..b63da8ed8a 100644
--- a/awx/main/tests/test_env.py
+++ b/awx/main/tests/test_env.py
@@ -1,5 +1,3 @@
-
-
# Ensure that our autouse overwrites are working
def test_cache(settings):
assert settings.CACHES['default']['BACKEND'] == 'django.core.cache.backends.locmem.LocMemCache'
diff --git a/awx/main/tests/unit/analytics/test_broadcast_websocket.py b/awx/main/tests/unit/analytics/test_broadcast_websocket.py
index 6edfe51b92..cd7f4323b5 100644
--- a/awx/main/tests/unit/analytics/test_broadcast_websocket.py
+++ b/awx/main/tests/unit/analytics/test_broadcast_websocket.py
@@ -4,8 +4,7 @@ from awx.main.analytics.broadcast_websocket import FixedSlidingWindow
from awx.main.analytics.broadcast_websocket import dt_to_seconds
-class TestFixedSlidingWindow():
-
+class TestFixedSlidingWindow:
def ts(self, **kwargs):
e = {
'year': 1985,
@@ -32,7 +31,6 @@ class TestFixedSlidingWindow():
fsw.record(self.ts(minute=0, second=i, microsecond=0))
assert (i + 1) == fsw.render(self.ts(minute=0, second=i, microsecond=0))
-
def test_record_same_minute_render_diff_minute(self):
"""
Legend:
@@ -53,17 +51,11 @@ class TestFixedSlidingWindow():
for i in range(20):
fsw.record(self.ts(minute=0, second=i, microsecond=0))
- assert 20 == fsw.render(self.ts(minute=0, second=19, microsecond=0)), \
- "A. The second of the last record() call"
- assert 20 == fsw.render(self.ts(minute=0, second=20, microsecond=0)), \
- "B. The second after the last record() call"
- assert 20 == fsw.render(self.ts(minute=0, second=59, microsecond=0)), \
- "C. Last second in the same minute that all record() called in"
- assert 20 == fsw.render(self.ts(minute=1, second=0, microsecond=0)), \
- "D. First second of the minute following the minute that all record() calls in"
+ assert 20 == fsw.render(self.ts(minute=0, second=19, microsecond=0)), "A. The second of the last record() call"
+ assert 20 == fsw.render(self.ts(minute=0, second=20, microsecond=0)), "B. The second after the last record() call"
+ assert 20 == fsw.render(self.ts(minute=0, second=59, microsecond=0)), "C. Last second in the same minute that all record() called in"
+ assert 20 == fsw.render(self.ts(minute=1, second=0, microsecond=0)), "D. First second of the minute following the minute that all record() calls in"
for i in range(20):
- assert 20 - i == fsw.render(self.ts(minute=1, second=i, microsecond=0)), \
- "E. Sliding window where 1 record() should drop from the results each time"
+ assert 20 - i == fsw.render(self.ts(minute=1, second=i, microsecond=0)), "E. Sliding window where 1 record() should drop from the results each time"
- assert 0 == fsw.render(self.ts(minute=1, second=20, microsecond=0)), \
- "F. First second one minute after all record() calls"
+ assert 0 == fsw.render(self.ts(minute=1, second=20, microsecond=0)), "F. First second one minute after all record() calls"
diff --git a/awx/main/tests/unit/api/serializers/conftest.py b/awx/main/tests/unit/api/serializers/conftest.py
index 7c26664954..b9f14cc644 100644
--- a/awx/main/tests/unit/api/serializers/conftest.py
+++ b/awx/main/tests/unit/api/serializers/conftest.py
@@ -7,6 +7,7 @@ def get_related_assert():
def fn(model_obj, related, resource_name, related_resource_name):
assert related_resource_name in related
assert related[related_resource_name] == '/api/v2/%s/%d/%s/' % (resource_name, model_obj.pk, related_resource_name)
+
return fn
@@ -16,6 +17,7 @@ def get_related_mock_and_run():
serializer = serializer_class()
related = serializer.get_related(model_obj)
return related
+
return fn
@@ -25,6 +27,7 @@ def test_get_related(get_related_assert, get_related_mock_and_run):
related = get_related_mock_and_run(serializer_class, model_obj)
get_related_assert(model_obj, related, resource_name, related_resource_name)
return related
+
return fn
@@ -32,6 +35,7 @@ def test_get_related(get_related_assert, get_related_mock_and_run):
def get_summary_fields_assert():
def fn(summary, summary_field_name):
assert summary_field_name in summary
+
return fn
@@ -42,6 +46,7 @@ def get_summary_fields_mock_and_run():
serializer.show_capabilities = []
serializer.context['view'] = mock.Mock(kwargs={})
return serializer.get_summary_fields(model_obj)
+
return fn
@@ -51,4 +56,5 @@ def test_get_summary_fields(get_summary_fields_mock_and_run, get_summary_fields_
summary = get_summary_fields_mock_and_run(serializer_class, model_obj)
get_summary_fields_assert(summary, summary_field_name)
return summary
+
return fn
diff --git a/awx/main/tests/unit/api/serializers/test_activity_stream_serializer.py b/awx/main/tests/unit/api/serializers/test_activity_stream_serializer.py
index 50849b31c5..dfe7720ea5 100644
--- a/awx/main/tests/unit/api/serializers/test_activity_stream_serializer.py
+++ b/awx/main/tests/unit/api/serializers/test_activity_stream_serializer.py
@@ -6,7 +6,7 @@ from awx.conf.models import Setting
def test_activity_stream_related():
- '''
+ """
If this test failed with content in `missing_models`, that means that a
model has been connected to the activity stream, but the model has not
been added to the activity stream serializer.
@@ -17,13 +17,13 @@ def test_activity_stream_related():
If, for whatever reason, the missing model should not generally be
summarized from related resources, then a special case can be carved out in
ActivityStreamSerializer._local_summarizable_fk_fields
- '''
+ """
serializer_related = set(
- ActivityStream._meta.get_field(field_name).related_model for field_name, stuff in
- ActivityStreamSerializer()._local_summarizable_fk_fields
+ ActivityStream._meta.get_field(field_name).related_model
+ for field_name, stuff in ActivityStreamSerializer()._local_summarizable_fk_fields
if hasattr(ActivityStream, field_name)
)
-
+
models = set(activity_stream_registrar.models)
models.remove(Setting)
diff --git a/awx/main/tests/unit/api/serializers/test_inventory_serializers.py b/awx/main/tests/unit/api/serializers/test_inventory_serializers.py
index a1191bea2c..dd14d9e0e9 100644
--- a/awx/main/tests/unit/api/serializers/test_inventory_serializers.py
+++ b/awx/main/tests/unit/api/serializers/test_inventory_serializers.py
@@ -14,7 +14,7 @@ from awx.main.models import (
User,
)
-#DRF
+# DRF
from rest_framework.request import Request
from rest_framework.test import (
APIRequestFactory,
@@ -25,36 +25,30 @@ from rest_framework.test import (
@pytest.fixture
def inventory_source(mocker):
obj = mocker.MagicMock(
- pk=22,
- inventory=mocker.MagicMock(pk=23),
- update=mocker.MagicMock(),
- source_project_id=None,
- current_update=None,
- last_update=None,
- spec=InventorySource
+ pk=22, inventory=mocker.MagicMock(pk=23), update=mocker.MagicMock(), source_project_id=None, current_update=None, last_update=None, spec=InventorySource
)
return obj
class TestCustomInventoryScriptSerializer(object):
- @pytest.mark.parametrize("superuser,sysaudit,admin_role,value",
- ((True, False, False, '#!/python'),
- (False, True, False, '#!/python'),
- (False, False, True, '#!/python'),
- (False, False, False, None)))
+ @pytest.mark.parametrize(
+ "superuser,sysaudit,admin_role,value",
+ ((True, False, False, '#!/python'), (False, True, False, '#!/python'), (False, False, True, '#!/python'), (False, False, False, None)),
+ )
def test_to_representation_orphan(self, superuser, sysaudit, admin_role, value):
with mock.patch.object(CustomInventoryScriptSerializer, 'get_summary_fields', return_value={}):
with mock.patch.object(User, 'is_system_auditor', return_value=sysaudit):
user = User(username="root", is_superuser=superuser)
roles = [user] if admin_role else []
- with mock.patch('awx.main.models.CustomInventoryScript.admin_role', new_callable=PropertyMock, return_value=roles),\
- mock.patch('awx.api.serializers.settings'):
+ with mock.patch('awx.main.models.CustomInventoryScript.admin_role', new_callable=PropertyMock, return_value=roles), mock.patch(
+ 'awx.api.serializers.settings'
+ ):
cis = CustomInventoryScript(pk=1, script=value)
serializer = CustomInventoryScriptSerializer()
factory = APIRequestFactory()
- wsgi_request = factory.post("/inventory_script/1", {'id':1}, format="json")
+ wsgi_request = factory.post("/inventory_script/1", {'id': 1}, format="json")
force_authenticate(wsgi_request, user)
request = Request(wsgi_request)
@@ -64,23 +58,21 @@ class TestCustomInventoryScriptSerializer(object):
assert representation['script'] == value
-@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {})
-@mock.patch('awx.api.serializers.InventorySourceOptionsSerializer.get_related', lambda x,y: {})
+@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x, y: {})
+@mock.patch('awx.api.serializers.InventorySourceOptionsSerializer.get_related', lambda x, y: {})
class TestInventorySourceSerializerGetRelated(object):
- @pytest.mark.parametrize('related_resource_name', [
- 'activity_stream',
- 'notification_templates_error',
- 'notification_templates_success',
- 'notification_templates_started',
- 'inventory_updates',
- 'update',
- 'hosts',
- 'groups',
- ])
+ @pytest.mark.parametrize(
+ 'related_resource_name',
+ [
+ 'activity_stream',
+ 'notification_templates_error',
+ 'notification_templates_success',
+ 'notification_templates_started',
+ 'inventory_updates',
+ 'update',
+ 'hosts',
+ 'groups',
+ ],
+ )
def test_get_related(self, test_get_related, inventory_source, related_resource_name):
- test_get_related(
- InventorySourceSerializer,
- inventory_source,
- 'inventory_sources',
- related_resource_name
- )
+ test_get_related(InventorySourceSerializer, inventory_source, 'inventory_sources', related_resource_name)
diff --git a/awx/main/tests/unit/api/serializers/test_job_serializers.py b/awx/main/tests/unit/api/serializers/test_job_serializers.py
index 53cc07676d..6e1a0833f8 100644
--- a/awx/main/tests/unit/api/serializers/test_job_serializers.py
+++ b/awx/main/tests/unit/api/serializers/test_job_serializers.py
@@ -39,8 +39,7 @@ def project_update(mocker):
@pytest.fixture
def job(mocker, job_template, project_update):
- return mocker.MagicMock(pk=5, job_template=job_template, project_update=project_update,
- workflow_job_id=None, execution_environment_id=None)
+ return mocker.MagicMock(pk=5, job_template=job_template, project_update=project_update, workflow_job_id=None, execution_environment_id=None)
@pytest.fixture
@@ -53,15 +52,17 @@ def jobs(mocker):
return [Job(id=x, name='job-%d' % x) for x in range(0, 25)]
-@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {})
-@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {})
-class TestJobSerializerGetRelated():
-
- @pytest.mark.parametrize("related_resource_name", [
- 'job_events',
- 'relaunch',
- 'labels',
- ])
+@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x, y: {})
+@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x, y: {})
+class TestJobSerializerGetRelated:
+ @pytest.mark.parametrize(
+ "related_resource_name",
+ [
+ 'job_events',
+ 'relaunch',
+ 'labels',
+ ],
+ )
def test_get_related(self, test_get_related, job, related_resource_name):
test_get_related(JobSerializer, job, 'jobs', related_resource_name)
@@ -77,14 +78,12 @@ class TestJobSerializerGetRelated():
assert related['job_template'] == '/api/v2/%s/%d/' % ('job_templates', job.job_template.pk)
-@mock.patch('awx.api.serializers.BaseSerializer.to_representation', lambda self,obj: {
- 'extra_vars': obj.extra_vars})
-class TestJobSerializerSubstitution():
-
+@mock.patch('awx.api.serializers.BaseSerializer.to_representation', lambda self, obj: {'extra_vars': obj.extra_vars})
+class TestJobSerializerSubstitution:
def test_survey_password_hide(self, mocker):
- job = mocker.MagicMock(**{
- 'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}',
- 'extra_vars.return_value': '{\"secret_key\": \"my_password\"}'})
+ job = mocker.MagicMock(
+ **{'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}', 'extra_vars.return_value': '{\"secret_key\": \"my_password\"}'}
+ )
serializer = JobSerializer(job)
rep = serializer.to_representation(job)
extra_vars = json.loads(rep['extra_vars'])
@@ -93,9 +92,8 @@ class TestJobSerializerSubstitution():
assert 'my_password' not in extra_vars
-@mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x,y: {})
-class TestJobOptionsSerializerGetSummaryFields():
-
+@mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x, y: {})
+class TestJobOptionsSerializerGetSummaryFields:
def test__summary_field_labels_10_max(self, mocker, job_template, labels):
job_template.labels.all = mocker.MagicMock(**{'return_value': labels})
@@ -110,29 +108,30 @@ class TestJobOptionsSerializerGetSummaryFields():
class TestJobDetailSerializerGetHostStatusCountFields(object):
-
def test_hosts_are_counted_once(self, job, mocker):
- mock_event = JobEvent(**{
- 'event': 'playbook_on_stats',
- 'event_data': {
- 'skipped': {
- 'localhost': 2,
- 'fiz': 1,
- },
- 'ok': {
- 'localhost': 1,
- 'foo': 2,
+ mock_event = JobEvent(
+ **{
+ 'event': 'playbook_on_stats',
+ 'event_data': {
+ 'skipped': {
+ 'localhost': 2,
+ 'fiz': 1,
+ },
+ 'ok': {
+ 'localhost': 1,
+ 'foo': 2,
+ },
+ 'changed': {
+ 'localhost': 1,
+ 'bar': 3,
+ },
+ 'dark': {
+ 'localhost': 2,
+ 'fiz': 2,
+ },
},
- 'changed': {
- 'localhost': 1,
- 'bar': 3,
- },
- 'dark': {
- 'localhost': 2,
- 'fiz': 2,
- }
}
- })
+ )
mock_qs = namedtuple('mock_qs', ['get'])(mocker.MagicMock(return_value=mock_event))
job.job_events.only = mocker.MagicMock(return_value=mock_qs)
@@ -152,29 +151,30 @@ class TestJobDetailSerializerGetHostStatusCountFields(object):
class TestProjectUpdateDetailSerializerGetHostStatusCountFields(object):
-
def test_hosts_are_counted_once(self, project_update, mocker):
- mock_event = ProjectUpdateEvent(**{
- 'event': 'playbook_on_stats',
- 'event_data': {
- 'skipped': {
- 'localhost': 2,
- 'fiz': 1,
- },
- 'ok': {
- 'localhost': 1,
- 'foo': 2,
- },
- 'changed': {
- 'localhost': 1,
- 'bar': 3,
+ mock_event = ProjectUpdateEvent(
+ **{
+ 'event': 'playbook_on_stats',
+ 'event_data': {
+ 'skipped': {
+ 'localhost': 2,
+ 'fiz': 1,
+ },
+ 'ok': {
+ 'localhost': 1,
+ 'foo': 2,
+ },
+ 'changed': {
+ 'localhost': 1,
+ 'bar': 3,
+ },
+ 'dark': {
+ 'localhost': 2,
+ 'fiz': 2,
+ },
},
- 'dark': {
- 'localhost': 2,
- 'fiz': 2,
- }
}
- })
+ )
mock_qs = namedtuple('mock_qs', ['get'])(mocker.MagicMock(return_value=mock_event))
project_update.project_update_events.only = mocker.MagicMock(return_value=mock_qs)
diff --git a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py
index 00ba863987..51e64fd753 100644
--- a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py
+++ b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py
@@ -15,7 +15,7 @@ from awx.main.models import (
)
from rest_framework.test import APIRequestFactory
-#DRF
+# DRF
from rest_framework import serializers
@@ -45,22 +45,25 @@ def jobs(mocker):
return [Job(id=x, name='job-%d' % x) for x in range(0, 25)]
-@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {})
-@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {})
-class TestJobTemplateSerializerGetRelated():
- @pytest.mark.parametrize("related_resource_name", [
- 'jobs',
- 'schedules',
- 'activity_stream',
- 'launch',
- 'webhook_key',
- 'notification_templates_started',
- 'notification_templates_success',
- 'notification_templates_error',
- 'survey_spec',
- 'labels',
- 'callback',
- ])
+@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x, y: {})
+@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x, y: {})
+class TestJobTemplateSerializerGetRelated:
+ @pytest.mark.parametrize(
+ "related_resource_name",
+ [
+ 'jobs',
+ 'schedules',
+ 'activity_stream',
+ 'launch',
+ 'webhook_key',
+ 'notification_templates_started',
+ 'notification_templates_success',
+ 'notification_templates_error',
+ 'survey_spec',
+ 'labels',
+ 'callback',
+ ],
+ )
def test_get_related(self, test_get_related, job_template, related_resource_name):
test_get_related(JobTemplateSerializer, job_template, 'job_templates', related_resource_name)
@@ -70,7 +73,7 @@ class TestJobTemplateSerializerGetRelated():
assert 'callback' not in related
-class TestJobTemplateSerializerGetSummaryFields():
+class TestJobTemplateSerializerGetSummaryFields:
def test_survey_spec_exists(self, test_get_summary_fields, mocker, job_template):
job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'}
with mocker.patch.object(JobTemplateSerializer, '_recent_jobs') as mock_rj:
diff --git a/awx/main/tests/unit/api/serializers/test_notification_template_serializers.py b/awx/main/tests/unit/api/serializers/test_notification_template_serializers.py
index f0bd6784d4..c9cfad00b2 100644
--- a/awx/main/tests/unit/api/serializers/test_notification_template_serializers.py
+++ b/awx/main/tests/unit/api/serializers/test_notification_template_serializers.py
@@ -6,52 +6,57 @@ from rest_framework.serializers import ValidationError
from awx.api.serializers import NotificationTemplateSerializer
-class StubNotificationTemplate():
+class StubNotificationTemplate:
notification_type = 'email'
-class TestNotificationTemplateSerializer():
-
- @pytest.mark.parametrize('valid_messages',
- [None,
- {'started': None},
- {'started': {'message': None}},
- {'started': {'message': 'valid'}},
- {'started': {'body': 'valid'}},
- {'started': {'message': 'valid', 'body': 'valid'}},
- {'started': None, 'success': None, 'error': None},
- {'started': {'message': None, 'body': None},
- 'success': {'message': None, 'body': None},
- 'error': {'message': None, 'body': None}},
- {'started': {'message': '{{ job.id }}', 'body': '{{ job.status }}'},
- 'success': {'message': None, 'body': '{{ job_friendly_name }}'},
- 'error': {'message': '{{ url }}', 'body': None}},
- {'started': {'body': '{{ job_metadata }}'}},
- {'started': {'body': '{{ job.summary_fields.inventory.total_hosts }}'}},
- {'started': {'body': u'Iñtërnâtiônàlizætiøn'}}
- ])
+class TestNotificationTemplateSerializer:
+ @pytest.mark.parametrize(
+ 'valid_messages',
+ [
+ None,
+ {'started': None},
+ {'started': {'message': None}},
+ {'started': {'message': 'valid'}},
+ {'started': {'body': 'valid'}},
+ {'started': {'message': 'valid', 'body': 'valid'}},
+ {'started': None, 'success': None, 'error': None},
+ {'started': {'message': None, 'body': None}, 'success': {'message': None, 'body': None}, 'error': {'message': None, 'body': None}},
+ {
+ 'started': {'message': '{{ job.id }}', 'body': '{{ job.status }}'},
+ 'success': {'message': None, 'body': '{{ job_friendly_name }}'},
+ 'error': {'message': '{{ url }}', 'body': None},
+ },
+ {'started': {'body': '{{ job_metadata }}'}},
+ {'started': {'body': '{{ job.summary_fields.inventory.total_hosts }}'}},
+ {'started': {'body': u'Iñtërnâtiônàlizætiøn'}},
+ ],
+ )
def test_valid_messages(self, valid_messages):
serializer = NotificationTemplateSerializer()
serializer.instance = StubNotificationTemplate()
serializer.validate_messages(valid_messages)
- @pytest.mark.parametrize('invalid_messages',
- [1,
- [],
- '',
- {'invalid_event': ''},
- {'started': 'should_be_dict'},
- {'started': {'bad_message_type': ''}},
- {'started': {'message': 1}},
- {'started': {'message': []}},
- {'started': {'message': {}}},
- {'started': {'message': '{{ unclosed_braces'}},
- {'started': {'message': '{{ undefined }}'}},
- {'started': {'message': '{{ job.undefined }}'}},
- {'started': {'message': '{{ job.id | bad_filter }}'}},
- {'started': {'message': '{{ job.__class__ }}'}},
- {'started': {'message': 'Newlines \n not allowed\n'}},
- ])
+ @pytest.mark.parametrize(
+ 'invalid_messages',
+ [
+ 1,
+ [],
+ '',
+ {'invalid_event': ''},
+ {'started': 'should_be_dict'},
+ {'started': {'bad_message_type': ''}},
+ {'started': {'message': 1}},
+ {'started': {'message': []}},
+ {'started': {'message': {}}},
+ {'started': {'message': '{{ unclosed_braces'}},
+ {'started': {'message': '{{ undefined }}'}},
+ {'started': {'message': '{{ job.undefined }}'}},
+ {'started': {'message': '{{ job.id | bad_filter }}'}},
+ {'started': {'message': '{{ job.__class__ }}'}},
+ {'started': {'message': 'Newlines \n not allowed\n'}},
+ ],
+ )
def test_invalid__messages(self, invalid_messages):
serializer = NotificationTemplateSerializer()
serializer.instance = StubNotificationTemplate()
diff --git a/awx/main/tests/unit/api/serializers/test_primary_key_related_field.py b/awx/main/tests/unit/api/serializers/test_primary_key_related_field.py
index 0be6d3312e..101bb5de4b 100644
--- a/awx/main/tests/unit/api/serializers/test_primary_key_related_field.py
+++ b/awx/main/tests/unit/api/serializers/test_primary_key_related_field.py
@@ -11,6 +11,6 @@ from awx.api.serializers import JobLaunchSerializer
def test_primary_key_related_field():
# We are testing if the PrimaryKeyRelatedField in this serializer can take dictionary.
# PrimaryKeyRelatedField should not be able to take dictionary as input, and should raise a ValidationError.
- data = {'credentials' : {'1': '2', '3':'4'}}
+ data = {'credentials': {'1': '2', '3': '4'}}
with pytest.raises(ValidationError):
JobLaunchSerializer(data=data)
diff --git a/awx/main/tests/unit/api/serializers/test_token_serializer.py b/awx/main/tests/unit/api/serializers/test_token_serializer.py
index 5ead166664..aa6363d47a 100644
--- a/awx/main/tests/unit/api/serializers/test_token_serializer.py
+++ b/awx/main/tests/unit/api/serializers/test_token_serializer.py
@@ -3,12 +3,6 @@ import pytest
from awx.api.serializers import OAuth2TokenSerializer
-@pytest.mark.parametrize('scope, expect', [
- ('', False),
- ('read', True),
- ('read read', False),
- ('write read', True),
- ('read rainbow', False)
-])
+@pytest.mark.parametrize('scope, expect', [('', False), ('read', True), ('read read', False), ('write read', True), ('read rainbow', False)])
def test_invalid_scopes(scope, expect):
assert OAuth2TokenSerializer()._is_valid_scope(scope) is expect
diff --git a/awx/main/tests/unit/api/serializers/test_unified_serializers.py b/awx/main/tests/unit/api/serializers/test_unified_serializers.py
index a03ceb7d7a..f5353e3324 100644
--- a/awx/main/tests/unit/api/serializers/test_unified_serializers.py
+++ b/awx/main/tests/unit/api/serializers/test_unified_serializers.py
@@ -7,12 +7,12 @@ from rest_framework.generics import ListAPIView
def test_unified_template_field_consistency():
- '''
+ """
Example of what is being tested:
The endpoints /projects/N/ and /projects/ should have the same fields as
that same project when it is serialized by the unified job template serializer
in /unified_job_templates/
- '''
+ """
for cls in UnifiedJobTemplate.__subclasses__():
detail_serializer = getattr(serializers, '{}Serializer'.format(cls.__name__))
unified_serializer = serializers.UnifiedJobTemplateSerializer().get_sub_serializer(cls())
@@ -20,28 +20,26 @@ def test_unified_template_field_consistency():
def test_unified_job_list_field_consistency():
- '''
+ """
Example of what is being tested:
The endpoint /project_updates/ should have the same fields as that
project update when it is serialized by the unified job template serializer
in /unified_jobs/
- '''
+ """
for cls in UnifiedJob.__subclasses__():
list_serializer = getattr(serializers, '{}ListSerializer'.format(cls.__name__))
unified_serializer = serializers.UnifiedJobListSerializer().get_sub_serializer(cls())
- assert set(list_serializer().fields.keys()) == set(unified_serializer().fields.keys()), (
- 'Mismatch between {} list serializer & unified list serializer'.format(cls)
- )
+ assert set(list_serializer().fields.keys()) == set(
+ unified_serializer().fields.keys()
+ ), 'Mismatch between {} list serializer & unified list serializer'.format(cls)
def test_unified_job_detail_exclusive_fields():
- '''
+ """
For each type, assert that the only fields allowed to be exclusive to
detail view are the allowed types
- '''
- allowed_detail_fields = frozenset(
- ('result_traceback', 'job_args', 'job_cwd', 'job_env', 'event_processing_finished')
- )
+ """
+ allowed_detail_fields = frozenset(('result_traceback', 'job_args', 'job_cwd', 'job_env', 'event_processing_finished'))
for cls in UnifiedJob.__subclasses__():
list_serializer = getattr(serializers, '{}ListSerializer'.format(cls.__name__))
detail_serializer = getattr(serializers, '{}Serializer'.format(cls.__name__))
@@ -51,19 +49,14 @@ def test_unified_job_detail_exclusive_fields():
def test_list_views_use_list_serializers(all_views):
- '''
+ """
Check that the list serializers are only used for list views,
and vice versa
- '''
- list_serializers = tuple(
- getattr(serializers, '{}ListSerializer'.format(cls.__name__)) for
- cls in (UnifiedJob.__subclasses__() + [UnifiedJob])
- )
+ """
+ list_serializers = tuple(getattr(serializers, '{}ListSerializer'.format(cls.__name__)) for cls in (UnifiedJob.__subclasses__() + [UnifiedJob]))
for View in all_views:
if hasattr(View, 'model') and issubclass(getattr(View, 'model'), UnifiedJob):
if issubclass(View, ListAPIView):
- assert issubclass(View.serializer_class, list_serializers), (
- 'View {} serializer {} is not a list serializer'.format(View, View.serializer_class)
- )
+ assert issubclass(View.serializer_class, list_serializers), 'View {} serializer {} is not a list serializer'.format(View, View.serializer_class)
else:
assert not issubclass(View.model, list_serializers)
diff --git a/awx/main/tests/unit/api/serializers/test_workflow_serializers.py b/awx/main/tests/unit/api/serializers/test_workflow_serializers.py
index 65837045f8..526f06c4c9 100644
--- a/awx/main/tests/unit/api/serializers/test_workflow_serializers.py
+++ b/awx/main/tests/unit/api/serializers/test_workflow_serializers.py
@@ -8,41 +8,32 @@ from awx.api.serializers import (
WorkflowJobTemplateNodeSerializer,
WorkflowJobNodeSerializer,
)
-from awx.main.models import (
- Job,
- WorkflowJobTemplateNode,
- WorkflowJob,
- WorkflowJobNode,
- WorkflowJobTemplate,
- Project,
- Inventory,
- JobTemplate
-)
+from awx.main.models import Job, WorkflowJobTemplateNode, WorkflowJob, WorkflowJobNode, WorkflowJobTemplate, Project, Inventory, JobTemplate
-@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {})
-class TestWorkflowJobTemplateSerializerGetRelated():
+@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x, y: {})
+class TestWorkflowJobTemplateSerializerGetRelated:
@pytest.fixture
def workflow_job_template(self, workflow_job_template_factory):
wfjt = workflow_job_template_factory('hello world', persisted=False).workflow_job_template
wfjt.pk = 3
return wfjt
- @pytest.mark.parametrize("related_resource_name", [
- 'workflow_jobs',
- 'launch',
- 'workflow_nodes',
- 'webhook_key',
- ])
+ @pytest.mark.parametrize(
+ "related_resource_name",
+ [
+ 'workflow_jobs',
+ 'launch',
+ 'workflow_nodes',
+ 'webhook_key',
+ ],
+ )
def test_get_related(self, mocker, test_get_related, workflow_job_template, related_resource_name):
- test_get_related(WorkflowJobTemplateSerializer,
- workflow_job_template,
- 'workflow_job_templates',
- related_resource_name)
+ test_get_related(WorkflowJobTemplateSerializer, workflow_job_template, 'workflow_job_templates', related_resource_name)
-@mock.patch('awx.api.serializers.BaseSerializer.get_related', lambda x,y: {})
-class TestWorkflowNodeBaseSerializerGetRelated():
+@mock.patch('awx.api.serializers.BaseSerializer.get_related', lambda x, y: {})
+class TestWorkflowNodeBaseSerializerGetRelated:
@pytest.fixture
def job_template(self, job_template_factory):
jt = job_template_factory(name="blah", persisted=False).job_template
@@ -67,8 +58,8 @@ class TestWorkflowNodeBaseSerializerGetRelated():
assert 'unified_job_template' not in related
-@mock.patch('awx.api.serializers.BaseSerializer.get_related', lambda x,y: {})
-class TestWorkflowJobTemplateNodeSerializerGetRelated():
+@mock.patch('awx.api.serializers.BaseSerializer.get_related', lambda x, y: {})
+class TestWorkflowJobTemplateNodeSerializerGetRelated:
@pytest.fixture
def workflow_job_template_node(self):
return WorkflowJobTemplateNode(pk=1)
@@ -90,16 +81,16 @@ class TestWorkflowJobTemplateNodeSerializerGetRelated():
workflow_job_template_node.workflow_job_template = workflow_job_template
return workflow_job_template_node
- @pytest.mark.parametrize("related_resource_name", [
- 'success_nodes',
- 'failure_nodes',
- 'always_nodes',
- ])
+ @pytest.mark.parametrize(
+ "related_resource_name",
+ [
+ 'success_nodes',
+ 'failure_nodes',
+ 'always_nodes',
+ ],
+ )
def test_get_related(self, test_get_related, workflow_job_template_node, related_resource_name):
- test_get_related(WorkflowJobTemplateNodeSerializer,
- workflow_job_template_node,
- 'workflow_job_template_nodes',
- related_resource_name)
+ test_get_related(WorkflowJobTemplateNodeSerializer, workflow_job_template_node, 'workflow_job_template_nodes', related_resource_name)
def test_workflow_job_template_present(self, get_related_mock_and_run, workflow_job_template_node_related):
related = get_related_mock_and_run(WorkflowJobTemplateNodeSerializer, workflow_job_template_node_related)
@@ -123,7 +114,7 @@ class FakeRequest:
pass
-class TestWorkflowJobTemplateNodeSerializerCharPrompts():
+class TestWorkflowJobTemplateNodeSerializerCharPrompts:
@pytest.fixture
def WFJT_serializer(self):
serializer = WorkflowJobTemplateNodeSerializer()
@@ -135,7 +126,7 @@ class TestWorkflowJobTemplateNodeSerializerCharPrompts():
view.request.method = "PATCH"
serializer = WorkflowJobTemplateNodeSerializer()
- serializer = WorkflowJobTemplateNodeSerializer(context={'view':view})
+ serializer = WorkflowJobTemplateNodeSerializer(context={'view': view})
serializer.instance = node
return serializer
@@ -156,26 +147,23 @@ class TestWorkflowJobTemplateNodeSerializerCharPrompts():
@mock.patch('awx.api.serializers.BaseSerializer.validate', lambda self, attrs: attrs)
-class TestWorkflowJobTemplateNodeSerializerSurveyPasswords():
-
+class TestWorkflowJobTemplateNodeSerializerSurveyPasswords:
@pytest.fixture
def jt(self, survey_spec_factory):
return JobTemplate(
name='fake-jt',
survey_enabled=True,
survey_spec=survey_spec_factory(variables='var1', default_type='password'),
- project=Project('fake-proj'), project_id=42,
- inventory=Inventory('fake-inv'), inventory_id=42
+ project=Project('fake-proj'),
+ project_id=42,
+ inventory=Inventory('fake-inv'),
+ inventory_id=42,
)
def test_set_survey_passwords_create(self, jt):
serializer = WorkflowJobTemplateNodeSerializer()
wfjt = WorkflowJobTemplate(name='fake-wfjt')
- attrs = serializer.validate({
- 'unified_job_template': jt,
- 'workflow_job_template': wfjt,
- 'extra_data': {'var1': 'secret_answer'}
- })
+ attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': 'secret_answer'}})
assert 'survey_passwords' in attrs
assert 'var1' in attrs['survey_passwords']
assert attrs['extra_data']['var1'].startswith('$encrypted$')
@@ -184,15 +172,8 @@ class TestWorkflowJobTemplateNodeSerializerSurveyPasswords():
def test_set_survey_passwords_modify(self, jt):
serializer = WorkflowJobTemplateNodeSerializer()
wfjt = WorkflowJobTemplate(name='fake-wfjt')
- serializer.instance = WorkflowJobTemplateNode(
- workflow_job_template=wfjt,
- unified_job_template=jt
- )
- attrs = serializer.validate({
- 'unified_job_template': jt,
- 'workflow_job_template': wfjt,
- 'extra_data': {'var1': 'secret_answer'}
- })
+ serializer.instance = WorkflowJobTemplateNode(workflow_job_template=wfjt, unified_job_template=jt)
+ attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': 'secret_answer'}})
assert 'survey_passwords' in attrs
assert 'var1' in attrs['survey_passwords']
assert attrs['extra_data']['var1'].startswith('$encrypted$')
@@ -201,42 +182,30 @@ class TestWorkflowJobTemplateNodeSerializerSurveyPasswords():
def test_use_db_answer(self, jt, mocker):
serializer = WorkflowJobTemplateNodeSerializer()
wfjt = WorkflowJobTemplate(name='fake-wfjt')
- serializer.instance = WorkflowJobTemplateNode(
- workflow_job_template=wfjt,
- unified_job_template=jt,
- extra_data={'var1': '$encrypted$foooooo'}
- )
+ serializer.instance = WorkflowJobTemplateNode(workflow_job_template=wfjt, unified_job_template=jt, extra_data={'var1': '$encrypted$foooooo'})
with mocker.patch('awx.main.models.mixins.decrypt_value', return_value='foo'):
- attrs = serializer.validate({
- 'unified_job_template': jt,
- 'workflow_job_template': wfjt,
- 'extra_data': {'var1': '$encrypted$'}
- })
+ attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': '$encrypted$'}})
assert 'survey_passwords' in attrs
assert 'var1' in attrs['survey_passwords']
assert attrs['extra_data']['var1'] == '$encrypted$foooooo'
def test_accept_password_default(self, jt, mocker):
- '''
+ """
If user provides "$encrypted$" without a corresponding DB value for the
node, but survey question has a default, then variables are accepted
with that particular var omitted so on launch time the default takes effect
- '''
+ """
serializer = WorkflowJobTemplateNodeSerializer()
wfjt = WorkflowJobTemplate(name='fake-wfjt')
jt.survey_spec['spec'][0]['default'] = '$encrypted$bar'
- attrs = serializer.validate({
- 'unified_job_template': jt,
- 'workflow_job_template': wfjt,
- 'extra_data': {'var1': '$encrypted$'}
- })
+ attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': '$encrypted$'}})
assert 'survey_passwords' in attrs
assert attrs['survey_passwords'] == {}
assert attrs['extra_data'] == {}
-@mock.patch('awx.api.serializers.WorkflowJobTemplateNodeSerializer.get_related', lambda x,y: {})
-class TestWorkflowJobNodeSerializerGetRelated():
+@mock.patch('awx.api.serializers.WorkflowJobTemplateNodeSerializer.get_related', lambda x, y: {})
+class TestWorkflowJobNodeSerializerGetRelated:
@pytest.fixture
def workflow_job_node(self):
return WorkflowJobNode(pk=1)
@@ -255,16 +224,16 @@ class TestWorkflowJobNodeSerializerGetRelated():
workflow_job_node.job = job
return workflow_job_node
- @pytest.mark.parametrize("related_resource_name", [
- 'success_nodes',
- 'failure_nodes',
- 'always_nodes',
- ])
+ @pytest.mark.parametrize(
+ "related_resource_name",
+ [
+ 'success_nodes',
+ 'failure_nodes',
+ 'always_nodes',
+ ],
+ )
def test_get_related(self, test_get_related, workflow_job_node, related_resource_name):
- test_get_related(WorkflowJobNodeSerializer,
- workflow_job_node,
- 'workflow_job_nodes',
- related_resource_name)
+ test_get_related(WorkflowJobNodeSerializer, workflow_job_node, 'workflow_job_nodes', related_resource_name)
def test_workflow_job_present(self, get_related_mock_and_run, workflow_job_node_related):
related = get_related_mock_and_run(WorkflowJobNodeSerializer, workflow_job_node_related)
diff --git a/awx/main/tests/unit/api/test_filters.py b/awx/main/tests/unit/api/test_filters.py
index 4a951890e7..5b247790e2 100644
--- a/awx/main/tests/unit/api/test_filters.py
+++ b/awx/main/tests/unit/api/test_filters.py
@@ -4,12 +4,22 @@ import pytest
from rest_framework.exceptions import PermissionDenied, ParseError
from awx.api.filters import FieldLookupBackend, OrderByBackend, get_field_from_path
-from awx.main.models import (AdHocCommand, ActivityStream,
- CustomInventoryScript, Credential, Job,
- JobTemplate, SystemJob, UnifiedJob, User,
- WorkflowJob, WorkflowJobTemplate,
- WorkflowJobOptions, InventorySource,
- JobEvent)
+from awx.main.models import (
+ AdHocCommand,
+ ActivityStream,
+ CustomInventoryScript,
+ Credential,
+ Job,
+ JobTemplate,
+ SystemJob,
+ UnifiedJob,
+ User,
+ WorkflowJob,
+ WorkflowJobTemplate,
+ WorkflowJobOptions,
+ InventorySource,
+ JobEvent,
+)
from awx.main.models.oauth import OAuth2Application
from awx.main.models.jobs import JobOptions
@@ -79,25 +89,28 @@ def test_filter_on_password_field(password_field, lookup_suffix):
assert 'not allowed' in str(excinfo.value)
-@pytest.mark.parametrize('model, query', [
- (User, 'password__icontains'),
- (User, 'settings__value__icontains'),
- (User, 'main_oauth2accesstoken__token__gt'),
- (UnifiedJob, 'job_args__icontains'),
- (UnifiedJob, 'job_env__icontains'),
- (UnifiedJob, 'start_args__icontains'),
- (AdHocCommand, 'extra_vars__icontains'),
- (JobOptions, 'extra_vars__icontains'),
- (SystemJob, 'extra_vars__icontains'),
- (WorkflowJobOptions, 'extra_vars__icontains'),
- (Job, 'survey_passwords__icontains'),
- (WorkflowJob, 'survey_passwords__icontains'),
- (JobTemplate, 'survey_spec__icontains'),
- (WorkflowJobTemplate, 'survey_spec__icontains'),
- (CustomInventoryScript, 'script__icontains'),
- (ActivityStream, 'o_auth2_application__client_secret__gt'),
- (OAuth2Application, 'grant__code__gt')
-])
+@pytest.mark.parametrize(
+ 'model, query',
+ [
+ (User, 'password__icontains'),
+ (User, 'settings__value__icontains'),
+ (User, 'main_oauth2accesstoken__token__gt'),
+ (UnifiedJob, 'job_args__icontains'),
+ (UnifiedJob, 'job_env__icontains'),
+ (UnifiedJob, 'start_args__icontains'),
+ (AdHocCommand, 'extra_vars__icontains'),
+ (JobOptions, 'extra_vars__icontains'),
+ (SystemJob, 'extra_vars__icontains'),
+ (WorkflowJobOptions, 'extra_vars__icontains'),
+ (Job, 'survey_passwords__icontains'),
+ (WorkflowJob, 'survey_passwords__icontains'),
+ (JobTemplate, 'survey_spec__icontains'),
+ (WorkflowJobTemplate, 'survey_spec__icontains'),
+ (CustomInventoryScript, 'script__icontains'),
+ (ActivityStream, 'o_auth2_application__client_secret__gt'),
+ (OAuth2Application, 'grant__code__gt'),
+ ],
+)
def test_filter_sensitive_fields_and_relations(model, query):
field_lookup = FieldLookupBackend()
with pytest.raises(PermissionDenied) as excinfo:
diff --git a/awx/main/tests/unit/api/test_generics.py b/awx/main/tests/unit/api/test_generics.py
index caac45bc3b..6f0982bfd8 100644
--- a/awx/main/tests/unit/api/test_generics.py
+++ b/awx/main/tests/unit/api/test_generics.py
@@ -1,4 +1,3 @@
-
# Python
import pytest
from unittest import mock
@@ -9,12 +8,7 @@ from rest_framework.response import Response
from rest_framework.exceptions import PermissionDenied
# AWX
-from awx.api.generics import (
- ParentMixin,
- SubListCreateAttachDetachAPIView, SubListAttachDetachAPIView,
- ResourceAccessList,
- ListAPIView
-)
+from awx.api.generics import ParentMixin, SubListCreateAttachDetachAPIView, SubListAttachDetachAPIView, ResourceAccessList, ListAPIView
from awx.main.models import Organization, Credential
@@ -45,6 +39,7 @@ def parent_relationship_factory(mocker):
serializer.relationship = relationship_name
return (serializer, mock_parent_relationship)
+
return rf
@@ -183,11 +178,8 @@ class TestParentMixin:
class TestResourceAccessList:
-
def mock_request(self):
- return mock.MagicMock(
- user=mock.MagicMock(is_anonymous=False, is_superuser=False),
- method='GET')
+ return mock.MagicMock(user=mock.MagicMock(is_anonymous=False, is_superuser=False), method='GET')
def mock_view(self, parent=None):
view = ResourceAccessList()
diff --git a/awx/main/tests/unit/api/test_logger.py b/awx/main/tests/unit/api/test_logger.py
index dc4c32f229..bdea633a07 100644
--- a/awx/main/tests/unit/api/test_logger.py
+++ b/awx/main/tests/unit/api/test_logger.py
@@ -35,7 +35,8 @@ data_loggly = {
# Test reconfigure logging settings function
# name this whatever you want
@pytest.mark.parametrize(
- 'enabled, log_type, host, port, protocol, errorfile, expected_config', [
+ 'enabled, log_type, host, port, protocol, errorfile, expected_config',
+ [
(
True,
'loggly',
@@ -43,10 +44,12 @@ data_loggly = {
None,
'https',
'/var/log/tower/rsyslog.err',
- '\n'.join([
- 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
- 'action(type="omhttp" server="logs-01.loggly.com" serverport="80" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/")', # noqa
- ])
+ '\n'.join(
+ [
+ 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
+ 'action(type="omhttp" server="logs-01.loggly.com" serverport="80" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/")', # noqa
+ ]
+ ),
),
(
True, # localhost w/ custom UDP port
@@ -55,10 +58,12 @@ data_loggly = {
9000,
'udp',
'', # empty errorfile
- '\n'.join([
- 'template(name="awx" type="string" string="%rawmsg-after-pri%")',
- 'action(type="omfwd" target="localhost" port="9000" protocol="udp" action.resumeRetryCount="-1" action.resumeInterval="5" template="awx")', # noqa
- ])
+ '\n'.join(
+ [
+ 'template(name="awx" type="string" string="%rawmsg-after-pri%")',
+ 'action(type="omfwd" target="localhost" port="9000" protocol="udp" action.resumeRetryCount="-1" action.resumeInterval="5" template="awx")', # noqa
+ ]
+ ),
),
(
True, # localhost w/ custom TCP port
@@ -67,10 +72,12 @@ data_loggly = {
9000,
'tcp',
'/var/log/tower/rsyslog.err',
- '\n'.join([
- 'template(name="awx" type="string" string="%rawmsg-after-pri%")',
- 'action(type="omfwd" target="localhost" port="9000" protocol="tcp" action.resumeRetryCount="-1" action.resumeInterval="5" template="awx")', # noqa
- ])
+ '\n'.join(
+ [
+ 'template(name="awx" type="string" string="%rawmsg-after-pri%")',
+ 'action(type="omfwd" target="localhost" port="9000" protocol="tcp" action.resumeRetryCount="-1" action.resumeInterval="5" template="awx")', # noqa
+ ]
+ ),
),
(
True, # https, default port 443
@@ -79,10 +86,12 @@ data_loggly = {
None,
None,
'/var/log/tower/rsyslog.err',
- '\n'.join([
- 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
- 'action(type="omhttp" server="yoursplunk" serverport="443" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
- ])
+ '\n'.join(
+ [
+ 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
+ 'action(type="omhttp" server="yoursplunk" serverport="443" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
+ ]
+ ),
),
(
True, # http, default port 80
@@ -91,10 +100,12 @@ data_loggly = {
None,
None,
'/var/log/tower/rsyslog.err',
- '\n'.join([
- 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
- 'action(type="omhttp" server="yoursplunk" serverport="80" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
- ])
+ '\n'.join(
+ [
+ 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
+ 'action(type="omhttp" server="yoursplunk" serverport="80" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
+ ]
+ ),
),
(
True, # https, custom port in URL string
@@ -103,10 +114,12 @@ data_loggly = {
None,
None,
'/var/log/tower/rsyslog.err',
- '\n'.join([
- 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
- 'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
- ])
+ '\n'.join(
+ [
+ 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
+ 'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
+ ]
+ ),
),
(
True, # https, custom port explicitly specified
@@ -115,10 +128,12 @@ data_loggly = {
8088,
None,
'/var/log/tower/rsyslog.err',
- '\n'.join([
- 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
- 'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
- ])
+ '\n'.join(
+ [
+ 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
+ 'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
+ ]
+ ),
),
(
True, # no scheme specified in URL, default to https, respect custom port
@@ -127,10 +142,12 @@ data_loggly = {
8088,
'https',
'/var/log/tower/rsyslog.err',
- '\n'.join([
- 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
- 'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
- ])
+ '\n'.join(
+ [
+ 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
+ 'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
+ ]
+ ),
),
(
True, # respect custom http-only port
@@ -139,29 +156,33 @@ data_loggly = {
8088,
None,
'/var/log/tower/rsyslog.err',
- '\n'.join([
- 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
- 'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
- ])
+ '\n'.join(
+ [
+ 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
+ 'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
+ ]
+ ),
),
(
True, # valid sumologic config
'sumologic',
- 'https://endpoint5.collection.us2.sumologic.com/receiver/v1/http/ZaVnC4dhaV0qoiETY0MrM3wwLoDgO1jFgjOxE6-39qokkj3LGtOroZ8wNaN2M6DtgYrJZsmSi4-36_Up5TbbN_8hosYonLKHSSOSKY845LuLZBCBwStrHQ==', # noqa
+ 'https://endpoint5.collection.us2.sumologic.com/receiver/v1/http/ZaVnC4dhaV0qoiETY0MrM3wwLoDgO1jFgjOxE6-39qokkj3LGtOroZ8wNaN2M6DtgYrJZsmSi4-36_Up5TbbN_8hosYonLKHSSOSKY845LuLZBCBwStrHQ==', # noqa
None,
'https',
'/var/log/tower/rsyslog.err',
- '\n'.join([
- 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
- 'action(type="omhttp" server="endpoint5.collection.us2.sumologic.com" serverport="443" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="receiver/v1/http/ZaVnC4dhaV0qoiETY0MrM3wwLoDgO1jFgjOxE6-39qokkj3LGtOroZ8wNaN2M6DtgYrJZsmSi4-36_Up5TbbN_8hosYonLKHSSOSKY845LuLZBCBwStrHQ==")', # noqa
- ])
+ '\n'.join(
+ [
+ 'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
+ 'action(type="omhttp" server="endpoint5.collection.us2.sumologic.com" serverport="443" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="receiver/v1/http/ZaVnC4dhaV0qoiETY0MrM3wwLoDgO1jFgjOxE6-39qokkj3LGtOroZ8wNaN2M6DtgYrJZsmSi4-36_Up5TbbN_8hosYonLKHSSOSKY845LuLZBCBwStrHQ==")', # noqa
+ ]
+ ),
),
- ]
+ ],
)
def test_rsyslog_conf_template(enabled, log_type, host, port, protocol, errorfile, expected_config):
-
+
mock_settings, _ = _mock_logging_defaults()
-
+
# Set test settings
logging_defaults = getattr(settings, 'LOGGING')
setattr(mock_settings, 'LOGGING', logging_defaults)
@@ -175,9 +196,9 @@ def test_rsyslog_conf_template(enabled, log_type, host, port, protocol, errorfil
if protocol:
setattr(mock_settings, 'LOG_AGGREGATOR_PROTOCOL', protocol)
- # create rsyslog conf template
+ # create rsyslog conf template
tmpl = construct_rsyslog_conf_template(mock_settings)
-
+
# check validity of created template
assert expected_config in tmpl
diff --git a/awx/main/tests/unit/api/test_parsers.py b/awx/main/tests/unit/api/test_parsers.py
index c2d221af36..508376f766 100644
--- a/awx/main/tests/unit/api/test_parsers.py
+++ b/awx/main/tests/unit/api/test_parsers.py
@@ -9,11 +9,12 @@ from rest_framework.exceptions import ParseError
@pytest.mark.parametrize(
- 'input_, output', [
+ 'input_, output',
+ [
('{"foo": "bar"}', {'foo': 'bar'}),
('null', None),
('', {}),
- ]
+ ],
)
def test_jsonparser_valid_input(input_, output):
input_stream = StringIO(input_)
diff --git a/awx/main/tests/unit/api/test_views.py b/awx/main/tests/unit/api/test_views.py
index 53ab2ececb..aff0dd2a3b 100644
--- a/awx/main/tests/unit/api/test_views.py
+++ b/awx/main/tests/unit/api/test_views.py
@@ -5,12 +5,7 @@ from unittest import mock
from collections import namedtuple
-from awx.api.views import (
- ApiVersionRootView,
- JobTemplateLabelList,
- InventoryInventorySourcesUpdate,
- JobTemplateSurveySpec
-)
+from awx.api.views import ApiVersionRootView, JobTemplateLabelList, InventoryInventorySourcesUpdate, JobTemplateSurveySpec
from awx.main.views import handle_error
@@ -74,12 +69,14 @@ class TestJobTemplateLabelList:
class TestInventoryInventorySourcesUpdate:
-
- @pytest.mark.parametrize("can_update, can_access, is_source, is_up_on_proj, expected", [
- (True, True, "ec2", False, [{'status': 'started', 'inventory_update': 1, 'inventory_source': 1}]),
- (False, True, "gce", False, [{'status': 'Could not start because `can_update` returned False', 'inventory_source': 1}]),
- (True, False, "scm", True, [{'status': 'started', 'inventory_update': 1, 'inventory_source': 1}]),
- ])
+ @pytest.mark.parametrize(
+ "can_update, can_access, is_source, is_up_on_proj, expected",
+ [
+ (True, True, "ec2", False, [{'status': 'started', 'inventory_update': 1, 'inventory_source': 1}]),
+ (False, True, "gce", False, [{'status': 'Could not start because `can_update` returned False', 'inventory_source': 1}]),
+ (True, False, "scm", True, [{'status': 'started', 'inventory_update': 1, 'inventory_source': 1}]),
+ ],
+ )
def test_post(self, mocker, can_update, can_access, is_source, is_up_on_proj, expected):
class InventoryUpdate:
id = 1
@@ -87,14 +84,20 @@ class TestInventoryInventorySourcesUpdate:
class Project:
name = 'project'
- InventorySource = namedtuple('InventorySource', ['source', 'update_on_project_update', 'pk', 'can_update',
- 'update', 'source_project'])
+ InventorySource = namedtuple('InventorySource', ['source', 'update_on_project_update', 'pk', 'can_update', 'update', 'source_project'])
class InventorySources(object):
def all(self):
- return [InventorySource(pk=1, source=is_source, source_project=Project,
- update_on_project_update=is_up_on_proj,
- can_update=can_update, update=lambda:InventoryUpdate)]
+ return [
+ InventorySource(
+ pk=1,
+ source=is_source,
+ source_project=Project,
+ update_on_project_update=is_up_on_proj,
+ can_update=can_update,
+ update=lambda: InventoryUpdate,
+ )
+ ]
def exclude(self, **kwargs):
return self.all()
@@ -117,26 +120,28 @@ class TestInventoryInventorySourcesUpdate:
class TestSurveySpecValidation:
-
def test_create_text_encrypted(self):
view = JobTemplateSurveySpec()
- resp = view._validate_spec_data({
- "name": "new survey",
- "description": "foobar",
- "spec": [
- {
- "question_description": "",
- "min": 0,
- "default": "$encrypted$",
- "max": 1024,
- "required": True,
- "choices": "",
- "variable": "openshift_username",
- "question_name": "OpenShift Username",
- "type": "text"
- }
- ]
- }, {})
+ resp = view._validate_spec_data(
+ {
+ "name": "new survey",
+ "description": "foobar",
+ "spec": [
+ {
+ "question_description": "",
+ "min": 0,
+ "default": "$encrypted$",
+ "max": 1024,
+ "required": True,
+ "choices": "",
+ "variable": "openshift_username",
+ "question_name": "OpenShift Username",
+ "type": "text",
+ }
+ ],
+ },
+ {},
+ )
assert resp.status_code == 400
assert '$encrypted$ is a reserved keyword for password question defaults' in str(resp.data['error'])
@@ -155,9 +160,9 @@ class TestSurveySpecValidation:
"choices": "",
"variable": "openshift_username",
"question_name": "OpenShift Username",
- "type": "password"
+ "type": "password",
}
- ]
+ ],
}
new = deepcopy(old)
new['spec'][0]['variable'] = 'openstack_username'
@@ -166,9 +171,9 @@ class TestSurveySpecValidation:
assert 'may not be used for new default' in str(resp.data['error'])
def test_use_saved_encrypted_default(self):
- '''
+ """
Save is allowed, the $encrypted$ replacement is done
- '''
+ """
view = JobTemplateSurveySpec()
old = {
"name": "old survey",
@@ -183,9 +188,9 @@ class TestSurveySpecValidation:
"choices": "",
"variable": "openshift_username",
"question_name": "OpenShift Username",
- "type": "password"
+ "type": "password",
}
- ]
+ ],
}
new = deepcopy(old)
new['spec'][0]['default'] = '$encrypted$'
@@ -205,17 +210,17 @@ class TestSurveySpecValidation:
"choices": "",
"variable": "openshift_username",
"question_name": "OpenShift Username",
- "type": "password"
+ "type": "password",
}
- ]
+ ],
}
def test_use_saved_empty_string_default(self):
- '''
+ """
Save is allowed, the $encrypted$ replacement is done with empty string
The empty string value for default is unencrypted,
unlike all other password questions
- '''
+ """
view = JobTemplateSurveySpec()
old = {
"name": "old survey",
@@ -230,9 +235,9 @@ class TestSurveySpecValidation:
"choices": "",
"variable": "openshift_username",
"question_name": "OpenShift Username",
- "type": "password"
+ "type": "password",
}
- ]
+ ],
}
new = deepcopy(old)
new['spec'][0]['default'] = '$encrypted$'
@@ -251,12 +256,11 @@ class TestSurveySpecValidation:
"choices": "",
"variable": "openshift_username",
"question_name": "OpenShift Username",
- "type": "password"
+ "type": "password",
}
- ]
+ ],
}
-
@staticmethod
def spec_from_element(survey_item):
survey_item.setdefault('name', 'foo')
@@ -264,25 +268,23 @@ class TestSurveySpecValidation:
survey_item.setdefault('required', False)
survey_item.setdefault('question_name', 'foo')
survey_item.setdefault('type', 'text')
- spec = {
- 'name': 'test survey',
- 'description': 'foo',
- 'spec': [survey_item]
- }
+ spec = {'name': 'test survey', 'description': 'foo', 'spec': [survey_item]}
return spec
-
- @pytest.mark.parametrize("survey_item, error_text", [
- ({'type': 'password', 'default': ['some', 'invalid', 'list']}, 'expected to be string'),
- ({'type': 'password', 'default': False}, 'expected to be string'),
- ({'type': 'integer', 'default': 'foo'}, 'expected to be int'),
- ({'type': 'integer', 'default': u'🐉'}, 'expected to be int'),
- ({'type': 'foo'}, 'allowed question types'),
- ({'type': u'🐉'}, 'allowed question types'),
- ({'type': 'multiplechoice'}, 'multiplechoice must specify choices'),
- ({'type': 'integer', 'min': 'foo'}, 'min limit in survey question 0 expected to be integer'),
- ({'question_name': 42}, "'question_name' in survey question 0 expected to be string.")
- ])
+ @pytest.mark.parametrize(
+ "survey_item, error_text",
+ [
+ ({'type': 'password', 'default': ['some', 'invalid', 'list']}, 'expected to be string'),
+ ({'type': 'password', 'default': False}, 'expected to be string'),
+ ({'type': 'integer', 'default': 'foo'}, 'expected to be int'),
+ ({'type': 'integer', 'default': u'🐉'}, 'expected to be int'),
+ ({'type': 'foo'}, 'allowed question types'),
+ ({'type': u'🐉'}, 'allowed question types'),
+ ({'type': 'multiplechoice'}, 'multiplechoice must specify choices'),
+ ({'type': 'integer', 'min': 'foo'}, 'min limit in survey question 0 expected to be integer'),
+ ({'question_name': 42}, "'question_name' in survey question 0 expected to be string."),
+ ],
+ )
def test_survey_question_element_validation(self, survey_item, error_text):
spec = self.spec_from_element(survey_item)
r = JobTemplateSurveySpec._validate_spec_data(spec, {})
@@ -290,28 +292,24 @@ class TestSurveySpecValidation:
assert 'error' in r.data
assert error_text in r.data['error']
-
def test_survey_spec_non_dict_error(self):
spec = self.spec_from_element({})
spec['spec'][0] = 'foo'
r = JobTemplateSurveySpec._validate_spec_data(spec, {})
assert 'Survey question 0 is not a json object' in r.data['error']
-
def test_survey_spec_dual_names_error(self):
spec = self.spec_from_element({})
spec['spec'].append(spec['spec'][0].copy())
r = JobTemplateSurveySpec._validate_spec_data(spec, {})
assert "'variable' 'foo' duplicated in survey question 1." in r.data['error']
-
def test_survey_spec_element_missing_property(self):
spec = self.spec_from_element({})
spec['spec'][0].pop('type')
r = JobTemplateSurveySpec._validate_spec_data(spec, {})
assert "'type' missing from survey question 0" in r.data['error']
-
@pytest.mark.parametrize('_type', ['integer', 'float'])
def test_survey_spec_element_number_empty_default(self, _type):
""" Assert that empty default is allowed for answer. """
diff --git a/awx/main/tests/unit/commands/test_inventory_import.py b/awx/main/tests/unit/commands/test_inventory_import.py
index db3e01408b..96896347fd 100644
--- a/awx/main/tests/unit/commands/test_inventory_import.py
+++ b/awx/main/tests/unit/commands/test_inventory_import.py
@@ -8,14 +8,11 @@ import pytest
from django.core.management.base import CommandError
# AWX
-from awx.main.management.commands.inventory_import import (
- Command
-)
+from awx.main.management.commands.inventory_import import Command
@pytest.mark.inventory_import
class TestInvalidOptions:
-
def test_invalid_options_no_options_specified(self):
cmd = Command()
with pytest.raises(CommandError) as err:
@@ -27,9 +24,7 @@ class TestInvalidOptions:
# You can not specify both name and if of the inventory
cmd = Command()
with pytest.raises(CommandError) as err:
- cmd.handle(
- inventory_id=42, inventory_name='my-inventory'
- )
+ cmd.handle(inventory_id=42, inventory_name='my-inventory')
assert 'inventory-id' in str(err.value)
assert 'exclusive' in str(err.value)
@@ -39,4 +34,3 @@ class TestInvalidOptions:
cmd.handle(inventory_id=42)
assert '--source' in str(err.value)
assert 'required' in str(err.value)
-
diff --git a/awx/main/tests/unit/commands/test_replay_job_events.py b/awx/main/tests/unit/commands/test_replay_job_events.py
index 2dab4443ad..06c187f3e9 100644
--- a/awx/main/tests/unit/commands/test_replay_job_events.py
+++ b/awx/main/tests/unit/commands/test_replay_job_events.py
@@ -19,8 +19,7 @@ from awx.main.management.commands.replay_job_events import (
)
-class TestReplayJobEvents():
-
+class TestReplayJobEvents:
@pytest.fixture
def epoch(self):
return timezone.now()
@@ -39,14 +38,14 @@ class TestReplayJobEvents():
@pytest.fixture
def mock_serializer_fn(self):
- class MockSerializer():
+ class MockSerializer:
data = dict()
-
def fn(job_event):
serialized = MockSerializer()
serialized.data['group_name'] = 'foobar'
return serialized
+
return fn
@pytest.fixture
@@ -65,28 +64,32 @@ class TestReplayJobEvents():
replayer.run(3, 1)
assert replayer.sleep.call_count == 6
- replayer.sleep.assert_has_calls([
- mock.call(10.0),
- mock.call(10.0),
- mock.call(10.0),
- mock.call(1.0),
- mock.call(0.001),
- mock.call(0.000001),
- ])
+ replayer.sleep.assert_has_calls(
+ [
+ mock.call(10.0),
+ mock.call(10.0),
+ mock.call(10.0),
+ mock.call(1.0),
+ mock.call(0.001),
+ mock.call(0.000001),
+ ]
+ )
@mock.patch('awx.main.management.commands.replay_job_events.emit_event_detail', lambda *a, **kw: None)
def test_speed(self, mocker, replayer):
replayer.run(3, 2)
assert replayer.sleep.call_count == 6
- replayer.sleep.assert_has_calls([
- mock.call(5.0),
- mock.call(5.0),
- mock.call(5.0),
- mock.call(0.5),
- mock.call(0.0005),
- mock.call(0.0000005),
- ])
+ replayer.sleep.assert_has_calls(
+ [
+ mock.call(5.0),
+ mock.call(5.0),
+ mock.call(5.0),
+ mock.call(0.5),
+ mock.call(0.0005),
+ mock.call(0.0000005),
+ ]
+ )
# TODO: Test replay_offset()
# TODO: Test stat generation
diff --git a/awx/main/tests/unit/conftest.py b/awx/main/tests/unit/conftest.py
index 26b7049477..ca09d48da5 100644
--- a/awx/main/tests/unit/conftest.py
+++ b/awx/main/tests/unit/conftest.py
@@ -17,9 +17,9 @@ def _disable_database_settings(mocker):
@pytest.fixture()
def all_views():
- '''
+ """
returns a set of all views in the app
- '''
+ """
patterns = set()
url_views = set()
# Add recursive URL patterns
@@ -45,11 +45,11 @@ def all_views():
@pytest.fixture()
def dummy_log_record():
return logging.LogRecord(
- 'awx', # logger name
- 20, # loglevel INFO
- './awx/some/module.py', # pathname
- 100, # lineno
- 'User joe logged in', # msg
- tuple(), # args,
- None # exc_info
+ 'awx', # logger name
+ 20, # loglevel INFO
+ './awx/some/module.py', # pathname
+ 100, # lineno
+ 'User joe logged in', # msg
+ tuple(), # args,
+ None, # exc_info
)
diff --git a/awx/main/tests/unit/models/test_credential.py b/awx/main/tests/unit/models/test_credential.py
index bbe2e990f2..082d7df7eb 100644
--- a/awx/main/tests/unit/models/test_credential.py
+++ b/awx/main/tests/unit/models/test_credential.py
@@ -5,34 +5,11 @@ from awx.main.models import Credential, CredentialType
def test_unique_hash_with_unicode():
ct = CredentialType(name=u'Väult', kind='vault')
- cred = Credential(
- id=4,
- name=u'Iñtërnâtiônàlizætiøn',
- credential_type=ct,
- inputs={
- u'vault_id': u'🐉🐉🐉'
- },
- credential_type_id=42
- )
+ cred = Credential(id=4, name=u'Iñtërnâtiônàlizætiøn', credential_type=ct, inputs={u'vault_id': u'🐉🐉🐉'}, credential_type_id=42)
assert cred.unique_hash(display=True) == u'Väult (id=🐉🐉🐉)'
def test_custom_cred_with_empty_encrypted_field():
- ct = CredentialType(
- name='My Custom Cred',
- kind='custom',
- inputs={
- 'fields': [{
- 'id': 'some_field',
- 'label': 'My Field',
- 'secret': True
- }]
- }
- )
- cred = Credential(
- id=4,
- name='Testing 1 2 3',
- credential_type=ct,
- inputs={}
- )
+ ct = CredentialType(name='My Custom Cred', kind='custom', inputs={'fields': [{'id': 'some_field', 'label': 'My Field', 'secret': True}]})
+ cred = Credential(id=4, name='Testing 1 2 3', credential_type=ct, inputs={})
assert cred.encrypt_field('some_field', None) is None
diff --git a/awx/main/tests/unit/models/test_events.py b/awx/main/tests/unit/models/test_events.py
index 3b824da242..a38df57fff 100644
--- a/awx/main/tests/unit/models/test_events.py
+++ b/awx/main/tests/unit/models/test_events.py
@@ -2,50 +2,42 @@ from datetime import datetime
from django.utils.timezone import utc
import pytest
-from awx.main.models import (JobEvent, ProjectUpdateEvent, AdHocCommandEvent,
- InventoryUpdateEvent, SystemJobEvent)
+from awx.main.models import JobEvent, ProjectUpdateEvent, AdHocCommandEvent, InventoryUpdateEvent, SystemJobEvent
-@pytest.mark.parametrize('job_identifier, cls', [
- ['job_id', JobEvent],
- ['project_update_id', ProjectUpdateEvent],
- ['ad_hoc_command_id', AdHocCommandEvent],
- ['inventory_update_id', InventoryUpdateEvent],
- ['system_job_id', SystemJobEvent],
-])
-@pytest.mark.parametrize('created', [
- datetime(2018, 1, 1).isoformat(), datetime(2018, 1, 1)
-])
+@pytest.mark.parametrize(
+ 'job_identifier, cls',
+ [
+ ['job_id', JobEvent],
+ ['project_update_id', ProjectUpdateEvent],
+ ['ad_hoc_command_id', AdHocCommandEvent],
+ ['inventory_update_id', InventoryUpdateEvent],
+ ['system_job_id', SystemJobEvent],
+ ],
+)
+@pytest.mark.parametrize('created', [datetime(2018, 1, 1).isoformat(), datetime(2018, 1, 1)])
def test_event_parse_created(job_identifier, cls, created):
- event = cls.create_from_data(**{
- job_identifier: 123,
- 'created': created
- })
+ event = cls.create_from_data(**{job_identifier: 123, 'created': created})
assert event.created == datetime(2018, 1, 1).replace(tzinfo=utc)
-@pytest.mark.parametrize('job_identifier, cls', [
- ['job_id', JobEvent],
- ['project_update_id', ProjectUpdateEvent],
- ['ad_hoc_command_id', AdHocCommandEvent],
- ['inventory_update_id', InventoryUpdateEvent],
- ['system_job_id', SystemJobEvent],
-])
+@pytest.mark.parametrize(
+ 'job_identifier, cls',
+ [
+ ['job_id', JobEvent],
+ ['project_update_id', ProjectUpdateEvent],
+ ['ad_hoc_command_id', AdHocCommandEvent],
+ ['inventory_update_id', InventoryUpdateEvent],
+ ['system_job_id', SystemJobEvent],
+ ],
+)
def test_playbook_event_strip_invalid_keys(job_identifier, cls):
- event = cls.create_from_data(**{
- job_identifier: 123,
- 'extra_key': 'extra_value'
- })
+ event = cls.create_from_data(**{job_identifier: 123, 'extra_key': 'extra_value'})
assert getattr(event, job_identifier) == 123
assert not hasattr(event, 'extra_key')
-@pytest.mark.parametrize('field', [
- 'play', 'role', 'task', 'playbook'
-])
+@pytest.mark.parametrize('field', ['play', 'role', 'task', 'playbook'])
def test_really_long_event_fields(field):
- event = JobEvent.create_from_data(**{
- 'job_id': 123,
- 'event_data': {field: 'X' * 4096}
- })
+ event = JobEvent.create_from_data(**{'job_id': 123, 'event_data': {field: 'X' * 4096}})
assert event.event_data[field] == 'X' * 1023 + '…'
diff --git a/awx/main/tests/unit/models/test_ha.py b/awx/main/tests/unit/models/test_ha.py
index 2534acfd15..83530f8ded 100644
--- a/awx/main/tests/unit/models/test_ha.py
+++ b/awx/main/tests/unit/models/test_ha.py
@@ -15,11 +15,11 @@ def T(impact):
def Is(param):
- '''
+ """
param:
[remaining_capacity1, remaining_capacity2, remaining_capacity3, ...]
[(jobs_running1, capacity1), (jobs_running2, capacity2), (jobs_running3, capacity3), ...]
- '''
+ """
instances = []
if isinstance(param[0], tuple):
@@ -37,13 +37,16 @@ def Is(param):
class TestInstanceGroup(object):
- @pytest.mark.parametrize('task,instances,instance_fit_index,reason', [
- (T(100), Is([100]), 0, "Only one, pick it"),
- (T(100), Is([100, 100]), 0, "Two equally good fits, pick the first"),
- (T(100), Is([50, 100]), 1, "First instance not as good as second instance"),
- (T(100), Is([50, 0, 20, 100, 100, 100, 30, 20]), 3, "Pick Instance [3] as it is the first that the task fits in."),
- (T(100), Is([50, 0, 20, 99, 11, 1, 5, 99]), None, "The task don't a fit, you must a quit!"),
- ])
+ @pytest.mark.parametrize(
+ 'task,instances,instance_fit_index,reason',
+ [
+ (T(100), Is([100]), 0, "Only one, pick it"),
+ (T(100), Is([100, 100]), 0, "Two equally good fits, pick the first"),
+ (T(100), Is([50, 100]), 1, "First instance not as good as second instance"),
+ (T(100), Is([50, 0, 20, 100, 100, 100, 30, 20]), 3, "Pick Instance [3] as it is the first that the task fits in."),
+ (T(100), Is([50, 0, 20, 99, 11, 1, 5, 99]), None, "The task don't a fit, you must a quit!"),
+ ],
+ )
def test_fit_task_to_most_remaining_capacity_instance(self, task, instances, instance_fit_index, reason):
ig = InstanceGroup(id=10)
@@ -54,13 +57,16 @@ class TestInstanceGroup(object):
else:
assert instance_picked == instances[instance_fit_index], reason
- @pytest.mark.parametrize('instances,instance_fit_index,reason', [
- (Is([(0, 100)]), 0, "One idle instance, pick it"),
- (Is([(1, 100)]), None, "One un-idle instance, pick nothing"),
- (Is([(0, 100), (0, 200), (1, 500), (0, 700)]), 3, "Pick the largest idle instance"),
- (Is([(0, 100), (0, 200), (1, 10000), (0, 700), (0, 699)]), 3, "Pick the largest idle instance"),
- (Is([(0, 0)]), None, "One idle but down instance, don't pick it"),
- ])
+ @pytest.mark.parametrize(
+ 'instances,instance_fit_index,reason',
+ [
+ (Is([(0, 100)]), 0, "One idle instance, pick it"),
+ (Is([(1, 100)]), None, "One un-idle instance, pick nothing"),
+ (Is([(0, 100), (0, 200), (1, 500), (0, 700)]), 3, "Pick the largest idle instance"),
+ (Is([(0, 100), (0, 200), (1, 10000), (0, 700), (0, 699)]), 3, "Pick the largest idle instance"),
+ (Is([(0, 0)]), None, "One idle but down instance, don't pick it"),
+ ],
+ )
def test_find_largest_idle_instance(self, instances, instance_fit_index, reason):
def filter_offline_instances(*args):
return filter(lambda i: i.capacity > 0, instances)
@@ -71,6 +77,4 @@ class TestInstanceGroup(object):
if instance_fit_index is None:
assert ig.find_largest_idle_instance(instances_online_only) is None, reason
else:
- assert ig.find_largest_idle_instance(instances_online_only) == \
- instances[instance_fit_index], reason
-
+ assert ig.find_largest_idle_instance(instances_online_only) == instances[instance_fit_index], reason
diff --git a/awx/main/tests/unit/models/test_inventory.py b/awx/main/tests/unit/models/test_inventory.py
index 26ef5e1fa9..04e20e2f03 100644
--- a/awx/main/tests/unit/models/test_inventory.py
+++ b/awx/main/tests/unit/models/test_inventory.py
@@ -25,15 +25,18 @@ def test_cancel(mocker):
iu.cancel()
parent_cancel.assert_called_with(is_chain=False, job_explanation=None)
-
+
def test__build_job_explanation():
iu = InventoryUpdate(id=3, name='I_am_an_Inventory_Update')
job_explanation = iu._build_job_explanation()
- assert job_explanation == 'Previous Task Canceled: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % \
- ('inventory_update', 'I_am_an_Inventory_Update', 3)
+ assert job_explanation == 'Previous Task Canceled: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % (
+ 'inventory_update',
+ 'I_am_an_Inventory_Update',
+ 3,
+ )
def test_valid_clean_insights_credential():
@@ -72,29 +75,27 @@ def test_invalid_kind_clean_insights_credential():
assert json.dumps(str(e.value)) == json.dumps(str([u'Assignment not allowed for Smart Inventory']))
-class TestControlledBySCM():
+class TestControlledBySCM:
def test_clean_source_path_valid(self):
- inv_src = InventorySource(source_path='/not_real/',
- source='scm')
+ inv_src = InventorySource(source_path='/not_real/', source='scm')
inv_src.clean_source_path()
- @pytest.mark.parametrize('source', [
- 'ec2',
- 'manual',
- ])
+ @pytest.mark.parametrize(
+ 'source',
+ [
+ 'ec2',
+ 'manual',
+ ],
+ )
def test_clean_source_path_invalid(self, source):
- inv_src = InventorySource(source_path='/not_real/',
- source=source)
-
+ inv_src = InventorySource(source_path='/not_real/', source=source)
+
with pytest.raises(ValidationError):
inv_src.clean_source_path()
def test_clean_update_on_launch_update_on_project_update(self):
- inv_src = InventorySource(update_on_project_update=True,
- update_on_launch=True,
- source='scm')
+ inv_src = InventorySource(update_on_project_update=True, update_on_launch=True, source='scm')
with pytest.raises(ValidationError):
inv_src.clean_update_on_launch()
-
diff --git a/awx/main/tests/unit/models/test_job_template_unit.py b/awx/main/tests/unit/models/test_job_template_unit.py
index aafae9ebc9..db7cdec4b6 100644
--- a/awx/main/tests/unit/models/test_job_template_unit.py
+++ b/awx/main/tests/unit/models/test_job_template_unit.py
@@ -7,30 +7,20 @@ from unittest import mock
def test_missing_project_error(job_template_factory):
- objects = job_template_factory(
- 'missing-project-jt',
- organization='org1',
- inventory='inventory1',
- persisted=False)
+ objects = job_template_factory('missing-project-jt', organization='org1', inventory='inventory1', persisted=False)
obj = objects.job_template
assert 'project' in obj.resources_needed_to_start
assert 'project' in obj.validation_errors
def test_inventory_need_to_start(job_template_factory):
- objects = job_template_factory(
- 'job-template-few-resources',
- project='project1',
- persisted=False)
+ objects = job_template_factory('job-template-few-resources', project='project1', persisted=False)
obj = objects.job_template
assert 'inventory' in obj.resources_needed_to_start
def test_inventory_contradictions(job_template_factory):
- objects = job_template_factory(
- 'job-template-paradox',
- project='project1',
- persisted=False)
+ objects = job_template_factory('job-template-paradox', project='project1', persisted=False)
obj = objects.job_template
obj.ask_inventory_on_launch = False
assert 'inventory' in obj.validation_errors
@@ -53,8 +43,8 @@ def test_job_template_survey_mixin(job_template_factory):
)
obj = objects.job_template
obj.survey_enabled = True
- obj.survey_spec = {'spec': [{'default':'my_default', 'type':'password', 'variable':'my_variable'}]}
- kwargs = obj._update_unified_job_kwargs({}, {'extra_vars': {'my_variable':'$encrypted$'}})
+ obj.survey_spec = {'spec': [{'default': 'my_default', 'type': 'password', 'variable': 'my_variable'}]}
+ kwargs = obj._update_unified_job_kwargs({}, {'extra_vars': {'my_variable': '$encrypted$'}})
assert kwargs['extra_vars'] == '{"my_variable": "my_default"}'
@@ -68,9 +58,10 @@ def test_job_template_survey_mixin_length(job_template_factory):
)
obj = objects.job_template
obj.survey_enabled = True
- obj.survey_spec = {'spec': [{'default':'my_default', 'type':'password', 'variable':'my_variable'},
- {'type':'password', 'variable':'my_other_variable'}]}
- kwargs = obj._update_unified_job_kwargs({}, {'extra_vars': {'my_variable':'$encrypted$'}})
+ obj.survey_spec = {
+ 'spec': [{'default': 'my_default', 'type': 'password', 'variable': 'my_variable'}, {'type': 'password', 'variable': 'my_other_variable'}]
+ }
+ kwargs = obj._update_unified_job_kwargs({}, {'extra_vars': {'my_variable': '$encrypted$'}})
assert kwargs['extra_vars'] == '{"my_variable": "my_default"}'
@@ -84,7 +75,7 @@ def test_job_template_survey_mixin_survey_runtime_has_highest_priority(job_templ
)
obj = objects.job_template
obj.survey_enabled = True
- obj.survey_spec = {'spec': [{'default':'foo', 'type':'password', 'variable':'my_variable'}]}
+ obj.survey_spec = {'spec': [{'default': 'foo', 'type': 'password', 'variable': 'my_variable'}]}
kwargs = obj._update_unified_job_kwargs({}, {'extra_vars': {'my_variable': 'bar'}})
assert kwargs['extra_vars'] == '{"my_variable": "bar"}'
diff --git a/awx/main/tests/unit/models/test_jobs.py b/awx/main/tests/unit/models/test_jobs.py
index f28691f500..d78a96305e 100644
--- a/awx/main/tests/unit/models/test_jobs.py
+++ b/awx/main/tests/unit/models/test_jobs.py
@@ -48,9 +48,14 @@ def test_start_job_fact_cache(hosts, job, inventory, tmpdir):
def test_fact_cache_with_invalid_path_traversal(job, inventory, tmpdir, mocker):
- job._get_inventory_hosts = mocker.Mock(return_value=[
- Host(name='../foo', ansible_facts={"a": 1, "b": 2},),
- ])
+ job._get_inventory_hosts = mocker.Mock(
+ return_value=[
+ Host(
+ name='../foo',
+ ansible_facts={"a": 1, "b": 2},
+ ),
+ ]
+ )
fact_cache = os.path.join(tmpdir, 'facts')
job.start_job_fact_cache(fact_cache, {}, 0)
diff --git a/awx/main/tests/unit/models/test_label.py b/awx/main/tests/unit/models/test_label.py
index 94bd9e9d29..c9565bf55e 100644
--- a/awx/main/tests/unit/models/test_label.py
+++ b/awx/main/tests/unit/models/test_label.py
@@ -38,21 +38,22 @@ class TestLabelFilterMocked:
Label.objects.filter.assert_called_with(id=37, unifiedjob_labels__isnull=True, unifiedjobtemplate_labels__isnull=True)
mock_query_set.count.assert_called_with()
- @pytest.mark.parametrize("jt_count,j_count,expected", [
- (1, 0, True),
- (0, 1, True),
- (1, 1, False),
- ])
+ @pytest.mark.parametrize(
+ "jt_count,j_count,expected",
+ [
+ (1, 0, True),
+ (0, 1, True),
+ (1, 1, False),
+ ],
+ )
def test_is_candidate_for_detach(self, mocker, jt_count, j_count, expected):
mock_job_qs = mocker.MagicMock()
mock_job_qs.count = mocker.MagicMock(return_value=j_count)
- mocker.patch.object(UnifiedJob, 'objects', mocker.MagicMock(
- filter=mocker.MagicMock(return_value=mock_job_qs)))
+ mocker.patch.object(UnifiedJob, 'objects', mocker.MagicMock(filter=mocker.MagicMock(return_value=mock_job_qs)))
mock_jt_qs = mocker.MagicMock()
mock_jt_qs.count = mocker.MagicMock(return_value=jt_count)
- mocker.patch.object(UnifiedJobTemplate, 'objects', mocker.MagicMock(
- filter=mocker.MagicMock(return_value=mock_jt_qs)))
+ mocker.patch.object(UnifiedJobTemplate, 'objects', mocker.MagicMock(filter=mocker.MagicMock(return_value=mock_jt_qs)))
label = Label(id=37)
ret = label.is_candidate_for_detach()
diff --git a/awx/main/tests/unit/models/test_project.py b/awx/main/tests/unit/models/test_project.py
index 82e34339f8..ee4e86c08b 100644
--- a/awx/main/tests/unit/models/test_project.py
+++ b/awx/main/tests/unit/models/test_project.py
@@ -12,4 +12,3 @@ def test_clean_credential_insights():
proj.clean_credential()
assert json.dumps(str(e.value)) == json.dumps(str([u'Insights Credential is required for an Insights Project.']))
-
diff --git a/awx/main/tests/unit/models/test_survey_models.py b/awx/main/tests/unit/models/test_survey_models.py
index 6148e06e0c..81aa74d911 100644
--- a/awx/main/tests/unit/models/test_survey_models.py
+++ b/awx/main/tests/unit/models/test_survey_models.py
@@ -4,20 +4,12 @@ import pytest
from itertools import count
from awx.main.utils.encryption import encrypt_value
-from awx.main.models import (
- Job,
- JobTemplate,
- JobLaunchConfig,
- WorkflowJobTemplate,
- Project,
- Inventory
-)
+from awx.main.models import Job, JobTemplate, JobLaunchConfig, WorkflowJobTemplate, Project, Inventory
ENCRYPTED_SECRET = encrypt_value('secret')
class DistinctParametrize(object):
-
def __init__(self):
self._gen = count(0)
@@ -27,12 +19,8 @@ class DistinctParametrize(object):
@pytest.mark.survey
class SurveyVariableValidation:
-
def test_survey_answers_as_string(self, job_template_factory):
- objects = job_template_factory(
- 'job-template-with-survey',
- survey=[{'variable': 'var1', 'type': 'text'}],
- persisted=False)
+ objects = job_template_factory('job-template-with-survey', survey=[{'variable': 'var1', 'type': 'text'}], persisted=False)
jt = objects.job_template
user_extra_vars = json.dumps({'var1': 'asdf'})
accepted, ignored, errors = jt._accept_or_ignore_job_kwargs(extra_vars=user_extra_vars)
@@ -60,10 +48,10 @@ class SurveyVariableValidation:
"choices": "",
"variable": "a",
"question_name": "Whosyourdaddy",
- "type": "text"
+ "type": "text",
}
],
- "name": ""
+ "name": "",
}
obj.survey_enabled = True
accepted, rejected, errors = obj.accept_or_ignore_variables({"a": 5})
@@ -74,17 +62,23 @@ class SurveyVariableValidation:
@pytest.fixture
def job(mocker):
- ret = mocker.MagicMock(**{
- 'decrypted_extra_vars.return_value': '{\"secret_key\": \"my_password\"}',
- 'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}',
- 'extra_vars_dict': {"secret_key": "my_password"},
- 'pk': 1, 'job_template.pk': 1, 'job_template.name': '',
- 'created_by.pk': 1, 'created_by.username': 'admin',
- 'launch_type': 'manual',
- 'verbosity': 1,
- 'awx_meta_vars.return_value': {},
- 'ansible_virtualenv_path': '',
- 'inventory.get_script_data.return_value': {}})
+ ret = mocker.MagicMock(
+ **{
+ 'decrypted_extra_vars.return_value': '{\"secret_key\": \"my_password\"}',
+ 'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}',
+ 'extra_vars_dict': {"secret_key": "my_password"},
+ 'pk': 1,
+ 'job_template.pk': 1,
+ 'job_template.name': '',
+ 'created_by.pk': 1,
+ 'created_by.username': 'admin',
+ 'launch_type': 'manual',
+ 'verbosity': 1,
+ 'awx_meta_vars.return_value': {},
+ 'ansible_virtualenv_path': '',
+ 'inventory.get_script_data.return_value': {},
+ }
+ )
ret.project = mocker.MagicMock(scm_revision='asdf1234')
return ret
@@ -93,23 +87,16 @@ def job(mocker):
def job_with_survey():
return Job(
name="test-job-with-passwords",
- extra_vars=json.dumps({
- 'submitter_email': 'foobar@redhat.com',
- 'secret_key': '6kQngg3h8lgiSTvIEb21',
- 'SSN': '123-45-6789'}),
- survey_passwords={
- 'secret_key': '$encrypted$',
- 'SSN': '$encrypted$'})
+ extra_vars=json.dumps({'submitter_email': 'foobar@redhat.com', 'secret_key': '6kQngg3h8lgiSTvIEb21', 'SSN': '123-45-6789'}),
+ survey_passwords={'secret_key': '$encrypted$', 'SSN': '$encrypted$'},
+ )
@pytest.mark.survey
def test_job_survey_password_redaction(job_with_survey):
"""Tests the Job model's funciton to redact passwords from
extra_vars - used when displaying job information"""
- assert json.loads(job_with_survey.display_extra_vars()) == {
- 'submitter_email': 'foobar@redhat.com',
- 'secret_key': '$encrypted$',
- 'SSN': '$encrypted$'}
+ assert json.loads(job_with_survey.display_extra_vars()) == {'submitter_email': 'foobar@redhat.com', 'secret_key': '$encrypted$', 'SSN': '$encrypted$'}
@pytest.mark.survey
@@ -118,34 +105,19 @@ def test_survey_passwords_not_in_extra_vars():
not included when displaying job information"""
job = Job(
name="test-survey-not-in",
- extra_vars=json.dumps({
- 'submitter_email': 'foobar@redhat.com'}),
- survey_passwords={
- 'secret_key': '$encrypted$',
- 'SSN': '$encrypted$'})
+ extra_vars=json.dumps({'submitter_email': 'foobar@redhat.com'}),
+ survey_passwords={'secret_key': '$encrypted$', 'SSN': '$encrypted$'},
+ )
assert json.loads(job.display_extra_vars()) == {
'submitter_email': 'foobar@redhat.com',
}
def test_launch_config_has_unprompted_vars(survey_spec_factory):
- jt = JobTemplate(
- survey_enabled = True,
- survey_spec = survey_spec_factory(['question1', 'question2'])
- )
- unprompted_config = JobLaunchConfig(
- extra_data = {
- 'question1': 'foobar',
- 'question4': 'foobar'
- }
- )
+ jt = JobTemplate(survey_enabled=True, survey_spec=survey_spec_factory(['question1', 'question2']))
+ unprompted_config = JobLaunchConfig(extra_data={'question1': 'foobar', 'question4': 'foobar'})
assert unprompted_config.has_unprompted(jt)
- allowed_config = JobLaunchConfig(
- extra_data = {
- 'question1': 'foobar',
- 'question2': 'foobar'
- }
- )
+ allowed_config = JobLaunchConfig(extra_data={'question1': 'foobar', 'question2': 'foobar'})
assert not allowed_config.has_unprompted(jt)
@@ -172,32 +144,36 @@ def test_display_survey_spec_encrypts_default(survey_spec_factory):
@pytest.mark.survey
-@pytest.mark.parametrize("question_type,default,min,max,expect_use,expect_value", [
- ("text", "", 0, 0, True, ''), # default used
- ("text", "", 1, 0, False, 'N/A'), # value less than min length
- ("password", "", 1, 0, False, 'N/A'), # passwords behave the same as text
- ("multiplechoice", "", 0, 0, False, 'N/A'), # historical bug
- ("multiplechoice", "zeb", 0, 0, False, 'N/A'), # zeb not in choices
- ("multiplechoice", "coffee", 0, 0, True, 'coffee'),
- ("multiselect", None, 0, 0, False, 'N/A'), # NOTE: Behavior is arguable, value of [] may be prefered
- ("multiselect", "", 0, 0, False, 'N/A'),
- ("multiselect", ["zeb"], 0, 0, False, 'N/A'),
- ("multiselect", ["milk"], 0, 0, True, ["milk"]),
- ("multiselect", ["orange\nmilk"], 0, 0, False, 'N/A'), # historical bug
-])
-def test_optional_survey_question_defaults(
- survey_spec_factory, question_type, default, min, max, expect_use, expect_value):
- spec = survey_spec_factory([
- {
- "required": False,
- "default": default,
- "choices": "orange\nmilk\nchocolate\ncoffee",
- "variable": "c",
- "min": min,
- "max": max,
- "type": question_type
- },
- ])
+@pytest.mark.parametrize(
+ "question_type,default,min,max,expect_use,expect_value",
+ [
+ ("text", "", 0, 0, True, ''), # default used
+ ("text", "", 1, 0, False, 'N/A'), # value less than min length
+ ("password", "", 1, 0, False, 'N/A'), # passwords behave the same as text
+ ("multiplechoice", "", 0, 0, False, 'N/A'), # historical bug
+ ("multiplechoice", "zeb", 0, 0, False, 'N/A'), # zeb not in choices
+ ("multiplechoice", "coffee", 0, 0, True, 'coffee'),
+ ("multiselect", None, 0, 0, False, 'N/A'), # NOTE: Behavior is arguable, value of [] may be prefered
+ ("multiselect", "", 0, 0, False, 'N/A'),
+ ("multiselect", ["zeb"], 0, 0, False, 'N/A'),
+ ("multiselect", ["milk"], 0, 0, True, ["milk"]),
+ ("multiselect", ["orange\nmilk"], 0, 0, False, 'N/A'), # historical bug
+ ],
+)
+def test_optional_survey_question_defaults(survey_spec_factory, question_type, default, min, max, expect_use, expect_value):
+ spec = survey_spec_factory(
+ [
+ {
+ "required": False,
+ "default": default,
+ "choices": "orange\nmilk\nchocolate\ncoffee",
+ "variable": "c",
+ "min": min,
+ "max": max,
+ "type": question_type,
+ },
+ ]
+ )
jt = JobTemplate(name="test-jt", survey_spec=spec, survey_enabled=True)
defaulted_extra_vars = jt._update_unified_job_kwargs({}, {})
element = spec['spec'][0]
@@ -212,36 +188,36 @@ def test_optional_survey_question_defaults(
@pytest.mark.survey
-@pytest.mark.parametrize("question_type,default,maxlen,kwargs,expected", [
- ('text', None, 5, {}, {}),
- ('text', '', 5, {}, {'x': ''}),
- ('text', 'y', 5, {}, {'x': 'y'}),
- ('text', 'too-long', 5, {}, {}),
- ('password', None, 5, {}, {}),
- ('password', '', 5, {}, {'x': ''}),
- ('password', ENCRYPTED_SECRET, 5, {}, {}), # len(secret) == 6, invalid
- ('password', ENCRYPTED_SECRET, 10, {}, {'x': ENCRYPTED_SECRET}), # len(secret) < 10, valid
- ('password', None, 5, {'extra_vars': {'x': '$encrypted$'}}, {}),
- ('password', '', 5, {'extra_vars': {'x': '$encrypted$'}}, {'x': ''}),
- ('password', None, 5, {'extra_vars': {'x': 'y'}}, {'x': 'y'}),
- ('password', '', 5, {'extra_vars': {'x': 'y'}}, {'x': 'y'}),
- ('password', 'foo', 5, {'extra_vars': {'x': 'y'}}, {'x': 'y'}),
- ('password', None, 5, {'extra_vars': {'x': ''}}, {'x': ''}),
- ('password', '', 5, {'extra_vars': {'x': ''}}, {'x': ''}),
- ('password', 'foo', 5, {'extra_vars': {'x': ''}}, {'x': ''}),
- ('password', ENCRYPTED_SECRET, 5, {'extra_vars': {'x': '$encrypted$'}}, {}),
- ('password', ENCRYPTED_SECRET, 10, {'extra_vars': {'x': '$encrypted$'}}, {'x': ENCRYPTED_SECRET}),
-], ids=DistinctParametrize())
+@pytest.mark.parametrize(
+ "question_type,default,maxlen,kwargs,expected",
+ [
+ ('text', None, 5, {}, {}),
+ ('text', '', 5, {}, {'x': ''}),
+ ('text', 'y', 5, {}, {'x': 'y'}),
+ ('text', 'too-long', 5, {}, {}),
+ ('password', None, 5, {}, {}),
+ ('password', '', 5, {}, {'x': ''}),
+ ('password', ENCRYPTED_SECRET, 5, {}, {}), # len(secret) == 6, invalid
+ ('password', ENCRYPTED_SECRET, 10, {}, {'x': ENCRYPTED_SECRET}), # len(secret) < 10, valid
+ ('password', None, 5, {'extra_vars': {'x': '$encrypted$'}}, {}),
+ ('password', '', 5, {'extra_vars': {'x': '$encrypted$'}}, {'x': ''}),
+ ('password', None, 5, {'extra_vars': {'x': 'y'}}, {'x': 'y'}),
+ ('password', '', 5, {'extra_vars': {'x': 'y'}}, {'x': 'y'}),
+ ('password', 'foo', 5, {'extra_vars': {'x': 'y'}}, {'x': 'y'}),
+ ('password', None, 5, {'extra_vars': {'x': ''}}, {'x': ''}),
+ ('password', '', 5, {'extra_vars': {'x': ''}}, {'x': ''}),
+ ('password', 'foo', 5, {'extra_vars': {'x': ''}}, {'x': ''}),
+ ('password', ENCRYPTED_SECRET, 5, {'extra_vars': {'x': '$encrypted$'}}, {}),
+ ('password', ENCRYPTED_SECRET, 10, {'extra_vars': {'x': '$encrypted$'}}, {'x': ENCRYPTED_SECRET}),
+ ],
+ ids=DistinctParametrize(),
+)
def test_survey_encryption_defaults(survey_spec_factory, question_type, default, maxlen, kwargs, expected):
- spec = survey_spec_factory([
- {
- "required": True,
- "variable": "x",
- "min": 0,
- "max": maxlen,
- "type": question_type
- },
- ])
+ spec = survey_spec_factory(
+ [
+ {"required": True, "variable": "x", "min": 0, "max": maxlen, "type": question_type},
+ ]
+ )
if default is not None:
spec['spec'][0]['default'] = default
else:
@@ -258,12 +234,7 @@ class TestWorkflowSurveys:
spec = survey_spec_factory('var1')
spec['spec'][0]['default'] = 3
spec['spec'][0]['required'] = False
- wfjt = WorkflowJobTemplate(
- name="test-wfjt",
- survey_spec=spec,
- survey_enabled=True,
- extra_vars="var1: 5"
- )
+ wfjt = WorkflowJobTemplate(name="test-wfjt", survey_spec=spec, survey_enabled=True, extra_vars="var1: 5")
updated_extra_vars = wfjt._update_unified_job_kwargs({}, {})
assert 'extra_vars' in updated_extra_vars
assert json.loads(updated_extra_vars['extra_vars'])['var1'] == 3
@@ -275,29 +246,25 @@ class TestWorkflowSurveys:
spec['spec'][0]['required'] = False
spec['spec'][1]['required'] = True
spec['spec'][2]['required'] = False
- wfjt = WorkflowJobTemplate(
- name="test-wfjt",
- survey_spec=spec,
- survey_enabled=True,
- extra_vars="question2: hiworld"
- )
+ wfjt = WorkflowJobTemplate(name="test-wfjt", survey_spec=spec, survey_enabled=True, extra_vars="question2: hiworld")
assert wfjt.variables_needed_to_start == ['question2']
assert not wfjt.can_start_without_user_input()
@pytest.mark.django_db
-@pytest.mark.parametrize('provided_vars,valid', [
- ({'tmpl_var': 'bar'}, True), # same as template, not counted as prompts
- ({'tmpl_var': 'bar2'}, False), # different value from template, not okay
- ({'tmpl_var': 'bar', 'a': 2}, False), # extra key, not okay
- ({'tmpl_var': 'bar', False: 2}, False), # Falsy key
- ({'tmpl_var': 'bar', u'🐉': u'🐉'}, False), # dragons
-])
+@pytest.mark.parametrize(
+ 'provided_vars,valid',
+ [
+ ({'tmpl_var': 'bar'}, True), # same as template, not counted as prompts
+ ({'tmpl_var': 'bar2'}, False), # different value from template, not okay
+ ({'tmpl_var': 'bar', 'a': 2}, False), # extra key, not okay
+ ({'tmpl_var': 'bar', False: 2}, False), # Falsy key
+ ({'tmpl_var': 'bar', u'🐉': u'🐉'}, False), # dragons
+ ],
+)
class TestExtraVarsNoPrompt:
def process_vars_and_assert(self, tmpl, provided_vars, valid):
- prompted_fields, ignored_fields, errors = tmpl._accept_or_ignore_job_kwargs(
- extra_vars=provided_vars
- )
+ prompted_fields, ignored_fields, errors = tmpl._accept_or_ignore_job_kwargs(extra_vars=provided_vars)
if valid:
assert not ignored_fields
assert not errors
@@ -307,25 +274,12 @@ class TestExtraVarsNoPrompt:
def test_jt_extra_vars_counting(self, provided_vars, valid):
jt = JobTemplate(
- name='foo',
- extra_vars={'tmpl_var': 'bar'},
- project=Project(),
- project_id=42,
- playbook='helloworld.yml',
- inventory=Inventory(),
- inventory_id=42
- )
- prompted_fields, ignored_fields, errors = jt._accept_or_ignore_job_kwargs(
- extra_vars=provided_vars
+ name='foo', extra_vars={'tmpl_var': 'bar'}, project=Project(), project_id=42, playbook='helloworld.yml', inventory=Inventory(), inventory_id=42
)
+ prompted_fields, ignored_fields, errors = jt._accept_or_ignore_job_kwargs(extra_vars=provided_vars)
self.process_vars_and_assert(jt, provided_vars, valid)
def test_wfjt_extra_vars_counting(self, provided_vars, valid):
- wfjt = WorkflowJobTemplate(
- name='foo',
- extra_vars={'tmpl_var': 'bar'}
- )
- prompted_fields, ignored_fields, errors = wfjt._accept_or_ignore_job_kwargs(
- extra_vars=provided_vars
- )
+ wfjt = WorkflowJobTemplate(name='foo', extra_vars={'tmpl_var': 'bar'})
+ prompted_fields, ignored_fields, errors = wfjt._accept_or_ignore_job_kwargs(extra_vars=provided_vars)
self.process_vars_and_assert(wfjt, provided_vars, valid)
diff --git a/awx/main/tests/unit/models/test_system_jobs.py b/awx/main/tests/unit/models/test_system_jobs.py
index 2ed9204adb..5e2936d161 100644
--- a/awx/main/tests/unit/models/test_system_jobs.py
+++ b/awx/main/tests/unit/models/test_system_jobs.py
@@ -3,49 +3,48 @@ import pytest
from awx.main.models import SystemJobTemplate
-@pytest.mark.parametrize("extra_data", [
- '{ "days": 1 }',
- '{ "days": 100 }',
- '{ "days": 0 }',
- {"days": 0},
- {"days": 1},
- {"days": 13435},
-])
+@pytest.mark.parametrize(
+ "extra_data",
+ [
+ '{ "days": 1 }',
+ '{ "days": 100 }',
+ '{ "days": 0 }',
+ {"days": 0},
+ {"days": 1},
+ {"days": 13435},
+ ],
+)
def test_valid__clean_extra_data_system_jobs(extra_data):
- accepted, rejected, errors = SystemJobTemplate(
- job_type='cleanup_jobs'
- ).accept_or_ignore_variables(extra_data)
+ accepted, rejected, errors = SystemJobTemplate(job_type='cleanup_jobs').accept_or_ignore_variables(extra_data)
assert not rejected
assert not errors
-@pytest.mark.parametrize("extra_data", [
- '{ "days": 1.2 }',
- '{ "days": -1.2 }',
- '{ "days": -111 }',
- '{ "days": "-111" }',
- '{ "days": false }',
- '{ "days": "foobar" }',
- {"days": 1.2},
- {"days": -1.2},
- {"days": -111},
- {"days": "-111"},
- {"days": False},
- {"days": "foobar"},
-])
+@pytest.mark.parametrize(
+ "extra_data",
+ [
+ '{ "days": 1.2 }',
+ '{ "days": -1.2 }',
+ '{ "days": -111 }',
+ '{ "days": "-111" }',
+ '{ "days": false }',
+ '{ "days": "foobar" }',
+ {"days": 1.2},
+ {"days": -1.2},
+ {"days": -111},
+ {"days": "-111"},
+ {"days": False},
+ {"days": "foobar"},
+ ],
+)
def test_invalid__extra_data_system_jobs(extra_data):
- accepted, rejected, errors = SystemJobTemplate(
- job_type='cleanup_jobs'
- ).accept_or_ignore_variables(extra_data)
+ accepted, rejected, errors = SystemJobTemplate(job_type='cleanup_jobs').accept_or_ignore_variables(extra_data)
assert str(errors['extra_vars'][0]) == u'days must be a positive integer.'
def test_unallowed_system_job_data():
sjt = SystemJobTemplate(job_type='cleanup_jobs')
- accepted, ignored, errors = sjt.accept_or_ignore_variables({
- 'days': 34,
- 'foobar': 'baz'
- })
+ accepted, ignored, errors = sjt.accept_or_ignore_variables({'days': 34, 'foobar': 'baz'})
assert 'foobar' in ignored
assert 'days' in accepted
@@ -59,11 +58,7 @@ def test_reject_other_prommpts():
def test_reject_some_accept_some():
sjt = SystemJobTemplate(job_type='cleanup_jobs')
- accepted, ignored, errors = sjt._accept_or_ignore_job_kwargs(limit="", extra_vars={
- 'days': 34,
- 'foobar': 'baz'
- })
+ accepted, ignored, errors = sjt._accept_or_ignore_job_kwargs(limit="", extra_vars={'days': 34, 'foobar': 'baz'})
assert accepted == {"extra_vars": {"days": 34}}
assert ignored == {"limit": "", "extra_vars": {"foobar": "baz"}}
assert 'not allowed on launch' in errors['limit'][0]
-
diff --git a/awx/main/tests/unit/models/test_unified_job_unit.py b/awx/main/tests/unit/models/test_unified_job_unit.py
index a3f9123f37..c149953dc1 100644
--- a/awx/main/tests/unit/models/test_unified_job_unit.py
+++ b/awx/main/tests/unit/models/test_unified_job_unit.py
@@ -1,18 +1,7 @@
import pytest
from unittest import mock
-from awx.main.models import (
- UnifiedJob,
- UnifiedJobTemplate,
- WorkflowJob,
- WorkflowJobNode,
- WorkflowApprovalTemplate,
- Job,
- User,
- Project,
- JobTemplate,
- Inventory
-)
+from awx.main.models import UnifiedJob, UnifiedJobTemplate, WorkflowJob, WorkflowJobNode, WorkflowApprovalTemplate, Job, User, Project, JobTemplate, Inventory
def test_incorrectly_formatted_variables():
@@ -67,9 +56,9 @@ def test_cancel_job_explanation(unified_job):
def test_organization_copy_to_jobs():
- '''
+ """
All unified job types should infer their organization from their template organization
- '''
+ """
for cls in UnifiedJobTemplate.__subclasses__():
if cls is WorkflowApprovalTemplate:
continue # these do not track organization
@@ -77,9 +66,9 @@ def test_organization_copy_to_jobs():
def test_log_representation():
- '''
+ """
Common representation used inside of log messages
- '''
+ """
uj = UnifiedJob(status='running', id=4)
job = Job(status='running', id=4)
assert job.log_format == 'job 4 (running)'
@@ -87,20 +76,14 @@ def test_log_representation():
class TestMetaVars:
- '''
+ """
Corresponding functional test exists for cases with indirect relationships
- '''
+ """
def test_job_metavars(self):
maker = User(username='joe', pk=47, id=47)
inv = Inventory(name='example-inv', id=45)
- assert Job(
- name='fake-job',
- pk=42, id=42,
- launch_type='manual',
- created_by=maker,
- inventory=inv
- ).awx_meta_vars() == {
+ assert Job(name='fake-job', pk=42, id=42, launch_type='manual', created_by=maker, inventory=inv).awx_meta_vars() == {
'tower_job_id': 42,
'awx_job_id': 42,
'tower_job_launch_type': 'manual',
@@ -118,22 +101,17 @@ class TestMetaVars:
'tower_inventory_id': 45,
'awx_inventory_id': 45,
'tower_inventory_name': 'example-inv',
- 'awx_inventory_name': 'example-inv'
+ 'awx_inventory_name': 'example-inv',
}
def test_project_update_metavars(self):
data = Job(
name='fake-job',
- pk=40, id=40,
+ pk=40,
+ id=40,
launch_type='manual',
- project=Project(
- name='jobs-sync',
- scm_revision='12345444'
- ),
- job_template=JobTemplate(
- name='jobs-jt',
- id=92, pk=92
- )
+ project=Project(name='jobs-sync', scm_revision='12345444'),
+ job_template=JobTemplate(name='jobs-jt', id=92, pk=92),
).awx_meta_vars()
assert data['awx_project_revision'] == '12345444'
assert 'tower_job_template_id' in data
diff --git a/awx/main/tests/unit/models/test_workflow_unit.py b/awx/main/tests/unit/models/test_workflow_unit.py
index 83ce1a58b1..f8bb1e9c84 100644
--- a/awx/main/tests/unit/models/test_workflow_unit.py
+++ b/awx/main/tests/unit/models/test_workflow_unit.py
@@ -2,26 +2,18 @@ import pytest
from awx.main.models.jobs import JobTemplate
from awx.main.models import Inventory, CredentialType, Credential, Project
-from awx.main.models.workflow import (
- WorkflowJobTemplate, WorkflowJobTemplateNode,
- WorkflowJob, WorkflowJobNode
-)
+from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobTemplateNode, WorkflowJob, WorkflowJobNode
from unittest import mock
@pytest.fixture
def credential():
ssh_type = CredentialType.defaults['ssh']()
- return Credential(
- id=43,
- name='example-cred',
- credential_type=ssh_type,
- inputs={'username': 'asdf', 'password': 'asdf'}
- )
+ return Credential(id=43, name='example-cred', credential_type=ssh_type, inputs={'username': 'asdf', 'password': 'asdf'})
-class TestWorkflowJobInheritNodesMixin():
- class TestCreateWorkflowJobNodes():
+class TestWorkflowJobInheritNodesMixin:
+ class TestCreateWorkflowJobNodes:
@pytest.fixture
def job_templates(self):
return [JobTemplate() for i in range(0, 10)]
@@ -39,7 +31,7 @@ class TestWorkflowJobInheritNodesMixin():
for job_template_node in job_template_nodes:
workflow_job_node_create.assert_any_call(workflow_job=workflow_job)
- class TestMapWorkflowJobNodes():
+ class TestMapWorkflowJobNodes:
@pytest.fixture
def job_template_nodes(self):
return [WorkflowJobTemplateNode(id=i) for i in range(0, 20)]
@@ -51,8 +43,7 @@ class TestWorkflowJobInheritNodesMixin():
def test__map_workflow_job_nodes(self, job_template_nodes, job_nodes, mocker):
mixin = WorkflowJob()
wj_node = WorkflowJobNode()
- mocker.patch('awx.main.models.workflow.WorkflowJobTemplateNode.create_workflow_job_node',
- return_value=wj_node)
+ mocker.patch('awx.main.models.workflow.WorkflowJobTemplateNode.create_workflow_job_node', return_value=wj_node)
node_ids_map = mixin._create_workflow_nodes(job_template_nodes, user=None)
assert len(node_ids_map) == len(job_template_nodes)
@@ -60,14 +51,13 @@ class TestWorkflowJobInheritNodesMixin():
for i, job_template_node in enumerate(job_template_nodes):
assert node_ids_map[job_template_node.id] == wj_node
- class TestInheritRelationship():
+ class TestInheritRelationship:
@pytest.fixture
def job_template_nodes(self, mocker):
nodes = [mocker.MagicMock(id=i, pk=i) for i in range(0, 10)]
for i in range(0, 9):
- nodes[i].success_nodes = mocker.MagicMock(
- all=mocker.MagicMock(return_value=[mocker.MagicMock(id=i + 1, pk=i + 1)]))
+ nodes[i].success_nodes = mocker.MagicMock(all=mocker.MagicMock(return_value=[mocker.MagicMock(id=i + 1, pk=i + 1)]))
nodes[i].always_nodes = mocker.MagicMock(all=mocker.MagicMock(return_value=[]))
nodes[i].failure_nodes = mocker.MagicMock(all=mocker.MagicMock(return_value=[]))
new_wj_node = mocker.MagicMock(success_nodes=mocker.MagicMock())
@@ -87,7 +77,6 @@ class TestWorkflowJobInheritNodesMixin():
_map[n.id] = n
return _map
-
def test__inherit_relationship(self, mocker, job_template_nodes, job_nodes, job_nodes_dict):
wj = WorkflowJob()
@@ -144,10 +133,7 @@ def job_node_with_prompts(job_node_no_prompts, mocker):
@pytest.fixture
def wfjt_node_no_prompts(workflow_job_template_unit, jt_ask):
- node = WorkflowJobTemplateNode(
- workflow_job_template=workflow_job_template_unit,
- unified_job_template=jt_ask
- )
+ node = WorkflowJobTemplateNode(workflow_job_template=workflow_job_template_unit, unified_job_template=jt_ask)
return node
@@ -178,14 +164,13 @@ class TestWorkflowJobCreate:
inventory=None,
unified_job_template=wfjt_node_no_prompts.unified_job_template,
workflow_job=workflow_job_unit,
- identifier=mocker.ANY)
+ identifier=mocker.ANY,
+ )
def test_create_with_prompts(self, wfjt_node_with_prompts, workflow_job_unit, credential, mocker):
mock_create = mocker.MagicMock()
with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create):
- wfjt_node_with_prompts.create_workflow_job_node(
- workflow_job=workflow_job_unit
- )
+ wfjt_node_with_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
mock_create.assert_called_once_with(
all_parents_must_converge=False,
extra_data={},
@@ -194,7 +179,8 @@ class TestWorkflowJobCreate:
inventory=wfjt_node_with_prompts.inventory,
unified_job_template=wfjt_node_with_prompts.unified_job_template,
workflow_job=workflow_job_unit,
- identifier=mocker.ANY)
+ identifier=mocker.ANY,
+ )
@mock.patch('awx.main.models.workflow.WorkflowNodeBase.get_parent_nodes', lambda self: [])
@@ -203,6 +189,7 @@ class TestWorkflowJobNodeJobKWARGS:
Tests for building the keyword arguments that go into creating and
launching a new job that corresponds to a workflow node.
"""
+
kwargs_base = {'_eager_fields': {'launch_type': 'workflow'}}
def test_null_kwargs(self, job_node_no_prompts):
@@ -212,14 +199,11 @@ class TestWorkflowJobNodeJobKWARGS:
job_node_no_prompts.extra_data = {"b": 98}
workflow_job = job_node_no_prompts.workflow_job
workflow_job.extra_vars = '{"a": 84}'
- assert job_node_no_prompts.get_job_kwargs() == dict(
- extra_vars={'a': 84, 'b': 98}, **self.kwargs_base)
+ assert job_node_no_prompts.get_job_kwargs() == dict(extra_vars={'a': 84, 'b': 98}, **self.kwargs_base)
def test_char_prompts_and_res_node_prompts(self, job_node_with_prompts):
# TBD: properly handle multicred credential assignment
- expect_kwargs = dict(
- inventory=job_node_with_prompts.inventory,
- **example_prompts)
+ expect_kwargs = dict(inventory=job_node_with_prompts.inventory, **example_prompts)
expect_kwargs.update(self.kwargs_base)
assert job_node_with_prompts.get_job_kwargs() == expect_kwargs
@@ -227,8 +211,7 @@ class TestWorkflowJobNodeJobKWARGS:
# TBD: properly handle multicred credential assignment
job_node_with_prompts.unified_job_template.ask_inventory_on_launch = False
job_node_with_prompts.unified_job_template.ask_job_type_on_launch = False
- expect_kwargs = dict(inventory=job_node_with_prompts.inventory,
- **example_prompts)
+ expect_kwargs = dict(inventory=job_node_with_prompts.inventory, **example_prompts)
expect_kwargs.update(self.kwargs_base)
expect_kwargs.pop('inventory')
expect_kwargs.pop('job_type')
diff --git a/awx/main/tests/unit/notifications/test_grafana.py b/awx/main/tests/unit/notifications/test_grafana.py
index e243238b84..ccf4241dbb 100644
--- a/awx/main/tests/unit/notifications/test_grafana.py
+++ b/awx/main/tests/unit/notifications/test_grafana.py
@@ -8,106 +8,178 @@ import awx.main.notifications.grafana_backend as grafana_backend
def test_send_messages():
with mock.patch('awx.main.notifications.grafana_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 200
- m={}
+ m = {}
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject"
backend = grafana_backend.GrafanaBackend("testapikey")
- message = EmailMessage(m['subject'],{"started":m['started'],"finished":m['finished']}, [], ['https://example.com', ])
- sent_messages = backend.send_messages([message, ])
+ message = EmailMessage(
+ m['subject'],
+ {"started": m['started'], "finished": m['finished']},
+ [],
+ [
+ 'https://example.com',
+ ],
+ )
+ sent_messages = backend.send_messages(
+ [
+ message,
+ ]
+ )
requests_mock.post.assert_called_once_with(
'https://example.com/api/annotations',
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': None, 'time': 60000, 'dashboardId': None},
- verify=True)
+ verify=True,
+ )
assert sent_messages == 1
def test_send_messages_with_no_verify_ssl():
with mock.patch('awx.main.notifications.grafana_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 200
- m={}
+ m = {}
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject"
- backend = grafana_backend.GrafanaBackend("testapikey",grafana_no_verify_ssl=True)
- message = EmailMessage(m['subject'],{"started":m['started'],"finished":m['finished']}, [], ['https://example.com', ])
- sent_messages = backend.send_messages([message, ])
+ backend = grafana_backend.GrafanaBackend("testapikey", grafana_no_verify_ssl=True)
+ message = EmailMessage(
+ m['subject'],
+ {"started": m['started'], "finished": m['finished']},
+ [],
+ [
+ 'https://example.com',
+ ],
+ )
+ sent_messages = backend.send_messages(
+ [
+ message,
+ ]
+ )
requests_mock.post.assert_called_once_with(
'https://example.com/api/annotations',
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
- json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': None,'time': 60000, 'dashboardId': None},
- verify=False)
+ json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': None, 'time': 60000, 'dashboardId': None},
+ verify=False,
+ )
assert sent_messages == 1
def test_send_messages_with_dashboardid():
with mock.patch('awx.main.notifications.grafana_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 200
- m={}
+ m = {}
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject"
- backend = grafana_backend.GrafanaBackend("testapikey",dashboardId=42)
- message = EmailMessage(m['subject'],{"started":m['started'],"finished":m['finished']}, [], ['https://example.com', ])
- sent_messages = backend.send_messages([message, ])
+ backend = grafana_backend.GrafanaBackend("testapikey", dashboardId=42)
+ message = EmailMessage(
+ m['subject'],
+ {"started": m['started'], "finished": m['finished']},
+ [],
+ [
+ 'https://example.com',
+ ],
+ )
+ sent_messages = backend.send_messages(
+ [
+ message,
+ ]
+ )
requests_mock.post.assert_called_once_with(
'https://example.com/api/annotations',
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': None, 'time': 60000, 'dashboardId': 42},
- verify=True)
+ verify=True,
+ )
assert sent_messages == 1
def test_send_messages_with_panelid():
with mock.patch('awx.main.notifications.grafana_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 200
- m={}
+ m = {}
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject"
- backend = grafana_backend.GrafanaBackend("testapikey",dashboardId=None,panelId=42)
- message = EmailMessage(m['subject'],{"started":m['started'],"finished":m['finished']}, [], ['https://example.com', ])
- sent_messages = backend.send_messages([message, ])
+ backend = grafana_backend.GrafanaBackend("testapikey", dashboardId=None, panelId=42)
+ message = EmailMessage(
+ m['subject'],
+ {"started": m['started'], "finished": m['finished']},
+ [],
+ [
+ 'https://example.com',
+ ],
+ )
+ sent_messages = backend.send_messages(
+ [
+ message,
+ ]
+ )
requests_mock.post.assert_called_once_with(
'https://example.com/api/annotations',
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': 42, 'time': 60000, 'dashboardId': None},
- verify=True)
+ verify=True,
+ )
assert sent_messages == 1
def test_send_messages_with_bothids():
with mock.patch('awx.main.notifications.grafana_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 200
- m={}
+ m = {}
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject"
- backend = grafana_backend.GrafanaBackend("testapikey",dashboardId=42,panelId=42)
- message = EmailMessage(m['subject'],{"started":m['started'],"finished":m['finished']}, [], ['https://example.com', ])
- sent_messages = backend.send_messages([message, ])
+ backend = grafana_backend.GrafanaBackend("testapikey", dashboardId=42, panelId=42)
+ message = EmailMessage(
+ m['subject'],
+ {"started": m['started'], "finished": m['finished']},
+ [],
+ [
+ 'https://example.com',
+ ],
+ )
+ sent_messages = backend.send_messages(
+ [
+ message,
+ ]
+ )
requests_mock.post.assert_called_once_with(
'https://example.com/api/annotations',
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': 42, 'time': 60000, 'dashboardId': 42},
- verify=True)
+ verify=True,
+ )
assert sent_messages == 1
def test_send_messages_with_tags():
with mock.patch('awx.main.notifications.grafana_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 200
- m={}
+ m = {}
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject"
- backend = grafana_backend.GrafanaBackend("testapikey",dashboardId=None,panelId=None,annotation_tags=["ansible"])
- message = EmailMessage(m['subject'],{"started":m['started'],"finished":m['finished']}, [], ['https://example.com', ])
- sent_messages = backend.send_messages([message, ])
+ backend = grafana_backend.GrafanaBackend("testapikey", dashboardId=None, panelId=None, annotation_tags=["ansible"])
+ message = EmailMessage(
+ m['subject'],
+ {"started": m['started'], "finished": m['finished']},
+ [],
+ [
+ 'https://example.com',
+ ],
+ )
+ sent_messages = backend.send_messages(
+ [
+ message,
+ ]
+ )
requests_mock.post.assert_called_once_with(
'https://example.com/api/annotations',
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
json={'tags': ['ansible'], 'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': None, 'time': 60000, 'dashboardId': None},
- verify=True)
+ verify=True,
+ )
assert sent_messages == 1
diff --git a/awx/main/tests/unit/notifications/test_rocketchat.py b/awx/main/tests/unit/notifications/test_rocketchat.py
index 6bf528a93e..8f00d19966 100644
--- a/awx/main/tests/unit/notifications/test_rocketchat.py
+++ b/awx/main/tests/unit/notifications/test_rocketchat.py
@@ -10,8 +10,19 @@ def test_send_messages():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 201
backend = rocketchat_backend.RocketChatBackend()
- message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
- sent_messages = backend.send_messages([message, ])
+ message = EmailMessage(
+ 'test subject',
+ 'test body',
+ [],
+ [
+ 'http://example.com',
+ ],
+ )
+ sent_messages = backend.send_messages(
+ [
+ message,
+ ]
+ )
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=True)
assert sent_messages == 1
@@ -20,8 +31,19 @@ def test_send_messages_with_username():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 201
backend = rocketchat_backend.RocketChatBackend(rocketchat_username='testuser')
- message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
- sent_messages = backend.send_messages([message, ])
+ message = EmailMessage(
+ 'test subject',
+ 'test body',
+ [],
+ [
+ 'http://example.com',
+ ],
+ )
+ sent_messages = backend.send_messages(
+ [
+ message,
+ ]
+ )
calls = requests_mock.post.call_args_list
assert len(calls) == 1
@@ -37,8 +59,19 @@ def test_send_messages_with_icon_url():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 201
backend = rocketchat_backend.RocketChatBackend(rocketchat_icon_url='http://example.com')
- message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
- sent_messages = backend.send_messages([message, ])
+ message = EmailMessage(
+ 'test subject',
+ 'test body',
+ [],
+ [
+ 'http://example.com',
+ ],
+ )
+ sent_messages = backend.send_messages(
+ [
+ message,
+ ]
+ )
calls = requests_mock.post.call_args_list
assert len(calls) == 1
@@ -54,7 +87,18 @@ def test_send_messages_with_no_verify_ssl():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 201
backend = rocketchat_backend.RocketChatBackend(rocketchat_no_verify_ssl=True)
- message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
- sent_messages = backend.send_messages([message, ])
+ message = EmailMessage(
+ 'test subject',
+ 'test body',
+ [],
+ [
+ 'http://example.com',
+ ],
+ )
+ sent_messages = backend.send_messages(
+ [
+ message,
+ ]
+ )
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=False)
assert sent_messages == 1
diff --git a/awx/main/tests/unit/scheduler/test_dag_simple.py b/awx/main/tests/unit/scheduler/test_dag_simple.py
index f5b8303a9a..4bb1418157 100644
--- a/awx/main/tests/unit/scheduler/test_dag_simple.py
+++ b/awx/main/tests/unit/scheduler/test_dag_simple.py
@@ -7,6 +7,7 @@ from awx.main.scheduler.dag_simple import SimpleDAG
def node_generator():
def fn():
return object()
+
return fn
diff --git a/awx/main/tests/unit/scheduler/test_dag_workflow.py b/awx/main/tests/unit/scheduler/test_dag_workflow.py
index aaec50191a..18c3d193f7 100644
--- a/awx/main/tests/unit/scheduler/test_dag_workflow.py
+++ b/awx/main/tests/unit/scheduler/test_dag_workflow.py
@@ -8,7 +8,7 @@ from django.utils.encoding import smart_text
from awx.main.scheduler.dag_workflow import WorkflowDAG
-class Job():
+class Job:
def __init__(self, status='successful'):
self.status = status
@@ -30,6 +30,7 @@ def wf_node_generator(mocker):
wfn = WorkflowNode(id=pytest.count, unified_job_template=object(), **kwargs)
pytest.count += 1
return wfn
+
return fn
@@ -62,7 +63,7 @@ def workflow_dag_1(wf_node_generator):
return (g, nodes)
-class TestWorkflowDAG():
+class TestWorkflowDAG:
@pytest.fixture
def workflow_dag_root_children(self, wf_node_generator):
g = WorkflowDAG()
@@ -84,13 +85,12 @@ class TestWorkflowDAG():
g.add_edge(wf_root_nodes[i], n, 'label')
return (g, wf_root_nodes, wf_leaf_nodes)
-
def test_get_root_nodes(self, workflow_dag_root_children):
(g, wf_root_nodes, ignore) = workflow_dag_root_children
assert set([n.id for n in wf_root_nodes]) == set([n['node_object'].id for n in g.get_root_nodes()])
-class TestDNR():
+class TestDNR:
def test_mark_dnr_nodes(self, workflow_dag_1):
(g, nodes) = workflow_dag_1
@@ -134,7 +134,7 @@ class TestDNR():
assert nodes[3] == do_not_run_nodes[0]
-class TestAllWorkflowNodes():
+class TestAllWorkflowNodes:
# test workflow convergence is functioning as expected
@pytest.fixture
def simple_all_convergence(self, wf_node_generator):
@@ -388,7 +388,7 @@ class TestAllWorkflowNodes():
assert 0 == len(nodes_to_run), "All non-run nodes should be DNR and NOT candidates to run"
-class TestIsWorkflowDone():
+class TestIsWorkflowDone:
@pytest.fixture
def workflow_dag_2(self, workflow_dag_1):
(g, nodes) = workflow_dag_1
@@ -466,9 +466,15 @@ class TestIsWorkflowDone():
(g, nodes) = workflow_dag_failed
assert g.is_workflow_done() is True
- assert g.has_workflow_failed() == \
- (True, smart_text(_("No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)"
- " missing unified job template and error handling path [].").format(nodes[2].id, nodes[2].job.status)))
+ assert g.has_workflow_failed() == (
+ True,
+ smart_text(
+ _(
+ "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)"
+ " missing unified job template and error handling path []."
+ ).format(nodes[2].id, nodes[2].job.status)
+ ),
+ )
def test_is_workflow_done_no_unified_job_tempalte_end(self, workflow_dag_failed):
(g, nodes) = workflow_dag_failed
@@ -476,9 +482,14 @@ class TestIsWorkflowDone():
nodes[2].unified_job_template = None
assert g.is_workflow_done() is True
- assert g.has_workflow_failed() == \
- (True, smart_text(_("No error handling path for workflow job node(s) []. Workflow job node(s) missing"
- " unified job template and error handling path [{}].").format(nodes[2].id)))
+ assert g.has_workflow_failed() == (
+ True,
+ smart_text(
+ _(
+ "No error handling path for workflow job node(s) []. Workflow job node(s) missing" " unified job template and error handling path [{}]."
+ ).format(nodes[2].id)
+ ),
+ )
def test_is_workflow_done_no_unified_job_tempalte_begin(self, workflow_dag_1):
(g, nodes) = workflow_dag_1
@@ -487,26 +498,43 @@ class TestIsWorkflowDone():
g.mark_dnr_nodes()
assert g.is_workflow_done() is True
- assert g.has_workflow_failed() == \
- (True, smart_text(_("No error handling path for workflow job node(s) []. Workflow job node(s) missing"
- " unified job template and error handling path [{}].").format(nodes[0].id)))
+ assert g.has_workflow_failed() == (
+ True,
+ smart_text(
+ _(
+ "No error handling path for workflow job node(s) []. Workflow job node(s) missing" " unified job template and error handling path [{}]."
+ ).format(nodes[0].id)
+ ),
+ )
def test_canceled_should_fail(self, workflow_dag_canceled):
(g, nodes) = workflow_dag_canceled
- assert g.has_workflow_failed() == \
- (True, smart_text(_("No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)"
- " missing unified job template and error handling path [].").format(nodes[0].id, nodes[0].job.status)))
+ assert g.has_workflow_failed() == (
+ True,
+ smart_text(
+ _(
+ "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)"
+ " missing unified job template and error handling path []."
+ ).format(nodes[0].id, nodes[0].job.status)
+ ),
+ )
def test_failure_should_fail(self, workflow_dag_failure):
(g, nodes) = workflow_dag_failure
- assert g.has_workflow_failed() == \
- (True, smart_text(_("No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)"
- " missing unified job template and error handling path [].").format(nodes[0].id, nodes[0].job.status)))
+ assert g.has_workflow_failed() == (
+ True,
+ smart_text(
+ _(
+ "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)"
+ " missing unified job template and error handling path []."
+ ).format(nodes[0].id, nodes[0].job.status)
+ ),
+ )
-class TestBFSNodesToRun():
+class TestBFSNodesToRun:
@pytest.fixture
def workflow_dag_canceled(self, wf_node_generator):
g = WorkflowDAG()
@@ -534,7 +562,7 @@ class TestBFSNodesToRun():
@pytest.mark.skip(reason="Run manually to re-generate doc images")
-class TestDocsExample():
+class TestDocsExample:
@pytest.fixture
def complex_dag(self, wf_node_generator):
g = WorkflowDAG()
diff --git a/awx/main/tests/unit/scheduler/test_kubernetes.py b/awx/main/tests/unit/scheduler/test_kubernetes.py
index 4121d0133b..dc794ab65e 100644
--- a/awx/main/tests/unit/scheduler/test_kubernetes.py
+++ b/awx/main/tests/unit/scheduler/test_kubernetes.py
@@ -21,12 +21,7 @@ def container_group():
@pytest.fixture
def job(container_group):
- return Job(pk=1,
- id=1,
- project=Project(),
- instance_group=container_group,
- inventory=Inventory(),
- job_template=JobTemplate(id=1, name='foo'))
+ return Job(pk=1, id=1, project=Project(), instance_group=container_group, inventory=Inventory(), job_template=JobTemplate(id=1, name='foo'))
def test_default_pod_spec(job):
diff --git a/awx/main/tests/unit/settings/test_defaults.py b/awx/main/tests/unit/settings/test_defaults.py
index 00e9418b57..b7d23a3b3e 100644
--- a/awx/main/tests/unit/settings/test_defaults.py
+++ b/awx/main/tests/unit/settings/test_defaults.py
@@ -4,9 +4,12 @@ from django.conf import settings
from datetime import timedelta
-@pytest.mark.parametrize("job_name,function_path", [
- ('tower_scheduler', 'awx.main.tasks.awx_periodic_scheduler'),
-])
+@pytest.mark.parametrize(
+ "job_name,function_path",
+ [
+ ('tower_scheduler', 'awx.main.tasks.awx_periodic_scheduler'),
+ ],
+)
def test_CELERYBEAT_SCHEDULE(mocker, job_name, function_path):
assert job_name in settings.CELERYBEAT_SCHEDULE
assert 'schedule' in settings.CELERYBEAT_SCHEDULE[job_name]
diff --git a/awx/main/tests/unit/test_access.py b/awx/main/tests/unit/test_access.py
index 49c2a54467..547af7b42c 100644
--- a/awx/main/tests/unit/test_access.py
+++ b/awx/main/tests/unit/test_access.py
@@ -5,14 +5,7 @@ from django.contrib.auth.models import User
from django.forms.models import model_to_dict
from rest_framework.exceptions import ParseError
-from awx.main.access import (
- BaseAccess,
- check_superuser,
- JobTemplateAccess,
- WorkflowJobTemplateAccess,
- SystemJobTemplateAccess,
- vars_are_encrypted
-)
+from awx.main.access import BaseAccess, check_superuser, JobTemplateAccess, WorkflowJobTemplateAccess, SystemJobTemplateAccess, vars_are_encrypted
from awx.main.models import (
Credential,
@@ -33,14 +26,12 @@ class TestRelatedFieldAccess:
@pytest.fixture
def resource_good(self, mocker):
good_role = mocker.MagicMock(__contains__=lambda self, user: True)
- return mocker.MagicMock(related=mocker.MagicMock(admin_role=good_role),
- admin_role=good_role)
+ return mocker.MagicMock(related=mocker.MagicMock(admin_role=good_role), admin_role=good_role)
@pytest.fixture
def resource_bad(self, mocker):
bad_role = mocker.MagicMock(__contains__=lambda self, user: False)
- return mocker.MagicMock(related=mocker.MagicMock(admin_role=bad_role),
- admin_role=bad_role)
+ return mocker.MagicMock(related=mocker.MagicMock(admin_role=bad_role), admin_role=bad_role)
@pytest.fixture
def access(self, user_unit):
@@ -61,10 +52,8 @@ class TestRelatedFieldAccess:
def test_new_mandatory_fail(self, access, mocker):
access.user.is_superuser = False
- assert not access.check_related(
- 'related', mocker.MagicMock, {}, mandatory=True)
- assert not access.check_related(
- 'related', mocker.MagicMock, {'resource': None}, mandatory=True)
+ assert not access.check_related('related', mocker.MagicMock, {}, mandatory=True)
+ assert not access.check_related('related', mocker.MagicMock, {'resource': None}, mandatory=True)
def test_existing_no_op(self, access, resource_bad, mocker):
"""
@@ -72,61 +61,54 @@ class TestRelatedFieldAccess:
lack of access to related field does not block action
"""
data = {'related': resource_bad.related}
- assert access.check_related(
- 'related', mocker.MagicMock, data, obj=resource_bad)
- assert access.check_related(
- 'related', mocker.MagicMock, {}, obj=resource_bad)
+ assert access.check_related('related', mocker.MagicMock, data, obj=resource_bad)
+ assert access.check_related('related', mocker.MagicMock, {}, obj=resource_bad)
def test_existing_required_access(self, access, resource_bad, mocker):
# no-op actions, but mandatory kwarg requires check to pass
- assert not access.check_related(
- 'related', mocker.MagicMock, {}, obj=resource_bad, mandatory=True)
- assert not access.check_related(
- 'related', mocker.MagicMock, {'related': resource_bad.related},
- obj=resource_bad, mandatory=True)
-
- def test_existing_no_access_to_current(
- self, access, resource_good, resource_bad, mocker):
+ assert not access.check_related('related', mocker.MagicMock, {}, obj=resource_bad, mandatory=True)
+ assert not access.check_related('related', mocker.MagicMock, {'related': resource_bad.related}, obj=resource_bad, mandatory=True)
+
+ def test_existing_no_access_to_current(self, access, resource_good, resource_bad, mocker):
"""
User gives a valid related resource (like organization), but does
not have access to _existing_ related resource, so deny action
"""
data = {'related': resource_good}
- assert not access.check_related(
- 'related', mocker.MagicMock, data, obj=resource_bad)
+ assert not access.check_related('related', mocker.MagicMock, data, obj=resource_bad)
- def test_existing_no_access_to_new(
- self, access, resource_good, resource_bad, mocker):
+ def test_existing_no_access_to_new(self, access, resource_good, resource_bad, mocker):
data = {'related': resource_bad}
- assert not access.check_related(
- 'related', mocker.MagicMock, data, obj=resource_good)
+ assert not access.check_related('related', mocker.MagicMock, data, obj=resource_good)
def test_existing_not_allowed_to_remove(self, access, resource_bad, mocker):
data = {'related': None}
- assert not access.check_related(
- 'related', mocker.MagicMock, data, obj=resource_bad)
+ assert not access.check_related('related', mocker.MagicMock, data, obj=resource_bad)
def test_existing_not_null_null(self, access, mocker):
resource = mocker.MagicMock(related=None)
data = {'related': None}
# Not changing anything by giving null when it is already-null
# important for PUT requests
- assert access.check_related(
- 'related', mocker.MagicMock, data, obj=resource, mandatory=True)
+ assert access.check_related('related', mocker.MagicMock, data, obj=resource, mandatory=True)
def test_encrypted_vars_detection():
- assert vars_are_encrypted({
- 'aaa': {'b': 'c'},
- 'alist': [],
- 'test_var_eight': '$encrypted$UTF8$AESCBC$Z0FBQUF...==',
- 'test_var_five': 'four',
- })
- assert not vars_are_encrypted({
- 'aaa': {'b': 'c'},
- 'alist': [],
- 'test_var_five': 'four',
- })
+ assert vars_are_encrypted(
+ {
+ 'aaa': {'b': 'c'},
+ 'alist': [],
+ 'test_var_eight': '$encrypted$UTF8$AESCBC$Z0FBQUF...==',
+ 'test_var_five': 'four',
+ }
+ )
+ assert not vars_are_encrypted(
+ {
+ 'aaa': {'b': 'c'},
+ 'alist': [],
+ 'test_var_five': 'four',
+ }
+ )
@pytest.fixture
@@ -145,9 +127,8 @@ def job_template_with_ids(job_template_factory):
proj = Project(id=14, pk=14, name='testproj')
jt_objects = job_template_factory(
- 'testJT', project=proj, inventory=inv, credential=credential,
- cloud_credential=cloud_cred, network_credential=net_cred,
- persisted=False)
+ 'testJT', project=proj, inventory=inv, credential=credential, cloud_credential=cloud_cred, network_credential=net_cred, persisted=False
+ )
jt = jt_objects.job_template
jt.organization = Organization(id=1, pk=1, name='fooOrg')
return jt
@@ -185,7 +166,7 @@ def test_change_jt_sensitive_data(job_template_with_ids, mocker, user_unit):
class RoleReturnsTrue(Role):
class Meta:
proxy = True
-
+
def __contains__(self, accessor):
return True
@@ -238,7 +219,6 @@ class TestWorkflowAccessMethods:
assert access.can_add({'organization': 1})
-
def test_user_capabilities_method():
"""Unit test to verify that the user_capabilities method will defer
to the appropriate sub-class methods of the access classes.
@@ -257,10 +237,7 @@ def test_user_capabilities_method():
foo_access = FooAccess(user)
foo = object()
foo_capabilities = foo_access.get_user_capabilities(foo, ['edit', 'copy'])
- assert foo_capabilities == {
- 'edit': 'bar',
- 'copy': 'foo'
- }
+ assert foo_capabilities == {'edit': 'bar', 'copy': 'foo'}
def test_system_job_template_can_start(mocker):
diff --git a/awx/main/tests/unit/test_capacity.py b/awx/main/tests/unit/test_capacity.py
index 1da05ec1f3..cbb4124b52 100644
--- a/awx/main/tests/unit/test_capacity.py
+++ b/awx/main/tests/unit/test_capacity.py
@@ -20,7 +20,6 @@ class Job(FakeObject):
@pytest.fixture
def sample_cluster():
def stand_up_cluster():
-
class Instances(FakeObject):
def add(self, *args):
for instance in args:
@@ -30,7 +29,6 @@ def sample_cluster():
return self.obj.instance_list
class InstanceGroup(FakeObject):
-
def __init__(self, **kwargs):
super(InstanceGroup, self).__init__(**kwargs)
self.instance_list = []
@@ -40,11 +38,9 @@ def sample_cluster():
mgr = Instances(obj=self)
return mgr
-
class Instance(FakeObject):
pass
-
ig_small = InstanceGroup(name='ig_small')
ig_large = InstanceGroup(name='ig_large')
tower = InstanceGroup(name='tower')
@@ -55,19 +51,14 @@ def sample_cluster():
ig_large.instances.add(i2, i3)
tower.instances.add(i2)
return [tower, ig_large, ig_small]
+
return stand_up_cluster
def test_committed_capacity(sample_cluster):
tower, ig_large, ig_small = sample_cluster()
- tasks = [
- Job(status='waiting', instance_group=tower),
- Job(status='waiting', instance_group=ig_large),
- Job(status='waiting', instance_group=ig_small)
- ]
- capacities = InstanceGroup.objects.capacity_values(
- qs=[tower, ig_large, ig_small], tasks=tasks, breakdown=True
- )
+ tasks = [Job(status='waiting', instance_group=tower), Job(status='waiting', instance_group=ig_large), Job(status='waiting', instance_group=ig_small)]
+ capacities = InstanceGroup.objects.capacity_values(qs=[tower, ig_large, ig_small], tasks=tasks, breakdown=True)
# Jobs submitted to either tower or ig_larg must count toward both
assert capacities['tower']['committed_capacity'] == 43 * 2
assert capacities['ig_large']['committed_capacity'] == 43 * 2
@@ -76,14 +67,8 @@ def test_committed_capacity(sample_cluster):
def test_running_capacity(sample_cluster):
tower, ig_large, ig_small = sample_cluster()
- tasks = [
- Job(status='running', execution_node='i1'),
- Job(status='running', execution_node='i2'),
- Job(status='running', execution_node='i3')
- ]
- capacities = InstanceGroup.objects.capacity_values(
- qs=[tower, ig_large, ig_small], tasks=tasks, breakdown=True
- )
+ tasks = [Job(status='running', execution_node='i1'), Job(status='running', execution_node='i2'), Job(status='running', execution_node='i3')]
+ capacities = InstanceGroup.objects.capacity_values(qs=[tower, ig_large, ig_small], tasks=tasks, breakdown=True)
# Tower is only given 1 instance
assert capacities['tower']['running_capacity'] == 43
# Large IG has 2 instances
@@ -99,8 +84,7 @@ def test_offline_node_running(sample_cluster):
tower, ig_large, ig_small = sample_cluster()
ig_small.instance_list[0].capacity = 0
tasks = [Job(status='running', execution_node='i1', instance_group=ig_small)]
- capacities = InstanceGroup.objects.capacity_values(
- qs=[tower, ig_large, ig_small], tasks=tasks)
+ capacities = InstanceGroup.objects.capacity_values(qs=[tower, ig_large, ig_small], tasks=tasks)
assert capacities['ig_small']['consumed_capacity'] == 43
@@ -111,8 +95,7 @@ def test_offline_node_waiting(sample_cluster):
tower, ig_large, ig_small = sample_cluster()
ig_small.instance_list[0].capacity = 0
tasks = [Job(status='waiting', instance_group=ig_small)]
- capacities = InstanceGroup.objects.capacity_values(
- qs=[tower, ig_large, ig_small], tasks=tasks)
+ capacities = InstanceGroup.objects.capacity_values(qs=[tower, ig_large, ig_small], tasks=tasks)
assert capacities['ig_small']['consumed_capacity'] == 43
@@ -123,14 +106,8 @@ def test_RBAC_reduced_filter(sample_cluster):
Verify that this does not blow everything up.
"""
tower, ig_large, ig_small = sample_cluster()
- tasks = [
- Job(status='waiting', instance_group=tower),
- Job(status='waiting', instance_group=ig_large),
- Job(status='waiting', instance_group=ig_small)
- ]
- capacities = InstanceGroup.objects.capacity_values(
- qs=[tower], tasks=tasks, breakdown=True
- )
+ tasks = [Job(status='waiting', instance_group=tower), Job(status='waiting', instance_group=ig_large), Job(status='waiting', instance_group=ig_small)]
+ capacities = InstanceGroup.objects.capacity_values(qs=[tower], tasks=tasks, breakdown=True)
# Cross-links between groups not visible to current user,
# so a naieve accounting of capacities is returned instead
assert capacities['tower']['committed_capacity'] == 43
diff --git a/awx/main/tests/unit/test_db.py b/awx/main/tests/unit/test_db.py
index d7ffe58456..ce0b8bbecc 100644
--- a/awx/main/tests/unit/test_db.py
+++ b/awx/main/tests/unit/test_db.py
@@ -10,15 +10,11 @@ import awx
from awx.main.db.profiled_pg.base import RecordedQueryLog
-QUERY = {
- 'sql': 'SELECT * FROM main_job',
- 'time': '.01'
-}
+QUERY = {'sql': 'SELECT * FROM main_job', 'time': '.01'}
EXPLAIN = 'Seq Scan on public.main_job (cost=0.00..1.18 rows=18 width=86)'
-class FakeDatabase():
-
+class FakeDatabase:
def __init__(self):
self._cursor = unittest.mock.Mock(spec_sec=['execute', 'fetchall'])
self._cursor.fetchall.return_value = [(EXPLAIN,)]
@@ -129,18 +125,16 @@ def test_sql_above_threshold(tmpdir):
args, kw = _call
assert args == ('EXPLAIN VERBOSE {}'.format(QUERY['sql']),)
- path = os.path.join(
- tmpdir,
- '{}.sqlite'.format(os.path.basename(sys.argv[0]))
- )
+ path = os.path.join(tmpdir, '{}.sqlite'.format(os.path.basename(sys.argv[0])))
assert os.path.exists(path)
# verify the results
def dict_factory(cursor, row):
d = {}
- for idx,col in enumerate(cursor.description):
+ for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
+
cursor = sqlite3.connect(path)
cursor.row_factory = dict_factory
queries_logged = cursor.execute('SELECT * FROM queries').fetchall()
diff --git a/awx/main/tests/unit/test_fields.py b/awx/main/tests/unit/test_fields.py
index 94d3eaab92..56c6a3befc 100644
--- a/awx/main/tests/unit/test_fields.py
+++ b/awx/main/tests/unit/test_fields.py
@@ -4,10 +4,7 @@ import pytest
from django.core.exceptions import ValidationError
from django.apps import apps
from django.db.models.fields.related import ForeignKey
-from django.db.models.fields.related_descriptors import (
- ReverseManyToOneDescriptor,
- ForwardManyToOneDescriptor
-)
+from django.db.models.fields.related_descriptors import ReverseManyToOneDescriptor, ForwardManyToOneDescriptor
from rest_framework.serializers import ValidationError as DRFValidationError
@@ -15,30 +12,28 @@ from awx.main.models import Credential, CredentialType, BaseModel
from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDescriptor
-@pytest.mark.parametrize('schema, given, message', [
- (
- { # immitates what the CredentialType injectors field is
- "additionalProperties": False,
- "type": "object",
- "properties": {
- "extra_vars": {
- "additionalProperties": False,
- "type": "object"
- }
- }
- },
- {'extra_vars': ['duck', 'horse']},
- "list provided in relative path ['extra_vars'], expected dict"
- ),
- (
- { # immitates what the CredentialType injectors field is
- "additionalProperties": False,
- "type": "object",
- },
- ['duck', 'horse'],
- "list provided, expected dict"
- ),
-])
+@pytest.mark.parametrize(
+ 'schema, given, message',
+ [
+ (
+ { # immitates what the CredentialType injectors field is
+ "additionalProperties": False,
+ "type": "object",
+ "properties": {"extra_vars": {"additionalProperties": False, "type": "object"}},
+ },
+ {'extra_vars': ['duck', 'horse']},
+ "list provided in relative path ['extra_vars'], expected dict",
+ ),
+ (
+ { # immitates what the CredentialType injectors field is
+ "additionalProperties": False,
+ "type": "object",
+ },
+ ['duck', 'horse'],
+ "list provided, expected dict",
+ ),
+ ],
+)
def test_custom_error_messages(schema, given, message):
instance = BaseModel()
@@ -54,43 +49,51 @@ def test_custom_error_messages(schema, given, message):
assert message == exc.value.error_list[0].message
-@pytest.mark.parametrize('input_, valid', [
- ({}, True),
- ({'fields': []}, True),
- ({'fields': {}}, False),
- ({'fields': 123}, False),
- ({'fields': [{'id': 'username', 'label': 'Username', 'foo': 'bar'}]}, False),
- ({'fields': [{'id': 'username', 'label': 'Username'}]}, True),
- ({'fields': [{'id': 'username', 'label': 'Username', 'type': 'string'}]}, True),
- ({'fields': [{'id': 'username', 'label': 'Username', 'help_text': 1}]}, False),
- ({'fields': [{'id': 'username', 'label': 'Username', 'help_text': 'Help Text'}]}, True), # noqa
- ({'fields': [{'id': 'username', 'label': 'Username'}, {'id': 'username', 'label': 'Username 2'}]}, False), # noqa
- ({'fields': [{'id': '$invalid$', 'label': 'Invalid', 'type': 'string'}]}, False), # noqa
- ({'fields': [{'id': 'password', 'label': 'Password', 'type': 'invalid-type'}]}, False),
- ({'fields': [{'id': 'ssh_key', 'label': 'SSH Key', 'type': 'string', 'format': 'ssh_private_key'}]}, True), # noqa
- ({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean'}]}, True),
- ({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean', 'choices': ['a', 'b']}]}, False),
- ({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean', 'secret': True}]}, False),
- ({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': True}]}, True),
- ({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': True, 'type': 'boolean'}]}, False), # noqa
- ({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': 'bad'}]}, False), # noqa
- ({'fields': [{'id': 'token', 'label': 'Token', 'secret': True}]}, True),
- ({'fields': [{'id': 'token', 'label': 'Token', 'secret': 'bad'}]}, False),
- ({'fields': [{'id': 'token', 'label': 'Token', 'ask_at_runtime': True}]}, True),
- ({'fields': [{'id': 'token', 'label': 'Token', 'ask_at_runtime': 'bad'}]}, False), # noqa
- ({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': 'not-a-list'}]}, False), # noqa
- ({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': []}]}, False),
- ({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': ['su', 'sudo']}]}, True), # noqa
- ({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': ['dup', 'dup']}]}, False), # noqa
- ({'fields': [{'id': 'tower', 'label': 'Reserved!', }]}, False), # noqa
-])
+@pytest.mark.parametrize(
+ 'input_, valid',
+ [
+ ({}, True),
+ ({'fields': []}, True),
+ ({'fields': {}}, False),
+ ({'fields': 123}, False),
+ ({'fields': [{'id': 'username', 'label': 'Username', 'foo': 'bar'}]}, False),
+ ({'fields': [{'id': 'username', 'label': 'Username'}]}, True),
+ ({'fields': [{'id': 'username', 'label': 'Username', 'type': 'string'}]}, True),
+ ({'fields': [{'id': 'username', 'label': 'Username', 'help_text': 1}]}, False),
+ ({'fields': [{'id': 'username', 'label': 'Username', 'help_text': 'Help Text'}]}, True), # noqa
+ ({'fields': [{'id': 'username', 'label': 'Username'}, {'id': 'username', 'label': 'Username 2'}]}, False), # noqa
+ ({'fields': [{'id': '$invalid$', 'label': 'Invalid', 'type': 'string'}]}, False), # noqa
+ ({'fields': [{'id': 'password', 'label': 'Password', 'type': 'invalid-type'}]}, False),
+ ({'fields': [{'id': 'ssh_key', 'label': 'SSH Key', 'type': 'string', 'format': 'ssh_private_key'}]}, True), # noqa
+ ({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean'}]}, True),
+ ({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean', 'choices': ['a', 'b']}]}, False),
+ ({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean', 'secret': True}]}, False),
+ ({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': True}]}, True),
+ ({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': True, 'type': 'boolean'}]}, False), # noqa
+ ({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': 'bad'}]}, False), # noqa
+ ({'fields': [{'id': 'token', 'label': 'Token', 'secret': True}]}, True),
+ ({'fields': [{'id': 'token', 'label': 'Token', 'secret': 'bad'}]}, False),
+ ({'fields': [{'id': 'token', 'label': 'Token', 'ask_at_runtime': True}]}, True),
+ ({'fields': [{'id': 'token', 'label': 'Token', 'ask_at_runtime': 'bad'}]}, False), # noqa
+ ({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': 'not-a-list'}]}, False), # noqa
+ ({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': []}]}, False),
+ ({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': ['su', 'sudo']}]}, True), # noqa
+ ({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': ['dup', 'dup']}]}, False), # noqa
+ (
+ {
+ 'fields': [
+ {
+ 'id': 'tower',
+ 'label': 'Reserved!',
+ }
+ ]
+ },
+ False,
+ ), # noqa
+ ],
+)
def test_cred_type_input_schema_validity(input_, valid):
- type_ = CredentialType(
- kind='cloud',
- name='SomeCloud',
- managed_by_tower=True,
- inputs=input_
- )
+ type_ = CredentialType(kind='cloud', name='SomeCloud', managed_by_tower=True, inputs=input_)
field = CredentialType._meta.get_field('inputs')
if valid is False:
with pytest.raises(ValidationError):
@@ -99,48 +102,51 @@ def test_cred_type_input_schema_validity(input_, valid):
field.clean(input_, type_)
-@pytest.mark.parametrize('injectors, valid', [
- ({}, True),
- ({'invalid-injector': {}}, False),
- ({'file': 123}, False),
- ({'file': {}}, True),
- # Uses credential inputs inside of unnamed file contents
- ({'file': {'template': '{{username}}'}}, True),
- # Uses named file
- ({'file': {'template.username': '{{username}}'}}, True),
- # Uses multiple named files
- ({'file': {'template.username': '{{username}}', 'template.password': '{{pass}}'}}, True),
- # Use of unnamed file mutually exclusive with use of named files
- ({'file': {'template': '{{username}}', 'template.password': '{{pass}}'}}, False),
- # References non-existant named file
- ({'env': {'FROM_FILE': "{{tower.filename.cert}}"}}, False),
- # References unnamed file, but a file was never defined
- ({'env': {'FROM_FILE': "{{tower.filename}}"}}, False),
- # Cannot reference tower namespace itself (what would this return??)
- ({'env': {'FROM_FILE': "{{tower}}"}}, False),
- # References filename of a named file
- ({'file': {'template.cert': '{{awx_secret}}'}, 'env': {'FROM_FILE': "{{tower.filename.cert}}"}}, True),
- # With named files, `tower.filename` is another namespace, so it cannot be referenced
- ({'file': {'template.cert': '{{awx_secret}}'}, 'env': {'FROM_FILE': "{{tower.filename}}"}}, False),
- # With an unnamed file, `tower.filename` is just the filename
- ({'file': {'template': '{{awx_secret}}'}, 'env': {'THE_FILENAME': "{{tower.filename}}"}}, True),
- ({'file': {'foo': 'bar'}}, False),
- ({'env': 123}, False),
- ({'env': {}}, True),
- ({'env': {'AWX_SECRET': '{{awx_secret}}'}}, True),
- ({'env': {'AWX_SECRET_99': '{{awx_secret}}'}}, True),
- ({'env': {'99': '{{awx_secret}}'}}, False),
- ({'env': {'AWX_SECRET=': '{{awx_secret}}'}}, False),
- ({'env': {'ANSIBLE_SETTING': '{{awx_secret}}'}}, False),
- ({'env': {'DRAGON': u'🐉'}}, False),
- ({'env': {u'🐉': 'DRAGON'}}, False),
- ({'extra_vars': 123}, False),
- ({'extra_vars': {}}, True),
- ({'extra_vars': {'hostname': '{{host}}'}}, True),
- ({'extra_vars': {'hostname_99': '{{host}}'}}, True),
- ({'extra_vars': {'99': '{{host}}'}}, False),
- ({'extra_vars': {'99=': '{{host}}'}}, False),
-])
+@pytest.mark.parametrize(
+ 'injectors, valid',
+ [
+ ({}, True),
+ ({'invalid-injector': {}}, False),
+ ({'file': 123}, False),
+ ({'file': {}}, True),
+ # Uses credential inputs inside of unnamed file contents
+ ({'file': {'template': '{{username}}'}}, True),
+ # Uses named file
+ ({'file': {'template.username': '{{username}}'}}, True),
+ # Uses multiple named files
+ ({'file': {'template.username': '{{username}}', 'template.password': '{{pass}}'}}, True),
+ # Use of unnamed file mutually exclusive with use of named files
+ ({'file': {'template': '{{username}}', 'template.password': '{{pass}}'}}, False),
+ # References non-existant named file
+ ({'env': {'FROM_FILE': "{{tower.filename.cert}}"}}, False),
+ # References unnamed file, but a file was never defined
+ ({'env': {'FROM_FILE': "{{tower.filename}}"}}, False),
+ # Cannot reference tower namespace itself (what would this return??)
+ ({'env': {'FROM_FILE': "{{tower}}"}}, False),
+ # References filename of a named file
+ ({'file': {'template.cert': '{{awx_secret}}'}, 'env': {'FROM_FILE': "{{tower.filename.cert}}"}}, True),
+ # With named files, `tower.filename` is another namespace, so it cannot be referenced
+ ({'file': {'template.cert': '{{awx_secret}}'}, 'env': {'FROM_FILE': "{{tower.filename}}"}}, False),
+ # With an unnamed file, `tower.filename` is just the filename
+ ({'file': {'template': '{{awx_secret}}'}, 'env': {'THE_FILENAME': "{{tower.filename}}"}}, True),
+ ({'file': {'foo': 'bar'}}, False),
+ ({'env': 123}, False),
+ ({'env': {}}, True),
+ ({'env': {'AWX_SECRET': '{{awx_secret}}'}}, True),
+ ({'env': {'AWX_SECRET_99': '{{awx_secret}}'}}, True),
+ ({'env': {'99': '{{awx_secret}}'}}, False),
+ ({'env': {'AWX_SECRET=': '{{awx_secret}}'}}, False),
+ ({'env': {'ANSIBLE_SETTING': '{{awx_secret}}'}}, False),
+ ({'env': {'DRAGON': u'🐉'}}, False),
+ ({'env': {u'🐉': 'DRAGON'}}, False),
+ ({'extra_vars': 123}, False),
+ ({'extra_vars': {}}, True),
+ ({'extra_vars': {'hostname': '{{host}}'}}, True),
+ ({'extra_vars': {'hostname_99': '{{host}}'}}, True),
+ ({'extra_vars': {'99': '{{host}}'}}, False),
+ ({'extra_vars': {'99=': '{{host}}'}}, False),
+ ],
+)
def test_cred_type_injectors_schema(injectors, valid):
type_ = CredentialType(
kind='cloud',
@@ -154,7 +160,7 @@ def test_cred_type_injectors_schema(injectors, valid):
{'id': 'host', 'type': 'string', 'label': '_'},
]
},
- injectors=injectors
+ injectors=injectors,
)
field = CredentialType._meta.get_field('injectors')
if valid is False:
@@ -164,38 +170,32 @@ def test_cred_type_injectors_schema(injectors, valid):
field.clean(injectors, type_)
-@pytest.mark.parametrize('inputs', [
- ['must-be-a-dict'],
- {'user': 'wrong-key'},
- {'username': 1},
- {'username': 1.5},
- {'username': ['a', 'b', 'c']},
- {'username': {'a': 'b'}},
- {'flag': 1},
- {'flag': 1.5},
- {'flag': ['a', 'b', 'c']},
- {'flag': {'a': 'b'}},
- {'flag': 'some-string'},
-])
+@pytest.mark.parametrize(
+ 'inputs',
+ [
+ ['must-be-a-dict'],
+ {'user': 'wrong-key'},
+ {'username': 1},
+ {'username': 1.5},
+ {'username': ['a', 'b', 'c']},
+ {'username': {'a': 'b'}},
+ {'flag': 1},
+ {'flag': 1.5},
+ {'flag': ['a', 'b', 'c']},
+ {'flag': {'a': 'b'}},
+ {'flag': 'some-string'},
+ ],
+)
def test_credential_creation_validation_failure(inputs):
type_ = CredentialType(
kind='cloud',
name='SomeCloud',
managed_by_tower=True,
inputs={
- 'fields': [{
- 'id': 'username',
- 'label': 'Username for SomeCloud',
- 'type': 'string'
- },{
- 'id': 'flag',
- 'label': 'Some Boolean Flag',
- 'type': 'boolean'
- }]
- }
+ 'fields': [{'id': 'username', 'label': 'Username for SomeCloud', 'type': 'string'}, {'id': 'flag', 'label': 'Some Boolean Flag', 'type': 'boolean'}]
+ },
)
- cred = Credential(credential_type=type_, name="Bob's Credential",
- inputs=inputs)
+ cred = Credential(credential_type=type_, name="Bob's Credential", inputs=inputs)
field = cred._meta.get_field('inputs')
with pytest.raises(Exception) as e:
@@ -239,11 +239,9 @@ def test_implicit_role_field_parents():
second_field = cls._meta.get_field(field_name)
second_field_descriptor = getattr(cls, field_name)
# all supported linkage types
- assert isinstance(second_field_descriptor, (
- ReverseManyToOneDescriptor, # not currently used
- ImplicitRoleDescriptor,
- ForwardManyToOneDescriptor
- ))
+ assert isinstance(
+ second_field_descriptor, (ReverseManyToOneDescriptor, ImplicitRoleDescriptor, ForwardManyToOneDescriptor) # not currently used
+ )
# only these links are supported
if field_attr:
if isinstance(second_field_descriptor, ReverseManyToOneDescriptor):
diff --git a/awx/main/tests/unit/test_redact.py b/awx/main/tests/unit/test_redact.py
index d240ccb9ce..c5585ff75c 100644
--- a/awx/main/tests/unit/test_redact.py
+++ b/awx/main/tests/unit/test_redact.py
@@ -33,9 +33,11 @@ TEST_CLEARTEXT = []
# Arguably, this is a regression test given the below data.
# regression data https://trello.com/c/cdUELgVY/
uri = URI(scheme="https", username="myusername", password="mypasswordwith%40", host="nonexistant.ansible.com/ansible.git/")
-TEST_CLEARTEXT.append({
- 'uri' : uri,
- 'text' : textwrap.dedent("""\
+TEST_CLEARTEXT.append(
+ {
+ 'uri': uri,
+ 'text': textwrap.dedent(
+ """\
PLAY [all] ********************************************************************
TASK: [delete project directory before update] ********************************
@@ -57,14 +59,19 @@ TEST_CLEARTEXT.append({
localhost : ok=0 changed=0 unreachable=0 failed=1
- """ % (uri.username, uri.password, str(uri), str(uri))),
- 'host_occurrences' : 2
-})
+ """
+ % (uri.username, uri.password, str(uri), str(uri))
+ ),
+ 'host_occurrences': 2,
+ }
+)
uri = URI(scheme="https", username="Dhh3U47nmC26xk9PKscV", password="PXPfWW8YzYrgS@E5NbQ2H@", host="github.ginger.com/theirrepo.git/info/refs")
-TEST_CLEARTEXT.append({
- 'uri' : uri,
- 'text' : textwrap.dedent("""\
+TEST_CLEARTEXT.append(
+ {
+ 'uri': uri,
+ 'text': textwrap.dedent(
+ """\
TASK: [update project using git] **
failed: [localhost] => {"cmd": "/usr/bin/git ls-remote https://REDACTED:********", "failed": true, "rc": 128}
stderr: error: Couldn't resolve host '@%s' while accessing %s
@@ -74,17 +81,23 @@ TEST_CLEARTEXT.append({
msg: error: Couldn't resolve host '@%s' while accessing %s
fatal: HTTP request failed
- """ % (uri.host, str(uri), uri.host, str(uri))),
- 'host_occurrences' : 4
-})
-
-
-@pytest.mark.parametrize('username, password, not_uri, expected', [
- ('', '', 'www.famfamfam.com](http://www.famfamfam.com/fijdlfd', 'www.famfamfam.com](http://www.famfamfam.com/fijdlfd'),
- ('', '', 'https://www.famfamfam.com](http://www.famfamfam.com/fijdlfd', '$encrypted$'),
- ('root', 'gigity', 'https://root@gigity@www.famfamfam.com](http://www.famfamfam.com/fijdlfd', '$encrypted$'),
- ('root', 'gigity@', 'https://root:gigity@@@www.famfamfam.com](http://www.famfamfam.com/fijdlfd', '$encrypted$'),
-])
+ """
+ % (uri.host, str(uri), uri.host, str(uri))
+ ),
+ 'host_occurrences': 4,
+ }
+)
+
+
+@pytest.mark.parametrize(
+ 'username, password, not_uri, expected',
+ [
+ ('', '', 'www.famfamfam.com](http://www.famfamfam.com/fijdlfd', 'www.famfamfam.com](http://www.famfamfam.com/fijdlfd'),
+ ('', '', 'https://www.famfamfam.com](http://www.famfamfam.com/fijdlfd', '$encrypted$'),
+ ('root', 'gigity', 'https://root@gigity@www.famfamfam.com](http://www.famfamfam.com/fijdlfd', '$encrypted$'),
+ ('root', 'gigity@', 'https://root:gigity@@@www.famfamfam.com](http://www.famfamfam.com/fijdlfd', '$encrypted$'),
+ ],
+)
# should redact sensitive usernames and passwords
def test_non_uri_redact(username, password, not_uri, expected):
redacted_str = UriCleaner.remove_sensitive(not_uri)
@@ -158,4 +171,3 @@ def test_large_string_performance():
length = 100000
redacted = UriCleaner.remove_sensitive('x' * length)
assert len(redacted) == length
-
diff --git a/awx/main/tests/unit/test_settings.py b/awx/main/tests/unit/test_settings.py
index d339262808..19b90099a1 100644
--- a/awx/main/tests/unit/test_settings.py
+++ b/awx/main/tests/unit/test_settings.py
@@ -6,4 +6,3 @@ def test_postprocess_auth_basic_enabled():
include('../../../settings/defaults.py', scope=locals())
assert 'awx.api.authentication.LoggedBasicAuthentication' in locals()['REST_FRAMEWORK']['DEFAULT_AUTHENTICATION_CLASSES']
-
diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py
index 3acdd7ead9..01e02b67d7 100644
--- a/awx/main/tests/unit/test_tasks.py
+++ b/awx/main/tests/unit/test_tasks.py
@@ -31,7 +31,7 @@ from awx.main.models import (
UnifiedJob,
User,
CustomInventoryScript,
- build_safe_env
+ build_safe_env,
)
from awx.main.models.credential import ManagedCredentialType
@@ -65,22 +65,21 @@ def patch_Job():
@pytest.fixture
def patch_Organization():
_credentials = []
- credentials_mock = mock.Mock(**{
- 'all': lambda: _credentials,
- 'add': _credentials.append,
- 'exists': lambda: len(_credentials) > 0,
- 'spec_set': ['all', 'add', 'exists'],
- })
+ credentials_mock = mock.Mock(
+ **{
+ 'all': lambda: _credentials,
+ 'add': _credentials.append,
+ 'exists': lambda: len(_credentials) > 0,
+ 'spec_set': ['all', 'add', 'exists'],
+ }
+ )
with mock.patch.object(Organization, 'galaxy_credentials', credentials_mock):
yield
@pytest.fixture
def job():
- return Job(
- pk=1, id=1,
- project=Project(local_path='/projects/_23_foo'),
- inventory=Inventory(), job_template=JobTemplate(id=1, name='foo'))
+ return Job(pk=1, id=1, project=Project(local_path='/projects/_23_foo'), inventory=Inventory(), job_template=JobTemplate(id=1, name='foo'))
@pytest.fixture
@@ -94,6 +93,7 @@ def update_model_wrapper(job):
for k, v in kwargs.items():
setattr(job, k, v)
return job
+
return fn
@@ -103,6 +103,7 @@ def adhoc_update_model_wrapper(adhoc_job):
for k, v in kwargs.items():
setattr(adhoc_job, k, v)
return adhoc_job
+
return fn
@@ -133,7 +134,7 @@ def test_send_notifications_list(mock_notifications_filter, mock_job_get, mocker
mock_notifications = [mocker.MagicMock(spec=Notification, subject="test", body={'hello': 'world'})]
mock_notifications_filter.return_value = mock_notifications
- tasks.send_notifications([1,2], job_id=1)
+ tasks.send_notifications([1, 2], job_id=1)
assert Notification.objects.filter.call_count == 1
assert mock_notifications[0].status == "successful"
assert mock_notifications[0].save.called
@@ -142,13 +143,16 @@ def test_send_notifications_list(mock_notifications_filter, mock_job_get, mocker
assert mock_job.notifications.add.called_with(*mock_notifications)
-@pytest.mark.parametrize("key,value", [
- ('REST_API_TOKEN', 'SECRET'),
- ('SECRET_KEY', 'SECRET'),
- ('VMWARE_PASSWORD', 'SECRET'),
- ('API_SECRET', 'SECRET'),
- ('ANSIBLE_GALAXY_SERVER_PRIMARY_GALAXY_TOKEN', 'SECRET'),
-])
+@pytest.mark.parametrize(
+ "key,value",
+ [
+ ('REST_API_TOKEN', 'SECRET'),
+ ('SECRET_KEY', 'SECRET'),
+ ('VMWARE_PASSWORD', 'SECRET'),
+ ('API_SECRET', 'SECRET'),
+ ('ANSIBLE_GALAXY_SERVER_PRIMARY_GALAXY_TOKEN', 'SECRET'),
+ ],
+)
def test_safe_env_filtering(key, value):
assert build_safe_env({key: value})[key] == tasks.HIDDEN_PASSWORD
@@ -158,9 +162,7 @@ def test_safe_env_returns_new_copy():
assert build_safe_env(env) is not env
-@pytest.mark.parametrize("source,expected", [
- (None, True), (False, False), (True, True)
-])
+@pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)])
def test_openstack_client_config_generation(mocker, source, expected, private_data_dir):
update = tasks.RunInventoryUpdate()
credential_type = CredentialType.defaults['openstack']()
@@ -169,23 +171,23 @@ def test_openstack_client_config_generation(mocker, source, expected, private_da
'username': 'demo',
'password': 'secrete',
'project': 'demo-project',
- 'domain': 'my-demo-domain'
+ 'domain': 'my-demo-domain',
}
if source is not None:
inputs['verify_ssl'] = source
credential = Credential(pk=1, credential_type=credential_type, inputs=inputs)
- inventory_update = mocker.Mock(**{
- 'source': 'openstack',
- 'source_vars_dict': {},
- 'get_cloud_credential': mocker.Mock(return_value=credential),
- 'get_extra_credentials': lambda x: [],
- 'ansible_virtualenv_path': '/var/lib/awx/venv/foo'
- })
- cloud_config = update.build_private_data(inventory_update, private_data_dir)
- cloud_credential = yaml.safe_load(
- cloud_config.get('credentials')[credential]
+ inventory_update = mocker.Mock(
+ **{
+ 'source': 'openstack',
+ 'source_vars_dict': {},
+ 'get_cloud_credential': mocker.Mock(return_value=credential),
+ 'get_extra_credentials': lambda x: [],
+ 'ansible_virtualenv_path': '/var/lib/awx/venv/foo',
+ }
)
+ cloud_config = update.build_private_data(inventory_update, private_data_dir)
+ cloud_credential = yaml.safe_load(cloud_config.get('credentials')[credential])
assert cloud_credential['clouds'] == {
'devstack': {
'auth': {
@@ -201,9 +203,7 @@ def test_openstack_client_config_generation(mocker, source, expected, private_da
}
-@pytest.mark.parametrize("source,expected", [
- (None, True), (False, False), (True, True)
-])
+@pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)])
def test_openstack_client_config_generation_with_project_domain_name(mocker, source, expected, private_data_dir):
update = tasks.RunInventoryUpdate()
credential_type = CredentialType.defaults['openstack']()
@@ -219,17 +219,17 @@ def test_openstack_client_config_generation_with_project_domain_name(mocker, sou
inputs['verify_ssl'] = source
credential = Credential(pk=1, credential_type=credential_type, inputs=inputs)
- inventory_update = mocker.Mock(**{
- 'source': 'openstack',
- 'source_vars_dict': {},
- 'get_cloud_credential': mocker.Mock(return_value=credential),
- 'get_extra_credentials': lambda x: [],
- 'ansible_virtualenv_path': '/var/lib/awx/venv/foo'
- })
- cloud_config = update.build_private_data(inventory_update, private_data_dir)
- cloud_credential = yaml.safe_load(
- cloud_config.get('credentials')[credential]
+ inventory_update = mocker.Mock(
+ **{
+ 'source': 'openstack',
+ 'source_vars_dict': {},
+ 'get_cloud_credential': mocker.Mock(return_value=credential),
+ 'get_extra_credentials': lambda x: [],
+ 'ansible_virtualenv_path': '/var/lib/awx/venv/foo',
+ }
)
+ cloud_config = update.build_private_data(inventory_update, private_data_dir)
+ cloud_credential = yaml.safe_load(cloud_config.get('credentials')[credential])
assert cloud_credential['clouds'] == {
'devstack': {
'auth': {
@@ -246,9 +246,7 @@ def test_openstack_client_config_generation_with_project_domain_name(mocker, sou
}
-@pytest.mark.parametrize("source,expected", [
- (None, True), (False, False), (True, True)
-])
+@pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)])
def test_openstack_client_config_generation_with_region(mocker, source, expected, private_data_dir):
update = tasks.RunInventoryUpdate()
credential_type = CredentialType.defaults['openstack']()
@@ -265,17 +263,17 @@ def test_openstack_client_config_generation_with_region(mocker, source, expected
inputs['verify_ssl'] = source
credential = Credential(pk=1, credential_type=credential_type, inputs=inputs)
- inventory_update = mocker.Mock(**{
- 'source': 'openstack',
- 'source_vars_dict': {},
- 'get_cloud_credential': mocker.Mock(return_value=credential),
- 'get_extra_credentials': lambda x: [],
- 'ansible_virtualenv_path': '/venv/foo'
- })
- cloud_config = update.build_private_data(inventory_update, private_data_dir)
- cloud_credential = yaml.safe_load(
- cloud_config.get('credentials')[credential]
+ inventory_update = mocker.Mock(
+ **{
+ 'source': 'openstack',
+ 'source_vars_dict': {},
+ 'get_cloud_credential': mocker.Mock(return_value=credential),
+ 'get_extra_credentials': lambda x: [],
+ 'ansible_virtualenv_path': '/venv/foo',
+ }
)
+ cloud_config = update.build_private_data(inventory_update, private_data_dir)
+ cloud_credential = yaml.safe_load(cloud_config.get('credentials')[credential])
assert cloud_credential['clouds'] == {
'devstack': {
'auth': {
@@ -293,9 +291,7 @@ def test_openstack_client_config_generation_with_region(mocker, source, expected
}
-@pytest.mark.parametrize("source,expected", [
- (False, False), (True, True)
-])
+@pytest.mark.parametrize("source,expected", [(False, False), (True, True)])
def test_openstack_client_config_generation_with_private_source_vars(mocker, source, expected, private_data_dir):
update = tasks.RunInventoryUpdate()
credential_type = CredentialType.defaults['openstack']()
@@ -309,27 +305,22 @@ def test_openstack_client_config_generation_with_private_source_vars(mocker, sou
}
credential = Credential(pk=1, credential_type=credential_type, inputs=inputs)
- inventory_update = mocker.Mock(**{
- 'source': 'openstack',
- 'source_vars_dict': {'private': source},
- 'get_cloud_credential': mocker.Mock(return_value=credential),
- 'get_extra_credentials': lambda x: [],
- 'ansible_virtualenv_path': '/var/lib/awx/venv/foo'
- })
- cloud_config = update.build_private_data(inventory_update, private_data_dir)
- cloud_credential = yaml.load(
- cloud_config.get('credentials')[credential], Loader=SafeLoader
+ inventory_update = mocker.Mock(
+ **{
+ 'source': 'openstack',
+ 'source_vars_dict': {'private': source},
+ 'get_cloud_credential': mocker.Mock(return_value=credential),
+ 'get_extra_credentials': lambda x: [],
+ 'ansible_virtualenv_path': '/var/lib/awx/venv/foo',
+ }
)
+ cloud_config = update.build_private_data(inventory_update, private_data_dir)
+ cloud_credential = yaml.load(cloud_config.get('credentials')[credential], Loader=SafeLoader)
assert cloud_credential['clouds'] == {
'devstack': {
- 'auth': {
- 'auth_url': 'https://keystone.openstack.example.org',
- 'password': 'secrete',
- 'project_name': 'demo-project',
- 'username': 'demo'
- },
+ 'auth': {'auth_url': 'https://keystone.openstack.example.org', 'password': 'secrete', 'project_name': 'demo-project', 'username': 'demo'},
'verify': True,
- 'private': expected
+ 'private': expected,
}
}
@@ -341,10 +332,7 @@ def pytest_generate_tests(metafunc):
funcarglist = metafunc.cls.parametrize.get(metafunc.function.__name__)
if funcarglist:
argnames = sorted(funcarglist[0])
- metafunc.parametrize(
- argnames,
- [[funcargs[name] for name in argnames] for funcargs in funcarglist]
- )
+ metafunc.parametrize(argnames, [[funcargs[name] for name in argnames] for funcargs in funcarglist])
def parse_extra_vars(args, private_data_dir):
@@ -363,7 +351,7 @@ class TestExtraVarSanitation(TestJobExecution):
# are deemed trustable, because they can only be added by users w/ enough
# privilege to add/modify a Job Template)
- UNSAFE = '{{ lookup(''pipe'',''ls -la'') }}'
+ UNSAFE = '{{ lookup(' 'pipe' ',' 'ls -la' ') }}'
def test_vars_unsafe_by_default(self, job, private_data_dir):
job.created_by = User(pk=123, username='angry-spud')
@@ -376,21 +364,33 @@ class TestExtraVarSanitation(TestJobExecution):
extra_vars = yaml.load(fd, Loader=SafeLoader)
# ensure that strings are marked as unsafe
- for unsafe in ['awx_job_template_name', 'tower_job_template_name',
- 'awx_user_name', 'tower_job_launch_type',
- 'awx_project_revision',
- 'tower_project_revision', 'tower_user_name',
- 'awx_job_launch_type',
- 'awx_inventory_name', 'tower_inventory_name']:
+ for unsafe in [
+ 'awx_job_template_name',
+ 'tower_job_template_name',
+ 'awx_user_name',
+ 'tower_job_launch_type',
+ 'awx_project_revision',
+ 'tower_project_revision',
+ 'tower_user_name',
+ 'awx_job_launch_type',
+ 'awx_inventory_name',
+ 'tower_inventory_name',
+ ]:
assert hasattr(extra_vars[unsafe], '__UNSAFE__')
# ensure that non-strings are marked as safe
- for safe in ['awx_job_template_id', 'awx_job_id', 'awx_user_id',
- 'tower_user_id', 'tower_job_template_id',
- 'tower_job_id', 'awx_inventory_id', 'tower_inventory_id']:
+ for safe in [
+ 'awx_job_template_id',
+ 'awx_job_id',
+ 'awx_user_id',
+ 'tower_user_id',
+ 'tower_job_template_id',
+ 'tower_job_id',
+ 'awx_inventory_id',
+ 'tower_inventory_id',
+ ]:
assert not hasattr(extra_vars[safe], '__UNSAFE__')
-
def test_launchtime_vars_unsafe(self, job, private_data_dir):
job.extra_vars = json.dumps({'msg': self.UNSAFE})
task = tasks.RunJob()
@@ -440,10 +440,7 @@ class TestExtraVarSanitation(TestJobExecution):
# JT defines `msg=SENSITIVE`, the job *should not* be able to do
# `other_var=SENSITIVE`
job.job_template.extra_vars = json.dumps({'msg': self.UNSAFE})
- job.extra_vars = json.dumps({
- 'msg': 'other-value',
- 'other_var': self.UNSAFE
- })
+ job.extra_vars = json.dumps({'msg': 'other-value', 'other_var': self.UNSAFE})
task = tasks.RunJob()
task.build_extra_vars_file(job, private_data_dir)
@@ -469,12 +466,9 @@ class TestExtraVarSanitation(TestJobExecution):
assert hasattr(extra_vars['msg'], '__UNSAFE__')
-class TestGenericRun():
-
+class TestGenericRun:
def test_generic_failure(self, patch_Job):
- job = Job(
- status='running', inventory=Inventory(),
- project=Project(local_path='/projects/_23_foo'))
+ job = Job(status='running', inventory=Inventory(), project=Project(local_path='/projects/_23_foo'))
job.websocket_emit_status = mock.Mock()
task = tasks.RunJob()
@@ -506,10 +500,7 @@ class TestGenericRun():
with pytest.raises(Exception):
task.run(1)
- for c in [
- mock.call(1, status='running', start_args=''),
- mock.call(1, status='canceled')
- ]:
+ for c in [mock.call(1, status='running', start_args=''), mock.call(1, status='canceled')]:
assert c in task.update_model.call_args_list
def test_event_count(self):
@@ -531,8 +522,9 @@ class TestGenericRun():
task.dispatcher.dispatch.assert_called_with({'event': 'EOF', 'final_counter': 17, 'job_id': 1, 'guid': None})
def test_save_job_metadata(self, job, update_model_wrapper):
- class MockMe():
+ class MockMe:
pass
+
task = tasks.RunJob()
task.instance = job
task.safe_env = {'secret_key': 'redacted_value'}
@@ -543,9 +535,9 @@ class TestGenericRun():
runner_config.env = {'switch': 'blade', 'foot': 'ball', 'secret_key': 'secret_value'}
task.status_handler({'status': 'starting'}, runner_config)
- task.update_model.assert_called_with(1, job_args=json.dumps({'foo': 'bar'}),
- job_cwd='/foobar', job_env={'switch': 'blade', 'foot': 'ball', 'secret_key': 'redacted_value'})
-
+ task.update_model.assert_called_with(
+ 1, job_args=json.dumps({'foo': 'bar'}), job_cwd='/foobar', job_env={'switch': 'blade', 'foot': 'ball', 'secret_key': 'redacted_value'}
+ )
@mock.patch('os.makedirs')
def test_build_params_resource_profiling(self, os_makedirs):
@@ -562,10 +554,7 @@ class TestGenericRun():
assert resource_profiling_params['resource_profiling_pid_poll_interval'] == '0.25'
assert resource_profiling_params['resource_profiling_results_dir'] == '/runner/artifacts/playbook_profiling'
-
- @pytest.mark.parametrize("scenario, profiling_enabled", [
- ('global_setting', True),
- ('default', False)])
+ @pytest.mark.parametrize("scenario, profiling_enabled", [('global_setting', True), ('default', False)])
def test_should_use_resource_profiling(self, scenario, profiling_enabled, settings):
job = Job(project=Project(), inventory=Inventory())
task = tasks.RunJob()
@@ -593,12 +582,8 @@ class TestGenericRun():
def test_survey_extra_vars(self):
job = Job()
- job.extra_vars = json.dumps({
- 'super_secret': encrypt_value('CLASSIFIED', pk=None)
- })
- job.survey_passwords = {
- 'super_secret': '$encrypted$'
- }
+ job.extra_vars = json.dumps({'super_secret': encrypt_value('CLASSIFIED', pk=None)})
+ job.survey_passwords = {'super_secret': '$encrypted$'}
task = tasks.RunJob()
task._write_extra_vars_file = mock.Mock()
@@ -622,7 +607,6 @@ class TestGenericRun():
@pytest.mark.django_db
class TestAdhocRun(TestJobExecution):
-
def test_options_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper):
ExecutionEnvironment.objects.create(name='test EE', managed_by_tower=True)
@@ -697,15 +681,7 @@ class TestIsolatedExecution(TestJobExecution):
def test_with_ssh_credentials(self, job):
ssh = CredentialType.defaults['ssh']()
- credential = Credential(
- pk=1,
- credential_type=ssh,
- inputs = {
- 'username': 'bob',
- 'password': 'secret',
- 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY
- }
- )
+ credential = Credential(pk=1, credential_type=ssh, inputs={'username': 'bob', 'password': 'secret', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY})
credential.inputs['password'] = encrypt_field(credential, 'password')
job.credentials.add(credential)
@@ -725,15 +701,26 @@ class TestIsolatedExecution(TestJobExecution):
with open(os.path.join(artifacts, filename), 'w') as f:
f.write(data)
return ('successful', 0)
+
self.run_pexpect.side_effect = _mock_job_artifacts
self.task.run(self.pk)
playbook_run = self.run_pexpect.call_args_list[0][0]
- assert ' '.join(playbook_run[0]).startswith(' '.join([
- 'ansible-playbook', 'run_isolated.yml', '-u', settings.AWX_ISOLATED_USERNAME,
- '-T', str(settings.AWX_ISOLATED_CONNECTION_TIMEOUT), '-i', self.ISOLATED_HOST + ',',
- '-e',
- ]))
+ assert ' '.join(playbook_run[0]).startswith(
+ ' '.join(
+ [
+ 'ansible-playbook',
+ 'run_isolated.yml',
+ '-u',
+ settings.AWX_ISOLATED_USERNAME,
+ '-T',
+ str(settings.AWX_ISOLATED_CONNECTION_TIMEOUT),
+ '-i',
+ self.ISOLATED_HOST + ',',
+ '-e',
+ ]
+ )
+ )
extra_vars = playbook_run[0][playbook_run[0].index('-e') + 1]
extra_vars = json.loads(extra_vars)
assert extra_vars['dest'] == '/tmp'
@@ -747,7 +734,9 @@ class TestIsolatedExecution(TestJobExecution):
credential = Credential(
pk=1,
credential_type=ssh,
- inputs = {'username': 'bob',}
+ inputs={
+ 'username': 'bob',
+ },
)
self.instance.credentials.add(credential)
@@ -760,12 +749,11 @@ class TestIsolatedExecution(TestJobExecution):
if not os.path.exists(artifacts):
os.makedirs(artifacts)
if 'run_isolated.yml' in args[0]:
- for filename, data in (
- ['daemon.log', 'ERROR IN RUN.PY'],
- ):
+ for filename, data in (['daemon.log', 'ERROR IN RUN.PY'],):
with open(os.path.join(artifacts, filename), 'w') as f:
f.write(data)
return ('successful', 0)
+
self.run_pexpect.side_effect = _mock_job_artifacts
with mock.patch('time.sleep'):
@@ -786,18 +774,17 @@ class TestJobCredentials(TestJobExecution):
creds = job._credentials
if credential_type__kind:
creds = [c for c in creds if c.credential_type.kind == credential_type__kind]
- return mock.Mock(
- __iter__ = lambda *args: iter(creds),
- first = lambda: creds[0] if len(creds) else None
- )
-
- credentials_mock = mock.Mock(**{
- 'all': lambda: job._credentials,
- 'add': job._credentials.append,
- 'filter.side_effect': _credentials_filter,
- 'prefetch_related': lambda _: credentials_mock,
- 'spec_set': ['all', 'add', 'filter', 'prefetch_related'],
- })
+ return mock.Mock(__iter__=lambda *args: iter(creds), first=lambda: creds[0] if len(creds) else None)
+
+ credentials_mock = mock.Mock(
+ **{
+ 'all': lambda: job._credentials,
+ 'add': job._credentials.append,
+ 'filter.side_effect': _credentials_filter,
+ 'prefetch_related': lambda _: credentials_mock,
+ 'spec_set': ['all', 'add', 'filter', 'prefetch_related'],
+ }
+ )
with mock.patch.object(UnifiedJob, 'credentials', credentials_mock):
yield job
@@ -808,6 +795,7 @@ class TestJobCredentials(TestJobExecution):
for k, v in kwargs.items():
setattr(job, k, v)
return job
+
return fn
parametrize = {
@@ -821,11 +809,7 @@ class TestJobCredentials(TestJobExecution):
def test_username_jinja_usage(self, job, private_data_dir):
task = tasks.RunJob()
ssh = CredentialType.defaults['ssh']()
- credential = Credential(
- pk=1,
- credential_type=ssh,
- inputs = {'username': '{{ ansible_ssh_pass }}'}
- )
+ credential = Credential(pk=1, credential_type=ssh, inputs={'username': '{{ ansible_ssh_pass }}'})
job.credentials.add(credential)
with pytest.raises(ValueError) as e:
task.build_args(job, private_data_dir, {})
@@ -836,11 +820,7 @@ class TestJobCredentials(TestJobExecution):
def test_become_jinja_usage(self, job, private_data_dir, flag):
task = tasks.RunJob()
ssh = CredentialType.defaults['ssh']()
- credential = Credential(
- pk=1,
- credential_type=ssh,
- inputs = {'username': 'joe', flag: '{{ ansible_ssh_pass }}'}
- )
+ credential = Credential(pk=1, credential_type=ssh, inputs={'username': 'joe', flag: '{{ ansible_ssh_pass }}'})
job.credentials.add(credential)
with pytest.raises(ValueError) as e:
@@ -851,11 +831,7 @@ class TestJobCredentials(TestJobExecution):
def test_ssh_passwords(self, job, private_data_dir, field, password_name, expected_flag):
task = tasks.RunJob()
ssh = CredentialType.defaults['ssh']()
- credential = Credential(
- pk=1,
- credential_type=ssh,
- inputs = {'username': 'bob', field: 'secret'}
- )
+ credential = Credential(pk=1, credential_type=ssh, inputs={'username': 'bob', field: 'secret'})
credential.inputs[field] = encrypt_field(credential, field)
job.credentials.add(credential)
@@ -872,11 +848,7 @@ class TestJobCredentials(TestJobExecution):
def test_net_ssh_key_unlock(self, job):
task = tasks.RunJob()
net = CredentialType.defaults['net']()
- credential = Credential(
- pk=1,
- credential_type=net,
- inputs = {'ssh_key_unlock': 'secret'}
- )
+ credential = Credential(pk=1, credential_type=net, inputs={'ssh_key_unlock': 'secret'})
credential.inputs['ssh_key_unlock'] = encrypt_field(credential, 'ssh_key_unlock')
job.credentials.add(credential)
@@ -890,11 +862,7 @@ class TestJobCredentials(TestJobExecution):
task = tasks.RunJob()
for i in range(3):
net = CredentialType.defaults['net']()
- credential = Credential(
- pk=i,
- credential_type=net,
- inputs = {'ssh_key_unlock': 'secret{}'.format(i)}
- )
+ credential = Credential(pk=i, credential_type=net, inputs={'ssh_key_unlock': 'secret{}'.format(i)})
credential.inputs['ssh_key_unlock'] = encrypt_field(credential, 'ssh_key_unlock')
job.credentials.add(credential)
@@ -907,19 +875,11 @@ class TestJobCredentials(TestJobExecution):
def test_prefer_ssh_over_net_ssh_key_unlock(self, job):
task = tasks.RunJob()
net = CredentialType.defaults['net']()
- net_credential = Credential(
- pk=1,
- credential_type=net,
- inputs = {'ssh_key_unlock': 'net_secret'}
- )
+ net_credential = Credential(pk=1, credential_type=net, inputs={'ssh_key_unlock': 'net_secret'})
net_credential.inputs['ssh_key_unlock'] = encrypt_field(net_credential, 'ssh_key_unlock')
ssh = CredentialType.defaults['ssh']()
- ssh_credential = Credential(
- pk=2,
- credential_type=ssh,
- inputs = {'ssh_key_unlock': 'ssh_secret'}
- )
+ ssh_credential = Credential(pk=2, credential_type=ssh, inputs={'ssh_key_unlock': 'ssh_secret'})
ssh_credential.inputs['ssh_key_unlock'] = encrypt_field(ssh_credential, 'ssh_key_unlock')
job.credentials.add(net_credential)
@@ -934,11 +894,7 @@ class TestJobCredentials(TestJobExecution):
def test_vault_password(self, private_data_dir, job):
task = tasks.RunJob()
vault = CredentialType.defaults['vault']()
- credential = Credential(
- pk=1,
- credential_type=vault,
- inputs={'vault_password': 'vault-me'}
- )
+ credential = Credential(pk=1, credential_type=vault, inputs={'vault_password': 'vault-me'})
credential.inputs['vault_password'] = encrypt_field(credential, 'vault_password')
job.credentials.add(credential)
@@ -947,17 +903,13 @@ class TestJobCredentials(TestJobExecution):
password_prompts = task.get_password_prompts(passwords)
expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords)
- assert expect_passwords['Vault password:\s*?$'] == 'vault-me' # noqa
+ assert expect_passwords['Vault password:\s*?$'] == 'vault-me' # noqa
assert '--ask-vault-pass' in ' '.join(args)
def test_vault_password_ask(self, private_data_dir, job):
task = tasks.RunJob()
vault = CredentialType.defaults['vault']()
- credential = Credential(
- pk=1,
- credential_type=vault,
- inputs={'vault_password': 'ASK'}
- )
+ credential = Credential(pk=1, credential_type=vault, inputs={'vault_password': 'ASK'})
credential.inputs['vault_password'] = encrypt_field(credential, 'vault_password')
job.credentials.add(credential)
@@ -966,18 +918,14 @@ class TestJobCredentials(TestJobExecution):
password_prompts = task.get_password_prompts(passwords)
expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords)
- assert expect_passwords['Vault password:\s*?$'] == 'provided-at-launch' # noqa
+ assert expect_passwords['Vault password:\s*?$'] == 'provided-at-launch' # noqa
assert '--ask-vault-pass' in ' '.join(args)
def test_multi_vault_password(self, private_data_dir, job):
task = tasks.RunJob()
vault = CredentialType.defaults['vault']()
for i, label in enumerate(['dev', 'prod', 'dotted.name']):
- credential = Credential(
- pk=i,
- credential_type=vault,
- inputs={'vault_password': 'pass@{}'.format(label), 'vault_id': label}
- )
+ credential = Credential(pk=i, credential_type=vault, inputs={'vault_password': 'pass@{}'.format(label), 'vault_id': label})
credential.inputs['vault_password'] = encrypt_field(credential, 'vault_password')
job.credentials.add(credential)
@@ -986,10 +934,7 @@ class TestJobCredentials(TestJobExecution):
password_prompts = task.get_password_prompts(passwords)
expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords)
- vault_passwords = dict(
- (k, v) for k, v in expect_passwords.items()
- if 'Vault' in k
- )
+ vault_passwords = dict((k, v) for k, v in expect_passwords.items() if 'Vault' in k)
assert vault_passwords['Vault password \(prod\):\\s*?$'] == 'pass@prod' # noqa
assert vault_passwords['Vault password \(dev\):\\s*?$'] == 'pass@dev' # noqa
assert vault_passwords['Vault password \(dotted.name\):\\s*?$'] == 'pass@dotted.name' # noqa
@@ -1003,11 +948,7 @@ class TestJobCredentials(TestJobExecution):
task = tasks.RunJob()
vault = CredentialType.defaults['vault']()
for i in range(2):
- credential = Credential(
- pk=i,
- credential_type=vault,
- inputs={'vault_password': 'some-pass', 'vault_id': 'conflict'}
- )
+ credential = Credential(pk=i, credential_type=vault, inputs={'vault_password': 'some-pass', 'vault_id': 'conflict'})
credential.inputs['vault_password'] = encrypt_field(credential, 'vault_password')
job.credentials.add(credential)
@@ -1020,25 +961,15 @@ class TestJobCredentials(TestJobExecution):
task = tasks.RunJob()
vault = CredentialType.defaults['vault']()
for i, label in enumerate(['dev', 'prod']):
- credential = Credential(
- pk=i,
- credential_type=vault,
- inputs={'vault_password': 'ASK', 'vault_id': label}
- )
+ credential = Credential(pk=i, credential_type=vault, inputs={'vault_password': 'ASK', 'vault_id': label})
credential.inputs['vault_password'] = encrypt_field(credential, 'vault_password')
job.credentials.add(credential)
- passwords = task.build_passwords(job, {
- 'vault_password.dev': 'provided-at-launch@dev',
- 'vault_password.prod': 'provided-at-launch@prod'
- })
+ passwords = task.build_passwords(job, {'vault_password.dev': 'provided-at-launch@dev', 'vault_password.prod': 'provided-at-launch@prod'})
args = task.build_args(job, private_data_dir, passwords)
password_prompts = task.get_password_prompts(passwords)
expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords)
- vault_passwords = dict(
- (k, v) for k, v in expect_passwords.items()
- if 'Vault' in k
- )
+ vault_passwords = dict((k, v) for k, v in expect_passwords.items() if 'Vault' in k)
assert vault_passwords['Vault password \(prod\):\\s*?$'] == 'provided-at-launch@prod' # noqa
assert vault_passwords['Vault password \(dev\):\\s*?$'] == 'provided-at-launch@dev' # noqa
assert vault_passwords['Vault password:\\s*?$'] == '' # noqa
@@ -1059,16 +990,14 @@ class TestJobCredentials(TestJobExecution):
credential = Credential(
pk=1,
credential_type=k8s,
- inputs = inputs,
+ inputs=inputs,
)
credential.inputs['bearer_token'] = encrypt_field(credential, 'bearer_token')
job.credentials.add(credential)
env = {}
safe_env = {}
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['K8S_AUTH_HOST'] == 'https://example.org/'
assert env['K8S_AUTH_API_KEY'] == 'token123'
@@ -1085,19 +1014,13 @@ class TestJobCredentials(TestJobExecution):
def test_aws_cloud_credential(self, job, private_data_dir):
aws = CredentialType.defaults['aws']()
- credential = Credential(
- pk=1,
- credential_type=aws,
- inputs = {'username': 'bob', 'password': 'secret'}
- )
+ credential = Credential(pk=1, credential_type=aws, inputs={'username': 'bob', 'password': 'secret'})
credential.inputs['password'] = encrypt_field(credential, 'password')
job.credentials.add(credential)
env = {}
safe_env = {}
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['AWS_ACCESS_KEY_ID'] == 'bob'
assert env['AWS_SECRET_ACCESS_KEY'] == 'secret'
@@ -1106,20 +1029,14 @@ class TestJobCredentials(TestJobExecution):
def test_aws_cloud_credential_with_sts_token(self, private_data_dir, job):
aws = CredentialType.defaults['aws']()
- credential = Credential(
- pk=1,
- credential_type=aws,
- inputs = {'username': 'bob', 'password': 'secret', 'security_token': 'token'}
- )
+ credential = Credential(pk=1, credential_type=aws, inputs={'username': 'bob', 'password': 'secret', 'security_token': 'token'})
for key in ('password', 'security_token'):
credential.inputs[key] = encrypt_field(credential, key)
job.credentials.add(credential)
env = {}
safe_env = {}
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['AWS_ACCESS_KEY_ID'] == 'bob'
assert env['AWS_SECRET_ACCESS_KEY'] == 'secret'
@@ -1128,23 +1045,13 @@ class TestJobCredentials(TestJobExecution):
def test_gce_credentials(self, private_data_dir, job):
gce = CredentialType.defaults['gce']()
- credential = Credential(
- pk=1,
- credential_type=gce,
- inputs = {
- 'username': 'bob',
- 'project': 'some-project',
- 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY
- }
- )
+ credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY})
credential.inputs['ssh_key_data'] = encrypt_field(credential, 'ssh_key_data')
job.credentials.add(credential)
env = {}
safe_env = {}
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
runner_path = env['GCE_CREDENTIALS_FILE_PATH']
local_path = os.path.join(private_data_dir, os.path.basename(runner_path))
json_data = json.load(open(local_path, 'rb'))
@@ -1156,23 +1063,14 @@ class TestJobCredentials(TestJobExecution):
def test_azure_rm_with_tenant(self, private_data_dir, job):
azure = CredentialType.defaults['azure_rm']()
credential = Credential(
- pk=1,
- credential_type=azure,
- inputs = {
- 'client': 'some-client',
- 'secret': 'some-secret',
- 'tenant': 'some-tenant',
- 'subscription': 'some-subscription'
- }
+ pk=1, credential_type=azure, inputs={'client': 'some-client', 'secret': 'some-secret', 'tenant': 'some-tenant', 'subscription': 'some-subscription'}
)
credential.inputs['secret'] = encrypt_field(credential, 'secret')
job.credentials.add(credential)
env = {}
safe_env = {}
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['AZURE_CLIENT_ID'] == 'some-client'
assert env['AZURE_SECRET'] == 'some-secret'
@@ -1183,23 +1081,14 @@ class TestJobCredentials(TestJobExecution):
def test_azure_rm_with_password(self, private_data_dir, job):
azure = CredentialType.defaults['azure_rm']()
credential = Credential(
- pk=1,
- credential_type=azure,
- inputs = {
- 'subscription': 'some-subscription',
- 'username': 'bob',
- 'password': 'secret',
- 'cloud_environment': 'foobar'
- }
+ pk=1, credential_type=azure, inputs={'subscription': 'some-subscription', 'username': 'bob', 'password': 'secret', 'cloud_environment': 'foobar'}
)
credential.inputs['password'] = encrypt_field(credential, 'password')
job.credentials.add(credential)
env = {}
safe_env = {}
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['AZURE_SUBSCRIPTION_ID'] == 'some-subscription'
assert env['AZURE_AD_USER'] == 'bob'
@@ -1209,19 +1098,13 @@ class TestJobCredentials(TestJobExecution):
def test_vmware_credentials(self, private_data_dir, job):
vmware = CredentialType.defaults['vmware']()
- credential = Credential(
- pk=1,
- credential_type=vmware,
- inputs = {'username': 'bob', 'password': 'secret', 'host': 'https://example.org'}
- )
+ credential = Credential(pk=1, credential_type=vmware, inputs={'username': 'bob', 'password': 'secret', 'host': 'https://example.org'})
credential.inputs['password'] = encrypt_field(credential, 'password')
job.credentials.add(credential)
env = {}
safe_env = {}
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['VMWARE_USER'] == 'bob'
assert env['VMWARE_PASSWORD'] == 'secret'
@@ -1232,40 +1115,31 @@ class TestJobCredentials(TestJobExecution):
task = tasks.RunJob()
openstack = CredentialType.defaults['openstack']()
credential = Credential(
- pk=1,
- credential_type=openstack,
- inputs = {
- 'username': 'bob',
- 'password': 'secret',
- 'project': 'tenant-name',
- 'host': 'https://keystone.example.org'
- }
+ pk=1, credential_type=openstack, inputs={'username': 'bob', 'password': 'secret', 'project': 'tenant-name', 'host': 'https://keystone.example.org'}
)
credential.inputs['password'] = encrypt_field(credential, 'password')
job.credentials.add(credential)
private_data_files = task.build_private_data_files(job, private_data_dir)
env = task.build_env(job, private_data_dir, private_data_files=private_data_files)
- credential.credential_type.inject_credential(
- credential, env, {}, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
# convert container path to host machine path
- config_loc = os.path.join(
- private_data_dir, os.path.basename(env['OS_CLIENT_CONFIG_FILE'])
- )
+ config_loc = os.path.join(private_data_dir, os.path.basename(env['OS_CLIENT_CONFIG_FILE']))
shade_config = open(config_loc, 'r').read()
- assert shade_config == '\n'.join([
- 'clouds:',
- ' devstack:',
- ' auth:',
- ' auth_url: https://keystone.example.org',
- ' password: secret',
- ' project_name: tenant-name',
- ' username: bob',
- ' verify: true',
- ''
- ])
+ assert shade_config == '\n'.join(
+ [
+ 'clouds:',
+ ' devstack:',
+ ' auth:',
+ ' auth_url: https://keystone.example.org',
+ ' password: secret',
+ ' project_name: tenant-name',
+ ' username: bob',
+ ' verify: true',
+ '',
+ ]
+ )
@pytest.mark.parametrize("ca_file", [None, '/path/to/some/file'])
def test_rhv_credentials(self, private_data_dir, job, ca_file):
@@ -1277,19 +1151,13 @@ class TestJobCredentials(TestJobExecution):
}
if ca_file:
inputs['ca_file'] = ca_file
- credential = Credential(
- pk=1,
- credential_type=rhv,
- inputs=inputs
- )
+ credential = Credential(pk=1, credential_type=rhv, inputs=inputs)
credential.inputs['password'] = encrypt_field(credential, 'password')
job.credentials.add(credential)
env = {}
safe_env = {}
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
config = configparser.ConfigParser()
config.read(os.path.join(private_data_dir, os.path.basename(env['OVIRT_INI_PATH'])))
@@ -1302,20 +1170,18 @@ class TestJobCredentials(TestJobExecution):
with pytest.raises(configparser.NoOptionError):
config.get('ovirt', 'ovirt_ca_file')
- @pytest.mark.parametrize('authorize, expected_authorize', [
- [True, '1'],
- [False, '0'],
- [None, '0'],
- ])
+ @pytest.mark.parametrize(
+ 'authorize, expected_authorize',
+ [
+ [True, '1'],
+ [False, '0'],
+ [None, '0'],
+ ],
+ )
def test_net_credentials(self, authorize, expected_authorize, job, private_data_dir):
task = tasks.RunJob()
net = CredentialType.defaults['net']()
- inputs = {
- 'username': 'bob',
- 'password': 'secret',
- 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY,
- 'authorize_password': 'authorizeme'
- }
+ inputs = {'username': 'bob', 'password': 'secret', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY, 'authorize_password': 'authorizeme'}
if authorize is not None:
inputs['authorize'] = authorize
credential = Credential(pk=1, credential_type=net, inputs=inputs)
@@ -1326,9 +1192,7 @@ class TestJobCredentials(TestJobExecution):
private_data_files = task.build_private_data_files(job, private_data_dir)
env = task.build_env(job, private_data_dir, private_data_files=private_data_files)
safe_env = build_safe_env(env)
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['ANSIBLE_NET_USERNAME'] == 'bob'
assert env['ANSIBLE_NET_PASSWORD'] == 'secret'
@@ -1343,58 +1207,26 @@ class TestJobCredentials(TestJobExecution):
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string'
- }]
- },
- injectors={
- 'env': {
- 'MY_CLOUD_API_TOKEN': '{{api_token.foo()}}'
- }
- }
- )
- credential = Credential(
- pk=1,
- credential_type=some_cloud,
- inputs = {'api_token': 'ABC123'}
+ inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
+ injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token.foo()}}'}},
)
+ credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
with pytest.raises(jinja2.exceptions.UndefinedError):
- credential.credential_type.inject_credential(
- credential, {}, {}, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, {}, {}, [], private_data_dir)
def test_custom_environment_injectors(self, private_data_dir):
some_cloud = CredentialType(
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string'
- }]
- },
- injectors={
- 'env': {
- 'MY_CLOUD_API_TOKEN': '{{api_token}}'
- }
- }
- )
- credential = Credential(
- pk=1,
- credential_type=some_cloud,
- inputs = {'api_token': 'ABC123'}
+ inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
+ injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token}}'}},
)
+ credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
env = {}
- credential.credential_type.inject_credential(
- credential, env, {}, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
assert env['MY_CLOUD_API_TOKEN'] == 'ABC123'
@@ -1403,29 +1235,13 @@ class TestJobCredentials(TestJobExecution):
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'turbo_button',
- 'label': 'Turbo Button',
- 'type': 'boolean'
- }]
- },
- injectors={
- 'env': {
- 'TURBO_BUTTON': '{{turbo_button}}'
- }
- }
- )
- credential = Credential(
- pk=1,
- credential_type=some_cloud,
- inputs={'turbo_button': True}
+ inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
+ injectors={'env': {'TURBO_BUTTON': '{{turbo_button}}'}},
)
+ credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True})
env = {}
- credential.credential_type.inject_credential(
- credential, env, {}, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
assert env['TURBO_BUTTON'] == str(True)
@@ -1435,24 +1251,10 @@ class TestJobCredentials(TestJobExecution):
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string'
- }]
- },
- injectors={
- 'env': {
- 'JOB_ID': 'reserved'
- }
- }
- )
- credential = Credential(
- pk=1,
- credential_type=some_cloud,
- inputs = {'api_token': 'ABC123'}
+ inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
+ injectors={'env': {'JOB_ID': 'reserved'}},
)
+ credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
job.credentials.add(credential)
env = task.build_env(job, private_data_dir)
@@ -1464,32 +1266,15 @@ class TestJobCredentials(TestJobExecution):
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'password',
- 'label': 'Password',
- 'type': 'string',
- 'secret': True
- }]
- },
- injectors={
- 'env': {
- 'MY_CLOUD_PRIVATE_VAR': '{{password}}'
- }
- }
- )
- credential = Credential(
- pk=1,
- credential_type=some_cloud,
- inputs = {'password': 'SUPER-SECRET-123'}
+ inputs={'fields': [{'id': 'password', 'label': 'Password', 'type': 'string', 'secret': True}]},
+ injectors={'env': {'MY_CLOUD_PRIVATE_VAR': '{{password}}'}},
)
+ credential = Credential(pk=1, credential_type=some_cloud, inputs={'password': 'SUPER-SECRET-123'})
credential.inputs['password'] = encrypt_field(credential, 'password')
env = {}
safe_env = {}
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['MY_CLOUD_PRIVATE_VAR'] == 'SUPER-SECRET-123'
assert 'SUPER-SECRET-123' not in safe_env.values()
@@ -1501,30 +1286,14 @@ class TestJobCredentials(TestJobExecution):
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string'
- }]
- },
- injectors={
- 'extra_vars': {
- 'api_token': '{{api_token}}'
- }
- }
- )
- credential = Credential(
- pk=1,
- credential_type=some_cloud,
- inputs = {'api_token': 'ABC123'}
+ inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
+ injectors={'extra_vars': {'api_token': '{{api_token}}'}},
)
+ credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
job.credentials.add(credential)
args = task.build_args(job, private_data_dir, {})
- credential.credential_type.inject_credential(
- credential, {}, {}, args, private_data_dir
- )
+ credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
extra_vars = parse_extra_vars(args, private_data_dir)
assert extra_vars["api_token"] == "ABC123"
@@ -1536,30 +1305,14 @@ class TestJobCredentials(TestJobExecution):
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'turbo_button',
- 'label': 'Turbo Button',
- 'type': 'boolean'
- }]
- },
- injectors={
- 'extra_vars': {
- 'turbo_button': '{{turbo_button}}'
- }
- }
- )
- credential = Credential(
- pk=1,
- credential_type=some_cloud,
- inputs={'turbo_button': True}
+ inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
+ injectors={'extra_vars': {'turbo_button': '{{turbo_button}}'}},
)
+ credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True})
job.credentials.add(credential)
args = task.build_args(job, private_data_dir, {})
- credential.credential_type.inject_credential(
- credential, {}, {}, args, private_data_dir
- )
+ credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
extra_vars = parse_extra_vars(args, private_data_dir)
assert extra_vars["turbo_button"] == "True"
@@ -1571,30 +1324,14 @@ class TestJobCredentials(TestJobExecution):
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'turbo_button',
- 'label': 'Turbo Button',
- 'type': 'boolean'
- }]
- },
- injectors={
- 'extra_vars': {
- 'turbo_button': '{% if turbo_button %}FAST!{% else %}SLOW!{% endif %}'
- }
- }
- )
- credential = Credential(
- pk=1,
- credential_type=some_cloud,
- inputs={'turbo_button': True}
+ inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
+ injectors={'extra_vars': {'turbo_button': '{% if turbo_button %}FAST!{% else %}SLOW!{% endif %}'}},
)
+ credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True})
job.credentials.add(credential)
args = task.build_args(job, private_data_dir, {})
- credential.credential_type.inject_credential(
- credential, {}, {}, args, private_data_dir
- )
+ credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
extra_vars = parse_extra_vars(args, private_data_dir)
assert extra_vars["turbo_button"] == "FAST!"
@@ -1608,32 +1345,15 @@ class TestJobCredentials(TestJobExecution):
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'password',
- 'label': 'Password',
- 'type': 'string',
- 'secret': True
- }]
- },
- injectors={
- 'extra_vars': {
- 'password': '{{password}}'
- }
- }
- )
- credential = Credential(
- pk=1,
- credential_type=some_cloud,
- inputs = {'password': 'SUPER-SECRET-123'}
+ inputs={'fields': [{'id': 'password', 'label': 'Password', 'type': 'string', 'secret': True}]},
+ injectors={'extra_vars': {'password': '{{password}}'}},
)
+ credential = Credential(pk=1, credential_type=some_cloud, inputs={'password': 'SUPER-SECRET-123'})
credential.inputs['password'] = encrypt_field(credential, 'password')
job.credentials.add(credential)
args = task.build_args(job, private_data_dir, {})
- credential.credential_type.inject_credential(
- credential, {}, {}, args, private_data_dir
- )
+ credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir)
extra_vars = parse_extra_vars(args, private_data_dir)
assert extra_vars["password"] == "SUPER-SECRET-123"
@@ -1643,32 +1363,13 @@ class TestJobCredentials(TestJobExecution):
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'api_token',
- 'label': 'API Token',
- 'type': 'string'
- }]
- },
- injectors={
- 'file': {
- 'template': '[mycloud]\n{{api_token}}'
- },
- 'env': {
- 'MY_CLOUD_INI_FILE': '{{tower.filename}}'
- }
- }
- )
- credential = Credential(
- pk=1,
- credential_type=some_cloud,
- inputs = {'api_token': 'ABC123'}
+ inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
+ injectors={'file': {'template': '[mycloud]\n{{api_token}}'}, 'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}},
)
+ credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'})
env = {}
- credential.credential_type.inject_credential(
- credential, env, {}, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
path = os.path.join(private_data_dir, os.path.basename(env['MY_CLOUD_INI_FILE']))
assert open(path, 'r').read() == '[mycloud]\nABC123'
@@ -1680,10 +1381,7 @@ class TestJobCredentials(TestJobExecution):
name='SomeCloud',
managed_by_tower=False,
inputs={'fields': []},
- injectors={
- 'file': {'template': value},
- 'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}
- }
+ injectors={'file': {'template': value}, 'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}},
)
credential = Credential(
pk=1,
@@ -1691,9 +1389,7 @@ class TestJobCredentials(TestJobExecution):
)
env = {}
- credential.credential_type.inject_credential(
- credential, env, {}, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
path = os.path.join(private_data_dir, os.path.basename(env['MY_CLOUD_INI_FILE']))
assert open(path, 'r').read() == value
@@ -1703,38 +1399,16 @@ class TestJobCredentials(TestJobExecution):
kind='cloud',
name='SomeCloud',
managed_by_tower=False,
- inputs={
- 'fields': [{
- 'id': 'cert',
- 'label': 'Certificate',
- 'type': 'string'
- }, {
- 'id': 'key',
- 'label': 'Key',
- 'type': 'string'
- }]
- },
+ inputs={'fields': [{'id': 'cert', 'label': 'Certificate', 'type': 'string'}, {'id': 'key', 'label': 'Key', 'type': 'string'}]},
injectors={
- 'file': {
- 'template.cert': '[mycert]\n{{cert}}',
- 'template.key': '[mykey]\n{{key}}'
- },
- 'env': {
- 'MY_CERT_INI_FILE': '{{tower.filename.cert}}',
- 'MY_KEY_INI_FILE': '{{tower.filename.key}}'
- }
- }
- )
- credential = Credential(
- pk=1,
- credential_type=some_cloud,
- inputs = {'cert': 'CERT123', 'key': 'KEY123'}
+ 'file': {'template.cert': '[mycert]\n{{cert}}', 'template.key': '[mykey]\n{{key}}'},
+ 'env': {'MY_CERT_INI_FILE': '{{tower.filename.cert}}', 'MY_KEY_INI_FILE': '{{tower.filename.key}}'},
+ },
)
+ credential = Credential(pk=1, credential_type=some_cloud, inputs={'cert': 'CERT123', 'key': 'KEY123'})
env = {}
- credential.credential_type.inject_credential(
- credential, env, {}, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
cert_path = os.path.join(private_data_dir, os.path.basename(env['MY_CERT_INI_FILE']))
key_path = os.path.join(private_data_dir, os.path.basename(env['MY_KEY_INI_FILE']))
@@ -1743,36 +1417,18 @@ class TestJobCredentials(TestJobExecution):
def test_multi_cloud(self, private_data_dir):
gce = CredentialType.defaults['gce']()
- gce_credential = Credential(
- pk=1,
- credential_type=gce,
- inputs = {
- 'username': 'bob',
- 'project': 'some-project',
- 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY
- }
- )
+ gce_credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY})
gce_credential.inputs['ssh_key_data'] = encrypt_field(gce_credential, 'ssh_key_data')
azure_rm = CredentialType.defaults['azure_rm']()
- azure_rm_credential = Credential(
- pk=2,
- credential_type=azure_rm,
- inputs = {
- 'subscription': 'some-subscription',
- 'username': 'bob',
- 'password': 'secret'
- }
- )
+ azure_rm_credential = Credential(pk=2, credential_type=azure_rm, inputs={'subscription': 'some-subscription', 'username': 'bob', 'password': 'secret'})
azure_rm_credential.inputs['secret'] = ''
azure_rm_credential.inputs['secret'] = encrypt_field(azure_rm_credential, 'secret')
env = {}
safe_env = {}
for credential in [gce_credential, azure_rm_credential]:
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['AZURE_SUBSCRIPTION_ID'] == 'some-subscription'
assert env['AZURE_AD_USER'] == 'bob'
@@ -1797,7 +1453,6 @@ class TestJobCredentials(TestJobExecution):
@pytest.mark.usefixtures("patch_Organization")
class TestProjectUpdateGalaxyCredentials(TestJobExecution):
-
@pytest.fixture
def project_update(self):
org = Organization(pk=1)
@@ -1823,7 +1478,6 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution):
assert 'ANSIBLE_GALAXY_IGNORE' not in env
def test_galaxy_credentials_empty(self, private_data_dir, project_update):
-
class RunProjectUpdate(tasks.RunProjectUpdate):
__vars__ = {}
@@ -1849,9 +1503,13 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution):
self.__vars__ = extra_vars
credential_type = CredentialType.defaults['galaxy_api_token']()
- public_galaxy = Credential(pk=1, credential_type=credential_type, inputs={
- 'url': 'https://galaxy.ansible.com/',
- })
+ public_galaxy = Credential(
+ pk=1,
+ credential_type=credential_type,
+ inputs={
+ 'url': 'https://galaxy.ansible.com/',
+ },
+ )
project_update.project.organization.galaxy_credentials.add(public_galaxy)
task = RunProjectUpdate()
env = task.build_env(project_update, private_data_dir)
@@ -1861,32 +1519,34 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution):
assert task.__vars__['roles_enabled'] is True
assert task.__vars__['collections_enabled'] is True
- assert sorted([
- (k, v) for k, v in env.items()
- if k.startswith('ANSIBLE_GALAXY')
- ]) == [
+ assert sorted([(k, v) for k, v in env.items() if k.startswith('ANSIBLE_GALAXY')]) == [
('ANSIBLE_GALAXY_SERVER_LIST', 'server0'),
('ANSIBLE_GALAXY_SERVER_SERVER0_URL', 'https://galaxy.ansible.com/'),
]
def test_multiple_galaxy_endpoints(self, private_data_dir, project_update):
credential_type = CredentialType.defaults['galaxy_api_token']()
- public_galaxy = Credential(pk=1, credential_type=credential_type, inputs={
- 'url': 'https://galaxy.ansible.com/',
- })
- rh = Credential(pk=2, credential_type=credential_type, inputs={
- 'url': 'https://cloud.redhat.com/api/automation-hub/',
- 'auth_url': 'https://sso.redhat.com/example/openid-connect/token/',
- 'token': 'secret123'
- })
+ public_galaxy = Credential(
+ pk=1,
+ credential_type=credential_type,
+ inputs={
+ 'url': 'https://galaxy.ansible.com/',
+ },
+ )
+ rh = Credential(
+ pk=2,
+ credential_type=credential_type,
+ inputs={
+ 'url': 'https://cloud.redhat.com/api/automation-hub/',
+ 'auth_url': 'https://sso.redhat.com/example/openid-connect/token/',
+ 'token': 'secret123',
+ },
+ )
project_update.project.organization.galaxy_credentials.add(public_galaxy)
project_update.project.organization.galaxy_credentials.add(rh)
task = tasks.RunProjectUpdate()
env = task.build_env(project_update, private_data_dir)
- assert sorted([
- (k, v) for k, v in env.items()
- if k.startswith('ANSIBLE_GALAXY')
- ]) == [
+ assert sorted([(k, v) for k, v in env.items() if k.startswith('ANSIBLE_GALAXY')]) == [
('ANSIBLE_GALAXY_SERVER_LIST', 'server0,server1'),
('ANSIBLE_GALAXY_SERVER_SERVER0_URL', 'https://galaxy.ansible.com/'),
('ANSIBLE_GALAXY_SERVER_SERVER1_AUTH_URL', 'https://sso.redhat.com/example/openid-connect/token/'), # noqa
@@ -1921,21 +1581,15 @@ class TestProjectUpdateCredentials(TestJobExecution):
dict(scm_type='git'),
dict(scm_type='svn'),
dict(scm_type='archive'),
- ]
+ ],
}
def test_username_and_password_auth(self, project_update, scm_type):
task = tasks.RunProjectUpdate()
ssh = CredentialType.defaults['ssh']()
project_update.scm_type = scm_type
- project_update.credential = Credential(
- pk=1,
- credential_type=ssh,
- inputs = {'username': 'bob', 'password': 'secret'}
- )
- project_update.credential.inputs['password'] = encrypt_field(
- project_update.credential, 'password'
- )
+ project_update.credential = Credential(pk=1, credential_type=ssh, inputs={'username': 'bob', 'password': 'secret'})
+ project_update.credential.inputs['password'] = encrypt_field(project_update.credential, 'password')
passwords = task.build_passwords(project_update, {})
password_prompts = task.get_password_prompts(passwords)
@@ -1948,17 +1602,8 @@ class TestProjectUpdateCredentials(TestJobExecution):
task = tasks.RunProjectUpdate()
ssh = CredentialType.defaults['ssh']()
project_update.scm_type = scm_type
- project_update.credential = Credential(
- pk=1,
- credential_type=ssh,
- inputs = {
- 'username': 'bob',
- 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY
- }
- )
- project_update.credential.inputs['ssh_key_data'] = encrypt_field(
- project_update.credential, 'ssh_key_data'
- )
+ project_update.credential = Credential(pk=1, credential_type=ssh, inputs={'username': 'bob', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY})
+ project_update.credential.inputs['ssh_key_data'] = encrypt_field(project_update.credential, 'ssh_key_data')
passwords = task.build_passwords(project_update, {})
password_prompts = task.get_password_prompts(passwords)
@@ -1978,13 +1623,7 @@ class TestProjectUpdateCredentials(TestJobExecution):
class TestInventoryUpdateCredentials(TestJobExecution):
@pytest.fixture
def inventory_update(self):
- return InventoryUpdate(
- pk=1,
- inventory_source=InventorySource(
- pk=1,
- inventory=Inventory(pk=1)
- )
- )
+ return InventoryUpdate(pk=1, inventory_source=InventorySource(pk=1, inventory=Inventory(pk=1)))
def test_source_without_credential(self, mocker, inventory_update, private_data_dir):
task = tasks.RunInventoryUpdate()
@@ -2003,7 +1642,7 @@ class TestInventoryUpdateCredentials(TestJobExecution):
task = tasks.RunInventoryUpdate()
inventory_update.source = 'custom'
inventory_update.source_vars = '{"FOO": "BAR"}'
- inventory_update.source_script= CustomInventoryScript(script='#!/bin/sh\necho "Hello, World!"')
+ inventory_update.source_script = CustomInventoryScript(script='#!/bin/sh\necho "Hello, World!"')
if with_credential:
azure_rm = CredentialType.defaults['azure_rm']()
@@ -2012,14 +1651,15 @@ class TestInventoryUpdateCredentials(TestJobExecution):
cred = Credential(
pk=1,
credential_type=azure_rm,
- inputs = {
+ inputs={
'client': 'some-client',
'secret': 'some-secret',
'tenant': 'some-tenant',
'subscription': 'some-subscription',
- }
+ },
)
return [cred]
+
inventory_update.get_extra_credentials = get_creds
else:
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
@@ -2031,9 +1671,7 @@ class TestInventoryUpdateCredentials(TestJobExecution):
credentials = task.build_credentials_list(inventory_update)
for credential in credentials:
if credential:
- credential.credential_type.inject_credential(
- credential, env, {}, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
assert '-i' in ' '.join(args)
script = args[args.index('-i') + 1]
@@ -2053,13 +1691,10 @@ class TestInventoryUpdateCredentials(TestJobExecution):
inventory_update.source = 'ec2'
def get_cred():
- cred = Credential(
- pk=1,
- credential_type=aws,
- inputs = {'username': 'bob', 'password': 'secret'}
- )
+ cred = Credential(pk=1, credential_type=aws, inputs={'username': 'bob', 'password': 'secret'})
cred.inputs['password'] = encrypt_field(cred, 'password')
return cred
+
inventory_update.get_cloud_credential = get_cred
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
@@ -2079,13 +1714,10 @@ class TestInventoryUpdateCredentials(TestJobExecution):
inventory_update.source = 'vmware'
def get_cred():
- cred = Credential(
- pk=1,
- credential_type=vmware,
- inputs = {'username': 'bob', 'password': 'secret', 'host': 'https://example.org'}
- )
+ cred = Credential(pk=1, credential_type=vmware, inputs={'username': 'bob', 'password': 'secret', 'host': 'https://example.org'})
cred.inputs['password'] = encrypt_field(cred, 'password')
return cred
+
inventory_update.get_cloud_credential = get_cred
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
@@ -2096,9 +1728,7 @@ class TestInventoryUpdateCredentials(TestJobExecution):
credentials = task.build_credentials_list(inventory_update)
for credential in credentials:
if credential:
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
env["VMWARE_USER"] == "bob",
env["VMWARE_PASSWORD"] == "secret",
@@ -2114,15 +1744,16 @@ class TestInventoryUpdateCredentials(TestJobExecution):
cred = Credential(
pk=1,
credential_type=azure_rm,
- inputs = {
+ inputs={
'client': 'some-client',
'secret': 'some-secret',
'tenant': 'some-tenant',
'subscription': 'some-subscription',
- 'cloud_environment': 'foobar'
- }
+ 'cloud_environment': 'foobar',
+ },
)
return cred
+
inventory_update.get_cloud_credential = get_cred
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
@@ -2148,14 +1779,10 @@ class TestInventoryUpdateCredentials(TestJobExecution):
cred = Credential(
pk=1,
credential_type=azure_rm,
- inputs = {
- 'subscription': 'some-subscription',
- 'username': 'bob',
- 'password': 'secret',
- 'cloud_environment': 'foobar'
- }
+ inputs={'subscription': 'some-subscription', 'username': 'bob', 'password': 'secret', 'cloud_environment': 'foobar'},
)
return cred
+
inventory_update.get_cloud_credential = get_cred
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
@@ -2177,19 +1804,10 @@ class TestInventoryUpdateCredentials(TestJobExecution):
inventory_update.source = 'gce'
def get_cred():
- cred = Credential(
- pk=1,
- credential_type=gce,
- inputs = {
- 'username': 'bob',
- 'project': 'some-project',
- 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY
- }
- )
- cred.inputs['ssh_key_data'] = encrypt_field(
- cred, 'ssh_key_data'
- )
+ cred = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY})
+ cred.inputs['ssh_key_data'] = encrypt_field(cred, 'ssh_key_data')
return cred
+
inventory_update.get_cloud_credential = get_cred
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
@@ -2200,9 +1818,7 @@ class TestInventoryUpdateCredentials(TestJobExecution):
credentials = task.build_credentials_list(inventory_update)
for credential in credentials:
if credential:
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['GCE_ZONE'] == expected_gce_zone
json_data = json.load(open(env['GCE_CREDENTIALS_FILE_PATH'], 'rb'))
@@ -2220,18 +1836,12 @@ class TestInventoryUpdateCredentials(TestJobExecution):
cred = Credential(
pk=1,
credential_type=openstack,
- inputs = {
- 'username': 'bob',
- 'password': 'secret',
- 'project': 'tenant-name',
- 'host': 'https://keystone.example.org'
- }
+ inputs={'username': 'bob', 'password': 'secret', 'project': 'tenant-name', 'host': 'https://keystone.example.org'},
)
cred.inputs['ssh_key_data'] = ''
- cred.inputs['ssh_key_data'] = encrypt_field(
- cred, 'ssh_key_data'
- )
+ cred.inputs['ssh_key_data'] = encrypt_field(cred, 'ssh_key_data')
return cred
+
inventory_update.get_cloud_credential = get_cred
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
@@ -2240,16 +1850,21 @@ class TestInventoryUpdateCredentials(TestJobExecution):
path = os.path.join(private_data_dir, os.path.basename(env['OS_CLIENT_CONFIG_FILE']))
shade_config = open(path, 'r').read()
- assert '\n'.join([
- 'clouds:',
- ' devstack:',
- ' auth:',
- ' auth_url: https://keystone.example.org',
- ' password: secret',
- ' project_name: tenant-name',
- ' username: bob',
- ''
- ]) in shade_config
+ assert (
+ '\n'.join(
+ [
+ 'clouds:',
+ ' devstack:',
+ ' auth:',
+ ' auth_url: https://keystone.example.org',
+ ' password: secret',
+ ' project_name: tenant-name',
+ ' username: bob',
+ '',
+ ]
+ )
+ in shade_config
+ )
def test_satellite6_source(self, inventory_update, private_data_dir, mocker):
task = tasks.RunInventoryUpdate()
@@ -2257,19 +1872,10 @@ class TestInventoryUpdateCredentials(TestJobExecution):
inventory_update.source = 'satellite6'
def get_cred():
- cred = Credential(
- pk=1,
- credential_type=satellite6,
- inputs = {
- 'username': 'bob',
- 'password': 'secret',
- 'host': 'https://example.org'
- }
- )
- cred.inputs['password'] = encrypt_field(
- cred, 'password'
- )
+ cred = Credential(pk=1, credential_type=satellite6, inputs={'username': 'bob', 'password': 'secret', 'host': 'https://example.org'})
+ cred.inputs['password'] = encrypt_field(cred, 'password')
return cred
+
inventory_update.get_cloud_credential = get_cred
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
@@ -2287,17 +1893,13 @@ class TestInventoryUpdateCredentials(TestJobExecution):
task = tasks.RunInventoryUpdate()
tower = CredentialType.defaults['tower']()
inventory_update.source = 'tower'
- inputs = {
- 'host': 'https://tower.example.org',
- 'username': 'bob',
- 'password': 'secret',
- 'verify_ssl': verify
- }
+ inputs = {'host': 'https://tower.example.org', 'username': 'bob', 'password': 'secret', 'verify_ssl': verify}
def get_cred():
- cred = Credential(pk=1, credential_type=tower, inputs = inputs)
+ cred = Credential(pk=1, credential_type=tower, inputs=inputs)
cred.inputs['password'] = encrypt_field(cred, 'password')
return cred
+
inventory_update.get_cloud_credential = get_cred
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
@@ -2325,9 +1927,10 @@ class TestInventoryUpdateCredentials(TestJobExecution):
}
def get_cred():
- cred = Credential(pk=1, credential_type=tower, inputs = inputs)
+ cred = Credential(pk=1, credential_type=tower, inputs=inputs)
cred.inputs['password'] = encrypt_field(cred, 'password')
return cred
+
inventory_update.get_cloud_credential = get_cred
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
@@ -2336,9 +1939,7 @@ class TestInventoryUpdateCredentials(TestJobExecution):
credentials = task.build_credentials_list(inventory_update)
for credential in credentials:
if credential:
- credential.credential_type.inject_credential(
- credential, env, safe_env, [], private_data_dir
- )
+ credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
assert env['TOWER_VERIFY_SSL'] == 'False'
@@ -2351,12 +1952,13 @@ class TestInventoryUpdateCredentials(TestJobExecution):
cred = Credential(
pk=1,
credential_type=gce,
- inputs = {
+ inputs={
'username': 'bob',
'project': 'some-project',
- }
+ },
)
return cred
+
inventory_update.get_cloud_credential = get_cred
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
settings.AWX_TASK_ENV = {'FOO': 'BAR'}
@@ -2426,9 +2028,7 @@ def test_aquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_
assert logger.err.called_with("I/O error({0}) while trying to aquire lock on file [{1}]: {2}".format(3, 'this_file_does_not_exist', 'dummy message'))
-@pytest.mark.parametrize('injector_cls', [
- cls for cls in ManagedCredentialType.registry.values() if cls.injectors
-])
+@pytest.mark.parametrize('injector_cls', [cls for cls in ManagedCredentialType.registry.values() if cls.injectors])
def test_managed_injector_redaction(injector_cls):
"""See awx.main.models.inventory.PluginFileInjector._get_shared_env
The ordering within awx.main.tasks.BaseTask and contract with build_env
diff --git a/awx/main/tests/unit/test_validators.py b/awx/main/tests/unit/test_validators.py
index a44f78b53c..24c45ca4ef 100644
--- a/awx/main/tests/unit/test_validators.py
+++ b/awx/main/tests/unit/test_validators.py
@@ -131,29 +131,35 @@ def test_cert_with_key():
assert not pem_objects[1]['key_enc']
-@pytest.mark.parametrize("var_str", [
- '{"a": "b"}',
- '---\na: b\nc: d',
- '',
- '""',
-])
+@pytest.mark.parametrize(
+ "var_str",
+ [
+ '{"a": "b"}',
+ '---\na: b\nc: d',
+ '',
+ '""',
+ ],
+)
def test_valid_vars(var_str):
vars_validate_or_raise(var_str)
-@pytest.mark.parametrize("var_str", [
- '["a": "b"]',
- '["a", "b"]',
- "('a=4', 'c=5')",
- '"',
- "''",
- "5",
- "6.74",
- "hello",
- "OrderedDict([('a', 'b')])",
- "True",
- "False",
-])
+@pytest.mark.parametrize(
+ "var_str",
+ [
+ '["a": "b"]',
+ '["a", "b"]',
+ "('a=4', 'c=5')",
+ '"',
+ "''",
+ "5",
+ "6.74",
+ "hello",
+ "OrderedDict([('a', 'b')])",
+ "True",
+ "False",
+ ],
+)
def test_invalid_vars(var_str):
with pytest.raises(RestValidationError):
vars_validate_or_raise(var_str)
diff --git a/awx/main/tests/unit/test_views.py b/awx/main/tests/unit/test_views.py
index b9a96d4344..82f81ff968 100644
--- a/awx/main/tests/unit/test_views.py
+++ b/awx/main/tests/unit/test_views.py
@@ -57,12 +57,11 @@ def test_views_have_search_fields(all_views):
views_missing_search.append(view)
if views_missing_search:
- raise Exception('{} views do not have search fields defined:\n{}'.format(
- len(views_missing_search),
- '\n'.join([
- v.__class__.__name__ + ' (model: {})'.format(getattr(v, 'model', type(None)).__name__)
- for v in views_missing_search
- ]))
+ raise Exception(
+ '{} views do not have search fields defined:\n{}'.format(
+ len(views_missing_search),
+ '\n'.join([v.__class__.__name__ + ' (model: {})'.format(getattr(v, 'model', type(None)).__name__) for v in views_missing_search]),
+ )
)
@@ -87,7 +86,6 @@ def test_global_creation_always_possible(all_views):
creatable_view = View
if not creatable or not global_view:
continue
- assert 'POST' in global_view().allowed_methods, (
- 'Resource {} should be creatable in global list view {}. '
- 'Can be created now in {}'.format(model, global_view, creatable_view)
+ assert 'POST' in global_view().allowed_methods, 'Resource {} should be creatable in global list view {}. ' 'Can be created now in {}'.format(
+ model, global_view, creatable_view
)
diff --git a/awx/main/tests/unit/utils/test_common.py b/awx/main/tests/unit/utils/test_common.py
index b86798ec51..98aaefea2c 100644
--- a/awx/main/tests/unit/utils/test_common.py
+++ b/awx/main/tests/unit/utils/test_common.py
@@ -13,26 +13,18 @@ from rest_framework.exceptions import ParseError
from awx.main.utils import common
-from awx.main.models import (
- Job,
- AdHocCommand,
- InventoryUpdate,
- ProjectUpdate,
- SystemJob,
- WorkflowJob,
- Inventory,
- JobTemplate,
- UnifiedJobTemplate,
- UnifiedJob
-)
+from awx.main.models import Job, AdHocCommand, InventoryUpdate, ProjectUpdate, SystemJob, WorkflowJob, Inventory, JobTemplate, UnifiedJobTemplate, UnifiedJob
-@pytest.mark.parametrize('input_, output', [
- ({"foo": "bar"}, {"foo": "bar"}),
- ('{"foo": "bar"}', {"foo": "bar"}),
- ('---\nfoo: bar', {"foo": "bar"}),
- (4399, {}),
-])
+@pytest.mark.parametrize(
+ 'input_, output',
+ [
+ ({"foo": "bar"}, {"foo": "bar"}),
+ ('{"foo": "bar"}', {"foo": "bar"}),
+ ('---\nfoo: bar', {"foo": "bar"}),
+ (4399, {}),
+ ],
+)
def test_parse_yaml_or_json(input_, output):
assert common.parse_yaml_or_json(input_) == output
@@ -48,7 +40,6 @@ def test_recursive_vars_not_allowed():
class TestParserExceptions:
-
@staticmethod
def json_error(data):
try:
@@ -103,7 +94,7 @@ TEST_MODELS = [
(UnifiedJob, 'unified_job'),
(Inventory, 'inventory'),
(JobTemplate, 'job_template'),
- (UnifiedJobTemplate, 'unified_job_template')
+ (UnifiedJobTemplate, 'unified_job_template'),
]
@@ -120,9 +111,7 @@ def test_get_model_for_invalid_type():
@pytest.mark.django_db
-@pytest.mark.parametrize("model_type,model_class", [
- (name, cls) for cls, name in TEST_MODELS
-])
+@pytest.mark.parametrize("model_type,model_class", [(name, cls) for cls, name in TEST_MODELS])
def test_get_model_for_valid_type(model_type, model_class):
assert common.get_model_for_type(model_type) == model_class
@@ -130,6 +119,7 @@ def test_get_model_for_valid_type(model_type, model_class):
@pytest.fixture
def memoized_function(mocker, mock_cache):
with mock.patch('awx.main.utils.common.get_memoize_cache', return_value=mock_cache):
+
@common.memoize(track_function=True)
def myfunction(key, value):
if key not in myfunction.calls:
@@ -141,6 +131,7 @@ def memoized_function(mocker, mock_cache):
return value
else:
return '%s called %s times' % (value, myfunction.calls[key])
+
myfunction.calls = dict()
return myfunction
@@ -178,16 +169,14 @@ def test_memoize_delete(memoized_function, mock_cache):
def test_memoize_parameter_error():
with pytest.raises(common.IllegalArgumentError):
+
@common.memoize(cache_key='foo', track_function=True)
def fn():
return
def test_extract_ansible_vars():
- my_dict = {
- "foobar": "baz",
- "ansible_connetion_setting": "1928"
- }
+ my_dict = {"foobar": "baz", "ansible_connetion_setting": "1928"}
redacted, var_list = common.extract_ansible_vars(json.dumps(my_dict))
assert var_list == set(['ansible_connetion_setting'])
assert redacted == {"foobar": "baz"}
diff --git a/awx/main/tests/unit/utils/test_encryption.py b/awx/main/tests/unit/utils/test_encryption.py
index bba8c3ca71..58afad0ef2 100644
--- a/awx/main/tests/unit/utils/test_encryption.py
+++ b/awx/main/tests/unit/utils/test_encryption.py
@@ -58,9 +58,10 @@ def test_decrypt_field_with_undefined_attr_raises_expected_exception():
class TestSurveyReversibilityValue:
- '''
+ """
Tests to enforce the contract with survey password question encrypted values
- '''
+ """
+
_key = encryption.get_encryption_key('value', None)
def test_encrypt_empty_string(self):
diff --git a/awx/main/tests/unit/utils/test_filters.py b/awx/main/tests/unit/utils/test_filters.py
index 76effe8284..52e37ab893 100644
--- a/awx/main/tests/unit/utils/test_filters.py
+++ b/awx/main/tests/unit/utils/test_filters.py
@@ -1,4 +1,3 @@
-
# Python
import pytest
from unittest import mock
@@ -11,18 +10,20 @@ from awx.main.models import Host
from django.db.models import Q
-
-@pytest.mark.parametrize('params, logger_name, expected', [
- # skip all records if enabled_flag = False
- ({'enabled_flag': False}, 'awx.main', False),
- # skip all records if the host is undefined
- ({'enabled_flag': True}, 'awx.main', False),
- # skip all records if underlying logger is used by handlers themselves
- ({'enabled_flag': True}, 'awx.main.utils.handlers', False),
- ({'enabled_flag': True, 'enabled_loggers': ['awx']}, 'awx.main', True),
- ({'enabled_flag': True, 'enabled_loggers': ['abc']}, 'awx.analytics.xyz', False),
- ({'enabled_flag': True, 'enabled_loggers': ['xyz']}, 'awx.analytics.xyz', True),
-])
+@pytest.mark.parametrize(
+ 'params, logger_name, expected',
+ [
+ # skip all records if enabled_flag = False
+ ({'enabled_flag': False}, 'awx.main', False),
+ # skip all records if the host is undefined
+ ({'enabled_flag': True}, 'awx.main', False),
+ # skip all records if underlying logger is used by handlers themselves
+ ({'enabled_flag': True}, 'awx.main.utils.handlers', False),
+ ({'enabled_flag': True, 'enabled_loggers': ['awx']}, 'awx.main', True),
+ ({'enabled_flag': True, 'enabled_loggers': ['abc']}, 'awx.analytics.xyz', False),
+ ({'enabled_flag': True, 'enabled_loggers': ['xyz']}, 'awx.analytics.xyz', True),
+ ],
+)
def test_base_logging_handler_skip_log(params, logger_name, expected, dummy_log_record):
filter = ExternalLoggerEnabled(**params)
dummy_log_record.name = logger_name
@@ -30,7 +31,6 @@ def test_base_logging_handler_skip_log(params, logger_name, expected, dummy_log_
class Field(object):
-
def __init__(self, name, related_model=None, __prevent_search__=None):
self.name = name
self.related_model = related_model
@@ -38,11 +38,8 @@ class Field(object):
class Meta(object):
-
def __init__(self, fields):
- self._fields = {
- f.name: f for f in fields
- }
+ self._fields = {f.name: f for f in fields}
self.object_name = 'Host'
self.fields_map = {}
self.fields = self._fields.values()
@@ -59,164 +56,197 @@ class mockObjects:
class mockUser:
def __init__(self):
print("Host user created")
- self._meta = Meta(fields=[
- Field(name='password', __prevent_search__=True)
- ])
+ self._meta = Meta(fields=[Field(name='password', __prevent_search__=True)])
class mockHost:
def __init__(self):
print("Host mock created")
self.objects = mockObjects()
- fields = [
- Field(name='name'),
- Field(name='description'),
- Field(name='created_by', related_model=mockUser())
- ]
+ fields = [Field(name='name'), Field(name='description'), Field(name='created_by', related_model=mockUser())]
self._meta = Meta(fields=fields)
@mock.patch('awx.main.utils.filters.get_model', return_value=mockHost())
-class TestSmartFilterQueryFromString():
- @mock.patch(
- 'awx.api.filters.get_fields_from_path',
- lambda model, path: ([model], path) # disable field filtering, because a__b isn't a real Host field
+class TestSmartFilterQueryFromString:
+ @mock.patch('awx.api.filters.get_fields_from_path', lambda model, path: ([model], path)) # disable field filtering, because a__b isn't a real Host field
+ @pytest.mark.parametrize(
+ "filter_string,q_expected",
+ [
+ ('facts__facts__blank=""', Q(**{u"facts__facts__blank": u""})),
+ ('"facts__facts__ space "="f"', Q(**{u"facts__facts__ space ": u"f"})),
+ ('"facts__facts__ e "=no_quotes_here', Q(**{u"facts__facts__ e ": u"no_quotes_here"})),
+ ('a__b__c=3', Q(**{u"a__b__c": 3})),
+ ('a__b__c=3.14', Q(**{u"a__b__c": 3.14})),
+ ('a__b__c=true', Q(**{u"a__b__c": True})),
+ ('a__b__c=false', Q(**{u"a__b__c": False})),
+ ('a__b__c=null', Q(**{u"a__b__c": None})),
+ ('ansible_facts__a="true"', Q(**{u"ansible_facts__contains": {u"a": u"true"}})),
+ ('ansible_facts__a__exact="true"', Q(**{u"ansible_facts__contains": {u"a": u"true"}})),
+ # ('"a__b\"__c"="true"', Q(**{u"a__b\"__c": "true"})),
+ # ('a__b\"__c="true"', Q(**{u"a__b\"__c": "true"})),
+ ],
)
- @pytest.mark.parametrize("filter_string,q_expected", [
- ('facts__facts__blank=""', Q(**{u"facts__facts__blank": u""})),
- ('"facts__facts__ space "="f"', Q(**{u"facts__facts__ space ": u"f"})),
- ('"facts__facts__ e "=no_quotes_here', Q(**{u"facts__facts__ e ": u"no_quotes_here"})),
- ('a__b__c=3', Q(**{u"a__b__c": 3})),
- ('a__b__c=3.14', Q(**{u"a__b__c": 3.14})),
- ('a__b__c=true', Q(**{u"a__b__c": True})),
- ('a__b__c=false', Q(**{u"a__b__c": False})),
- ('a__b__c=null', Q(**{u"a__b__c": None})),
- ('ansible_facts__a="true"', Q(**{u"ansible_facts__contains": {u"a": u"true"}})),
- ('ansible_facts__a__exact="true"', Q(**{u"ansible_facts__contains": {u"a": u"true"}})),
- #('"a__b\"__c"="true"', Q(**{u"a__b\"__c": "true"})),
- #('a__b\"__c="true"', Q(**{u"a__b\"__c": "true"})),
- ])
def test_query_generated(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string)
assert str(q) == str(q_expected)
- @pytest.mark.parametrize("filter_string", [
- 'ansible_facts__facts__facts__blank='
- 'ansible_facts__a__b__c__ space =ggg',
- ])
+ @pytest.mark.parametrize(
+ "filter_string",
+ [
+ 'ansible_facts__facts__facts__blank=' 'ansible_facts__a__b__c__ space =ggg',
+ ],
+ )
def test_invalid_filter_strings(self, mock_get_host_model, filter_string):
with pytest.raises(RuntimeError) as e:
SmartFilter.query_from_string(filter_string)
assert str(e.value) == u"Invalid query " + filter_string
- @pytest.mark.parametrize("filter_string", [
- 'created_by__password__icontains=pbkdf2'
- 'search=foo or created_by__password__icontains=pbkdf2',
- 'created_by__password__icontains=pbkdf2 or search=foo',
- ])
+ @pytest.mark.parametrize(
+ "filter_string",
+ [
+ 'created_by__password__icontains=pbkdf2' 'search=foo or created_by__password__icontains=pbkdf2',
+ 'created_by__password__icontains=pbkdf2 or search=foo',
+ ],
+ )
def test_forbidden_filter_string(self, mock_get_host_model, filter_string):
with pytest.raises(Exception) as e:
SmartFilter.query_from_string(filter_string)
"Filtering on password is not allowed." in str(e)
- @pytest.mark.parametrize("filter_string,q_expected", [
- (u'(a=abc\u1F5E3def)', Q(**{u"a": u"abc\u1F5E3def"})),
- (u'(ansible_facts__a=abc\u1F5E3def)', Q(**{u"ansible_facts__contains": {u"a": u"abc\u1F5E3def"}})),
- ])
+ @pytest.mark.parametrize(
+ "filter_string,q_expected",
+ [
+ (u'(a=abc\u1F5E3def)', Q(**{u"a": u"abc\u1F5E3def"})),
+ (u'(ansible_facts__a=abc\u1F5E3def)', Q(**{u"ansible_facts__contains": {u"a": u"abc\u1F5E3def"}})),
+ ],
+ )
def test_unicode(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string)
assert str(q) == str(q_expected)
- @pytest.mark.parametrize("filter_string,q_expected", [
- ('(a=b)', Q(**{u"a": u"b"})),
- ('a=b and c=d', Q(**{u"a": u"b"}) & Q(**{u"c": u"d"})),
- ('(a=b and c=d)', Q(**{u"a": u"b"}) & Q(**{u"c": u"d"})),
- ('a=b or c=d', Q(**{u"a": u"b"}) | Q(**{u"c": u"d"})),
- ('(a=b and c=d) or (e=f)', (Q(**{u"a": u"b"}) & Q(**{u"c": u"d"})) | (Q(**{u"e": u"f"}))),
- (
- 'a=b or a=d or a=e or a=z and b=h and b=i and b=j and b=k',
- Q(**{u"a": u"b"}) | Q(**{u"a": u"d"}) | Q(**{u"a": u"e"}) | Q(**{u"a": u"z"}) &
- Q(**{u"b": u"h"}) & Q(**{u"b": u"i"}) & Q(**{u"b": u"j"}) & Q(**{u"b": u"k"})
- )
- ])
+ @pytest.mark.parametrize(
+ "filter_string,q_expected",
+ [
+ ('(a=b)', Q(**{u"a": u"b"})),
+ ('a=b and c=d', Q(**{u"a": u"b"}) & Q(**{u"c": u"d"})),
+ ('(a=b and c=d)', Q(**{u"a": u"b"}) & Q(**{u"c": u"d"})),
+ ('a=b or c=d', Q(**{u"a": u"b"}) | Q(**{u"c": u"d"})),
+ ('(a=b and c=d) or (e=f)', (Q(**{u"a": u"b"}) & Q(**{u"c": u"d"})) | (Q(**{u"e": u"f"}))),
+ (
+ 'a=b or a=d or a=e or a=z and b=h and b=i and b=j and b=k',
+ Q(**{u"a": u"b"})
+ | Q(**{u"a": u"d"})
+ | Q(**{u"a": u"e"})
+ | Q(**{u"a": u"z"}) & Q(**{u"b": u"h"}) & Q(**{u"b": u"i"}) & Q(**{u"b": u"j"}) & Q(**{u"b": u"k"}),
+ ),
+ ],
+ )
def test_boolean_parenthesis(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string)
assert str(q) == str(q_expected)
- @pytest.mark.parametrize("filter_string,q_expected", [
- ('ansible_facts__a__b__c[]=3', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [3]}}}})),
- ('ansible_facts__a__b__c[]=3.14', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [3.14]}}}})),
- ('ansible_facts__a__b__c[]=true', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [True]}}}})),
- ('ansible_facts__a__b__c[]=false', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [False]}}}})),
- ('ansible_facts__a__b__c[]="true"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [u"true"]}}}})),
- ('ansible_facts__a__b__c[]="hello world"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [u"hello world"]}}}})),
- ('ansible_facts__a__b__c[]__d[]="foobar"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": [u"foobar"]}]}}}})),
- ('ansible_facts__a__b__c[]__d="foobar"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": u"foobar"}]}}}})),
- ('ansible_facts__a__b__c[]__d__e="foobar"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": {u"e": u"foobar"}}]}}}})),
- ('ansible_facts__a__b__c[]__d__e[]="foobar"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": {u"e": [u"foobar"]}}]}}}})),
- ('ansible_facts__a__b__c[]__d__e__f[]="foobar"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": {u"e": {u"f": [u"foobar"]}}}]}}}})),
- (
- '(ansible_facts__a__b__c[]__d__e__f[]="foobar") and (ansible_facts__a__b__c[]__d__e[]="foobar")',
- Q(**{ u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": {u"e": {u"f": [u"foobar"]}}}]}}}}) &
- Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": {u"e": [u"foobar"]}}]}}}})),
- #('"a__b\"__c"="true"', Q(**{u"a__b\"__c": "true"})),
- #('a__b\"__c="true"', Q(**{u"a__b\"__c": "true"})),
- ])
+ @pytest.mark.parametrize(
+ "filter_string,q_expected",
+ [
+ ('ansible_facts__a__b__c[]=3', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [3]}}}})),
+ ('ansible_facts__a__b__c[]=3.14', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [3.14]}}}})),
+ ('ansible_facts__a__b__c[]=true', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [True]}}}})),
+ ('ansible_facts__a__b__c[]=false', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [False]}}}})),
+ ('ansible_facts__a__b__c[]="true"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [u"true"]}}}})),
+ ('ansible_facts__a__b__c[]="hello world"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [u"hello world"]}}}})),
+ ('ansible_facts__a__b__c[]__d[]="foobar"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": [u"foobar"]}]}}}})),
+ ('ansible_facts__a__b__c[]__d="foobar"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": u"foobar"}]}}}})),
+ ('ansible_facts__a__b__c[]__d__e="foobar"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": {u"e": u"foobar"}}]}}}})),
+ ('ansible_facts__a__b__c[]__d__e[]="foobar"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": {u"e": [u"foobar"]}}]}}}})),
+ ('ansible_facts__a__b__c[]__d__e__f[]="foobar"', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": {u"e": {u"f": [u"foobar"]}}}]}}}})),
+ (
+ '(ansible_facts__a__b__c[]__d__e__f[]="foobar") and (ansible_facts__a__b__c[]__d__e[]="foobar")',
+ Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": {u"e": {u"f": [u"foobar"]}}}]}}}})
+ & Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [{u"d": {u"e": [u"foobar"]}}]}}}}),
+ ),
+ # ('"a__b\"__c"="true"', Q(**{u"a__b\"__c": "true"})),
+ # ('a__b\"__c="true"', Q(**{u"a__b\"__c": "true"})),
+ ],
+ )
def test_contains_query_generated(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string)
assert str(q) == str(q_expected)
- @pytest.mark.parametrize("filter_string,q_expected", [
- #('a__b__c[]="true"', Q(**{u"a__b__c__contains": u"\"true\""})),
- ('ansible_facts__a="true"', Q(**{u"ansible_facts__contains": {u"a": u"true"}})),
- #('"a__b\"__c"="true"', Q(**{u"a__b\"__c": "true"})),
- #('a__b\"__c="true"', Q(**{u"a__b\"__c": "true"})),
- ])
+ @pytest.mark.parametrize(
+ "filter_string,q_expected",
+ [
+ # ('a__b__c[]="true"', Q(**{u"a__b__c__contains": u"\"true\""})),
+ ('ansible_facts__a="true"', Q(**{u"ansible_facts__contains": {u"a": u"true"}})),
+ # ('"a__b\"__c"="true"', Q(**{u"a__b\"__c": "true"})),
+ # ('a__b\"__c="true"', Q(**{u"a__b\"__c": "true"})),
+ ],
+ )
def test_contains_query_generated_unicode(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string)
assert str(q) == str(q_expected)
- @pytest.mark.parametrize("filter_string,q_expected", [
- ('ansible_facts__a=null', Q(**{u"ansible_facts__contains": {u"a": None}})),
- ('ansible_facts__c="null"', Q(**{u"ansible_facts__contains": {u"c": u"\"null\""}})),
- ])
+ @pytest.mark.parametrize(
+ "filter_string,q_expected",
+ [
+ ('ansible_facts__a=null', Q(**{u"ansible_facts__contains": {u"a": None}})),
+ ('ansible_facts__c="null"', Q(**{u"ansible_facts__contains": {u"c": u"\"null\""}})),
+ ],
+ )
def test_contains_query_generated_null(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string)
assert str(q) == str(q_expected)
-
- @pytest.mark.parametrize("filter_string,q_expected", [
- ('group__search=foo', Q(Q(**{u"group__name__icontains": u"foo"}) | Q(**{u"group__description__icontains": u"foo"}))),
- ('search=foo and group__search=foo', Q(
- Q(**{u"name__icontains": u"foo"}) | Q(**{ u"description__icontains": u"foo"}),
- Q(**{u"group__name__icontains": u"foo"}) | Q(**{u"group__description__icontains": u"foo"}))),
- ('search=foo or ansible_facts__a=null',
- Q(Q(**{u"name__icontains": u"foo"}) | Q(**{u"description__icontains": u"foo"})) |
- Q(**{u"ansible_facts__contains": {u"a": None}})),
- ])
+ @pytest.mark.parametrize(
+ "filter_string,q_expected",
+ [
+ ('group__search=foo', Q(Q(**{u"group__name__icontains": u"foo"}) | Q(**{u"group__description__icontains": u"foo"}))),
+ (
+ 'search=foo and group__search=foo',
+ Q(
+ Q(**{u"name__icontains": u"foo"}) | Q(**{u"description__icontains": u"foo"}),
+ Q(**{u"group__name__icontains": u"foo"}) | Q(**{u"group__description__icontains": u"foo"}),
+ ),
+ ),
+ (
+ 'search=foo or ansible_facts__a=null',
+ Q(Q(**{u"name__icontains": u"foo"}) | Q(**{u"description__icontains": u"foo"})) | Q(**{u"ansible_facts__contains": {u"a": None}}),
+ ),
+ ],
+ )
def test_search_related_fields(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string)
assert str(q) == str(q_expected)
-class TestSmartFilterQueryFromStringNoDB():
- @pytest.mark.parametrize("filter_string,q_expected", [
- ('ansible_facts__a="true" and ansible_facts__b="true" and ansible_facts__c="true"',
- (Q(**{u"ansible_facts__contains": {u"a": u"true"}}) &
- Q(**{u"ansible_facts__contains": {u"b": u"true"}}) &
- Q(**{u"ansible_facts__contains": {u"c": u"true"}}))),
- ('ansible_facts__a="true" or ansible_facts__b="true" or ansible_facts__c="true"',
- (Q(**{u"ansible_facts__contains": {u"a": u"true"}}) |
- Q(**{u"ansible_facts__contains": {u"b": u"true"}}) |
- Q(**{u"ansible_facts__contains": {u"c": u"true"}}))),
- ('search=foo',
- Q(Q(**{ u"description__icontains": u"foo"}) | Q(**{u"name__icontains": u"foo"}))),
- ('search=foo and ansible_facts__a="null"',
- Q(Q(**{u"description__icontains": u"foo"}) | Q(**{u"name__icontains": u"foo"})) &
- Q(**{u"ansible_facts__contains": {u"a": u"\"null\""}})),
- ('name=foo or name=bar and name=foobar',
- Q(name="foo") | Q(name="bar") & Q(name="foobar"))
- ])
+class TestSmartFilterQueryFromStringNoDB:
+ @pytest.mark.parametrize(
+ "filter_string,q_expected",
+ [
+ (
+ 'ansible_facts__a="true" and ansible_facts__b="true" and ansible_facts__c="true"',
+ (
+ Q(**{u"ansible_facts__contains": {u"a": u"true"}})
+ & Q(**{u"ansible_facts__contains": {u"b": u"true"}})
+ & Q(**{u"ansible_facts__contains": {u"c": u"true"}})
+ ),
+ ),
+ (
+ 'ansible_facts__a="true" or ansible_facts__b="true" or ansible_facts__c="true"',
+ (
+ Q(**{u"ansible_facts__contains": {u"a": u"true"}})
+ | Q(**{u"ansible_facts__contains": {u"b": u"true"}})
+ | Q(**{u"ansible_facts__contains": {u"c": u"true"}})
+ ),
+ ),
+ ('search=foo', Q(Q(**{u"description__icontains": u"foo"}) | Q(**{u"name__icontains": u"foo"}))),
+ (
+ 'search=foo and ansible_facts__a="null"',
+ Q(Q(**{u"description__icontains": u"foo"}) | Q(**{u"name__icontains": u"foo"})) & Q(**{u"ansible_facts__contains": {u"a": u"\"null\""}}),
+ ),
+ ('name=foo or name=bar and name=foobar', Q(name="foo") | Q(name="bar") & Q(name="foobar")),
+ ],
+ )
def test_does_not_invoke_db(self, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string)
assert str(q.query) == str(Host.objects.filter(q_expected).query)
diff --git a/awx/main/tests/unit/utils/test_formatters.py b/awx/main/tests/unit/utils/test_formatters.py
index 6358c1b931..e655827731 100644
--- a/awx/main/tests/unit/utils/test_formatters.py
+++ b/awx/main/tests/unit/utils/test_formatters.py
@@ -1,4 +1,3 @@
-
from awx.main.models import Job, JobEvent
from awx.main.utils.formatters import LogstashFormatter
@@ -9,8 +8,7 @@ def test_log_from_job_event_object():
event = JobEvent(job_id=job.id)
formatter = LogstashFormatter()
- data_for_log = formatter.reformat_data_for_log(
- dict(python_objects=dict(job_event=event)), kind='job_events')
+ data_for_log = formatter.reformat_data_for_log(dict(python_objects=dict(job_event=event)), kind='job_events')
# Check entire body of data for any exceptions from getattr on event object
for fd in data_for_log:
diff --git a/awx/main/tests/unit/utils/test_insights.py b/awx/main/tests/unit/utils/test_insights.py
index 1eee79ce78..b5dbe63d19 100644
--- a/awx/main/tests/unit/utils/test_insights.py
+++ b/awx/main/tests/unit/utils/test_insights.py
@@ -7,8 +7,7 @@ from awx.main.tests.data.insights import TEST_INSIGHTS_HOSTS, TEST_INSIGHTS_PLAN
def test_filter_insights_api_response():
- actual = filter_insights_api_response(
- TEST_INSIGHTS_HOSTS['results'][0], TEST_INSIGHTS_PLANS, TEST_INSIGHTS_REMEDIATIONS)
+ actual = filter_insights_api_response(TEST_INSIGHTS_HOSTS['results'][0], TEST_INSIGHTS_PLANS, TEST_INSIGHTS_REMEDIATIONS)
assert actual['last_check_in'] == '2019-03-19T21:59:09.213151-04:00'
assert len(actual['reports']) == 5
@@ -17,11 +16,11 @@ def test_filter_insights_api_response():
rule = actual['reports'][0]['rule']
assert rule['severity'] == 'WARN'
- assert rule['description'] == (
- "Kernel vulnerable to side-channel attacks in modern microprocessors (CVE-2017-5715/Spectre)")
+ assert rule['description'] == ("Kernel vulnerable to side-channel attacks in modern microprocessors (CVE-2017-5715/Spectre)")
assert rule['category'] == 'Security'
assert rule['summary'] == (
"A vulnerability was discovered in modern microprocessors supported by the kernel,"
" whereby an unprivileged attacker can use this flaw to bypass restrictions to gain read"
" access to privileged memory.\nThe issue was reported as [CVE-2017-5715 / Spectre]"
- "(https://access.redhat.com/security/cve/CVE-2017-5715).\n")
+ "(https://access.redhat.com/security/cve/CVE-2017-5715).\n"
+ )
diff --git a/awx/main/tests/unit/utils/test_mem_inventory.py b/awx/main/tests/unit/utils/test_mem_inventory.py
index 078d303323..e81c6da8da 100644
--- a/awx/main/tests/unit/utils/test_mem_inventory.py
+++ b/awx/main/tests/unit/utils/test_mem_inventory.py
@@ -1,8 +1,5 @@
# AWX utils
-from awx.main.utils.mem_inventory import (
- MemInventory,
- mem_data_to_dict, dict_to_mem_data
-)
+from awx.main.utils.mem_inventory import MemInventory, mem_data_to_dict, dict_to_mem_data
import pytest
import json
@@ -24,18 +21,10 @@ def memory_inventory():
def JSON_of_inv():
# Implemented as fixture becuase it may be change inside of tests
return {
- "_meta": {
- "hostvars": {
- "group_host": {},
- "my_host": {"foo": "bar"}
- }
- },
- "all": {"children": ["my_group", "ungrouped"]},
- "my_group": {
- "hosts": ["group_host"],
- "vars": {"foobar": "barfoo"}
- },
- "ungrouped": {"hosts": ["my_host"]}
+ "_meta": {"hostvars": {"group_host": {}, "my_host": {"foo": "bar"}}},
+ "all": {"children": ["my_group", "ungrouped"]},
+ "my_group": {"hosts": ["group_host"], "vars": {"foobar": "barfoo"}},
+ "ungrouped": {"hosts": ["my_host"]},
}
@@ -66,6 +55,7 @@ def JSON_with_lists():
# MemObject basic operations tests
+
@pytest.mark.inventory_import
def test_inventory_create_all_group():
inventory = MemInventory()
@@ -97,6 +87,7 @@ def test_ungrouped_mechanics():
# MemObject --> JSON tests
+
@pytest.mark.inventory_import
def test_convert_memory_to_JSON_with_vars(memory_inventory):
data = mem_data_to_dict(memory_inventory)
@@ -109,6 +100,7 @@ def test_convert_memory_to_JSON_with_vars(memory_inventory):
# JSON --> MemObject tests
+
@pytest.mark.inventory_import
def test_convert_JSON_to_memory_with_vars(JSON_of_inv):
inventory = dict_to_mem_data(JSON_of_inv)
diff --git a/awx/main/tests/unit/utils/test_reload.py b/awx/main/tests/unit/utils/test_reload.py
index 525a90e6aa..5f8c7b95e3 100644
--- a/awx/main/tests/unit/utils/test_reload.py
+++ b/awx/main/tests/unit/utils/test_reload.py
@@ -10,6 +10,12 @@ def test_produce_supervisor_command(mocker):
with mocker.patch.object(reload.subprocess, 'Popen', Popen_mock):
reload.supervisor_service_command("restart")
reload.subprocess.Popen.assert_called_once_with(
- ['supervisorctl', 'restart', 'tower-processes:*',],
- stderr=-1, stdin=-1, stdout=-1)
-
+ [
+ 'supervisorctl',
+ 'restart',
+ 'tower-processes:*',
+ ],
+ stderr=-1,
+ stdin=-1,
+ stdout=-1,
+ )
diff --git a/awx/main/tests/unit/utils/test_safe_yaml.py b/awx/main/tests/unit/utils/test_safe_yaml.py
index 8e8dd933aa..596cf708d7 100644
--- a/awx/main/tests/unit/utils/test_safe_yaml.py
+++ b/awx/main/tests/unit/utils/test_safe_yaml.py
@@ -63,11 +63,7 @@ def test_kv_unsafe_deep_nesting():
def test_kv_unsafe_multiple():
- assert safe_dump({'a': 'b', 'c': 'd'}) == '\n'.join([
- "!unsafe 'a': !unsafe 'b'",
- "!unsafe 'c': !unsafe 'd'",
- ""
- ])
+ assert safe_dump({'a': 'b', 'c': 'd'}) == '\n'.join(["!unsafe 'a': !unsafe 'b'", "!unsafe 'c': !unsafe 'd'", ""])
def test_safe_marking():
@@ -75,11 +71,7 @@ def test_safe_marking():
def test_safe_marking_mixed():
- assert safe_dump({'a': 'b', 'c': 'd'}, safe_dict={'a': 'b'}) == '\n'.join([
- "a: b",
- "!unsafe 'c': !unsafe 'd'",
- ""
- ])
+ assert safe_dump({'a': 'b', 'c': 'd'}, safe_dict={'a': 'b'}) == '\n'.join(["a: b", "!unsafe 'c': !unsafe 'd'", ""])
def test_safe_marking_deep_nesting():
diff --git a/awx/main/utils/__init__.py b/awx/main/utils/__init__.py
index 8c24401d5e..e635e44f62 100644
--- a/awx/main/utils/__init__.py
+++ b/awx/main/utils/__init__.py
@@ -4,6 +4,10 @@
# AWX
from awx.main.utils.common import * # noqa
from awx.main.utils.encryption import ( # noqa
- get_encryption_key, encrypt_field, decrypt_field, encrypt_value,
- decrypt_value, encrypt_dict,
+ get_encryption_key,
+ encrypt_field,
+ decrypt_field,
+ encrypt_value,
+ decrypt_value,
+ encrypt_dict,
)
diff --git a/awx/main/utils/ansible.py b/awx/main/utils/ansible.py
index 18011504b9..577aeb3fd6 100644
--- a/awx/main/utils/ansible.py
+++ b/awx/main/utils/ansible.py
@@ -49,12 +49,7 @@ def could_be_playbook(project_path, dir_path, filename):
# show up.
matched = False
try:
- for n, line in enumerate(codecs.open(
- playbook_path,
- 'r',
- encoding='utf-8',
- errors='ignore'
- )):
+ for n, line in enumerate(codecs.open(playbook_path, 'r', encoding='utf-8', errors='ignore')):
if valid_playbook_re.match(line):
matched = True
break
@@ -89,12 +84,7 @@ def could_be_inventory(project_path, dir_path, filename):
# Ansible inventory mainly
try:
# only read through first 10 lines for performance
- with codecs.open(
- inventory_path,
- 'r',
- encoding='utf-8',
- errors='ignore'
- ) as inv_file:
+ with codecs.open(inventory_path, 'r', encoding='utf-8', errors='ignore') as inv_file:
for line in islice(inv_file, 10):
if not valid_inventory_re.match(line):
return None
diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py
index ad90d5e4ec..9ad45d68c6 100644
--- a/awx/main/utils/common.py
+++ b/awx/main/utils/common.py
@@ -23,10 +23,7 @@ from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
from django.utils.translation import ugettext_lazy as _
from django.utils.functional import cached_property
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField
-from django.db.models.fields.related_descriptors import (
- ForwardManyToOneDescriptor,
- ManyToManyDescriptor
-)
+from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor, ManyToManyDescriptor
from django.db.models.query import QuerySet
from django.db.models import Q
@@ -42,30 +39,65 @@ from awx.conf.license import get_license
logger = logging.getLogger('awx.main.utils')
__all__ = [
- 'get_object_or_400', 'camelcase_to_underscore', 'underscore_to_camelcase', 'memoize',
- 'memoize_delete', 'get_ansible_version', 'get_licenser', 'get_awx_http_client_headers',
- 'get_awx_version', 'update_scm_url', 'get_type_for_model', 'get_model_for_type',
- 'copy_model_by_class', 'copy_m2m_relationships',
- 'prefetch_page_capabilities', 'to_python_boolean', 'ignore_inventory_computed_fields',
- 'ignore_inventory_group_removal', '_inventory_updates', 'get_pk_from_dict', 'getattrd',
- 'getattr_dne', 'NoDefaultProvided', 'get_current_apps', 'set_current_apps',
- 'extract_ansible_vars', 'get_search_fields', 'get_system_task_capacity',
- 'get_cpu_capacity', 'get_mem_capacity', 'wrap_args_with_proot', 'build_proot_temp_dir',
- 'check_proot_installed', 'model_to_dict', 'NullablePromptPseudoField',
- 'model_instance_diff', 'parse_yaml_or_json', 'RequireDebugTrueOrTest',
- 'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError',
- 'get_custom_venv_choices', 'get_external_account', 'task_manager_bulk_reschedule',
- 'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout',
- 'deepmerge'
+ 'get_object_or_400',
+ 'camelcase_to_underscore',
+ 'underscore_to_camelcase',
+ 'memoize',
+ 'memoize_delete',
+ 'get_ansible_version',
+ 'get_licenser',
+ 'get_awx_http_client_headers',
+ 'get_awx_version',
+ 'update_scm_url',
+ 'get_type_for_model',
+ 'get_model_for_type',
+ 'copy_model_by_class',
+ 'copy_m2m_relationships',
+ 'prefetch_page_capabilities',
+ 'to_python_boolean',
+ 'ignore_inventory_computed_fields',
+ 'ignore_inventory_group_removal',
+ '_inventory_updates',
+ 'get_pk_from_dict',
+ 'getattrd',
+ 'getattr_dne',
+ 'NoDefaultProvided',
+ 'get_current_apps',
+ 'set_current_apps',
+ 'extract_ansible_vars',
+ 'get_search_fields',
+ 'get_system_task_capacity',
+ 'get_cpu_capacity',
+ 'get_mem_capacity',
+ 'wrap_args_with_proot',
+ 'build_proot_temp_dir',
+ 'check_proot_installed',
+ 'model_to_dict',
+ 'NullablePromptPseudoField',
+ 'model_instance_diff',
+ 'parse_yaml_or_json',
+ 'RequireDebugTrueOrTest',
+ 'has_model_field_prefetched',
+ 'set_environ',
+ 'IllegalArgumentError',
+ 'get_custom_venv_choices',
+ 'get_external_account',
+ 'task_manager_bulk_reschedule',
+ 'schedule_task_manager',
+ 'classproperty',
+ 'create_temporary_fifo',
+ 'truncate_stdout',
+ 'deepmerge',
]
def get_object_or_400(klass, *args, **kwargs):
- '''
+ """
Return a single object from the given model or queryset based on the query
params, otherwise raise an exception that will return in a 400 response.
- '''
+ """
from django.shortcuts import _get_queryset
+
queryset = _get_queryset(klass)
try:
return queryset.get(*args, **kwargs)
@@ -88,28 +120,28 @@ def to_python_boolean(value, allow_none=False):
def camelcase_to_underscore(s):
- '''
+ """
Convert CamelCase names to lowercase_with_underscore.
- '''
+ """
s = re.sub(r'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))', '_\\1', s)
return s.lower().strip('_')
def underscore_to_camelcase(s):
- '''
+ """
Convert lowercase_with_underscore names to CamelCase.
- '''
+ """
return ''.join(x.capitalize() or '_' for x in s.split('_'))
-
class RequireDebugTrueOrTest(logging.Filter):
- '''
+ """
Logging filter to output when in DEBUG mode or running tests.
- '''
+ """
def filter(self, record):
from django.conf import settings
+
return settings.DEBUG or settings.IS_TESTING()
@@ -119,13 +151,14 @@ class IllegalArgumentError(ValueError):
def get_memoize_cache():
from django.core.cache import cache
+
return cache
def memoize(ttl=60, cache_key=None, track_function=False, cache=None):
- '''
+ """
Decorator to wrap a function and cache its result.
- '''
+ """
if cache_key and track_function:
raise IllegalArgumentError("Can not specify cache_key when track_function is True")
cache = cache or get_memoize_cache()
@@ -164,13 +197,12 @@ def memoize_delete(function_name):
@memoize()
def get_ansible_version():
- '''
+ """
Return Ansible version installed.
Ansible path needs to be provided to account for custom virtual environments
- '''
+ """
try:
- proc = subprocess.Popen(['ansible', '--version'],
- stdout=subprocess.PIPE)
+ proc = subprocess.Popen(['ansible', '--version'], stdout=subprocess.PIPE)
result = smart_str(proc.communicate()[0])
return result.split('\n')[0].replace('ansible', '').strip()
except Exception:
@@ -178,12 +210,14 @@ def get_ansible_version():
def get_awx_version():
- '''
+ """
Return AWX version as reported by setuptools.
- '''
+ """
from awx import __version__
+
try:
import pkg_resources
+
return pkg_resources.require('awx')[0].version
except Exception:
return __version__
@@ -193,17 +227,14 @@ def get_awx_http_client_headers():
license = get_license().get('license_type', 'UNLICENSED')
headers = {
'Content-Type': 'application/json',
- 'User-Agent': '{} {} ({})'.format(
- 'AWX' if license == 'open' else 'Red Hat Ansible Tower',
- get_awx_version(),
- license
- )
+ 'User-Agent': '{} {} ({})'.format('AWX' if license == 'open' else 'Red Hat Ansible Tower', get_awx_version(), license),
}
return headers
def get_licenser(*args, **kwargs):
from awx.main.utils.licensing import Licenser, OpenLicense
+
try:
if os.path.exists('/var/lib/awx/.tower_version'):
return Licenser(*args, **kwargs)
@@ -213,14 +244,13 @@ def get_licenser(*args, **kwargs):
raise ValueError(_('Error importing Tower License: %s') % e)
-def update_scm_url(scm_type, url, username=True, password=True,
- check_special_cases=True, scp_format=False):
- '''
+def update_scm_url(scm_type, url, username=True, password=True, check_special_cases=True, scp_format=False):
+ """
Update the given SCM URL to add/replace/remove the username/password. When
username/password is True, preserve existing username/password, when
False (None, '', etc.), remove any existing username/password, otherwise
replace username/password. Also validates the given URL.
- '''
+ """
# Handle all of the URL formats supported by the SCM systems:
# git: https://www.kernel.org/pub/software/scm/git/docs/git-clone.html#URLS
# svn: http://svnbook.red-bean.com/en/1.7/svn-book.html#svn.advanced.reposurls
@@ -246,9 +276,9 @@ def update_scm_url(scm_type, url, username=True, password=True,
if hostpath.count(':') > 1:
raise ValueError(_('Invalid %s URL') % scm_type)
host, path = hostpath.split(':', 1)
- #if not path.startswith('/') and not path.startswith('~/'):
+ # if not path.startswith('/') and not path.startswith('~/'):
# path = '~/%s' % path
- #if path.startswith('/'):
+ # if path.startswith('/'):
# path = path.lstrip('/')
hostpath = '/'.join([host, path])
modified_url = '@'.join(filter(None, [userpass, hostpath]))
@@ -297,18 +327,17 @@ def update_scm_url(scm_type, url, username=True, password=True,
if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_username != 'git':
raise ValueError(_('Username must be "git" for SSH access to %s.') % parts.hostname)
if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_password:
- #raise ValueError('Password not allowed for SSH access to %s.' % parts.hostname)
+ # raise ValueError('Password not allowed for SSH access to %s.' % parts.hostname)
netloc_password = ''
if netloc_username and parts.scheme != 'file' and scm_type not in ("insights", "archive"):
- netloc = u':'.join([urllib.parse.quote(x,safe='') for x in (netloc_username, netloc_password) if x])
+ netloc = u':'.join([urllib.parse.quote(x, safe='') for x in (netloc_username, netloc_password) if x])
else:
netloc = u''
netloc = u'@'.join(filter(None, [netloc, parts.hostname]))
if parts.port:
netloc = u':'.join([netloc, str(parts.port)])
- new_url = urllib.parse.urlunsplit([parts.scheme, netloc, parts.path,
- parts.query, parts.fragment])
+ new_url = urllib.parse.urlunsplit([parts.scheme, netloc, parts.path, parts.query, parts.fragment])
if scp_format and parts.scheme == 'git+ssh':
new_url = new_url.replace('git+ssh://', '', 1).replace('/', ':', 1)
return new_url
@@ -322,11 +351,7 @@ def get_allowed_fields(obj, serializer_mapping):
else:
allowed_fields = [x.name for x in obj._meta.fields]
- ACTIVITY_STREAM_FIELD_EXCLUSIONS = {
- 'user': ['last_login'],
- 'oauth2accesstoken': ['last_used'],
- 'oauth2application': ['client_secret']
- }
+ ACTIVITY_STREAM_FIELD_EXCLUSIONS = {'user': ['last_login'], 'oauth2accesstoken': ['last_used'], 'oauth2application': ['client_secret']}
model_name = obj._meta.model_name
fields_excluded = ACTIVITY_STREAM_FIELD_EXCLUSIONS.get(model_name, [])
# see definition of from_db for CredentialType
@@ -347,10 +372,7 @@ def _convert_model_field_for_display(obj, field_name, password_fields=None):
return '<missing {}>-{}'.format(obj._meta.verbose_name, getattr(obj, '{}_id'.format(field_name)))
if password_fields is None:
password_fields = set(getattr(type(obj), 'PASSWORD_FIELDS', [])) | set(['password'])
- if field_name in password_fields or (
- isinstance(field_val, str) and
- field_val.startswith('$encrypted$')
- ):
+ if field_name in password_fields or (isinstance(field_val, str) and field_val.startswith('$encrypted$')):
return u'hidden'
if hasattr(obj, 'display_%s' % field_name):
field_val = getattr(obj, 'display_%s' % field_name)()
@@ -373,9 +395,9 @@ def model_instance_diff(old, new, serializer_mapping=None):
"""
from django.db.models import Model
- if not(old is None or isinstance(old, Model)):
+ if not (old is None or isinstance(old, Model)):
raise TypeError('The supplied old instance is not a valid model instance.')
- if not(new is None or isinstance(new, Model)):
+ if not (new is None or isinstance(new, Model)):
raise TypeError('The supplied new instance is not a valid model instance.')
old_password_fields = set(getattr(type(old), 'PASSWORD_FIELDS', [])) | set(['password'])
new_password_fields = set(getattr(type(new), 'PASSWORD_FIELDS', [])) | set(['password'])
@@ -417,6 +439,7 @@ class CharPromptDescriptor:
"""Class used for identifying nullable launch config fields from class
ex. Schedule.limit
"""
+
def __init__(self, field):
self.field = field
@@ -426,6 +449,7 @@ class NullablePromptPseudoField:
Interface for pseudo-property stored in `char_prompts` dict
Used in LaunchTimeConfig and submodels, defined here to avoid circular imports
"""
+
def __init__(self, field_name):
self.field_name = field_name
@@ -447,10 +471,10 @@ class NullablePromptPseudoField:
def copy_model_by_class(obj1, Class2, fields, kwargs):
- '''
+ """
Creates a new unsaved object of type Class2 using the fields from obj1
values in kwargs can override obj1
- '''
+ """
create_kwargs = {}
for field_name in fields:
descriptor = getattr(Class2, field_name)
@@ -500,11 +524,11 @@ def copy_model_by_class(obj1, Class2, fields, kwargs):
def copy_m2m_relationships(obj1, obj2, fields, kwargs=None):
- '''
+ """
In-place operation.
Given two saved objects, copies related objects from obj1
to obj2 to field of same name, if field occurs in `fields`
- '''
+ """
for field_name in fields:
if hasattr(obj1, field_name):
try:
@@ -526,17 +550,17 @@ def copy_m2m_relationships(obj1, obj2, fields, kwargs=None):
def get_type_for_model(model):
- '''
+ """
Return type name for a given model class.
- '''
+ """
opts = model._meta.concrete_model._meta
return camelcase_to_underscore(opts.object_name)
def get_model_for_type(type_name):
- '''
+ """
Return model class for a given type name.
- '''
+ """
model_str = underscore_to_camelcase(type_name)
if model_str == 'User':
use_app = 'auth'
@@ -546,7 +570,7 @@ def get_model_for_type(type_name):
def prefetch_page_capabilities(model, page, prefetch_list, user):
- '''
+ """
Given a `page` list of objects, a nested dictionary of user_capabilities
are returned by id, ex.
{
@@ -565,7 +589,7 @@ def prefetch_page_capabilities(model, page, prefetch_list, user):
prefetch_list = [{'copy': ['inventory.admin', 'project.admin']}]
--> prefetch logical combination of admin permission to inventory AND
project, put into cache dictionary as "copy"
- '''
+ """
page_ids = [obj.id for obj in page]
mapping = {}
for obj in page:
@@ -592,9 +616,9 @@ def prefetch_page_capabilities(model, page, prefetch_list, user):
parent_model = model
for subpath in role_path.split('.')[:-1]:
parent_model = parent_model._meta.get_field(subpath).related_model
- filter_args.append(Q(
- Q(**{'%s__pk__in' % res_path: parent_model.accessible_pk_qs(user, '%s_role' % role_type)}) |
- Q(**{'%s__isnull' % res_path: True})))
+ filter_args.append(
+ Q(Q(**{'%s__pk__in' % res_path: parent_model.accessible_pk_qs(user, '%s_role' % role_type)}) | Q(**{'%s__isnull' % res_path: True}))
+ )
else:
role_type = role_path
filter_args.append(Q(**{'pk__in': model.accessible_pk_qs(user, '%s_role' % role_type)}))
@@ -625,19 +649,16 @@ def validate_vars_type(vars_obj):
data_type = vars_type.__name__
else:
data_type = str(vars_type)
- raise AssertionError(
- _('Input type `{data_type}` is not a dictionary').format(
- data_type=data_type)
- )
+ raise AssertionError(_('Input type `{data_type}` is not a dictionary').format(data_type=data_type))
def parse_yaml_or_json(vars_str, silent_failure=True):
- '''
+ """
Attempt to parse a string of variables.
First, with JSON parser, if that fails, then with PyYAML.
If both attempts fail, return an empty dictionary if `silent_failure`
is True, re-raise combination error if `silent_failure` if False.
- '''
+ """
if isinstance(vars_str, dict):
return vars_str
elif isinstance(vars_str, str) and vars_str == '""':
@@ -658,21 +679,19 @@ def parse_yaml_or_json(vars_str, silent_failure=True):
try:
json.dumps(vars_dict)
except (ValueError, TypeError, AssertionError) as json_err2:
- raise ParseError(_(
- 'Variables not compatible with JSON standard (error: {json_error})').format(
- json_error=str(json_err2)))
+ raise ParseError(_('Variables not compatible with JSON standard (error: {json_error})').format(json_error=str(json_err2)))
except (yaml.YAMLError, TypeError, AttributeError, AssertionError) as yaml_err:
if silent_failure:
return {}
- raise ParseError(_(
- 'Cannot parse as JSON (error: {json_error}) or '
- 'YAML (error: {yaml_error}).').format(
- json_error=str(json_err), yaml_error=str(yaml_err)))
+ raise ParseError(
+ _('Cannot parse as JSON (error: {json_error}) or ' 'YAML (error: {yaml_error}).').format(json_error=str(json_err), yaml_error=str(yaml_err))
+ )
return vars_dict
def get_cpu_capacity():
from django.conf import settings
+
settings_forkcpu = getattr(settings, 'SYSTEM_TASK_FORKS_CPU', None)
env_forkcpu = os.getenv('SYSTEM_TASK_FORKS_CPU', None)
@@ -697,6 +716,7 @@ def get_cpu_capacity():
def get_mem_capacity():
from django.conf import settings
+
settings_forkmem = getattr(settings, 'SYSTEM_TASK_FORKS_MEM', None)
env_forkmem = os.getenv('SYSTEM_TASK_FORKS_MEM', None)
@@ -720,10 +740,11 @@ def get_mem_capacity():
def get_system_task_capacity(scale=Decimal(1.0), cpu_capacity=None, mem_capacity=None):
- '''
+ """
Measure system memory and use it as a baseline for determining the system's capacity
- '''
+ """
from django.conf import settings
+
settings_forks = getattr(settings, 'SYSTEM_TASK_FORKS_CAPACITY', None)
env_forks = os.getenv('SYSTEM_TASK_FORKS_CAPACITY', None)
@@ -749,9 +770,9 @@ _task_manager = threading.local()
@contextlib.contextmanager
def ignore_inventory_computed_fields():
- '''
+ """
Context manager to ignore updating inventory computed fields.
- '''
+ """
try:
previous_value = getattr(_inventory_updates, 'is_updating', False)
_inventory_updates.is_updating = True
@@ -763,14 +784,14 @@ def ignore_inventory_computed_fields():
def _schedule_task_manager():
from awx.main.scheduler.tasks import run_task_manager
from django.db import connection
+
# runs right away if not in transaction
connection.on_commit(lambda: run_task_manager.delay())
@contextlib.contextmanager
def task_manager_bulk_reschedule():
- """Context manager to avoid submitting task multiple times.
- """
+ """Context manager to avoid submitting task multiple times."""
try:
previous_flag = getattr(_task_manager, 'bulk_reschedule', False)
previous_value = getattr(_task_manager, 'needs_scheduling', False)
@@ -793,9 +814,9 @@ def schedule_task_manager():
@contextlib.contextmanager
def ignore_inventory_group_removal():
- '''
+ """
Context manager to ignore moving groups/hosts when group is deleted.
- '''
+ """
try:
previous_value = getattr(_inventory_updates, 'is_removing', False)
_inventory_updates.is_removing = True
@@ -806,12 +827,12 @@ def ignore_inventory_group_removal():
@contextlib.contextmanager
def set_environ(**environ):
- '''
+ """
Temporarily set the process environment variables.
>>> with set_environ(FOO='BAR'):
... assert os.environ['FOO'] == 'BAR'
- '''
+ """
old_environ = os.environ.copy()
try:
os.environ.update(environ)
@@ -823,14 +844,14 @@ def set_environ(**environ):
@memoize()
def check_proot_installed():
- '''
+ """
Check that proot is installed.
- '''
+ """
from django.conf import settings
+
cmd = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--version']
try:
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
proc.communicate()
return bool(proc.returncode == 0)
except (OSError, ValueError) as e:
@@ -840,17 +861,18 @@ def check_proot_installed():
def build_proot_temp_dir():
- '''
+ """
Create a temporary directory for proot to use.
- '''
+ """
from django.conf import settings
+
path = tempfile.mkdtemp(prefix='awx_proot_', dir=settings.AWX_PROOT_BASE_PATH)
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
return path
def wrap_args_with_proot(args, cwd, **kwargs):
- '''
+ """
Wrap existing command line with proot to restrict access to:
- AWX_PROOT_BASE_PATH (generally, /tmp) (except for own /tmp files)
For non-isolated nodes:
@@ -858,14 +880,14 @@ def wrap_args_with_proot(args, cwd, **kwargs):
- /var/lib/awx (except for current project)
- /var/log/tower
- /var/log/supervisor
- '''
+ """
from django.conf import settings
+
cwd = os.path.realpath(cwd)
new_args = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--unshare-pid', '--dev-bind', '/', '/', '--proc', '/proc']
hide_paths = [settings.AWX_PROOT_BASE_PATH]
if not kwargs.get('isolated'):
- hide_paths.extend(['/etc/tower', '/var/lib/awx', '/var/log', '/etc/ssh',
- settings.PROJECTS_ROOT, settings.JOBOUTPUT_ROOT])
+ hide_paths.extend(['/etc/tower', '/var/lib/awx', '/var/log', '/etc/ssh', settings.PROJECTS_ROOT, settings.JOBOUTPUT_ROOT])
hide_paths.extend(getattr(settings, 'AWX_PROOT_HIDE_PATHS', None) or [])
for path in sorted(set(hide_paths)):
if not os.path.exists(path):
@@ -878,18 +900,14 @@ def wrap_args_with_proot(args, cwd, **kwargs):
handle, new_path = tempfile.mkstemp(dir=kwargs['proot_temp_dir'])
os.close(handle)
os.chmod(new_path, stat.S_IRUSR | stat.S_IWUSR)
- new_args.extend(['--bind', '%s' %(new_path,), '%s' % (path,)])
+ new_args.extend(['--bind', '%s' % (new_path,), '%s' % (path,)])
if kwargs.get('isolated'):
show_paths = [kwargs['private_data_dir']]
elif 'private_data_dir' in kwargs:
show_paths = [cwd, kwargs['private_data_dir']]
else:
show_paths = [cwd]
- for venv in (
- settings.ANSIBLE_VENV_PATH,
- settings.AWX_VENV_PATH,
- kwargs.get('proot_custom_virtualenv')
- ):
+ for venv in (settings.ANSIBLE_VENV_PATH, settings.AWX_VENV_PATH, kwargs.get('proot_custom_virtualenv')):
if venv:
new_args.extend(['--ro-bind', venv, venv])
show_paths.extend(getattr(settings, 'AWX_PROOT_SHOW_PATHS', None) or [])
@@ -913,9 +931,9 @@ def wrap_args_with_proot(args, cwd, **kwargs):
def get_pk_from_dict(_dict, key):
- '''
+ """
Helper for obtaining a pk from user data dict or None if not present.
- '''
+ """
try:
val = _dict[key]
if isinstance(val, object) and hasattr(val, 'id'):
@@ -966,6 +984,7 @@ def get_current_apps():
def get_custom_venv_choices(custom_paths=None):
from django.conf import settings
+
custom_paths = custom_paths or settings.CUSTOM_VENV_PATHS
all_venv_paths = [settings.BASE_VENV_PATH] + custom_paths
custom_venv_choices = []
@@ -973,13 +992,15 @@ def get_custom_venv_choices(custom_paths=None):
for custom_venv_path in all_venv_paths:
try:
if os.path.exists(custom_venv_path):
- custom_venv_choices.extend([
- os.path.join(custom_venv_path, x, '')
- for x in os.listdir(custom_venv_path)
- if x != 'awx' and
- os.path.isdir(os.path.join(custom_venv_path, x)) and
- os.path.exists(os.path.join(custom_venv_path, x, 'bin', 'activate'))
- ])
+ custom_venv_choices.extend(
+ [
+ os.path.join(custom_venv_path, x, '')
+ for x in os.listdir(custom_venv_path)
+ if x != 'awx'
+ and os.path.isdir(os.path.join(custom_venv_path, x))
+ and os.path.exists(os.path.join(custom_venv_path, x, 'bin', 'activate'))
+ ]
+ )
except Exception:
logger.exception("Encountered an error while discovering custom virtual environments.")
return custom_venv_choices
@@ -1002,20 +1023,19 @@ def extract_ansible_vars(extra_vars):
def get_search_fields(model):
fields = []
for field in model._meta.fields:
- if field.name in ('username', 'first_name', 'last_name', 'email',
- 'name', 'description'):
+ if field.name in ('username', 'first_name', 'last_name', 'email', 'name', 'description'):
fields.append(field.name)
return fields
def has_model_field_prefetched(model_obj, field_name):
# NOTE: Update this function if django internal implementation changes.
- return getattr(getattr(model_obj, field_name, None),
- 'prefetch_cache_name', '') in getattr(model_obj, '_prefetched_objects_cache', {})
+ return getattr(getattr(model_obj, field_name, None), 'prefetch_cache_name', '') in getattr(model_obj, '_prefetched_objects_cache', {})
def get_external_account(user):
from django.conf import settings
+
account_type = None
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
try:
@@ -1023,20 +1043,20 @@ def get_external_account(user):
account_type = "ldap"
except AttributeError:
pass
- if (getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None) or
- getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None) or
- getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None) or
- getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None) or
- getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)) and user.social_auth.all():
+ if (
+ getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None)
+ or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None)
+ or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None)
+ or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None)
+ or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)
+ ) and user.social_auth.all():
account_type = "social"
- if (getattr(settings, 'RADIUS_SERVER', None) or
- getattr(settings, 'TACACSPLUS_HOST', None)) and user.enterprise_auth.all():
+ if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and user.enterprise_auth.all():
account_type = "enterprise"
return account_type
class classproperty:
-
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.fget = fget
self.fset = fset
@@ -1058,10 +1078,7 @@ def create_temporary_fifo(data):
path = os.path.join(tempfile.mkdtemp(), next(tempfile._get_candidate_names()))
os.mkfifo(path, stat.S_IRUSR | stat.S_IWUSR)
- threading.Thread(
- target=lambda p, d: open(p, 'wb').write(d),
- args=(path, data)
- ).start()
+ threading.Thread(target=lambda p, d: open(p, 'wb').write(d), args=(path, data)).start()
return path
@@ -1071,7 +1088,7 @@ def truncate_stdout(stdout, size):
if size <= 0 or len(stdout) <= size:
return stdout
- stdout = stdout[:(size - 1)] + u'\u2026'
+ stdout = stdout[: (size - 1)] + u'\u2026'
set_count, reset_count = 0, 0
for m in ANSI_SGR_PATTERN.finditer(stdout):
if m.group() == u'\u001b[0m':
@@ -1092,8 +1109,7 @@ def deepmerge(a, b):
{'first': {'all_rows': {'fail': 'cat', 'number': '5', 'pass': 'dog'}}}
"""
if isinstance(a, dict) and isinstance(b, dict):
- return dict([(k, deepmerge(a.get(k), b.get(k)))
- for k in set(a.keys()).union(b.keys())])
+ return dict([(k, deepmerge(a.get(k), b.get(k))) for k in set(a.keys()).union(b.keys())])
elif b is None:
return a
else:
diff --git a/awx/main/utils/db.py b/awx/main/utils/db.py
index f91f2d7b65..5574d4ea91 100644
--- a/awx/main/utils/db.py
+++ b/awx/main/utils/db.py
@@ -7,10 +7,14 @@ from itertools import chain
def get_all_field_names(model):
# Implements compatibility with _meta.get_all_field_names
# See: https://docs.djangoproject.com/en/1.11/ref/models/meta/#migrating-from-the-old-api
- return list(set(chain.from_iterable(
- (field.name, field.attname) if hasattr(field, 'attname') else (field.name,)
- for field in model._meta.get_fields()
- # For complete backwards compatibility, you may want to exclude
- # GenericForeignKey from the results.
- if not (field.many_to_one and field.related_model is None)
- )))
+ return list(
+ set(
+ chain.from_iterable(
+ (field.name, field.attname) if hasattr(field, 'attname') else (field.name,)
+ for field in model._meta.get_fields()
+ # For complete backwards compatibility, you may want to exclude
+ # GenericForeignKey from the results.
+ if not (field.many_to_one and field.related_model is None)
+ )
+ )
+ )
diff --git a/awx/main/utils/deletion.py b/awx/main/utils/deletion.py
index 8fc78d540f..d17bc0b710 100644
--- a/awx/main/utils/deletion.py
+++ b/awx/main/utils/deletion.py
@@ -1,6 +1,8 @@
from django.contrib.contenttypes.models import ContentType
from django.db.models.deletion import (
- DO_NOTHING, Collector, get_candidate_relations_to_delete,
+ DO_NOTHING,
+ Collector,
+ get_candidate_relations_to_delete,
)
from collections import Counter, OrderedDict
from django.db import transaction
@@ -12,17 +14,18 @@ def bulk_related_objects(field, objs, using):
"""
Return all objects related to ``objs`` via this ``GenericRelation``.
"""
- return field.remote_field.model._base_manager.db_manager(using).filter(**{
- "%s__pk" % field.content_type_field_name: ContentType.objects.db_manager(using).get_for_model(
- field.model, for_concrete_model=field.for_concrete_model).pk,
- "%s__in" % field.object_id_field_name: list(objs.values_list('pk', flat=True))
- })
+ return field.remote_field.model._base_manager.db_manager(using).filter(
+ **{
+ "%s__pk"
+ % field.content_type_field_name: ContentType.objects.db_manager(using).get_for_model(field.model, for_concrete_model=field.for_concrete_model).pk,
+ "%s__in" % field.object_id_field_name: list(objs.values_list('pk', flat=True)),
+ }
+ )
def pre_delete(qs):
# taken from .delete method in django.db.models.query.py
- assert qs.query.can_filter(), \
- "Cannot use 'limit' or 'offset' with delete."
+ assert qs.query.can_filter(), "Cannot use 'limit' or 'offset' with delete."
if qs._fields is not None:
raise TypeError("Cannot call delete() after .values() or .values_list()")
@@ -42,7 +45,6 @@ def pre_delete(qs):
class AWXCollector(Collector):
-
def add(self, objs, source=None, nullable=False, reverse_dependency=False):
"""
Add 'objs' to the collection of objects to be deleted. If the call is
@@ -62,8 +64,7 @@ class AWXCollector(Collector):
if source is not None and not nullable:
if reverse_dependency:
source, model = model, source
- self.dependencies.setdefault(
- source._meta.concrete_model, set()).add(model._meta.concrete_model)
+ self.dependencies.setdefault(source._meta.concrete_model, set()).add(model._meta.concrete_model)
return objs
def add_field_update(self, field, value, objs):
@@ -78,8 +79,7 @@ class AWXCollector(Collector):
self.field_updates[model].setdefault((field, value), [])
self.field_updates[model][(field, value)].append(objs)
- def collect(self, objs, source=None, nullable=False, collect_related=True,
- source_attr=None, reverse_dependency=False, keep_parents=False):
+ def collect(self, objs, source=None, nullable=False, collect_related=True, source_attr=None, reverse_dependency=False, keep_parents=False):
"""
Add 'objs' to the collection of objects to be deleted as well as all
parent instances. 'objs' must be a homogeneous iterable collection of
@@ -104,8 +104,7 @@ class AWXCollector(Collector):
if self.can_fast_delete(objs):
self.fast_deletes.append(objs)
return
- new_objs = self.add(objs, source, nullable,
- reverse_dependency=reverse_dependency)
+ new_objs = self.add(objs, source, nullable, reverse_dependency=reverse_dependency)
if not new_objs.exists():
return
@@ -117,10 +116,8 @@ class AWXCollector(Collector):
concrete_model = model._meta.concrete_model
for ptr in concrete_model._meta.parents.keys():
if ptr:
- parent_objs = ptr.objects.filter(pk__in = new_objs.values_list('pk', flat=True))
- self.collect(parent_objs, source=model,
- collect_related=False,
- reverse_dependency=True)
+ parent_objs = ptr.objects.filter(pk__in=new_objs.values_list('pk', flat=True))
+ self.collect(parent_objs, source=model, collect_related=False, reverse_dependency=True)
if collect_related:
parents = model._meta.parents
for related in get_candidate_relations_to_delete(model._meta):
@@ -161,8 +158,7 @@ class AWXCollector(Collector):
for (field, value), instances in instances_for_fieldvalues.items():
for inst in instances:
query = sql.UpdateQuery(model)
- query.update_batch(inst.values_list('pk', flat=True),
- {field.name: value}, self.using)
+ query.update_batch(inst.values_list('pk', flat=True), {field.name: value}, self.using)
# fast deletes
for qs in self.fast_deletes:
count = qs._raw_delete(using=self.using)
diff --git a/awx/main/utils/encryption.py b/awx/main/utils/encryption.py
index 3725243a8e..4272e3e07f 100644
--- a/awx/main/utils/encryption.py
+++ b/awx/main/utils/encryption.py
@@ -10,27 +10,23 @@ from cryptography.hazmat.backends import default_backend
from django.utils.encoding import smart_str, smart_bytes
-__all__ = ['get_encryption_key',
- 'encrypt_field', 'decrypt_field',
- 'encrypt_value', 'decrypt_value',
- 'encrypt_dict']
+__all__ = ['get_encryption_key', 'encrypt_field', 'decrypt_field', 'encrypt_value', 'decrypt_value', 'encrypt_dict']
logger = logging.getLogger('awx.main.utils.encryption')
class Fernet256(Fernet):
- '''Not techincally Fernet, but uses the base of the Fernet spec and uses AES-256-CBC
+ """Not techincally Fernet, but uses the base of the Fernet spec and uses AES-256-CBC
instead of AES-128-CBC. All other functionality remain identical.
- '''
+ """
+
def __init__(self, key, backend=None):
if backend is None:
backend = default_backend()
key = base64.urlsafe_b64decode(key)
if len(key) != 64:
- raise ValueError(
- "Fernet key must be 64 url-safe base64-encoded bytes."
- )
+ raise ValueError("Fernet key must be 64 url-safe base64-encoded bytes.")
self._signing_key = key[:32]
self._encryption_key = key[32:]
@@ -38,15 +34,16 @@ class Fernet256(Fernet):
def get_encryption_key(field_name, pk=None, secret_key=None):
- '''
+ """
Generate key for encrypted password based on field name,
``settings.SECRET_KEY``, and instance pk (if available).
:param pk: (optional) the primary key of the model object;
can be omitted in situations where you're encrypting a setting
that is not database-persistent (like a read-only setting)
- '''
+ """
from django.conf import settings
+
h = hashlib.sha512()
h.update(smart_bytes(secret_key or settings.SECRET_KEY))
if pk is not None:
@@ -100,9 +97,9 @@ def encrypt_field(instance, field_name, ask=False, subfield=None, secret_key=Non
# 2. Decrypting them using the *old* SECRET_KEY
# 3. Storing newly encrypted values using the *newly generated* SECRET_KEY
#
- '''
+ """
Return content of the given instance and field name encrypted.
- '''
+ """
try:
value = instance.inputs[field_name]
except (TypeError, AttributeError):
@@ -117,11 +114,7 @@ def encrypt_field(instance, field_name, ask=False, subfield=None, secret_key=Non
value = smart_str(value)
if not value or value.startswith('$encrypted$') or (ask and value == 'ASK'):
return value
- key = get_encryption_key(
- field_name,
- getattr(instance, 'pk', None),
- secret_key=secret_key
- )
+ key = get_encryption_key(field_name, getattr(instance, 'pk', None), secret_key=secret_key)
f = Fernet256(key)
encrypted = f.encrypt(smart_bytes(value))
b64data = smart_str(base64.b64encode(encrypted))
@@ -130,11 +123,11 @@ def encrypt_field(instance, field_name, ask=False, subfield=None, secret_key=Non
def decrypt_value(encryption_key, value):
- raw_data = value[len('$encrypted$'):]
+ raw_data = value[len('$encrypted$') :]
# If the encrypted string contains a UTF8 marker, discard it
utf8 = raw_data.startswith('UTF8$')
if utf8:
- raw_data = raw_data[len('UTF8$'):]
+ raw_data = raw_data[len('UTF8$') :]
algo, b64data = raw_data.split('$', 1)
if algo != 'AESCBC':
raise ValueError('unsupported algorithm: %s' % algo)
@@ -145,9 +138,9 @@ def decrypt_value(encryption_key, value):
def decrypt_field(instance, field_name, subfield=None, secret_key=None):
- '''
+ """
Return content of the given instance and field name decrypted.
- '''
+ """
try:
value = instance.inputs[field_name]
except (TypeError, AttributeError):
@@ -160,11 +153,7 @@ def decrypt_field(instance, field_name, subfield=None, secret_key=None):
value = smart_str(value)
if not value or not value.startswith('$encrypted$'):
return value
- key = get_encryption_key(
- field_name,
- getattr(instance, 'pk', None),
- secret_key=secret_key
- )
+ key = get_encryption_key(field_name, getattr(instance, 'pk', None), secret_key=secret_key)
try:
return smart_str(decrypt_value(key, value))
@@ -176,16 +165,16 @@ def decrypt_field(instance, field_name, subfield=None, secret_key=None):
instance.__class__.__name__,
getattr(instance, 'pk', None),
field_name,
- exc_info=True
+ exc_info=True,
)
raise
def encrypt_dict(data, fields):
- '''
+ """
Encrypts all of the dictionary values in `data` under the keys in `fields`
in-place operation on `data`
- '''
+ """
encrypt_fields = set(data.keys()).intersection(fields)
for key in encrypt_fields:
data[key] = encrypt_value(data[key])
diff --git a/awx/main/utils/external_logging.py b/awx/main/utils/external_logging.py
index 3c281719ff..96acc371ea 100644
--- a/awx/main/utils/external_logging.py
+++ b/awx/main/utils/external_logging.py
@@ -26,15 +26,17 @@ def construct_rsyslog_conf_template(settings=settings):
max_bytes = settings.MAX_EVENT_RES_DATA
if settings.LOG_AGGREGATOR_RSYSLOGD_DEBUG:
parts.append('$DebugLevel 2')
- parts.extend([
- '$WorkDirectory /var/lib/awx/rsyslog',
- f'$MaxMessageSize {max_bytes}',
- '$IncludeConfig /var/lib/awx/rsyslog/conf.d/*.conf',
- f'main_queue(queue.spoolDirectory="{spool_directory}" queue.maxdiskspace="{max_disk_space}g" queue.type="Disk" queue.filename="awx-external-logger-backlog")', # noqa
- 'module(load="imuxsock" SysSock.Use="off")',
- 'input(type="imuxsock" Socket="' + settings.LOGGING['handlers']['external_logger']['address'] + '" unlink="on" RateLimit.Burst="0")',
- 'template(name="awx" type="string" string="%rawmsg-after-pri%")',
- ])
+ parts.extend(
+ [
+ '$WorkDirectory /var/lib/awx/rsyslog',
+ f'$MaxMessageSize {max_bytes}',
+ '$IncludeConfig /var/lib/awx/rsyslog/conf.d/*.conf',
+ f'main_queue(queue.spoolDirectory="{spool_directory}" queue.maxdiskspace="{max_disk_space}g" queue.type="Disk" queue.filename="awx-external-logger-backlog")', # noqa
+ 'module(load="imuxsock" SysSock.Use="off")',
+ 'input(type="imuxsock" Socket="' + settings.LOGGING['handlers']['external_logger']['address'] + '" unlink="on" RateLimit.Burst="0")',
+ 'template(name="awx" type="string" string="%rawmsg-after-pri%")',
+ ]
+ )
def escape_quotes(x):
return x.replace('"', '\\"')
@@ -43,7 +45,7 @@ def construct_rsyslog_conf_template(settings=settings):
parts.append('action(type="omfile" file="/dev/null")') # rsyslog needs *at least* one valid action to start
tmpl = '\n'.join(parts)
return tmpl
-
+
if protocol.startswith('http'):
scheme = 'https'
# urlparse requires '//' to be provided if scheme is not specified
@@ -75,7 +77,7 @@ def construct_rsyslog_conf_template(settings=settings):
f'skipverifyhost="{skip_verify}"',
'action.resumeRetryCount="-1"',
'template="awx"',
- f'action.resumeInterval="{timeout}"'
+ f'action.resumeInterval="{timeout}"',
]
if error_log_file:
params.append(f'errorfile="{error_log_file}"')
diff --git a/awx/main/utils/filters.py b/awx/main/utils/filters.py
index 279a6a30e4..002ab957bd 100644
--- a/awx/main/utils/filters.py
+++ b/awx/main/utils/filters.py
@@ -107,7 +107,6 @@ class ExternalLoggerEnabled(Filter):
class DynamicLevelFilter(Filter):
-
def filter(self, record):
"""Filters out logs that have a level below the threshold defined
by the databse setting LOG_AGGREGATOR_LEVEL
@@ -132,10 +131,10 @@ def string_to_type(t):
elif t == u'false':
return False
- if re.search(r'^[-+]?[0-9]+$',t):
+ if re.search(r'^[-+]?[0-9]+$', t):
return int(t)
- if re.search(r'^[-+]?[0-9]+\.[0-9]+$',t):
+ if re.search(r'^[-+]?[0-9]+\.[0-9]+$', t):
return float(t)
return t
@@ -158,12 +157,13 @@ class SmartFilter(object):
search_kwargs = self._expand_search(k, v)
if search_kwargs:
kwargs.update(search_kwargs)
- q = reduce(lambda x, y: x | y, [models.Q(**{u'%s__icontains' % _k:_v}) for _k, _v in kwargs.items()])
+ q = reduce(lambda x, y: x | y, [models.Q(**{u'%s__icontains' % _k: _v}) for _k, _v in kwargs.items()])
self.result = Host.objects.filter(q)
else:
# detect loops and restrict access to sensitive fields
# this import is intentional here to avoid a circular import
from awx.api.filters import FieldLookupBackend
+
FieldLookupBackend().get_field_from_lookup(Host, k)
kwargs[k] = v
self.result = Host.objects.filter(**kwargs)
@@ -186,8 +186,10 @@ class SmartFilter(object):
accomplished using an allowed list or introspecting the
relationship refered to to see if it's a jsonb type.
'''
+
def _json_path_to_contains(self, k, v):
from awx.main.fields import JSONBField # avoid a circular import
+
if not k.startswith(SmartFilter.SEARCHABLE_RELATIONSHIP):
v = self.strip_quotes_traditional_logic(v)
return (k, v)
@@ -198,14 +200,9 @@ class SmartFilter(object):
if match == '__exact':
# appending __exact is basically a no-op, because that's
# what the query means if you leave it off
- k = k[:-len(match)]
+ k = k[: -len(match)]
else:
- logger.error(
- 'host_filter:{} does not support searching with {}'.format(
- SmartFilter.SEARCHABLE_RELATIONSHIP,
- match
- )
- )
+ logger.error('host_filter:{} does not support searching with {}'.format(SmartFilter.SEARCHABLE_RELATIONSHIP, match))
# Strip off leading relationship key
if k.startswith(SmartFilter.SEARCHABLE_RELATIONSHIP + '__'):
@@ -270,7 +267,7 @@ class SmartFilter(object):
# ="something"
if t_len > (v_offset + 2) and t[v_offset] == "\"" and t[v_offset + 2] == "\"":
v = u'"' + str(t[v_offset + 1]) + u'"'
- #v = t[v_offset + 1]
+ # v = t[v_offset + 1]
# empty ""
elif t_len > (v_offset + 1):
v = u""
@@ -305,42 +302,38 @@ class SmartFilter(object):
search_kwargs[k] = v
return search_kwargs
-
class BoolBinOp(object):
def __init__(self, t):
self.result = None
i = 2
while i < len(t[0]):
- '''
+ """
Do NOT observe self.result. It will cause the sql query to be executed.
We do not want that. We only want to build the query.
- '''
+ """
if isinstance(self.result, type(None)):
self.result = t[0][0].result
right = t[0][i].result
self.result = self.execute_logic(self.result, right)
i += 2
-
class BoolAnd(BoolBinOp):
def execute_logic(self, left, right):
return left & right
-
class BoolOr(BoolBinOp):
def execute_logic(self, left, right):
return left | right
-
@classmethod
def query_from_string(cls, filter_string):
- '''
+ """
TODO:
* handle values with " via: a.b.c.d="hello\"world"
* handle keys with " via: a.\"b.c="yeah"
* handle key with __ in it
- '''
+ """
filter_string_raw = filter_string
filter_string = str(filter_string)
@@ -351,13 +344,16 @@ class SmartFilter(object):
atom_quoted = Literal('"') + Optional(atom_inside_quotes) + Literal('"')
EQUAL = Literal('=')
- grammar = ((atom_quoted | atom) + EQUAL + Optional((atom_quoted | atom)))
+ grammar = (atom_quoted | atom) + EQUAL + Optional((atom_quoted | atom))
grammar.setParseAction(cls.BoolOperand)
- boolExpr = infixNotation(grammar, [
- ("and", 2, opAssoc.LEFT, cls.BoolAnd),
- ("or", 2, opAssoc.LEFT, cls.BoolOr),
- ])
+ boolExpr = infixNotation(
+ grammar,
+ [
+ ("and", 2, opAssoc.LEFT, cls.BoolAnd),
+ ("or", 2, opAssoc.LEFT, cls.BoolOr),
+ ],
+ )
try:
res = boolExpr.parseString('(' + filter_string + ')')
@@ -370,9 +366,7 @@ class SmartFilter(object):
raise RuntimeError("Parsing the filter_string %s went terribly wrong" % filter_string)
-
class DefaultCorrelationId(CorrelationId):
-
def filter(self, record):
guid = GuidMiddleware.get_guid() or '-'
if MODE == 'development':
diff --git a/awx/main/utils/formatters.py b/awx/main/utils/formatters.py
index 8ebd1fb0c4..ebc873c657 100644
--- a/awx/main/utils/formatters.py
+++ b/awx/main/utils/formatters.py
@@ -25,9 +25,10 @@ class JobLifeCycleFormatter(json_log_formatter.JSONFormatter):
class TimeFormatter(logging.Formatter):
- '''
+ """
Custom log formatter used for inventory imports
- '''
+ """
+
def __init__(self, start_time=None, **kwargs):
if start_time is None:
self.job_start = now()
@@ -81,10 +82,31 @@ class LogstashFormatterBase(logging.Formatter):
# The list contains all the attributes listed in
# http://docs.python.org/library/logging.html#logrecord-attributes
skip_list = (
- 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename',
- 'funcName', 'id', 'levelname', 'levelno', 'lineno', 'module',
- 'msecs', 'msecs', 'message', 'msg', 'name', 'pathname', 'process',
- 'processName', 'relativeCreated', 'thread', 'threadName', 'extra')
+ 'args',
+ 'asctime',
+ 'created',
+ 'exc_info',
+ 'exc_text',
+ 'filename',
+ 'funcName',
+ 'id',
+ 'levelname',
+ 'levelno',
+ 'lineno',
+ 'module',
+ 'msecs',
+ 'msecs',
+ 'message',
+ 'msg',
+ 'name',
+ 'pathname',
+ 'process',
+ 'processName',
+ 'relativeCreated',
+ 'thread',
+ 'threadName',
+ 'extra',
+ )
easy_types = (str, bool, dict, float, int, list, type(None))
@@ -119,25 +141,21 @@ class LogstashFormatterBase(logging.Formatter):
class LogstashFormatter(LogstashFormatterBase):
-
def __init__(self, *args, **kwargs):
self.cluster_host_id = settings.CLUSTER_HOST_ID
self.tower_uuid = None
- uuid = (
- getattr(settings, 'LOG_AGGREGATOR_TOWER_UUID', None) or
- getattr(settings, 'INSTALL_UUID', None)
- )
+ uuid = getattr(settings, 'LOG_AGGREGATOR_TOWER_UUID', None) or getattr(settings, 'INSTALL_UUID', None)
if uuid:
self.tower_uuid = uuid
super(LogstashFormatter, self).__init__(*args, **kwargs)
def reformat_data_for_log(self, raw_data, kind=None):
- '''
+ """
Process dictionaries from various contexts (job events, activity stream
changes, etc.) to give meaningful information
Output a dictionary which will be passed in logstash or syslog format
to the logging receiver
- '''
+ """
if kind == 'activity_stream':
try:
raw_data['changes'] = json.loads(raw_data.get('changes', '{}'))
@@ -191,6 +209,7 @@ class LogstashFormatter(LogstashFormatterBase):
data_for_log['host_name'] = raw_data.get('host_name')
data_for_log['job_id'] = raw_data.get('job_id')
elif kind == 'performance':
+
def convert_to_type(t, val):
if t is float:
val = val[:-1] if val.endswith('s') else val
@@ -216,7 +235,7 @@ class LogstashFormatter(LogstashFormatterBase):
(float, 'X-API-Time'), # may end with an 's' "0.33s"
(float, 'X-API-Total-Time'),
(int, 'X-API-Query-Count'),
- (float, 'X-API-Query-Time'), # may also end with an 's'
+ (float, 'X-API-Query-Time'), # may also end with an 's'
(str, 'X-API-Node'),
]
data_for_log['x_api'] = {k: convert_to_type(t, response[k]) for (t, k) in headers if k in response}
@@ -236,7 +255,7 @@ class LogstashFormatter(LogstashFormatterBase):
def get_extra_fields(self, record):
fields = super(LogstashFormatter, self).get_extra_fields(record)
if record.name.startswith('awx.analytics'):
- log_kind = record.name[len('awx.analytics.'):]
+ log_kind = record.name[len('awx.analytics.') :]
fields = self.reformat_data_for_log(fields, kind=log_kind)
# General AWX metadata
fields['cluster_host_id'] = self.cluster_host_id
@@ -252,7 +271,6 @@ class LogstashFormatter(LogstashFormatterBase):
'@timestamp': stamp,
'message': record.getMessage(),
'host': self.host,
-
# Extra Fields
'level': record.levelname,
'logger_name': record.name,
diff --git a/awx/main/utils/handlers.py b/awx/main/utils/handlers.py
index 9ce1afabbe..19deb234b5 100644
--- a/awx/main/utils/handlers.py
+++ b/awx/main/utils/handlers.py
@@ -56,8 +56,7 @@ class SpecialInventoryHandler(logging.Handler):
as opposed to ansible-runner
"""
- def __init__(self, event_handler, cancel_callback, job_timeout, verbosity,
- start_time=None, counter=0, initial_line=0, **kwargs):
+ def __init__(self, event_handler, cancel_callback, job_timeout, verbosity, start_time=None, counter=0, initial_line=0, **kwargs):
self.event_handler = event_handler
self.cancel_callback = cancel_callback
self.job_timeout = job_timeout
@@ -89,12 +88,7 @@ class SpecialInventoryHandler(logging.Handler):
msg = self.format(record)
n_lines = len(msg.strip().split('\n')) # don't count line breaks at boundry of text
dispatch_data = dict(
- created=now().isoformat(),
- event='verbose',
- counter=self.counter,
- stdout=msg,
- start_line=self._current_line,
- end_line=self._current_line + n_lines
+ created=now().isoformat(), event='verbose', counter=self.counter, stdout=msg, start_line=self._current_line, end_line=self._current_line + n_lines
)
self._current_line += n_lines
@@ -120,10 +114,7 @@ if settings.COLOR_LOGS is True:
def format(self, record):
message = logging.StreamHandler.format(self, record)
- return '\n'.join([
- self.colorize(line, record)
- for line in message.splitlines()
- ])
+ return '\n'.join([self.colorize(line, record) for line in message.splitlines()])
level_map = {
logging.DEBUG: (None, 'green', True),
@@ -132,6 +123,7 @@ if settings.COLOR_LOGS is True:
logging.ERROR: (None, 'red', True),
logging.CRITICAL: (None, 'red', True),
}
+
except ImportError:
# logutils is only used for colored logs in the dev environment
pass
diff --git a/awx/main/utils/insights.py b/awx/main/utils/insights.py
index 67bb1e5f25..67c5a6e097 100644
--- a/awx/main/utils/insights.py
+++ b/awx/main/utils/insights.py
@@ -16,12 +16,7 @@
def filter_insights_api_response(platform_info, reports, remediations):
- severity_mapping = {
- 1: 'INFO',
- 2: 'WARN',
- 3: 'ERROR',
- 4: 'CRITICAL'
- }
+ severity_mapping = {1: 'INFO', 2: 'WARN', 3: 'ERROR', 4: 'CRITICAL'}
new_json = {
'platform_id': platform_info['id'],
@@ -29,10 +24,7 @@ def filter_insights_api_response(platform_info, reports, remediations):
'reports': [],
}
for rep in reports:
- new_report = {
- 'rule': {},
- 'maintenance_actions': remediations
- }
+ new_report = {'rule': {}, 'maintenance_actions': remediations}
rule = rep.get('rule') or {}
for k in ['description', 'summary']:
if k in rule:
diff --git a/awx/main/utils/licensing.py b/awx/main/utils/licensing.py
index 9b248536b5..c972905cb9 100644
--- a/awx/main/utils/licensing.py
+++ b/awx/main/utils/licensing.py
@@ -104,7 +104,7 @@ class Licenser(object):
license_date=0,
license_type="UNLICENSED",
product_name="Red Hat Ansible Automation Platform",
- valid_key=False
+ valid_key=False,
)
def __init__(self, **kwargs):
@@ -128,11 +128,9 @@ class Licenser(object):
else:
self._unset_attrs()
-
def _unset_attrs(self):
self._attrs = self.UNLICENSED_DATA.copy()
-
def license_from_manifest(self, manifest):
def is_appropriate_manifest_sub(sub):
if sub['pool']['activeSubscription'] is False:
@@ -162,12 +160,12 @@ class Licenser(object):
license = dict()
for sub in manifest:
if not is_appropriate_manifest_sub(sub):
- logger.warning("Subscription %s (%s) in manifest is not active or for another product" %
- (sub['pool']['productName'], sub['pool']['productId']))
+ logger.warning("Subscription %s (%s) in manifest is not active or for another product" % (sub['pool']['productName'], sub['pool']['productId']))
continue
if not _can_aggregate(sub, license):
- logger.warning("Subscription %s (%s) in manifest does not match other manifest subscriptions" %
- (sub['pool']['productName'], sub['pool']['productId']))
+ logger.warning(
+ "Subscription %s (%s) in manifest does not match other manifest subscriptions" % (sub['pool']['productName'], sub['pool']['productId'])
+ )
continue
license.setdefault('sku', sub['pool']['productId'])
@@ -179,7 +177,7 @@ class Licenser(object):
license.setdefault('satellite', False)
# Use the nearest end date
endDate = parse_date(sub['endDate'])
- currentEndDateStr = license.get('license_date', '4102462800') # 2100-01-01
+ currentEndDateStr = license.get('license_date', '4102462800') # 2100-01-01
currentEndDate = datetime.fromtimestamp(int(currentEndDateStr), timezone.utc)
if endDate < currentEndDate:
license['license_date'] = endDate.strftime('%s')
@@ -193,7 +191,6 @@ class Licenser(object):
settings.LICENSE = self._attrs
return self._attrs
-
def update(self, **kwargs):
# Update attributes of the current license.
if 'instance_count' in kwargs:
@@ -202,7 +199,6 @@ class Licenser(object):
kwargs['license_date'] = int(kwargs['license_date'])
self._attrs.update(kwargs)
-
def validate_rh(self, user, pw):
try:
host = 'https://' + str(self.config.get("server", "hostname"))
@@ -211,7 +207,7 @@ class Licenser(object):
host = None
if not host:
host = getattr(settings, 'REDHAT_CANDLEPIN_HOST', None)
-
+
if not user:
raise ValueError('subscriptions_username is required')
@@ -226,36 +222,25 @@ class Licenser(object):
return self.generate_license_options_from_entitlements(json)
return []
-
def get_rhsm_subs(self, host, user, pw):
verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
json = []
try:
- subs = requests.get(
- '/'.join([host, 'subscription/users/{}/owners'.format(user)]),
- verify=verify,
- auth=(user, pw)
- )
+ subs = requests.get('/'.join([host, 'subscription/users/{}/owners'.format(user)]), verify=verify, auth=(user, pw))
except requests.exceptions.ConnectionError as error:
raise error
except OSError as error:
- raise OSError('Unable to open certificate bundle {}. Check that Ansible Tower is running on Red Hat Enterprise Linux.'.format(verify)) from error # noqa
+ raise OSError(
+ 'Unable to open certificate bundle {}. Check that Ansible Tower is running on Red Hat Enterprise Linux.'.format(verify)
+ ) from error # noqa
subs.raise_for_status()
for sub in subs.json():
- resp = requests.get(
- '/'.join([
- host,
- 'subscription/owners/{}/pools/?match=*tower*'.format(sub['key'])
- ]),
- verify=verify,
- auth=(user, pw)
- )
+ resp = requests.get('/'.join([host, 'subscription/owners/{}/pools/?match=*tower*'.format(sub['key'])]), verify=verify, auth=(user, pw))
resp.raise_for_status()
json.extend(resp.json())
return json
-
def get_satellite_subs(self, host, user, pw):
port = None
try:
@@ -268,25 +253,20 @@ class Licenser(object):
host = ':'.join([host, port])
json = []
try:
- orgs = requests.get(
- '/'.join([host, 'katello/api/organizations']),
- verify=verify,
- auth=(user, pw)
- )
+ orgs = requests.get('/'.join([host, 'katello/api/organizations']), verify=verify, auth=(user, pw))
except requests.exceptions.ConnectionError as error:
raise error
except OSError as error:
- raise OSError('Unable to open certificate bundle {}. Check that Ansible Tower is running on Red Hat Enterprise Linux.'.format(verify)) from error # noqa
+ raise OSError(
+ 'Unable to open certificate bundle {}. Check that Ansible Tower is running on Red Hat Enterprise Linux.'.format(verify)
+ ) from error # noqa
orgs.raise_for_status()
-
+
for org in orgs.json()['results']:
resp = requests.get(
- '/'.join([
- host,
- '/katello/api/organizations/{}/subscriptions/?search=Red Hat Ansible Automation'.format(org['id'])
- ]),
+ '/'.join([host, '/katello/api/organizations/{}/subscriptions/?search=Red Hat Ansible Automation'.format(org['id'])]),
verify=verify,
- auth=(user, pw)
+ auth=(user, pw),
)
resp.raise_for_status()
results = resp.json()['results']
@@ -307,13 +287,11 @@ class Licenser(object):
json.append(license)
return json
-
def is_appropriate_sat_sub(self, sub):
if 'Red Hat Ansible Automation' not in sub['subscription_name']:
return False
return True
-
def is_appropriate_sub(self, sub):
if sub['activeSubscription'] is False:
return False
@@ -323,9 +301,9 @@ class Licenser(object):
return True
return False
-
def generate_license_options_from_entitlements(self, json):
from dateutil.parser import parse
+
ValidSub = collections.namedtuple('ValidSub', 'sku name support_level end_date trial quantity pool_id satellite')
valid_subs = []
for sub in json:
@@ -363,9 +341,7 @@ class Licenser(object):
if attr.get('name') == 'support_level':
support_level = attr.get('value')
- valid_subs.append(ValidSub(
- sku, sub['productName'], support_level, end_date, trial, quantity, pool_id, satellite
- ))
+ valid_subs.append(ValidSub(sku, sub['productName'], support_level, end_date, trial, quantity, pool_id, satellite))
if valid_subs:
licenses = []
@@ -378,40 +354,27 @@ class Licenser(object):
if sub.trial:
license._attrs['trial'] = True
license._attrs['license_type'] = 'trial'
- license._attrs['instance_count'] = min(
- MAX_INSTANCES, license._attrs['instance_count']
- )
+ license._attrs['instance_count'] = min(MAX_INSTANCES, license._attrs['instance_count'])
human_instances = license._attrs['instance_count']
if human_instances == MAX_INSTANCES:
human_instances = 'Unlimited'
- subscription_name = re.sub(
- r' \([\d]+ Managed Nodes',
- ' ({} Managed Nodes'.format(human_instances),
- sub.name
- )
+ subscription_name = re.sub(r' \([\d]+ Managed Nodes', ' ({} Managed Nodes'.format(human_instances), sub.name)
license._attrs['subscription_name'] = subscription_name
license._attrs['satellite'] = satellite
license._attrs['valid_key'] = True
- license.update(
- license_date=int(sub.end_date.strftime('%s'))
- )
- license.update(
- pool_id=sub.pool_id
- )
+ license.update(license_date=int(sub.end_date.strftime('%s')))
+ license.update(pool_id=sub.pool_id)
licenses.append(license._attrs.copy())
return licenses
- raise ValueError(
- 'No valid Red Hat Ansible Automation subscription could be found for this account.' # noqa
- )
-
+ raise ValueError('No valid Red Hat Ansible Automation subscription could be found for this account.') # noqa
def validate(self):
# Return license attributes with additional validation info.
attrs = copy.deepcopy(self._attrs)
type = attrs.get('license_type', 'none')
- if (type == 'UNLICENSED' or False):
+ if type == 'UNLICENSED' or False:
attrs.update(dict(valid_key=False, compliant=False))
return attrs
attrs['valid_key'] = True
@@ -422,7 +385,7 @@ class Licenser(object):
current_instances = 0
instance_count = int(attrs.get('instance_count', 0))
attrs['current_instances'] = current_instances
- free_instances = (instance_count - current_instances)
+ free_instances = instance_count - current_instances
attrs['free_instances'] = max(0, free_instances)
license_date = int(attrs.get('license_date', 0) or 0)
diff --git a/awx/main/utils/mem_inventory.py b/awx/main/utils/mem_inventory.py
index 7f72b3b396..7e6e458cb8 100644
--- a/awx/main/utils/mem_inventory.py
+++ b/awx/main/utils/mem_inventory.py
@@ -12,8 +12,7 @@ from collections import OrderedDict
logger = logging.getLogger('awx.main.commands.inventory_import')
-__all__ = ['MemHost', 'MemGroup', 'MemInventory',
- 'mem_data_to_dict', 'dict_to_mem_data']
+__all__ = ['MemHost', 'MemGroup', 'MemInventory', 'mem_data_to_dict', 'dict_to_mem_data']
ipv6_port_re = re.compile(r'^\[([A-Fa-f0-9:]{3,})\]:(\d+?)$')
@@ -23,9 +22,9 @@ ipv6_port_re = re.compile(r'^\[([A-Fa-f0-9:]{3,})\]:(\d+?)$')
class MemObject(object):
- '''
+ """
Common code shared between in-memory groups and hosts.
- '''
+ """
def __init__(self, name):
assert name, 'no name'
@@ -33,9 +32,9 @@ class MemObject(object):
class MemGroup(MemObject):
- '''
+ """
In-memory representation of an inventory group.
- '''
+ """
def __init__(self, name):
super(MemGroup, self).__init__(name)
@@ -75,7 +74,7 @@ class MemGroup(MemObject):
logger.debug('Dumping tree for group "%s":', self.name)
logger.debug('- Vars: %r', self.variables)
for h in self.hosts:
- logger.debug('- Host: %s, %r', h.name, h.variables)
+ logger.debug('- Host: %s, %r', h.name, h.variables)
for g in self.children:
logger.debug('- Child: %s', g.name)
logger.debug('----')
@@ -85,9 +84,9 @@ class MemGroup(MemObject):
class MemHost(MemObject):
- '''
+ """
In-memory representation of an inventory host.
- '''
+ """
def __init__(self, name, port=None):
super(MemHost, self).__init__(name)
@@ -104,9 +103,10 @@ class MemHost(MemObject):
class MemInventory(object):
- '''
+ """
Common functions for an inventory loader from a given source.
- '''
+ """
+
def __init__(self, all_group=None, group_filter_re=None, host_filter_re=None):
if all_group:
assert isinstance(all_group, MemGroup), '{} is not MemGroup instance'.format(all_group)
@@ -122,10 +122,10 @@ class MemInventory(object):
return host
def get_host(self, name):
- '''
+ """
Return a MemHost instance from host name, creating if needed. If name
contains brackets, they will NOT be interpreted as a host pattern.
- '''
+ """
m = ipv6_port_re.match(name)
if m:
host_name = m.groups()[0]
@@ -135,8 +135,7 @@ class MemInventory(object):
try:
port = int(name.split(':')[1])
except (ValueError, UnicodeDecodeError):
- logger.warning(u'Invalid port "%s" for host "%s"',
- name.split(':')[1], host_name)
+ logger.warning(u'Invalid port "%s" for host "%s"', name.split(':')[1], host_name)
port = None
else:
host_name = name
@@ -155,9 +154,9 @@ class MemInventory(object):
return group
def get_group(self, name, all_group=None, child=False):
- '''
+ """
Return a MemGroup instance from group name, creating if needed.
- '''
+ """
all_group = all_group or self.all_group
if name in ['all', 'ungrouped']:
return all_group
@@ -182,13 +181,14 @@ class MemInventory(object):
# Conversion utilities
+
def mem_data_to_dict(inventory):
- '''
+ """
Given an in-memory construct of an inventory, returns a dictionary that
follows Ansible guidelines on the structure of dynamic inventory sources
May be replaced by removing in-memory constructs within this file later
- '''
+ """
all_group = inventory.all_group
inventory_data = OrderedDict([])
# Save hostvars to _meta
@@ -225,18 +225,18 @@ def mem_data_to_dict(inventory):
def dict_to_mem_data(data, inventory=None):
- '''
+ """
In-place operation on `inventory`, adds contents from `data` to the
in-memory representation of memory.
May be destructive on `data`
- '''
+ """
assert isinstance(data, dict), 'Expected dict, received {}'.format(type(data))
if inventory is None:
inventory = MemInventory()
_meta = data.pop('_meta', {})
- for k,v in data.items():
+ for k, v in data.items():
group = inventory.get_group(k)
if not group:
continue
@@ -253,9 +253,7 @@ def dict_to_mem_data(data, inventory=None):
if isinstance(hv, dict):
host.variables.update(hv)
else:
- logger.warning('Expected dict of vars for '
- 'host "%s", got %s instead',
- hk, str(type(hv)))
+ logger.warning('Expected dict of vars for ' 'host "%s", got %s instead', hk, str(type(hv)))
group.add_host(host)
elif isinstance(hosts, (list, tuple)):
for hk in hosts:
@@ -264,17 +262,13 @@ def dict_to_mem_data(data, inventory=None):
continue
group.add_host(host)
else:
- logger.warning('Expected dict or list of "hosts" for '
- 'group "%s", got %s instead', k,
- str(type(hosts)))
+ logger.warning('Expected dict or list of "hosts" for ' 'group "%s", got %s instead', k, str(type(hosts)))
# Process group variables.
vars = v.get('vars', {})
if isinstance(vars, dict):
group.variables.update(vars)
else:
- logger.warning('Expected dict of vars for '
- 'group "%s", got %s instead',
- k, str(type(vars)))
+ logger.warning('Expected dict of vars for ' 'group "%s", got %s instead', k, str(type(vars)))
# Process child groups.
children = v.get('children', [])
if isinstance(children, (list, tuple)):
@@ -283,9 +277,7 @@ def dict_to_mem_data(data, inventory=None):
if child and c != 'ungrouped':
group.add_child_group(child)
else:
- logger.warning('Expected list of children for '
- 'group "%s", got %s instead',
- k, str(type(children)))
+ logger.warning('Expected list of children for ' 'group "%s", got %s instead', k, str(type(children)))
# Load host names from a list.
elif isinstance(v, (list, tuple)):
@@ -296,20 +288,17 @@ def dict_to_mem_data(data, inventory=None):
group.add_host(host)
else:
logger.warning('')
- logger.warning('Expected dict or list for group "%s", '
- 'got %s instead', k, str(type(v)))
+ logger.warning('Expected dict or list for group "%s", ' 'got %s instead', k, str(type(v)))
if k not in ['all', 'ungrouped']:
inventory.all_group.add_child_group(group)
if _meta:
- for k,v in inventory.all_group.all_hosts.items():
+ for k, v in inventory.all_group.all_hosts.items():
meta_hostvars = _meta['hostvars'].get(k, {})
if isinstance(meta_hostvars, dict):
v.variables.update(meta_hostvars)
else:
- logger.warning('Expected dict of vars for '
- 'host "%s", got %s instead',
- k, str(type(meta_hostvars)))
+ logger.warning('Expected dict of vars for ' 'host "%s", got %s instead', k, str(type(meta_hostvars)))
return inventory
diff --git a/awx/main/utils/named_url_graph.py b/awx/main/utils/named_url_graph.py
index f1b72e0aef..9d2c0a27c9 100644
--- a/awx/main/utils/named_url_graph.py
+++ b/awx/main/utils/named_url_graph.py
@@ -1,6 +1,7 @@
# Python
import urllib.parse
from collections import deque
+
# Django
from django.db import models
from django.conf import settings
@@ -16,13 +17,10 @@ for c in ';/?:@=&[]':
FK_NAME = 0
NEXT_NODE = 1
-NAME_EXCEPTIONS = {
- "custom_inventory_scripts": "inventory_scripts"
-}
+NAME_EXCEPTIONS = {"custom_inventory_scripts": "inventory_scripts"}
class GraphNode(object):
-
def __init__(self, model, fields, adj_list):
self.model = model
self.found = False
@@ -50,10 +48,7 @@ class GraphNode(object):
current_fk_name = ''
while stack:
if stack[-1].counter == 0:
- named_url_component = NAMED_URL_RES_INNER_DILIMITER.join(
- ["<%s>" % (current_fk_name + field)
- for field in stack[-1].fields]
- )
+ named_url_component = NAMED_URL_RES_INNER_DILIMITER.join(["<%s>" % (current_fk_name + field) for field in stack[-1].fields])
named_url_components.append(named_url_component)
if stack[-1].counter >= len(stack[-1].adj_list):
stack[-1].counter = 0
@@ -73,16 +68,15 @@ class GraphNode(object):
return ret
def _encode_uri(self, text):
- '''
+ """
Performance assured: http://stackoverflow.com/a/27086669
- '''
+ """
for c in URL_PATH_RESERVED_CHARSET:
if not isinstance(text, str):
text = str(text) # needed for WFJT node creation, identifier temporarily UUID4 type
if c in text:
text = text.replace(c, URL_PATH_RESERVED_CHARSET[c])
- text = text.replace(NAMED_URL_RES_INNER_DILIMITER,
- '[%s]' % NAMED_URL_RES_INNER_DILIMITER)
+ text = text.replace(NAMED_URL_RES_INNER_DILIMITER, '[%s]' % NAMED_URL_RES_INNER_DILIMITER)
return text
def generate_named_url(self, obj):
@@ -91,8 +85,7 @@ class GraphNode(object):
stack = [self]
while stack:
if stack[-1].counter == 0:
- named_url_item = [self._encode_uri(getattr(stack[-1].obj, field, ''))
- for field in stack[-1].fields]
+ named_url_item = [self._encode_uri(getattr(stack[-1].obj, field, '')) for field in stack[-1].fields]
named_url.append(NAMED_URL_RES_INNER_DILIMITER.join(named_url_item))
if stack[-1].counter >= len(stack[-1].adj_list):
stack[-1].counter = 0
@@ -109,7 +102,6 @@ class GraphNode(object):
named_url.append('')
return NAMED_URL_RES_DILIMITER.join(named_url)
-
def _process_top_node(self, named_url_names, kwargs, prefixes, stack, idx):
if stack[-1].counter == 0:
if idx >= len(named_url_names):
@@ -146,16 +138,13 @@ class GraphNode(object):
def populate_named_url_query_kwargs(self, kwargs, named_url, ignore_digits=True):
if ignore_digits and named_url.isdigit() and int(named_url) > 0:
return False
- named_url = named_url.replace('[%s]' % NAMED_URL_RES_INNER_DILIMITER,
- NAMED_URL_RES_DILIMITER_ENCODE)
+ named_url = named_url.replace('[%s]' % NAMED_URL_RES_INNER_DILIMITER, NAMED_URL_RES_DILIMITER_ENCODE)
named_url_names = named_url.split(NAMED_URL_RES_DILIMITER)
prefixes = []
stack = [self]
idx = 0
while stack:
- idx, is_valid = self._process_top_node(
- named_url_names, kwargs, prefixes, stack, idx
- )
+ idx, is_valid = self._process_top_node(named_url_names, kwargs, prefixes, stack, idx)
if not is_valid:
return False
return idx == len(named_url_names)
@@ -192,10 +181,12 @@ def _get_all_unique_togethers(model):
soft_uts = getattr(model_to_backtrack, 'SOFT_UNIQUE_TOGETHER', [])
ret.extend(soft_uts)
for parent_class in model_to_backtrack.__bases__:
- if issubclass(parent_class, models.Model) and\
- hasattr(parent_class, '_meta') and\
- hasattr(parent_class._meta, 'unique_together') and\
- isinstance(parent_class._meta.unique_together, tuple):
+ if (
+ issubclass(parent_class, models.Model)
+ and hasattr(parent_class, '_meta')
+ and hasattr(parent_class._meta, 'unique_together')
+ and isinstance(parent_class._meta.unique_together, tuple)
+ ):
queue.append(parent_class)
ret.sort(key=lambda x: len(x))
return tuple(ret)
@@ -261,18 +252,11 @@ def _dfs(configuration, model, graph, dead_ends, new_deadends, parents):
next_model = model._meta.get_field(fk_name).related_model
if issubclass(next_model, ContentType):
continue
- if next_model not in configuration or\
- next_model in dead_ends or\
- next_model in new_deadends or\
- next_model in parents:
+ if next_model not in configuration or next_model in dead_ends or next_model in new_deadends or next_model in parents:
new_deadends.add(model)
parents.remove(model)
return False
- if next_model not in graph and\
- not _dfs(
- configuration, next_model, graph,
- dead_ends, new_deadends, parents
- ):
+ if next_model not in graph and not _dfs(configuration, next_model, graph, dead_ends, new_deadends, parents):
new_deadends.add(model)
parents.remove(model)
return False
diff --git a/awx/main/utils/polymorphic.py b/awx/main/utils/polymorphic.py
index 9fc9844b69..28f8a11187 100644
--- a/awx/main/utils/polymorphic.py
+++ b/awx/main/utils/polymorphic.py
@@ -1,4 +1,3 @@
-
from django.contrib.contenttypes.models import ContentType
from django.db import models
diff --git a/awx/main/utils/profiling.py b/awx/main/utils/profiling.py
index c550175d7b..218927e97f 100644
--- a/awx/main/utils/profiling.py
+++ b/awx/main/utils/profiling.py
@@ -64,16 +64,18 @@ def timing(name, *init_args, **init_kwargs):
res = func(*args, **kwargs)
timing.stop()
return res
+
return wrapper_profile
+
return decorator_profile
class AWXProfiler(AWXProfileBase):
def __init__(self, name, dest='/var/log/tower/profile', dot_enabled=True):
- '''
+ """
Try to do as little as possible in init. Instead, do the init
only when the profiling is started.
- '''
+ """
super().__init__(name, dest)
self.started = False
self.dot_enabled = dot_enabled
@@ -101,11 +103,7 @@ class AWXProfiler(AWXProfileBase):
dot_filepath = os.path.join(self.dest, f"{filename_base}.dot")
pstats.Stats(self.prof).dump_stats(raw_filepath)
- generate_dot([
- '-n', '2.5', '-f', 'pstats', '-o',
- dot_filepath,
- raw_filepath
- ])
+ generate_dot(['-n', '2.5', '-f', 'pstats', '-o', dot_filepath, raw_filepath])
os.remove(raw_filepath)
with open(pstats_filepath, 'w') as f:
@@ -113,7 +111,6 @@ class AWXProfiler(AWXProfileBase):
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
return pstats_filepath
-
def start(self):
self.prof = cProfile.Profile()
self.pid = os.getpid()
@@ -146,6 +143,7 @@ def profile(name, *init_args, **init_kwargs):
res = func(*args, **kwargs)
prof.stop()
return res
+
return wrapper_profile
- return decorator_profile
+ return decorator_profile
diff --git a/awx/main/utils/reload.py b/awx/main/utils/reload.py
index 37c7c48cfe..6651fcf44d 100644
--- a/awx/main/utils/reload.py
+++ b/awx/main/utils/reload.py
@@ -11,10 +11,10 @@ logger = logging.getLogger('awx.main.utils.reload')
def supervisor_service_command(command, service='*', communicate=True):
- '''
+ """
example use pattern of supervisorctl:
# supervisorctl restart tower-processes:receiver tower-processes:factcacher
- '''
+ """
args = ['supervisorctl']
supervisor_config_path = os.getenv('SUPERVISOR_WEB_CONFIG_PATH', None)
@@ -23,18 +23,18 @@ def supervisor_service_command(command, service='*', communicate=True):
args.extend([command, ':'.join(['tower-processes', service])])
logger.debug('Issuing command to {} services, args={}'.format(command, args))
- supervisor_process = subprocess.Popen(args, stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ supervisor_process = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if communicate:
restart_stdout, restart_err = supervisor_process.communicate()
restart_code = supervisor_process.returncode
if restart_code or restart_err:
- logger.error('supervisorctl {} {} errored with exit code `{}`, stdout:\n{}stderr:\n{}'.format(
- command, service, restart_code, restart_stdout.strip(), restart_err.strip()))
- else:
- logger.debug(
- 'supervisorctl {} {} succeeded'.format(command, service)
+ logger.error(
+ 'supervisorctl {} {} errored with exit code `{}`, stdout:\n{}stderr:\n{}'.format(
+ command, service, restart_code, restart_stdout.strip(), restart_err.strip()
+ )
)
+ else:
+ logger.debug('supervisorctl {} {} succeeded'.format(command, service))
else:
logger.info('Submitted supervisorctl {} command, not waiting for result'.format(command))
diff --git a/awx/main/utils/safe_yaml.py b/awx/main/utils/safe_yaml.py
index 7e4a5b1496..abf21e3428 100644
--- a/awx/main/utils/safe_yaml.py
+++ b/awx/main/utils/safe_yaml.py
@@ -6,7 +6,6 @@ __all__ = ['safe_dump', 'SafeLoader']
class SafeStringDumper(yaml.SafeDumper):
-
def represent_data(self, value):
if isinstance(value, str):
return self.represent_scalar('!unsafe', value)
@@ -14,18 +13,15 @@ class SafeStringDumper(yaml.SafeDumper):
class SafeLoader(yaml.Loader):
-
def construct_yaml_unsafe(self, node):
class UnsafeText(str):
__UNSAFE__ = True
+
node = UnsafeText(self.construct_scalar(node))
return node
-SafeLoader.add_constructor(
- u'!unsafe',
- SafeLoader.construct_yaml_unsafe
-)
+SafeLoader.add_constructor(u'!unsafe', SafeLoader.construct_yaml_unsafe)
def safe_dump(x, safe_dict=None):
@@ -41,7 +37,7 @@ def safe_dump(x, safe_dict=None):
resulting YAML. Anything _not_ in this dict will automatically be
`!unsafe`.
- safe_dump({'a': 'b', 'c': 'd'}) ->
+ safe_dump({'a': 'b', 'c': 'd'}) ->
!unsafe 'a': !unsafe 'b'
!unsafe 'c': !unsafe 'd'
@@ -59,12 +55,14 @@ def safe_dump(x, safe_dict=None):
dumper = yaml.SafeDumper
if k not in safe_dict or safe_dict.get(k) != v:
dumper = SafeStringDumper
- yamls.append(yaml.dump_all(
- [{k: v}],
- None,
- Dumper=dumper,
- default_flow_style=False,
- ))
+ yamls.append(
+ yaml.dump_all(
+ [{k: v}],
+ None,
+ Dumper=dumper,
+ default_flow_style=False,
+ )
+ )
return ''.join(yamls)
else:
return yaml.dump_all([x], None, Dumper=SafeStringDumper, default_flow_style=False)
diff --git a/awx/main/validators.py b/awx/main/validators.py
index 879be056e5..3c26922c37 100644
--- a/awx/main/validators.py
+++ b/awx/main/validators.py
@@ -48,10 +48,8 @@ def validate_pem(data, min_keys=0, max_keys=None, min_certs=0, max_certs=None):
# Build regular expressions for matching each object in the PEM file.
pem_obj_re = re.compile(
- r'^(?P<dashes>-{4,}) *BEGIN (?P<type>[A-Z ]+?) *(?P=dashes)' +
- r'\s*(?P<data>.+?)\s*' +
- r'(?P=dashes) *END (?P=type) *(?P=dashes)' +
- r'(?P<next>.*?)$', re.DOTALL
+ r'^(?P<dashes>-{4,}) *BEGIN (?P<type>[A-Z ]+?) *(?P=dashes)' + r'\s*(?P<data>.+?)\s*' + r'(?P=dashes) *END (?P=type) *(?P=dashes)' + r'(?P<next>.*?)$',
+ re.DOTALL,
)
pem_obj_header_re = re.compile(r'^(.+?):\s*?(.+?)(\\??)$')
diff --git a/awx/main/views.py b/awx/main/views.py
index 887d44318c..bb6c43b6bf 100644
--- a/awx/main/views.py
+++ b/awx/main/views.py
@@ -47,9 +47,11 @@ def handle_error(request, status=404, **kwargs):
# browsable error page for browser clients or a simple JSON response for any
# other clients.
if request.path.startswith('/api/'):
+
class APIException(exceptions.APIException):
status_code = status
default_detail = kwargs['content']
+
api_error_view = ApiErrorView.as_view(exception_class=APIException)
response = api_error_view(request)
if hasattr(response, 'render'):
diff --git a/awx/main/wsbroadcast.py b/awx/main/wsbroadcast.py
index a97baf45f4..e2ee9fc431 100644
--- a/awx/main/wsbroadcast.py
+++ b/awx/main/wsbroadcast.py
@@ -32,11 +32,13 @@ def unwrap_broadcast_msg(payload: dict):
def get_broadcast_hosts():
Instance = apps.get_model('main', 'Instance')
- instances = Instance.objects.filter(rampart_groups__controller__isnull=True) \
- .exclude(hostname=Instance.objects.me().hostname) \
- .order_by('hostname') \
- .values('hostname', 'ip_address') \
- .distinct()
+ instances = (
+ Instance.objects.filter(rampart_groups__controller__isnull=True)
+ .exclude(hostname=Instance.objects.me().hostname)
+ .order_by('hostname')
+ .values('hostname', 'ip_address')
+ .distinct()
+ )
return {i['hostname']: i['ip_address'] or i['hostname'] for i in instances}
@@ -45,16 +47,18 @@ def get_local_host():
return Instance.objects.me().hostname
-class WebsocketTask():
- def __init__(self,
- name,
- event_loop,
- stats: BroadcastWebsocketStats,
- remote_host: str,
- remote_port: int = settings.BROADCAST_WEBSOCKET_PORT,
- protocol: str = settings.BROADCAST_WEBSOCKET_PROTOCOL,
- verify_ssl: bool = settings.BROADCAST_WEBSOCKET_VERIFY_CERT,
- endpoint: str = 'broadcast'):
+class WebsocketTask:
+ def __init__(
+ self,
+ name,
+ event_loop,
+ stats: BroadcastWebsocketStats,
+ remote_host: str,
+ remote_port: int = settings.BROADCAST_WEBSOCKET_PORT,
+ protocol: str = settings.BROADCAST_WEBSOCKET_PROTOCOL,
+ verify_ssl: bool = settings.BROADCAST_WEBSOCKET_VERIFY_CERT,
+ endpoint: str = 'broadcast',
+ ):
self.name = name
self.event_loop = event_loop
self.stats = stats
@@ -69,7 +73,8 @@ class WebsocketTask():
raise RuntimeError("Implement me")
async def connect(self, attempt):
- from awx.main.consumers import WebsocketSecretAuthHelper # noqa
+ from awx.main.consumers import WebsocketSecretAuthHelper # noqa
+
logger.debug(f"Connection from {self.name} to {self.remote_host} attempt number {attempt}.")
'''
@@ -91,8 +96,7 @@ class WebsocketTask():
secret_val = WebsocketSecretAuthHelper.construct_secret()
try:
- async with aiohttp.ClientSession(headers={'secret': secret_val},
- timeout=timeout) as session:
+ async with aiohttp.ClientSession(headers={'secret': secret_val}, timeout=timeout) as session:
async with session.ws_connect(uri, ssl=self.verify_ssl, heartbeat=20) as websocket:
logger.info(f"Connection from {self.name} to {self.remote_host} established.")
self.stats.record_connection_established()
@@ -171,8 +175,7 @@ class BroadcastWebsocketManager(object):
remote_addresses = {k: v.remote_host for k, v in self.broadcast_tasks.items()}
for hostname, address in known_hosts.items():
- if hostname in self.broadcast_tasks and \
- address != remote_addresses[hostname]:
+ if hostname in self.broadcast_tasks and address != remote_addresses[hostname]:
deleted_remote_hosts.add(hostname)
new_remote_hosts.add(hostname)
@@ -188,10 +191,7 @@ class BroadcastWebsocketManager(object):
for h in new_remote_hosts:
stats = self.stats_mgr.new_remote_host_stats(h)
- broadcast_task = BroadcastWebsocketTask(name=self.local_hostname,
- event_loop=self.event_loop,
- stats=stats,
- remote_host=known_hosts[h])
+ broadcast_task = BroadcastWebsocketTask(name=self.local_hostname, event_loop=self.event_loop, stats=stats, remote_host=known_hosts[h])
broadcast_task.start()
self.broadcast_tasks[h] = broadcast_task
diff --git a/awx/playbooks/action_plugins/insights.py b/awx/playbooks/action_plugins/insights.py
index 6a82dd95da..2b3ba5fd3b 100644
--- a/awx/playbooks/action_plugins/insights.py
+++ b/awx/playbooks/action_plugins/insights.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import os
@@ -10,7 +11,6 @@ from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
-
def save_playbook(self, proj_path, remediation, content):
name = remediation.get('name', None) or 'insights-remediation'
name = re.sub(r'[^\w\s-]', '', name).strip().lower()
@@ -50,11 +50,7 @@ class ActionModule(ActionBase):
session.auth = requests.auth.HTTPBasicAuth(username, password)
headers = {
'Content-Type': 'application/json',
- 'User-Agent': '{} {} ({})'.format(
- 'AWX' if license == 'open' else 'Red Hat Ansible Tower',
- awx_version,
- license
- )
+ 'User-Agent': '{} {} ({})'.format('AWX' if license == 'open' else 'Red Hat Ansible Tower', awx_version, license),
}
url = '/api/remediations/v1/remediations'
while url:
@@ -62,9 +58,8 @@ class ActionModule(ActionBase):
if res.status_code != 200:
result['failed'] = True
- result['msg'] = (
- 'Expected {} to return a status code of 200 but returned status '
- 'code "{}" instead with content "{}".'.format(url, res.status_code, res.content)
+ result['msg'] = 'Expected {} to return a status code of 200 but returned status ' 'code "{}" instead with content "{}".'.format(
+ url, res.status_code, res.content
)
return result
@@ -86,17 +81,14 @@ class ActionModule(ActionBase):
url = res.json()['links']['next'] # will be None if we're on the last page
for item in res.json()['data']:
- playbook_url = '{}/api/remediations/v1/remediations/{}/playbook'.format(
- insights_url, item['id'])
+ playbook_url = '{}/api/remediations/v1/remediations/{}/playbook'.format(insights_url, item['id'])
res = session.get(playbook_url, timeout=120)
if res.status_code == 204:
continue
elif res.status_code != 200:
result['failed'] = True
- result['msg'] = (
- 'Expected {} to return a status code of 200 but returned status '
- 'code "{}" instead with content "{}".'.format(
- playbook_url, res.status_code, res.content)
+ result['msg'] = 'Expected {} to return a status code of 200 but returned status ' 'code "{}" instead with content "{}".'.format(
+ playbook_url, res.status_code, res.content
)
return result
self.save_playbook(proj_path, item, res.content)
diff --git a/awx/playbooks/action_plugins/project_archive.py b/awx/playbooks/action_plugins/project_archive.py
index d5dff804a6..accc74ecbb 100644
--- a/awx/playbooks/action_plugins/project_archive.py
+++ b/awx/playbooks/action_plugins/project_archive.py
@@ -45,9 +45,7 @@ class ActionModule(ActionBase):
# Most well formed archives contain a single root directory, typically named
# project-name-1.0.0. The project contents should be inside that directory.
start_index = 0
- root_contents = set(
- [filename.split(os.path.sep)[0] for filename in get_filenames()]
- )
+ root_contents = set([filename.split(os.path.sep)[0] for filename in get_filenames()])
if len(root_contents) == 1:
start_index = len(list(root_contents)[0]) + 1
diff --git a/awx/playbooks/library/insights.py b/awx/playbooks/library/insights.py
index 7d0759d656..39d54b4e88 100644
--- a/awx/playbooks/library/insights.py
+++ b/awx/playbooks/library/insights.py
@@ -1,6 +1,4 @@
-ANSIBLE_METADATA = {'metadata_version': '1.0',
- 'status': ['stableinterface'],
- 'supported_by': 'tower'}
+ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['stableinterface'], 'supported_by': 'tower'}
DOCUMENTATION = '''
diff --git a/awx/playbooks/library/project_archive.py b/awx/playbooks/library/project_archive.py
index 4a046e354d..001e2eec04 100644
--- a/awx/playbooks/library/project_archive.py
+++ b/awx/playbooks/library/project_archive.py
@@ -1,8 +1,4 @@
-ANSIBLE_METADATA = {
- "metadata_version": "1.0",
- "status": ["stableinterface"],
- "supported_by": "community",
-}
+ANSIBLE_METADATA = {"metadata_version": "1.0", "status": ["stableinterface"], "supported_by": "community"}
DOCUMENTATION = """
diff --git a/awx/plugins/isolated/awx_capacity.py b/awx/plugins/isolated/awx_capacity.py
index 212025544a..2f33a8ffad 100644
--- a/awx/plugins/isolated/awx_capacity.py
+++ b/awx/plugins/isolated/awx_capacity.py
@@ -46,17 +46,12 @@ def get_mem_capacity():
def main():
- module = AnsibleModule(
- argument_spec = dict()
- )
+ module = AnsibleModule(argument_spec=dict())
ar = module.get_bin_path('ansible-runner', required=True)
try:
- version = subprocess.check_output(
- [ar, '--version'],
- stderr=subprocess.STDOUT
- ).strip()
+ version = subprocess.check_output([ar, '--version'], stderr=subprocess.STDOUT).strip()
except subprocess.CalledProcessError as e:
module.fail_json(msg=to_text(e))
return
@@ -65,15 +60,13 @@ def main():
mem, capacity_mem = get_mem_capacity()
# Module never results in a change
- module.exit_json(changed=False, capacity_cpu=capacity_cpu,
- capacity_mem=capacity_mem, version=version,
- ansible_facts=dict(
- awx_cpu=cpu,
- awx_mem=mem,
- awx_capacity_cpu=capacity_cpu,
- awx_capacity_mem=capacity_mem,
- awx_capacity_version=version
- ))
+ module.exit_json(
+ changed=False,
+ capacity_cpu=capacity_cpu,
+ capacity_mem=capacity_mem,
+ version=version,
+ ansible_facts=dict(awx_cpu=cpu, awx_mem=mem, awx_capacity_cpu=capacity_cpu, awx_capacity_mem=capacity_mem, awx_capacity_version=version),
+ )
if __name__ == '__main__':
diff --git a/awx/plugins/isolated/awx_isolated_cleanup.py b/awx/plugins/isolated/awx_isolated_cleanup.py
index b52939df25..7f58a1f74a 100644
--- a/awx/plugins/isolated/awx_isolated_cleanup.py
+++ b/awx/plugins/isolated/awx_isolated_cleanup.py
@@ -26,9 +26,7 @@ import subprocess
def main():
- module = AnsibleModule(
- argument_spec = dict()
- )
+ module = AnsibleModule(argument_spec=dict())
changed = False
paths_removed = set([])
@@ -38,9 +36,7 @@ def main():
# this datetime, then it will be deleted because its job has finished
job_cutoff = datetime.datetime.now() - datetime.timedelta(hours=1)
- for search_pattern in [
- '/tmp/awx_[0-9]*_*', '/tmp/ansible_runner_pi_*',
- ]:
+ for search_pattern in ['/tmp/awx_[0-9]*_*', '/tmp/ansible_runner_pi_*']:
for path in glob.iglob(search_pattern):
st = os.stat(path)
modtime = datetime.datetime.fromtimestamp(st.st_mtime)
diff --git a/awx/plugins/isolated/mkfifo.py b/awx/plugins/isolated/mkfifo.py
index 7f9be68b0d..45741c2ad3 100755
--- a/awx/plugins/isolated/mkfifo.py
+++ b/awx/plugins/isolated/mkfifo.py
@@ -11,13 +11,7 @@ from ansible.module_utils.basic import AnsibleModule
def main():
- module = AnsibleModule(
- argument_spec={
- 'path': {'required': True, 'type': 'str'},
- 'content': {'required': True, 'type': 'str'}
- },
- supports_check_mode=False
- )
+ module = AnsibleModule(argument_spec={'path': {'required': True, 'type': 'str'}, 'content': {'required': True, 'type': 'str'}}, supports_check_mode=False)
path = module.params['path']
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py
index 502afa5b18..194d363a6d 100644
--- a/awx/settings/defaults.py
+++ b/awx/settings/defaults.py
@@ -22,6 +22,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(__file__))
def is_testing(argv=None):
import sys
+
'''Return True if running django or py.test unit tests.'''
if 'PYTEST_CURRENT_TEST' in os.environ.keys():
return True
@@ -39,6 +40,7 @@ def IS_TESTING(argv=None):
if "pytest" in sys.modules:
from unittest import mock
+
with mock.patch('__main__.__builtins__.dir', return_value=[]):
import ldap
else:
@@ -54,7 +56,7 @@ DATABASES = {
'ATOMIC_REQUESTS': True,
'TEST': {
# Test database cannot be :memory: for inventory tests.
- 'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3'),
+ 'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3')
},
}
}
@@ -103,10 +105,7 @@ USE_L10N = True
USE_TZ = True
-STATICFILES_DIRS = (
- os.path.join(BASE_DIR, 'ui_next', 'build', 'static'),
- os.path.join(BASE_DIR, 'static'),
-)
+STATICFILES_DIRS = (os.path.join(BASE_DIR, 'ui_next', 'build', 'static'), os.path.join(BASE_DIR, 'static'))
# Absolute filesystem path to the directory where static file are collected via
# the collectstatic command.
@@ -147,9 +146,7 @@ LOG_ROOT = '/var/log/tower/'
SCHEDULE_METADATA_LOCATION = os.path.join(BASE_DIR, '.tower_cycle')
# Django gettext files path: locale/<lang-code>/LC_MESSAGES/django.po, django.mo
-LOCALE_PATHS = (
- os.path.join(BASE_DIR, 'locale'),
-)
+LOCALE_PATHS = (os.path.join(BASE_DIR, 'locale'),)
# Graph of resources that can have named-url
NAMED_URL_GRAPH = {}
@@ -215,7 +212,7 @@ JOB_EVENT_WORKERS = 4
# The number of seconds to buffer callback receiver bulk
# writes in memory before flushing via JobEvent.objects.bulk_create()
-JOB_EVENT_BUFFER_SECONDS = .1
+JOB_EVENT_BUFFER_SECONDS = 0.1
# The interval at which callback receiver statistics should be
# recorded
@@ -256,7 +253,7 @@ TEMPLATES = [
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'debug': DEBUG,
- 'context_processors': [# NOQA
+ 'context_processors': [ # NOQA
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.request',
@@ -269,19 +266,13 @@ TEMPLATES = [
'social_django.context_processors.backends',
'social_django.context_processors.login_redirect',
],
- 'loaders': [(
- 'django.template.loaders.cached.Loader',
- ('django.template.loaders.filesystem.Loader',
- 'django.template.loaders.app_directories.Loader',),
- )],
+ 'loaders': [
+ ('django.template.loaders.cached.Loader', ('django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader'))
+ ],
'builtins': ['awx.main.templatetags.swagger'],
},
- 'DIRS': [
- os.path.join(BASE_DIR, 'templates'),
- os.path.join(BASE_DIR, 'ui_next', 'build'),
- os.path.join(BASE_DIR, 'ui_next', 'public')
- ],
- },
+ 'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, 'ui_next', 'build'), os.path.join(BASE_DIR, 'ui_next', 'public')],
+ }
]
ROOT_URLCONF = 'awx.urls'
@@ -310,7 +301,7 @@ INSTALLED_APPS = [
'awx.ui',
'awx.ui_next',
'awx.sso',
- 'solo'
+ 'solo',
]
INTERNAL_IPS = ('127.0.0.1',)
@@ -324,22 +315,15 @@ REST_FRAMEWORK = {
'awx.api.authentication.SessionAuthentication',
'awx.api.authentication.LoggedBasicAuthentication',
),
- 'DEFAULT_PERMISSION_CLASSES': (
- 'awx.api.permissions.ModelAccessPermission',
- ),
+ 'DEFAULT_PERMISSION_CLASSES': ('awx.api.permissions.ModelAccessPermission',),
'DEFAULT_FILTER_BACKENDS': (
'awx.api.filters.TypeFilterBackend',
'awx.api.filters.FieldLookupBackend',
'rest_framework.filters.SearchFilter',
'awx.api.filters.OrderByBackend',
),
- 'DEFAULT_PARSER_CLASSES': (
- 'awx.api.parsers.JSONParser',
- ),
- 'DEFAULT_RENDERER_CLASSES': (
- 'awx.api.renderers.DefaultJSONRenderer',
- 'awx.api.renderers.BrowsableAPIRenderer',
- ),
+ 'DEFAULT_PARSER_CLASSES': ('awx.api.parsers.JSONParser',),
+ 'DEFAULT_RENDERER_CLASSES': ('awx.api.renderers.DefaultJSONRenderer', 'awx.api.renderers.BrowsableAPIRenderer'),
'DEFAULT_METADATA_CLASS': 'awx.api.metadata.Metadata',
'EXCEPTION_HANDLER': 'awx.api.views.api_exception_handler',
'VIEW_DESCRIPTION_FUNCTION': 'awx.api.generics.get_view_description',
@@ -378,9 +362,7 @@ OAUTH2_PROVIDER_APPLICATION_MODEL = 'main.OAuth2Application'
OAUTH2_PROVIDER_ACCESS_TOKEN_MODEL = 'main.OAuth2AccessToken'
OAUTH2_PROVIDER_REFRESH_TOKEN_MODEL = 'oauth2_provider.RefreshToken'
-OAUTH2_PROVIDER = {'ACCESS_TOKEN_EXPIRE_SECONDS': 31536000000,
- 'AUTHORIZATION_CODE_EXPIRE_SECONDS': 600,
- 'REFRESH_TOKEN_EXPIRE_SECONDS': 2628000}
+OAUTH2_PROVIDER = {'ACCESS_TOKEN_EXPIRE_SECONDS': 31536000000, 'AUTHORIZATION_CODE_EXPIRE_SECONDS': 600, 'REFRESH_TOKEN_EXPIRE_SECONDS': 2628000}
ALLOW_OAUTH2_FOR_EXTERNAL_USERS = False
# LDAP server (default to None to skip using LDAP authentication).
@@ -390,10 +372,7 @@ AUTH_LDAP_SERVER_URI = None
# Disable LDAP referrals by default (to prevent certain LDAP queries from
# hanging with AD).
# Note: This setting may be overridden by database settings.
-AUTH_LDAP_CONNECTION_OPTIONS = {
- ldap.OPT_REFERRALS: 0,
- ldap.OPT_NETWORK_TIMEOUT: 30
-}
+AUTH_LDAP_CONNECTION_OPTIONS = {ldap.OPT_REFERRALS: 0, ldap.OPT_NETWORK_TIMEOUT: 30}
# Radius server settings (default to empty string to skip using Radius auth).
# Note: These settings may be overridden by database settings.
@@ -443,41 +422,17 @@ os.environ.setdefault('DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:9013-9199')
BROKER_URL = 'unix:///var/run/redis/redis.sock'
CELERYBEAT_SCHEDULE = {
- 'tower_scheduler': {
- 'task': 'awx.main.tasks.awx_periodic_scheduler',
- 'schedule': timedelta(seconds=30),
- 'options': {'expires': 20,}
- },
- 'cluster_heartbeat': {
- 'task': 'awx.main.tasks.cluster_node_heartbeat',
- 'schedule': timedelta(seconds=60),
- 'options': {'expires': 50,}
- },
- 'gather_analytics': {
- 'task': 'awx.main.tasks.gather_analytics',
- 'schedule': timedelta(minutes=5)
- },
- 'task_manager': {
- 'task': 'awx.main.scheduler.tasks.run_task_manager',
- 'schedule': timedelta(seconds=20),
- 'options': {'expires': 20}
- },
- 'k8s_reaper': {
- 'task': 'awx.main.tasks.awx_k8s_reaper',
- 'schedule': timedelta(seconds=60),
- 'options': {'expires': 50,}
- },
+ 'tower_scheduler': {'task': 'awx.main.tasks.awx_periodic_scheduler', 'schedule': timedelta(seconds=30), 'options': {'expires': 20}},
+ 'cluster_heartbeat': {'task': 'awx.main.tasks.cluster_node_heartbeat', 'schedule': timedelta(seconds=60), 'options': {'expires': 50}},
+ 'gather_analytics': {'task': 'awx.main.tasks.gather_analytics', 'schedule': timedelta(minutes=5)},
+ 'task_manager': {'task': 'awx.main.scheduler.tasks.run_task_manager', 'schedule': timedelta(seconds=20), 'options': {'expires': 20}},
+ 'k8s_reaper': {'task': 'awx.main.tasks.awx_k8s_reaper', 'schedule': timedelta(seconds=60), 'options': {'expires': 50}},
# 'isolated_heartbeat': set up at the end of production.py and development.py
}
# Django Caching Configuration
DJANGO_REDIS_IGNORE_EXCEPTIONS = True
-CACHES = {
- 'default': {
- 'BACKEND': 'django_redis.cache.RedisCache',
- 'LOCATION': 'unix:/var/run/redis/redis.sock?db=1'
- },
-}
+CACHES = {'default': {'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': 'unix:/var/run/redis/redis.sock?db=1'}}
# Social Auth configuration.
SOCIAL_AUTH_STRATEGY = 'social_django.strategy.DjangoStrategy'
@@ -499,10 +454,7 @@ _SOCIAL_AUTH_PIPELINE_BASE = (
'social_core.pipeline.user.user_details',
'awx.sso.pipeline.prevent_inactive_login',
)
-SOCIAL_AUTH_PIPELINE = _SOCIAL_AUTH_PIPELINE_BASE + (
- 'awx.sso.pipeline.update_user_orgs',
- 'awx.sso.pipeline.update_user_teams',
-)
+SOCIAL_AUTH_PIPELINE = _SOCIAL_AUTH_PIPELINE_BASE + ('awx.sso.pipeline.update_user_orgs', 'awx.sso.pipeline.update_user_teams')
SOCIAL_AUTH_SAML_PIPELINE = _SOCIAL_AUTH_PIPELINE_BASE + (
'awx.sso.pipeline.update_user_orgs_by_saml_attr',
'awx.sso.pipeline.update_user_teams_by_saml_attr',
@@ -518,7 +470,7 @@ SOCIAL_AUTH_INACTIVE_USER_URL = '/sso/inactive/'
SOCIAL_AUTH_RAISE_EXCEPTIONS = False
SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = False
-#SOCIAL_AUTH_SLUGIFY_USERNAMES = True
+# SOCIAL_AUTH_SLUGIFY_USERNAMES = True
SOCIAL_AUTH_CLEAN_USERNAMES = True
SOCIAL_AUTH_SANITIZE_REDIRECTS = True
@@ -764,18 +716,18 @@ SATELLITE6_INSTANCE_ID_VAR = 'foreman_id'
# ---------------------
# ----- Custom -----
# ---------------------
-#CUSTOM_ENABLED_VAR =
-#CUSTOM_ENABLED_VALUE =
+# CUSTOM_ENABLED_VAR =
+# CUSTOM_ENABLED_VALUE =
CUSTOM_EXCLUDE_EMPTY_GROUPS = False
-#CUSTOM_INSTANCE_ID_VAR =
+# CUSTOM_INSTANCE_ID_VAR =
# ---------------------
# ----- SCM -----
# ---------------------
-#SCM_ENABLED_VAR =
-#SCM_ENABLED_VALUE =
+# SCM_ENABLED_VAR =
+# SCM_ENABLED_VALUE =
SCM_EXCLUDE_EMPTY_GROUPS = False
-#SCM_INSTANCE_ID_VAR =
+# SCM_INSTANCE_ID_VAR =
# ---------------------
# -- Activity Stream --
@@ -797,7 +749,7 @@ TOWER_URL_BASE = "https://towerhost"
INSIGHTS_URL_BASE = "https://example.org"
INSIGHTS_AGENT_MIME = 'application/example'
# See https://github.com/ansible/awx-facts-playbooks
-INSIGHTS_SYSTEM_ID_FILE='/etc/redhat-access-insights/machine-id'
+INSIGHTS_SYSTEM_ID_FILE = '/etc/redhat-access-insights/machine-id'
TOWER_SETTINGS_MANIFEST = {}
@@ -819,14 +771,7 @@ CHANNEL_LAYER_RECEIVE_MAX_RETRY = 10
ASGI_APPLICATION = "awx.main.routing.application"
CHANNEL_LAYERS = {
- "default": {
- "BACKEND": "channels_redis.core.RedisChannelLayer",
- "CONFIG": {
- "hosts": [BROKER_URL],
- "capacity": 10000,
- "group_expiry": 157784760, # 5 years
- },
- },
+ "default": {"BACKEND": "channels_redis.core.RedisChannelLayer", "CONFIG": {"hosts": [BROKER_URL], "capacity": 10000, "group_expiry": 157784760}} # 5 years
}
# Logging configuration.
@@ -834,63 +779,25 @@ LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
- 'require_debug_false': {
- '()': 'django.utils.log.RequireDebugFalse',
- },
- 'require_debug_true': {
- '()': 'django.utils.log.RequireDebugTrue',
- },
- 'require_debug_true_or_test': {
- '()': 'awx.main.utils.RequireDebugTrueOrTest',
- },
- 'external_log_enabled': {
- '()': 'awx.main.utils.filters.ExternalLoggerEnabled'
- },
- 'dynamic_level_filter': {
- '()': 'awx.main.utils.filters.DynamicLevelFilter'
- },
- 'guid': {
- '()': 'awx.main.utils.filters.DefaultCorrelationId'
- },
+ 'require_debug_false': {'()': 'django.utils.log.RequireDebugFalse'},
+ 'require_debug_true': {'()': 'django.utils.log.RequireDebugTrue'},
+ 'require_debug_true_or_test': {'()': 'awx.main.utils.RequireDebugTrueOrTest'},
+ 'external_log_enabled': {'()': 'awx.main.utils.filters.ExternalLoggerEnabled'},
+ 'dynamic_level_filter': {'()': 'awx.main.utils.filters.DynamicLevelFilter'},
+ 'guid': {'()': 'awx.main.utils.filters.DefaultCorrelationId'},
},
'formatters': {
- 'simple': {
- 'format': '%(asctime)s %(levelname)-8s [%(guid)s] %(name)s %(message)s',
- },
- 'json': {
- '()': 'awx.main.utils.formatters.LogstashFormatter'
- },
- 'timed_import': {
- '()': 'awx.main.utils.formatters.TimeFormatter',
- 'format': '%(relativeSeconds)9.3f %(levelname)-8s %(message)s'
- },
- 'dispatcher': {
- 'format': '%(asctime)s %(levelname)-8s [%(guid)s] %(name)s PID:%(process)d %(message)s',
- },
- 'job_lifecycle': {
- '()': 'awx.main.utils.formatters.JobLifeCycleFormatter',
- },
+ 'simple': {'format': '%(asctime)s %(levelname)-8s [%(guid)s] %(name)s %(message)s'},
+ 'json': {'()': 'awx.main.utils.formatters.LogstashFormatter'},
+ 'timed_import': {'()': 'awx.main.utils.formatters.TimeFormatter', 'format': '%(relativeSeconds)9.3f %(levelname)-8s %(message)s'},
+ 'dispatcher': {'format': '%(asctime)s %(levelname)-8s [%(guid)s] %(name)s PID:%(process)d %(message)s'},
+ 'job_lifecycle': {'()': 'awx.main.utils.formatters.JobLifeCycleFormatter'},
},
'handlers': {
- 'console': {
- '()': 'logging.StreamHandler',
- 'level': 'DEBUG',
- 'filters': ['require_debug_true_or_test', 'guid'],
- 'formatter': 'simple',
- },
- 'null': {
- 'class': 'logging.NullHandler',
- },
- 'file': {
- 'class': 'logging.NullHandler',
- 'formatter': 'simple',
- },
- 'syslog': {
- 'level': 'WARNING',
- 'filters': ['require_debug_false'],
- 'class': 'logging.NullHandler',
- 'formatter': 'simple',
- },
+ 'console': {'()': 'logging.StreamHandler', 'level': 'DEBUG', 'filters': ['require_debug_true_or_test', 'guid'], 'formatter': 'simple'},
+ 'null': {'class': 'logging.NullHandler'},
+ 'file': {'class': 'logging.NullHandler', 'formatter': 'simple'},
+ 'syslog': {'level': 'WARNING', 'filters': ['require_debug_false'], 'class': 'logging.NullHandler', 'formatter': 'simple'},
'external_logger': {
'class': 'awx.main.utils.handlers.RSysLogHandler',
'formatter': 'json',
@@ -902,194 +809,103 @@ LOGGING = {
'class': 'logging.handlers.WatchedFileHandler',
'filters': ['require_debug_false', 'dynamic_level_filter', 'guid'],
'filename': os.path.join(LOG_ROOT, 'tower.log'),
- 'formatter':'simple',
+ 'formatter': 'simple',
},
'callback_receiver': {
# don't define a level here, it's set by settings.LOG_AGGREGATOR_LEVEL
'class': 'logging.handlers.WatchedFileHandler',
'filters': ['require_debug_false', 'dynamic_level_filter', 'guid'],
'filename': os.path.join(LOG_ROOT, 'callback_receiver.log'),
- 'formatter':'simple',
+ 'formatter': 'simple',
},
'dispatcher': {
# don't define a level here, it's set by settings.LOG_AGGREGATOR_LEVEL
'class': 'logging.handlers.WatchedFileHandler',
'filters': ['require_debug_false', 'dynamic_level_filter', 'guid'],
'filename': os.path.join(LOG_ROOT, 'dispatcher.log'),
- 'formatter':'dispatcher',
+ 'formatter': 'dispatcher',
},
'wsbroadcast': {
# don't define a level here, it's set by settings.LOG_AGGREGATOR_LEVEL
'class': 'logging.handlers.WatchedFileHandler',
'filters': ['require_debug_false', 'dynamic_level_filter', 'guid'],
'filename': os.path.join(LOG_ROOT, 'wsbroadcast.log'),
- 'formatter':'simple',
- },
- 'celery.beat': {
- 'class':'logging.StreamHandler',
- 'level': 'ERROR'
- }, # don't log every celerybeat wakeup
- 'inventory_import': {
- 'level': 'DEBUG',
- 'class':'logging.StreamHandler',
- 'formatter': 'timed_import',
+ 'formatter': 'simple',
},
+ 'celery.beat': {'class': 'logging.StreamHandler', 'level': 'ERROR'}, # don't log every celerybeat wakeup
+ 'inventory_import': {'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'timed_import'},
'task_system': {
# don't define a level here, it's set by settings.LOG_AGGREGATOR_LEVEL
'class': 'logging.handlers.WatchedFileHandler',
'filters': ['require_debug_false', 'dynamic_level_filter', 'guid'],
'filename': os.path.join(LOG_ROOT, 'task_system.log'),
- 'formatter':'simple',
+ 'formatter': 'simple',
},
'management_playbooks': {
'level': 'DEBUG',
- 'class':'logging.handlers.WatchedFileHandler',
+ 'class': 'logging.handlers.WatchedFileHandler',
'filters': ['require_debug_false'],
'filename': os.path.join(LOG_ROOT, 'management_playbooks.log'),
- 'formatter':'simple',
+ 'formatter': 'simple',
},
'system_tracking_migrations': {
'level': 'WARNING',
- 'class':'logging.handlers.WatchedFileHandler',
+ 'class': 'logging.handlers.WatchedFileHandler',
'filters': ['require_debug_false'],
'filename': os.path.join(LOG_ROOT, 'tower_system_tracking_migrations.log'),
- 'formatter':'simple',
+ 'formatter': 'simple',
},
'rbac_migrations': {
'level': 'WARNING',
- 'class':'logging.handlers.WatchedFileHandler',
+ 'class': 'logging.handlers.WatchedFileHandler',
'filters': ['require_debug_false'],
'filename': os.path.join(LOG_ROOT, 'tower_rbac_migrations.log'),
- 'formatter':'simple',
+ 'formatter': 'simple',
},
'isolated_manager': {
'level': 'WARNING',
- 'class':'logging.handlers.WatchedFileHandler',
+ 'class': 'logging.handlers.WatchedFileHandler',
'filename': os.path.join(LOG_ROOT, 'isolated_manager.log'),
- 'formatter':'simple',
+ 'formatter': 'simple',
},
'job_lifecycle': {
'level': 'DEBUG',
- 'class':'logging.handlers.WatchedFileHandler',
+ 'class': 'logging.handlers.WatchedFileHandler',
'filename': os.path.join(LOG_ROOT, 'job_lifecycle.log'),
'formatter': 'job_lifecycle',
},
},
'loggers': {
- 'django': {
- 'handlers': ['console'],
- },
- 'django.request': {
- 'handlers': ['console', 'file', 'tower_warnings'],
- 'level': 'WARNING',
- },
- 'daphne': {
- 'handlers': ['console', 'file', 'tower_warnings'],
- 'level': 'INFO',
- },
- 'rest_framework.request': {
- 'handlers': ['console', 'file', 'tower_warnings'],
- 'level': 'WARNING',
- 'propagate': False,
- },
- 'py.warnings': {
- 'handlers': ['console'],
- },
- 'awx': {
- 'handlers': ['console', 'file', 'tower_warnings', 'external_logger'],
- 'level': 'DEBUG',
- },
- 'awx.conf': {
- 'handlers': ['null'],
- 'level': 'WARNING',
- },
- 'awx.conf.settings': {
- 'handlers': ['null'],
- 'level': 'WARNING',
- },
- 'awx.main': {
- 'handlers': ['null']
- },
- 'awx.main.commands.run_callback_receiver': {
- 'handlers': ['callback_receiver'], # level handled by dynamic_level_filter
- },
- 'awx.main.dispatch': {
- 'handlers': ['dispatcher'],
- },
- 'awx.main.consumers': {
- 'handlers': ['console', 'file', 'tower_warnings'],
- 'level': 'INFO',
- },
- 'awx.main.wsbroadcast': {
- 'handlers': ['wsbroadcast'],
- },
- 'awx.isolated.manager': {
- 'level': 'WARNING',
- 'handlers': ['console', 'file', 'isolated_manager'],
- 'propagate': True
- },
- 'awx.isolated.manager.playbooks': {
- 'handlers': ['management_playbooks'],
- 'propagate': False
- },
- 'awx.main.commands.inventory_import': {
- 'handlers': ['inventory_import'],
- 'propagate': False
- },
- 'awx.main.tasks': {
- 'handlers': ['task_system', 'external_logger'],
- 'propagate': False
- },
- 'awx.main.analytics': {
- 'handlers': ['task_system', 'external_logger'],
- 'level': 'INFO',
- 'propagate': False
- },
- 'awx.main.scheduler': {
- 'handlers': ['task_system', 'external_logger'],
- 'propagate': False
- },
- 'awx.main.access': {
- 'level': 'INFO', # very verbose debug-level logs
- },
- 'awx.main.signals': {
- 'level': 'INFO', # very verbose debug-level logs
- },
- 'awx.api.permissions': {
- 'level': 'INFO', # very verbose debug-level logs
- },
- 'awx.analytics': {
- 'handlers': ['external_logger'],
- 'level': 'INFO',
- 'propagate': False
- },
- 'awx.analytics.performance': {
- 'handlers': ['console', 'file', 'tower_warnings', 'external_logger'],
- 'level': 'DEBUG',
- 'propagate': False
- },
- 'awx.analytics.job_lifecycle': {
- 'handlers': ['console', 'job_lifecycle'],
- 'level': 'DEBUG',
- 'propagate': False
- },
- 'django_auth_ldap': {
- 'handlers': ['console', 'file', 'tower_warnings'],
- 'level': 'DEBUG',
- },
- 'social': {
- 'handlers': ['console', 'file', 'tower_warnings'],
- 'level': 'DEBUG',
- },
- 'system_tracking_migrations': {
- 'handlers': ['console', 'file', 'tower_warnings'],
- 'level': 'DEBUG',
- },
- 'rbac_migrations': {
- 'handlers': ['console', 'file', 'tower_warnings'],
- 'level': 'DEBUG',
- },
- }
+ 'django': {'handlers': ['console']},
+ 'django.request': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'WARNING'},
+ 'daphne': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'INFO'},
+ 'rest_framework.request': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'WARNING', 'propagate': False},
+ 'py.warnings': {'handlers': ['console']},
+ 'awx': {'handlers': ['console', 'file', 'tower_warnings', 'external_logger'], 'level': 'DEBUG'},
+ 'awx.conf': {'handlers': ['null'], 'level': 'WARNING'},
+ 'awx.conf.settings': {'handlers': ['null'], 'level': 'WARNING'},
+ 'awx.main': {'handlers': ['null']},
+ 'awx.main.commands.run_callback_receiver': {'handlers': ['callback_receiver']}, # level handled by dynamic_level_filter
+ 'awx.main.dispatch': {'handlers': ['dispatcher']},
+ 'awx.main.consumers': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'INFO'},
+ 'awx.main.wsbroadcast': {'handlers': ['wsbroadcast']},
+ 'awx.isolated.manager': {'level': 'WARNING', 'handlers': ['console', 'file', 'isolated_manager'], 'propagate': True},
+ 'awx.isolated.manager.playbooks': {'handlers': ['management_playbooks'], 'propagate': False},
+ 'awx.main.commands.inventory_import': {'handlers': ['inventory_import'], 'propagate': False},
+ 'awx.main.tasks': {'handlers': ['task_system', 'external_logger'], 'propagate': False},
+ 'awx.main.analytics': {'handlers': ['task_system', 'external_logger'], 'level': 'INFO', 'propagate': False},
+ 'awx.main.scheduler': {'handlers': ['task_system', 'external_logger'], 'propagate': False},
+ 'awx.main.access': {'level': 'INFO'}, # very verbose debug-level logs
+ 'awx.main.signals': {'level': 'INFO'}, # very verbose debug-level logs
+ 'awx.api.permissions': {'level': 'INFO'}, # very verbose debug-level logs
+ 'awx.analytics': {'handlers': ['external_logger'], 'level': 'INFO', 'propagate': False},
+ 'awx.analytics.performance': {'handlers': ['console', 'file', 'tower_warnings', 'external_logger'], 'level': 'DEBUG', 'propagate': False},
+ 'awx.analytics.job_lifecycle': {'handlers': ['console', 'job_lifecycle'], 'level': 'DEBUG', 'propagate': False},
+ 'django_auth_ldap': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'DEBUG'},
+ 'social': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'DEBUG'},
+ 'system_tracking_migrations': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'DEBUG'},
+ 'rbac_migrations': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'DEBUG'},
+ },
}
# Apply coloring to messages logged to the console
@@ -1164,6 +980,4 @@ BROADCAST_WEBSOCKET_NEW_INSTANCE_POLL_RATE_SECONDS = 10
# How often websocket process will generate stats
BROADCAST_WEBSOCKET_STATS_POLL_RATE_SECONDS = 5
-DJANGO_GUID = {
- 'GUID_HEADER_NAME': 'X-API-Request-Id',
-}
+DJANGO_GUID = {'GUID_HEADER_NAME': 'X-API-Request-Id'}
diff --git a/awx/settings/development.py b/awx/settings/development.py
index 21b5bb6b4a..e2a42fef67 100644
--- a/awx/settings/development.py
+++ b/awx/settings/development.py
@@ -23,13 +23,7 @@ from .defaults import * # NOQA
# awx-manage shell_plus --notebook
-NOTEBOOK_ARGUMENTS = [
- '--NotebookApp.token=',
- '--ip', '0.0.0.0',
- '--port', '8888',
- '--allow-root',
- '--no-browser',
-]
+NOTEBOOK_ARGUMENTS = ['--NotebookApp.token=', '--ip', '0.0.0.0', '--port', '8888', '--allow-root', '--no-browser']
# print SQL queries in shell_plus
SHELL_PLUS_PRINT_SQL = False
@@ -63,11 +57,8 @@ SESSION_COOKIE_SECURE = False
CSRF_COOKIE_SECURE = False
# Override django.template.loaders.cached.Loader in defaults.py
-template = next((tpl_backend for tpl_backend in TEMPLATES if tpl_backend['NAME'] == 'default'), None) # noqa
-template['OPTIONS']['loaders'] = (
- 'django.template.loaders.filesystem.Loader',
- 'django.template.loaders.app_directories.Loader',
-)
+template = next((tpl_backend for tpl_backend in TEMPLATES if tpl_backend['NAME'] == 'default'), None) # noqa
+template['OPTIONS']['loaders'] = ('django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader')
CALLBACK_QUEUE = "callback_tasks"
@@ -89,43 +80,13 @@ AWX_ISOLATED_PERIODIC_CHECK = 30
PENDO_TRACKING_STATE = "off"
INSIGHTS_TRACKING_STATE = False
-# Use Django-Jenkins if installed. Only run tests for awx.main app.
-try:
- import django_jenkins
- INSTALLED_APPS += [django_jenkins.__name__,] # noqa
- PROJECT_APPS = ('awx.main.tests', 'awx.api.tests',)
-except ImportError:
- pass
-
-if 'django_jenkins' in INSTALLED_APPS:
- JENKINS_TASKS = (
- # 'django_jenkins.tasks.run_pylint',
- # 'django_jenkins.tasks.run_flake8',
- # The following are not needed when including run_flake8
- # 'django_jenkins.tasks.run_pep8',
- # 'django_jenkins.tasks.run_pyflakes',
- # The following are handled by various grunt tasks and no longer required
- # 'django_jenkins.tasks.run_jshint',
- # 'django_jenkins.tasks.run_csslint',
- )
- PEP8_RCFILE = "setup.cfg"
- PYLINT_RCFILE = ".pylintrc"
-
-
# debug toolbar and swagger assume that requirements/requirements_dev.txt are installed
-INSTALLED_APPS += [ # NOQA
- 'rest_framework_swagger',
- 'debug_toolbar',
-]
+INSTALLED_APPS += ['rest_framework_swagger', 'debug_toolbar'] # NOQA
-MIDDLEWARE = [
- 'debug_toolbar.middleware.DebugToolbarMiddleware',
-] + MIDDLEWARE # NOQA
+MIDDLEWARE = ['debug_toolbar.middleware.DebugToolbarMiddleware'] + MIDDLEWARE # NOQA
-DEBUG_TOOLBAR_CONFIG = {
- 'ENABLE_STACKTRACES' : True,
-}
+DEBUG_TOOLBAR_CONFIG = {'ENABLE_STACKTRACES': True}
# Configure a default UUID for development only.
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
@@ -167,35 +128,32 @@ except ImportError:
# commented out, Django will create the test_awx-dev database in PostgreSQL to
# run unit tests.
if "pytest" in sys.modules:
- CACHES = {
- 'default': {
- 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- 'LOCATION': 'unique-{}'.format(str(uuid.uuid4())),
- },
- }
+ CACHES = {'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-{}'.format(str(uuid.uuid4()))}}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
- 'NAME': os.path.join(BASE_DIR, 'awx.sqlite3'), # noqa
+ 'NAME': os.path.join(BASE_DIR, 'awx.sqlite3'), # noqa
'TEST': {
# Test database cannot be :memory: for inventory tests.
- 'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3'), # noqa
+ 'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3') # noqa
},
}
}
-CELERYBEAT_SCHEDULE.update({ # noqa
- 'isolated_heartbeat': {
- 'task': 'awx.main.tasks.awx_isolated_heartbeat',
- 'schedule': timedelta(seconds=AWX_ISOLATED_PERIODIC_CHECK), # noqa
- 'options': {'expires': AWX_ISOLATED_PERIODIC_CHECK * 2}, # noqa
+CELERYBEAT_SCHEDULE.update(
+ { # noqa
+ 'isolated_heartbeat': {
+ 'task': 'awx.main.tasks.awx_isolated_heartbeat',
+ 'schedule': timedelta(seconds=AWX_ISOLATED_PERIODIC_CHECK), # noqa
+ 'options': {'expires': AWX_ISOLATED_PERIODIC_CHECK * 2}, # noqa
+ }
}
-})
+)
CLUSTER_HOST_ID = socket.gethostname()
AWX_CALLBACK_PROFILE = True
-if 'sqlite3' not in DATABASES['default']['ENGINE']: # noqa
- DATABASES['default'].setdefault('OPTIONS', dict()).setdefault('application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]) # noqa
+if 'sqlite3' not in DATABASES['default']['ENGINE']: # noqa
+ DATABASES['default'].setdefault('OPTIONS', dict()).setdefault('application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]) # noqa
diff --git a/awx/settings/development_quiet.py b/awx/settings/development_quiet.py
index 924ae22072..c47e78b69d 100644
--- a/awx/settings/development_quiet.py
+++ b/awx/settings/development_quiet.py
@@ -13,4 +13,3 @@ from development import * # NOQA
DEBUG = False
TEMPLATE_DEBUG = DEBUG
SQL_DEBUG = DEBUG
-
diff --git a/awx/settings/production.py b/awx/settings/production.py
index 02681265e6..f5a1bd7a7f 100644
--- a/awx/settings/production.py
+++ b/awx/settings/production.py
@@ -57,8 +57,7 @@ settings_files = os.path.join(settings_dir, '*.py')
# Load remaining settings from the global settings file specified in the
# environment, defaulting to /etc/tower/settings.py.
-settings_file = os.environ.get('AWX_SETTINGS_FILE',
- '/etc/tower/settings.py')
+settings_file = os.environ.get('AWX_SETTINGS_FILE', '/etc/tower/settings.py')
# Attempt to load settings from /etc/tower/settings.py first, followed by
# /etc/tower/conf.d/*.py.
@@ -69,8 +68,9 @@ except ImportError:
sys.exit(1)
except IOError:
from django.core.exceptions import ImproperlyConfigured
+
included_file = locals().get('__included_file__', '')
- if (not included_file or included_file == settings_file):
+ if not included_file or included_file == settings_file:
# The import doesn't always give permission denied, so try to open the
# settings file directly.
try:
@@ -91,12 +91,14 @@ except IOError:
# The below runs AFTER all of the custom settings are imported.
-CELERYBEAT_SCHEDULE.update({ # noqa
- 'isolated_heartbeat': {
- 'task': 'awx.main.tasks.awx_isolated_heartbeat',
- 'schedule': timedelta(seconds=AWX_ISOLATED_PERIODIC_CHECK), # noqa
- 'options': {'expires': AWX_ISOLATED_PERIODIC_CHECK * 2}, # noqa
+CELERYBEAT_SCHEDULE.update(
+ { # noqa
+ 'isolated_heartbeat': {
+ 'task': 'awx.main.tasks.awx_isolated_heartbeat',
+ 'schedule': timedelta(seconds=AWX_ISOLATED_PERIODIC_CHECK), # noqa
+ 'options': {'expires': AWX_ISOLATED_PERIODIC_CHECK * 2}, # noqa
+ }
}
-})
+)
-DATABASES['default'].setdefault('OPTIONS', dict()).setdefault('application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]) # noqa
+DATABASES['default'].setdefault('OPTIONS', dict()).setdefault('application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]) # noqa
diff --git a/awx/sso/backends.py b/awx/sso/backends.py
index b09cbf9699..ae7918b359 100644
--- a/awx/sso/backends.py
+++ b/awx/sso/backends.py
@@ -40,11 +40,7 @@ logger = logging.getLogger('awx.sso.backends')
class LDAPSettings(BaseLDAPSettings):
- defaults = dict(list(BaseLDAPSettings.defaults.items()) + list({
- 'ORGANIZATION_MAP': {},
- 'TEAM_MAP': {},
- 'GROUP_TYPE_PARAMS': {},
- }.items()))
+ defaults = dict(list(BaseLDAPSettings.defaults.items()) + list({'ORGANIZATION_MAP': {}, 'TEAM_MAP': {}, 'GROUP_TYPE_PARAMS': {}}.items()))
def __init__(self, prefix='AUTH_LDAP_', defaults={}):
super(LDAPSettings, self).__init__(prefix, defaults)
@@ -72,9 +68,9 @@ class LDAPSettings(BaseLDAPSettings):
class LDAPBackend(BaseLDAPBackend):
- '''
+ """
Custom LDAP backend for AWX.
- '''
+ """
settings_prefix = 'AUTH_LDAP_'
@@ -117,14 +113,9 @@ class LDAPBackend(BaseLDAPBackend):
pass
try:
- for setting_name, type_ in [
- ('GROUP_SEARCH', 'LDAPSearch'),
- ('GROUP_TYPE', 'LDAPGroupType'),
- ]:
+ for setting_name, type_ in [('GROUP_SEARCH', 'LDAPSearch'), ('GROUP_TYPE', 'LDAPGroupType')]:
if getattr(self.settings, setting_name) is None:
- raise ImproperlyConfigured(
- "{} must be an {} instance.".format(setting_name, type_)
- )
+ raise ImproperlyConfigured("{} must be an {} instance.".format(setting_name, type_))
return super(LDAPBackend, self).authenticate(request, username, password)
except Exception:
logger.exception("Encountered an error authenticating to LDAP")
@@ -184,8 +175,7 @@ def _get_or_set_enterprise_user(username, password, provider):
except User.DoesNotExist:
user = User(username=username)
enterprise_auth = _decorate_enterprise_user(user, provider)
- logger.debug("Created enterprise user %s via %s backend." %
- (username, enterprise_auth.get_provider_display()))
+ logger.debug("Created enterprise user %s via %s backend." % (username, enterprise_auth.get_provider_display()))
created = True
if created or user.is_in_enterprise_category(provider):
return user
@@ -193,9 +183,9 @@ def _get_or_set_enterprise_user(username, password, provider):
class RADIUSBackend(BaseRADIUSBackend):
- '''
+ """
Custom Radius backend to verify license status
- '''
+ """
def authenticate(self, request, username, password):
if not django_settings.RADIUS_SERVER:
@@ -214,9 +204,9 @@ class RADIUSBackend(BaseRADIUSBackend):
class TACACSPlusBackend(object):
- '''
+ """
Custom TACACS+ auth backend for AWX
- '''
+ """
def authenticate(self, request, username, password):
if not django_settings.TACACSPLUS_HOST:
@@ -228,10 +218,7 @@ class TACACSPlusBackend(object):
django_settings.TACACSPLUS_PORT,
django_settings.TACACSPLUS_SECRET,
timeout=django_settings.TACACSPLUS_SESSION_TIMEOUT,
- ).authenticate(
- username, password,
- authen_type=tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL],
- )
+ ).authenticate(username, password, authen_type=tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL])
except Exception as e:
logger.exception("TACACS+ Authentication Error: %s" % str(e))
return None
@@ -248,9 +235,9 @@ class TACACSPlusBackend(object):
class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
- '''
+ """
Custom Identity Provider to make attributes to what we expect.
- '''
+ """
def get_user_permanent_id(self, attributes):
uid = attributes[self.conf.get('attr_user_permanent_id', OID_USERID)]
@@ -270,26 +257,37 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
if isinstance(value, (list, tuple)):
value = value[0]
if conf_key in ('attr_first_name', 'attr_last_name', 'attr_username', 'attr_email') and value is None:
- logger.warn("Could not map user detail '%s' from SAML attribute '%s'; "
- "update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
- conf_key[5:], key, self.name, conf_key)
+ logger.warn(
+ "Could not map user detail '%s' from SAML attribute '%s'; " "update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
+ conf_key[5:],
+ key,
+ self.name,
+ conf_key,
+ )
return str(value) if value is not None else value
class SAMLAuth(BaseSAMLAuth):
- '''
+ """
Custom SAMLAuth backend to verify license status
- '''
+ """
def get_idp(self, idp_name):
idp_config = self.setting('ENABLED_IDPS')[idp_name]
return TowerSAMLIdentityProvider(idp_name, **idp_config)
def authenticate(self, request, *args, **kwargs):
- if not all([django_settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID, django_settings.SOCIAL_AUTH_SAML_SP_PUBLIC_CERT,
- django_settings.SOCIAL_AUTH_SAML_SP_PRIVATE_KEY, django_settings.SOCIAL_AUTH_SAML_ORG_INFO,
- django_settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT, django_settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT,
- django_settings.SOCIAL_AUTH_SAML_ENABLED_IDPS]):
+ if not all(
+ [
+ django_settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID,
+ django_settings.SOCIAL_AUTH_SAML_SP_PUBLIC_CERT,
+ django_settings.SOCIAL_AUTH_SAML_SP_PRIVATE_KEY,
+ django_settings.SOCIAL_AUTH_SAML_ORG_INFO,
+ django_settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT,
+ django_settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT,
+ django_settings.SOCIAL_AUTH_SAML_ENABLED_IDPS,
+ ]
+ ):
return None
user = super(SAMLAuth, self).authenticate(request, *args, **kwargs)
# Comes from https://github.com/omab/python-social-auth/blob/v0.2.21/social/backends/base.py#L91
@@ -300,18 +298,25 @@ class SAMLAuth(BaseSAMLAuth):
return user
def get_user(self, user_id):
- if not all([django_settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID, django_settings.SOCIAL_AUTH_SAML_SP_PUBLIC_CERT,
- django_settings.SOCIAL_AUTH_SAML_SP_PRIVATE_KEY, django_settings.SOCIAL_AUTH_SAML_ORG_INFO,
- django_settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT, django_settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT,
- django_settings.SOCIAL_AUTH_SAML_ENABLED_IDPS]):
+ if not all(
+ [
+ django_settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID,
+ django_settings.SOCIAL_AUTH_SAML_SP_PUBLIC_CERT,
+ django_settings.SOCIAL_AUTH_SAML_SP_PRIVATE_KEY,
+ django_settings.SOCIAL_AUTH_SAML_ORG_INFO,
+ django_settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT,
+ django_settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT,
+ django_settings.SOCIAL_AUTH_SAML_ENABLED_IDPS,
+ ]
+ ):
return None
return super(SAMLAuth, self).get_user(user_id)
def _update_m2m_from_groups(user, ldap_user, related, opts, remove=True):
- '''
+ """
Hepler function to update m2m relationship based on LDAP group membership.
- '''
+ """
should_add = False
if opts is None:
return
@@ -337,11 +342,12 @@ def _update_m2m_from_groups(user, ldap_user, related, opts, remove=True):
@receiver(populate_user, dispatch_uid='populate-ldap-user')
def on_populate_user(sender, **kwargs):
- '''
+ """
Handle signal from LDAP backend to populate the user object. Update user
organization/team memberships according to their LDAP groups.
- '''
+ """
from awx.main.models import Organization, Team
+
user = kwargs['user']
ldap_user = kwargs['ldap_user']
backend = ldap_user.backend
@@ -356,9 +362,7 @@ def on_populate_user(sender, **kwargs):
field_len = len(getattr(user, field))
if field_len > max_len:
setattr(user, field, getattr(user, field)[:max_len])
- logger.warn(
- 'LDAP user {} has {} > max {} characters'.format(user.username, field, max_len)
- )
+ logger.warn('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len))
# Update organization membership based on group memberships.
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})
@@ -367,16 +371,13 @@ def on_populate_user(sender, **kwargs):
remove = bool(org_opts.get('remove', True))
admins_opts = org_opts.get('admins', None)
remove_admins = bool(org_opts.get('remove_admins', remove))
- _update_m2m_from_groups(user, ldap_user, org.admin_role.members, admins_opts,
- remove_admins)
+ _update_m2m_from_groups(user, ldap_user, org.admin_role.members, admins_opts, remove_admins)
auditors_opts = org_opts.get('auditors', None)
remove_auditors = bool(org_opts.get('remove_auditors', remove))
- _update_m2m_from_groups(user, ldap_user, org.auditor_role.members, auditors_opts,
- remove_auditors)
+ _update_m2m_from_groups(user, ldap_user, org.auditor_role.members, auditors_opts, remove_auditors)
users_opts = org_opts.get('users', None)
remove_users = bool(org_opts.get('remove_users', remove))
- _update_m2m_from_groups(user, ldap_user, org.member_role.members, users_opts,
- remove_users)
+ _update_m2m_from_groups(user, ldap_user, org.member_role.members, users_opts, remove_users)
# Update team membership based on group memberships.
team_map = getattr(backend.settings, 'TEAM_MAP', {})
@@ -387,8 +388,7 @@ def on_populate_user(sender, **kwargs):
team, created = Team.objects.get_or_create(name=team_name, organization=org)
users_opts = team_opts.get('users', None)
remove = bool(team_opts.get('remove', True))
- _update_m2m_from_groups(user, ldap_user, team.member_role.members, users_opts,
- remove)
+ _update_m2m_from_groups(user, ldap_user, team.member_role.members, users_opts, remove)
# Update user profile to store LDAP DN.
user.save()
diff --git a/awx/sso/conf.py b/awx/sso/conf.py
index 8f21eb4d0a..f160cc565c 100644
--- a/awx/sso/conf.py
+++ b/awx/sso/conf.py
@@ -13,23 +13,33 @@ from rest_framework import serializers
# Tower
from awx.conf import register, register_validate, fields
from awx.sso.fields import (
- AuthenticationBackendsField, LDAPConnectionOptionsField, LDAPDNField,
- LDAPDNWithUserField, LDAPGroupTypeField, LDAPGroupTypeParamsField,
- LDAPOrganizationMapField, LDAPSearchField, LDAPSearchUnionField,
- LDAPServerURIField, LDAPTeamMapField, LDAPUserAttrMapField,
- LDAPUserFlagsField, SAMLContactField, SAMLEnabledIdPsField,
- SAMLOrgAttrField, SAMLOrgInfoField, SAMLSecurityField, SAMLTeamAttrField,
- SocialOrganizationMapField, SocialTeamMapField,
+ AuthenticationBackendsField,
+ LDAPConnectionOptionsField,
+ LDAPDNField,
+ LDAPDNWithUserField,
+ LDAPGroupTypeField,
+ LDAPGroupTypeParamsField,
+ LDAPOrganizationMapField,
+ LDAPSearchField,
+ LDAPSearchUnionField,
+ LDAPServerURIField,
+ LDAPTeamMapField,
+ LDAPUserAttrMapField,
+ LDAPUserFlagsField,
+ SAMLContactField,
+ SAMLEnabledIdPsField,
+ SAMLOrgAttrField,
+ SAMLOrgInfoField,
+ SAMLSecurityField,
+ SAMLTeamAttrField,
+ SocialOrganizationMapField,
+ SocialTeamMapField,
)
from awx.main.validators import validate_private_key, validate_certificate
-from awx.sso.validators import ( # noqa
- validate_ldap_bind_dn,
- validate_tacacsplus_disallow_nonascii,
-)
+from awx.sso.validators import validate_ldap_bind_dn, validate_tacacsplus_disallow_nonascii # noqa
class SocialAuthCallbackURL(object):
-
def __init__(self, provider):
self.provider = provider
@@ -38,49 +48,48 @@ class SocialAuthCallbackURL(object):
return urlparse.urljoin(settings.TOWER_URL_BASE, path)
-SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT = _('''\
+SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT = _(
+ '''\
Mapping to organization admins/users from social auth accounts. This setting
controls which users are placed into which Tower organizations based on their
username and email address. Configuration details are available in the Ansible
Tower documentation.\
-''')
+'''
+)
# FIXME: /regex/gim (flags)
-SOCIAL_AUTH_ORGANIZATION_MAP_PLACEHOLDER = collections.OrderedDict([
- ('Default', collections.OrderedDict([
- ('users', True),
- ])),
- ('Test Org', collections.OrderedDict([
- ('admins', ['admin@example.com']),
- ('auditors', ['auditor@example.com']),
- ('users', True),
- ])),
- ('Test Org 2', collections.OrderedDict([
- ('admins', ['admin@example.com', r'/^tower-[^@]+*?@.*$/']),
- ('remove_admins', True),
- ('users', r'/^[^@].*?@example\.com$/i'),
- ('remove_users', True),
- ])),
-])
-
-SOCIAL_AUTH_TEAM_MAP_HELP_TEXT = _('''\
+SOCIAL_AUTH_ORGANIZATION_MAP_PLACEHOLDER = collections.OrderedDict(
+ [
+ ('Default', collections.OrderedDict([('users', True)])),
+ ('Test Org', collections.OrderedDict([('admins', ['admin@example.com']), ('auditors', ['auditor@example.com']), ('users', True)])),
+ (
+ 'Test Org 2',
+ collections.OrderedDict(
+ [
+ ('admins', ['admin@example.com', r'/^tower-[^@]+*?@.*$/']),
+ ('remove_admins', True),
+ ('users', r'/^[^@].*?@example\.com$/i'),
+ ('remove_users', True),
+ ]
+ ),
+ ),
+ ]
+)
+
+SOCIAL_AUTH_TEAM_MAP_HELP_TEXT = _(
+ '''\
Mapping of team members (users) from social auth accounts. Configuration
details are available in Tower documentation.\
-''')
-
-SOCIAL_AUTH_TEAM_MAP_PLACEHOLDER = collections.OrderedDict([
- ('My Team', collections.OrderedDict([
- ('organization', 'Test Org'),
- ('users', [r'/^[^@]+?@test\.example\.com$/']),
- ('remove', True),
- ])),
- ('Other Team', collections.OrderedDict([
- ('organization', 'Test Org 2'),
- ('users', r'/^[^@]+?@test2\.example\.com$/i'),
- ('remove', False),
- ])),
-])
+'''
+)
+
+SOCIAL_AUTH_TEAM_MAP_PLACEHOLDER = collections.OrderedDict(
+ [
+ ('My Team', collections.OrderedDict([('organization', 'Test Org'), ('users', [r'/^[^@]+?@test\.example\.com$/']), ('remove', True)])),
+ ('Other Team', collections.OrderedDict([('organization', 'Test Org 2'), ('users', r'/^[^@]+?@test2\.example\.com$/i'), ('remove', False)])),
+ ]
+)
###############################################################################
# AUTHENTICATION BACKENDS DYNAMIC SETTING
@@ -90,8 +99,7 @@ register(
'AUTHENTICATION_BACKENDS',
field_class=AuthenticationBackendsField,
label=_('Authentication Backends'),
- help_text=_('List of authentication backends that are enabled based on '
- 'license features and other authentication settings.'),
+ help_text=_('List of authentication backends that are enabled based on ' 'license features and other authentication settings.'),
read_only=True,
depends_on=AuthenticationBackendsField.get_all_required_settings(),
category=_('Authentication'),
@@ -128,10 +136,12 @@ register(
allow_null=True,
default=None,
label=_('Social Auth User Fields'),
- help_text=_('When set to an empty list `[]`, this setting prevents new user '
- 'accounts from being created. Only users who have previously '
- 'logged in using social auth or have a user account with a '
- 'matching email address will be able to login.'),
+ help_text=_(
+ 'When set to an empty list `[]`, this setting prevents new user '
+ 'accounts from being created. Only users who have previously '
+ 'logged in using social auth or have a user account with a '
+ 'matching email address will be able to login.'
+ ),
category=_('Authentication'),
category_slug='authentication',
placeholder=['username', 'email'],
@@ -151,10 +161,12 @@ def _register_ldap(append=None):
allow_blank=True,
default='',
label=_('LDAP Server URI'),
- help_text=_('URI to connect to LDAP server, such as "ldap://ldap.example.com:389" '
- '(non-SSL) or "ldaps://ldap.example.com:636" (SSL). Multiple LDAP '
- 'servers may be specified by separating with spaces or commas. LDAP '
- 'authentication is disabled if this parameter is empty.'),
+ help_text=_(
+ 'URI to connect to LDAP server, such as "ldap://ldap.example.com:389" '
+ '(non-SSL) or "ldaps://ldap.example.com:636" (SSL). Multiple LDAP '
+ 'servers may be specified by separating with spaces or commas. LDAP '
+ 'authentication is disabled if this parameter is empty.'
+ ),
category=_('LDAP'),
category_slug='ldap',
placeholder='ldaps://ldap.example.com:636',
@@ -167,9 +179,11 @@ def _register_ldap(append=None):
default='',
validators=[validate_ldap_bind_dn],
label=_('LDAP Bind DN'),
- help_text=_('DN (Distinguished Name) of user to bind for all search queries. This'
- ' is the system user account we will use to login to query LDAP for other'
- ' user information. Refer to the Ansible Tower documentation for example syntax.'),
+ help_text=_(
+ 'DN (Distinguished Name) of user to bind for all search queries. This'
+ ' is the system user account we will use to login to query LDAP for other'
+ ' user information. Refer to the Ansible Tower documentation for example syntax.'
+ ),
category=_('LDAP'),
category_slug='ldap',
)
@@ -201,18 +215,17 @@ def _register_ldap(append=None):
field_class=LDAPConnectionOptionsField,
default={'OPT_REFERRALS': 0, 'OPT_NETWORK_TIMEOUT': 30},
label=_('LDAP Connection Options'),
- help_text=_('Additional options to set for the LDAP connection. LDAP '
- 'referrals are disabled by default (to prevent certain LDAP '
- 'queries from hanging with AD). Option names should be strings '
- '(e.g. "OPT_REFERRALS"). Refer to '
- 'https://www.python-ldap.org/doc/html/ldap.html#options for '
- 'possible options and values that can be set.'),
+ help_text=_(
+ 'Additional options to set for the LDAP connection. LDAP '
+ 'referrals are disabled by default (to prevent certain LDAP '
+ 'queries from hanging with AD). Option names should be strings '
+ '(e.g. "OPT_REFERRALS"). Refer to '
+ 'https://www.python-ldap.org/doc/html/ldap.html#options for '
+ 'possible options and values that can be set.'
+ ),
category=_('LDAP'),
category_slug='ldap',
- placeholder=collections.OrderedDict([
- ('OPT_REFERRALS', 0),
- ('OPT_NETWORK_TIMEOUT', 30)
- ]),
+ placeholder=collections.OrderedDict([('OPT_REFERRALS', 0), ('OPT_NETWORK_TIMEOUT', 30)]),
)
register(
@@ -220,19 +233,17 @@ def _register_ldap(append=None):
field_class=LDAPSearchUnionField,
default=[],
label=_('LDAP User Search'),
- help_text=_('LDAP search query to find users. Any user that matches the given '
- 'pattern will be able to login to Tower. The user should also be '
- 'mapped into a Tower organization (as defined in the '
- 'AUTH_LDAP_ORGANIZATION_MAP setting). If multiple search queries '
- 'need to be supported use of "LDAPUnion" is possible. See '
- 'Tower documentation for details.'),
+ help_text=_(
+ 'LDAP search query to find users. Any user that matches the given '
+ 'pattern will be able to login to Tower. The user should also be '
+ 'mapped into a Tower organization (as defined in the '
+ 'AUTH_LDAP_ORGANIZATION_MAP setting). If multiple search queries '
+ 'need to be supported use of "LDAPUnion" is possible. See '
+ 'Tower documentation for details.'
+ ),
category=_('LDAP'),
category_slug='ldap',
- placeholder=(
- 'OU=Users,DC=example,DC=com',
- 'SCOPE_SUBTREE',
- '(sAMAccountName=%(user)s)',
- ),
+ placeholder=('OU=Users,DC=example,DC=com', 'SCOPE_SUBTREE', '(sAMAccountName=%(user)s)'),
)
register(
@@ -242,11 +253,13 @@ def _register_ldap(append=None):
allow_null=True,
default=None,
label=_('LDAP User DN Template'),
- help_text=_('Alternative to user search, if user DNs are all of the same '
- 'format. This approach is more efficient for user lookups than '
- 'searching if it is usable in your organizational environment. If '
- 'this setting has a value it will be used instead of '
- 'AUTH_LDAP_USER_SEARCH.'),
+ help_text=_(
+ 'Alternative to user search, if user DNs are all of the same '
+ 'format. This approach is more efficient for user lookups than '
+ 'searching if it is usable in your organizational environment. If '
+ 'this setting has a value it will be used instead of '
+ 'AUTH_LDAP_USER_SEARCH.'
+ ),
category=_('LDAP'),
category_slug='ldap',
placeholder='uid=%(user)s,OU=Users,DC=example,DC=com',
@@ -257,17 +270,15 @@ def _register_ldap(append=None):
field_class=LDAPUserAttrMapField,
default={},
label=_('LDAP User Attribute Map'),
- help_text=_('Mapping of LDAP user schema to Tower API user attributes. The default'
- ' setting is valid for ActiveDirectory but users with other LDAP'
- ' configurations may need to change the values. Refer to the Ansible'
- ' Tower documentation for additional details.'),
+ help_text=_(
+ 'Mapping of LDAP user schema to Tower API user attributes. The default'
+ ' setting is valid for ActiveDirectory but users with other LDAP'
+ ' configurations may need to change the values. Refer to the Ansible'
+ ' Tower documentation for additional details.'
+ ),
category=_('LDAP'),
category_slug='ldap',
- placeholder=collections.OrderedDict([
- ('first_name', 'givenName'),
- ('last_name', 'sn'),
- ('email', 'mail'),
- ]),
+ placeholder=collections.OrderedDict([('first_name', 'givenName'), ('last_name', 'sn'), ('email', 'mail')]),
)
register(
@@ -275,25 +286,25 @@ def _register_ldap(append=None):
field_class=LDAPSearchField,
default=[],
label=_('LDAP Group Search'),
- help_text=_('Users are mapped to organizations based on their membership in LDAP'
- ' groups. This setting defines the LDAP search query to find groups. '
- 'Unlike the user search, group search does not support LDAPSearchUnion.'),
+ help_text=_(
+ 'Users are mapped to organizations based on their membership in LDAP'
+ ' groups. This setting defines the LDAP search query to find groups. '
+ 'Unlike the user search, group search does not support LDAPSearchUnion.'
+ ),
category=_('LDAP'),
category_slug='ldap',
- placeholder=(
- 'DC=example,DC=com',
- 'SCOPE_SUBTREE',
- '(objectClass=group)',
- ),
+ placeholder=('DC=example,DC=com', 'SCOPE_SUBTREE', '(objectClass=group)'),
)
register(
'AUTH_LDAP{}_GROUP_TYPE'.format(append_str),
field_class=LDAPGroupTypeField,
label=_('LDAP Group Type'),
- help_text=_('The group type may need to be changed based on the type of the '
- 'LDAP server. Values are listed at: '
- 'https://django-auth-ldap.readthedocs.io/en/stable/groups.html#types-of-groups'),
+ help_text=_(
+ 'The group type may need to be changed based on the type of the '
+ 'LDAP server. Values are listed at: '
+ 'https://django-auth-ldap.readthedocs.io/en/stable/groups.html#types-of-groups'
+ ),
category=_('LDAP'),
category_slug='ldap',
default='MemberDNGroupType',
@@ -307,15 +318,8 @@ def _register_ldap(append=None):
help_text=_('Key value parameters to send the chosen group type init method.'),
category=_('LDAP'),
category_slug='ldap',
- default=collections.OrderedDict([
- ('member_attr', 'member'),
- ('name_attr', 'cn'),
- ]),
- placeholder=collections.OrderedDict([
- ('ldap_group_user_attr', 'legacyuid'),
- ('member_attr', 'member'),
- ('name_attr', 'cn'),
- ]),
+ default=collections.OrderedDict([('member_attr', 'member'), ('name_attr', 'cn')]),
+ placeholder=collections.OrderedDict([('ldap_group_user_attr', 'legacyuid'), ('member_attr', 'member'), ('name_attr', 'cn')]),
depends_on=['AUTH_LDAP{}_GROUP_TYPE'.format(append_str)],
)
@@ -326,10 +330,12 @@ def _register_ldap(append=None):
allow_null=True,
default=None,
label=_('LDAP Require Group'),
- help_text=_('Group DN required to login. If specified, user must be a member '
- 'of this group to login via LDAP. If not set, everyone in LDAP '
- 'that matches the user search will be able to login via Tower. '
- 'Only one require group is supported.'),
+ help_text=_(
+ 'Group DN required to login. If specified, user must be a member '
+ 'of this group to login via LDAP. If not set, everyone in LDAP '
+ 'that matches the user search will be able to login via Tower. '
+ 'Only one require group is supported.'
+ ),
category=_('LDAP'),
category_slug='ldap',
placeholder='CN=Tower Users,OU=Users,DC=example,DC=com',
@@ -342,9 +348,9 @@ def _register_ldap(append=None):
allow_null=True,
default=None,
label=_('LDAP Deny Group'),
- help_text=_('Group DN denied from login. If specified, user will not be '
- 'allowed to login if a member of this group. Only one deny group '
- 'is supported.'),
+ help_text=_(
+ 'Group DN denied from login. If specified, user will not be ' 'allowed to login if a member of this group. Only one deny group ' 'is supported.'
+ ),
category=_('LDAP'),
category_slug='ldap',
placeholder='CN=Disabled Users,OU=Users,DC=example,DC=com',
@@ -355,15 +361,16 @@ def _register_ldap(append=None):
field_class=LDAPUserFlagsField,
default={},
label=_('LDAP User Flags By Group'),
- help_text=_('Retrieve users from a given group. At this time, superuser and system'
- ' auditors are the only groups supported. Refer to the Ansible Tower'
- ' documentation for more detail.'),
+ help_text=_(
+ 'Retrieve users from a given group. At this time, superuser and system'
+ ' auditors are the only groups supported. Refer to the Ansible Tower'
+ ' documentation for more detail.'
+ ),
category=_('LDAP'),
category_slug='ldap',
- placeholder=collections.OrderedDict([
- ('is_superuser', 'CN=Domain Admins,CN=Users,DC=example,DC=com'),
- ('is_system_auditor', 'CN=Domain Auditors,CN=Users,DC=example,DC=com'),
- ]),
+ placeholder=collections.OrderedDict(
+ [('is_superuser', 'CN=Domain Admins,CN=Users,DC=example,DC=com'), ('is_system_auditor', 'CN=Domain Auditors,CN=Users,DC=example,DC=com')]
+ ),
)
register(
@@ -371,27 +378,36 @@ def _register_ldap(append=None):
field_class=LDAPOrganizationMapField,
default={},
label=_('LDAP Organization Map'),
- help_text=_('Mapping between organization admins/users and LDAP groups. This '
- 'controls which users are placed into which Tower organizations '
- 'relative to their LDAP group memberships. Configuration details '
- 'are available in the Ansible Tower documentation.'),
+ help_text=_(
+ 'Mapping between organization admins/users and LDAP groups. This '
+ 'controls which users are placed into which Tower organizations '
+ 'relative to their LDAP group memberships. Configuration details '
+ 'are available in the Ansible Tower documentation.'
+ ),
category=_('LDAP'),
category_slug='ldap',
- placeholder=collections.OrderedDict([
- ('Test Org', collections.OrderedDict([
- ('admins', 'CN=Domain Admins,CN=Users,DC=example,DC=com'),
- ('auditors', 'CN=Domain Auditors,CN=Users,DC=example,DC=com'),
- ('users', ['CN=Domain Users,CN=Users,DC=example,DC=com']),
- ('remove_users', True),
- ('remove_admins', True),
- ])),
- ('Test Org 2', collections.OrderedDict([
- ('admins', 'CN=Administrators,CN=Builtin,DC=example,DC=com'),
- ('users', True),
- ('remove_users', True),
- ('remove_admins', True),
- ])),
- ]),
+ placeholder=collections.OrderedDict(
+ [
+ (
+ 'Test Org',
+ collections.OrderedDict(
+ [
+ ('admins', 'CN=Domain Admins,CN=Users,DC=example,DC=com'),
+ ('auditors', 'CN=Domain Auditors,CN=Users,DC=example,DC=com'),
+ ('users', ['CN=Domain Users,CN=Users,DC=example,DC=com']),
+ ('remove_users', True),
+ ('remove_admins', True),
+ ]
+ ),
+ ),
+ (
+ 'Test Org 2',
+ collections.OrderedDict(
+ [('admins', 'CN=Administrators,CN=Builtin,DC=example,DC=com'), ('users', True), ('remove_users', True), ('remove_admins', True)]
+ ),
+ ),
+ ]
+ ),
)
register(
@@ -399,22 +415,21 @@ def _register_ldap(append=None):
field_class=LDAPTeamMapField,
default={},
label=_('LDAP Team Map'),
- help_text=_('Mapping between team members (users) and LDAP groups. Configuration'
- ' details are available in the Ansible Tower documentation.'),
+ help_text=_('Mapping between team members (users) and LDAP groups. Configuration' ' details are available in the Ansible Tower documentation.'),
category=_('LDAP'),
category_slug='ldap',
- placeholder=collections.OrderedDict([
- ('My Team', collections.OrderedDict([
- ('organization', 'Test Org'),
- ('users', ['CN=Domain Users,CN=Users,DC=example,DC=com']),
- ('remove', True),
- ])),
- ('Other Team', collections.OrderedDict([
- ('organization', 'Test Org 2'),
- ('users', 'CN=Other Users,CN=Users,DC=example,DC=com'),
- ('remove', False),
- ])),
- ]),
+ placeholder=collections.OrderedDict(
+ [
+ (
+ 'My Team',
+ collections.OrderedDict([('organization', 'Test Org'), ('users', ['CN=Domain Users,CN=Users,DC=example,DC=com']), ('remove', True)]),
+ ),
+ (
+ 'Other Team',
+ collections.OrderedDict([('organization', 'Test Org 2'), ('users', 'CN=Other Users,CN=Users,DC=example,DC=com'), ('remove', False)]),
+ ),
+ ]
+ ),
)
@@ -435,8 +450,7 @@ register(
allow_blank=True,
default='',
label=_('RADIUS Server'),
- help_text=_('Hostname/IP of RADIUS server. RADIUS authentication is '
- 'disabled if this setting is empty.'),
+ help_text=_('Hostname/IP of RADIUS server. RADIUS authentication is ' 'disabled if this setting is empty.'),
category=_('RADIUS'),
category_slug='radius',
placeholder='radius.example.com',
@@ -539,9 +553,11 @@ register(
read_only=True,
default=SocialAuthCallbackURL('google-oauth2'),
label=_('Google OAuth2 Callback URL'),
- help_text=_('Provide this URL as the callback URL for your application as part '
- 'of your registration process. Refer to the Ansible Tower '
- 'documentation for more detail.'),
+ help_text=_(
+ 'Provide this URL as the callback URL for your application as part '
+ 'of your registration process. Refer to the Ansible Tower '
+ 'documentation for more detail.'
+ ),
category=_('Google OAuth2'),
category_slug='google-oauth2',
depends_on=['TOWER_URL_BASE'],
@@ -577,8 +593,7 @@ register(
field_class=fields.StringListField,
default=[],
label=_('Google OAuth2 Allowed Domains'),
- help_text=_('Update this setting to restrict the domains who are allowed to '
- 'login using Google OAuth2.'),
+ help_text=_('Update this setting to restrict the domains who are allowed to ' 'login using Google OAuth2.'),
category=_('Google OAuth2'),
category_slug='google-oauth2',
placeholder=['example.com'],
@@ -589,10 +604,12 @@ register(
field_class=fields.DictField,
default={},
label=_('Google OAuth2 Extra Arguments'),
- help_text=_('Extra arguments for Google OAuth2 login. You can restrict it to'
- ' only allow a single domain to authenticate, even if the user is'
- ' logged in with multple Google accounts. Refer to the Ansible Tower'
- ' documentation for more detail.'),
+ help_text=_(
+ 'Extra arguments for Google OAuth2 login. You can restrict it to'
+ ' only allow a single domain to authenticate, even if the user is'
+ ' logged in with multple Google accounts. Refer to the Ansible Tower'
+ ' documentation for more detail.'
+ ),
category=_('Google OAuth2'),
category_slug='google-oauth2',
placeholder={'hd': 'example.com'},
@@ -632,9 +649,11 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github'),
label=_('GitHub OAuth2 Callback URL'),
- help_text=_('Provide this URL as the callback URL for your application as part '
- 'of your registration process. Refer to the Ansible Tower '
- 'documentation for more detail.'),
+ help_text=_(
+ 'Provide this URL as the callback URL for your application as part '
+ 'of your registration process. Refer to the Ansible Tower '
+ 'documentation for more detail.'
+ ),
category=_('GitHub OAuth2'),
category_slug='github',
depends_on=['TOWER_URL_BASE'],
@@ -697,9 +716,11 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github-org'),
label=_('GitHub Organization OAuth2 Callback URL'),
- help_text=_('Provide this URL as the callback URL for your application as part '
- 'of your registration process. Refer to the Ansible Tower '
- 'documentation for more detail.'),
+ help_text=_(
+ 'Provide this URL as the callback URL for your application as part '
+ 'of your registration process. Refer to the Ansible Tower '
+ 'documentation for more detail.'
+ ),
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
depends_on=['TOWER_URL_BASE'],
@@ -734,8 +755,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Organization Name'),
- help_text=_('The name of your GitHub organization, as used in your '
- 'organization\'s URL: https://github.com/<yourorg>/.'),
+ help_text=_('The name of your GitHub organization, as used in your ' 'organization\'s URL: https://github.com/<yourorg>/.'),
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
)
@@ -774,10 +794,12 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github-team'),
label=_('GitHub Team OAuth2 Callback URL'),
- help_text=_('Create an organization-owned application at '
- 'https://github.com/organizations/<yourorg>/settings/applications '
- 'and obtain an OAuth2 key (Client ID) and secret (Client Secret). '
- 'Provide this URL as the callback URL for your application.'),
+ help_text=_(
+ 'Create an organization-owned application at '
+ 'https://github.com/organizations/<yourorg>/settings/applications '
+ 'and obtain an OAuth2 key (Client ID) and secret (Client Secret). '
+ 'Provide this URL as the callback URL for your application.'
+ ),
category=_('GitHub Team OAuth2'),
category_slug='github-team',
depends_on=['TOWER_URL_BASE'],
@@ -812,8 +834,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Team ID'),
- help_text=_('Find the numeric team ID using the Github API: '
- 'http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
+ help_text=_('Find the numeric team ID using the Github API: ' 'http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
category=_('GitHub Team OAuth2'),
category_slug='github-team',
)
@@ -852,9 +873,11 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github-enterprise'),
label=_('GitHub Enterprise OAuth2 Callback URL'),
- help_text=_('Provide this URL as the callback URL for your application as part '
- 'of your registration process. Refer to the Ansible Tower '
- 'documentation for more detail.'),
+ help_text=_(
+ 'Provide this URL as the callback URL for your application as part '
+ 'of your registration process. Refer to the Ansible Tower '
+ 'documentation for more detail.'
+ ),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise',
depends_on=['TOWER_URL_BASE'],
@@ -866,8 +889,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise URL'),
- help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise '
- 'documentation for more details.'),
+ help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise',
)
@@ -878,8 +900,9 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise API URL'),
- help_text=_('The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github '
- 'Enterprise documentation for more details.'),
+ help_text=_(
+ 'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
+ ),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise',
)
@@ -941,9 +964,11 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github-enterprise-org'),
label=_('GitHub Enterprise Organization OAuth2 Callback URL'),
- help_text=_('Provide this URL as the callback URL for your application as part '
- 'of your registration process. Refer to the Ansible Tower '
- 'documentation for more detail.'),
+ help_text=_(
+ 'Provide this URL as the callback URL for your application as part '
+ 'of your registration process. Refer to the Ansible Tower '
+ 'documentation for more detail.'
+ ),
category=_('GitHub Enterprise Organization OAuth2'),
category_slug='github-enterprise-org',
depends_on=['TOWER_URL_BASE'],
@@ -955,8 +980,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Organization URL'),
- help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise '
- 'documentation for more details.'),
+ help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-org',
)
@@ -967,8 +991,9 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Organization API URL'),
- help_text=_('The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github '
- 'Enterprise documentation for more details.'),
+ help_text=_(
+ 'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
+ ),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-org',
)
@@ -1002,8 +1027,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Organization Name'),
- help_text=_('The name of your GitHub Enterprise organization, as used in your '
- 'organization\'s URL: https://github.com/<yourorg>/.'),
+ help_text=_('The name of your GitHub Enterprise organization, as used in your ' 'organization\'s URL: https://github.com/<yourorg>/.'),
category=_('GitHub Enterprise Organization OAuth2'),
category_slug='github-enterprise-org',
)
@@ -1042,10 +1066,12 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github-enterprise-team'),
label=_('GitHub Enterprise Team OAuth2 Callback URL'),
- help_text=_('Create an organization-owned application at '
- 'https://github.com/organizations/<yourorg>/settings/applications '
- 'and obtain an OAuth2 key (Client ID) and secret (Client Secret). '
- 'Provide this URL as the callback URL for your application.'),
+ help_text=_(
+ 'Create an organization-owned application at '
+ 'https://github.com/organizations/<yourorg>/settings/applications '
+ 'and obtain an OAuth2 key (Client ID) and secret (Client Secret). '
+ 'Provide this URL as the callback URL for your application.'
+ ),
category=_('GitHub Enterprise Team OAuth2'),
category_slug='github-enterprise-team',
depends_on=['TOWER_URL_BASE'],
@@ -1057,8 +1083,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Team URL'),
- help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise '
- 'documentation for more details.'),
+ help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-team',
)
@@ -1069,8 +1094,9 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Team API URL'),
- help_text=_('The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github '
- 'Enterprise documentation for more details.'),
+ help_text=_(
+ 'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
+ ),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-team',
)
@@ -1104,8 +1130,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Team ID'),
- help_text=_('Find the numeric team ID using the Github Enterprise API: '
- 'http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
+ help_text=_('Find the numeric team ID using the Github Enterprise API: ' 'http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
category=_('GitHub Enterprise Team OAuth2'),
category_slug='github-enterprise-team',
)
@@ -1144,9 +1169,11 @@ register(
read_only=True,
default=SocialAuthCallbackURL('azuread-oauth2'),
label=_('Azure AD OAuth2 Callback URL'),
- help_text=_('Provide this URL as the callback URL for your application as part'
- ' of your registration process. Refer to the Ansible Tower'
- ' documentation for more detail. '),
+ help_text=_(
+ 'Provide this URL as the callback URL for your application as part'
+ ' of your registration process. Refer to the Ansible Tower'
+ ' documentation for more detail. '
+ ),
category=_('Azure AD OAuth2'),
category_slug='azuread-oauth2',
depends_on=['TOWER_URL_BASE'],
@@ -1217,8 +1244,7 @@ register(
field_class=fields.BooleanField,
default=True,
label=_('Automatically Create Organizations and Teams on SAML Login'),
- help_text=_('When enabled (the default), mapped Organizations and Teams '
- 'will be created automatically on successful SAML login.'),
+ help_text=_('When enabled (the default), mapped Organizations and Teams ' 'will be created automatically on successful SAML login.'),
category=_('SAML'),
category_slug='saml',
)
@@ -1229,9 +1255,11 @@ register(
read_only=True,
default=SocialAuthCallbackURL('saml'),
label=_('SAML Assertion Consumer Service (ACS) URL'),
- help_text=_('Register Tower as a service provider (SP) with each identity '
- 'provider (IdP) you have configured. Provide your SP Entity ID '
- 'and this ACS URL for your application.'),
+ help_text=_(
+ 'Register Tower as a service provider (SP) with each identity '
+ 'provider (IdP) you have configured. Provide your SP Entity ID '
+ 'and this ACS URL for your application.'
+ ),
category=_('SAML'),
category_slug='saml',
depends_on=['TOWER_URL_BASE'],
@@ -1243,8 +1271,7 @@ register(
read_only=True,
default=get_saml_metadata_url,
label=_('SAML Service Provider Metadata URL'),
- help_text=_('If your identity provider (IdP) allows uploading an XML '
- 'metadata file, you can download one from this URL.'),
+ help_text=_('If your identity provider (IdP) allows uploading an XML ' 'metadata file, you can download one from this URL.'),
category=_('SAML'),
category_slug='saml',
)
@@ -1255,9 +1282,11 @@ register(
allow_blank=True,
default=get_saml_entity_id,
label=_('SAML Service Provider Entity ID'),
- help_text=_('The application-defined unique identifier used as the '
- 'audience of the SAML service provider (SP) configuration. '
- 'This is usually the URL for Tower.'),
+ help_text=_(
+ 'The application-defined unique identifier used as the '
+ 'audience of the SAML service provider (SP) configuration. '
+ 'This is usually the URL for Tower.'
+ ),
category=_('SAML'),
category_slug='saml',
depends_on=['TOWER_URL_BASE'],
@@ -1270,8 +1299,7 @@ register(
required=True,
validators=[validate_certificate],
label=_('SAML Service Provider Public Certificate'),
- help_text=_('Create a keypair for Tower to use as a service provider (SP) '
- 'and include the certificate content here.'),
+ help_text=_('Create a keypair for Tower to use as a service provider (SP) ' 'and include the certificate content here.'),
category=_('SAML'),
category_slug='saml',
)
@@ -1283,8 +1311,7 @@ register(
required=True,
validators=[validate_private_key],
label=_('SAML Service Provider Private Key'),
- help_text=_('Create a keypair for Tower to use as a service provider (SP) '
- 'and include the private key content here.'),
+ help_text=_('Create a keypair for Tower to use as a service provider (SP) ' 'and include the private key content here.'),
category=_('SAML'),
category_slug='saml',
encrypted=True,
@@ -1295,17 +1322,12 @@ register(
field_class=SAMLOrgInfoField,
required=True,
label=_('SAML Service Provider Organization Info'),
- help_text=_('Provide the URL, display name, and the name of your app. Refer to'
- ' the Ansible Tower documentation for example syntax.'),
+ help_text=_('Provide the URL, display name, and the name of your app. Refer to' ' the Ansible Tower documentation for example syntax.'),
category=_('SAML'),
category_slug='saml',
- placeholder=collections.OrderedDict([
- ('en-US', collections.OrderedDict([
- ('name', 'example'),
- ('displayname', 'Example'),
- ('url', 'http://www.example.com'),
- ])),
- ]),
+ placeholder=collections.OrderedDict(
+ [('en-US', collections.OrderedDict([('name', 'example'), ('displayname', 'Example'), ('url', 'http://www.example.com')]))]
+ ),
)
register(
@@ -1314,15 +1336,14 @@ register(
allow_blank=True,
required=True,
label=_('SAML Service Provider Technical Contact'),
- help_text=_('Provide the name and email address of the technical contact for'
- ' your service provider. Refer to the Ansible Tower documentation'
- ' for example syntax.'),
+ help_text=_(
+ 'Provide the name and email address of the technical contact for'
+ ' your service provider. Refer to the Ansible Tower documentation'
+ ' for example syntax.'
+ ),
category=_('SAML'),
category_slug='saml',
- placeholder=collections.OrderedDict([
- ('givenName', 'Technical Contact'),
- ('emailAddress', 'techsup@example.com'),
- ]),
+ placeholder=collections.OrderedDict([('givenName', 'Technical Contact'), ('emailAddress', 'techsup@example.com')]),
)
register(
@@ -1331,15 +1352,14 @@ register(
allow_blank=True,
required=True,
label=_('SAML Service Provider Support Contact'),
- help_text=_('Provide the name and email address of the support contact for your'
- ' service provider. Refer to the Ansible Tower documentation for'
- ' example syntax.'),
+ help_text=_(
+ 'Provide the name and email address of the support contact for your'
+ ' service provider. Refer to the Ansible Tower documentation for'
+ ' example syntax.'
+ ),
category=_('SAML'),
category_slug='saml',
- placeholder=collections.OrderedDict([
- ('givenName', 'Support Contact'),
- ('emailAddress', 'support@example.com'),
- ]),
+ placeholder=collections.OrderedDict([('givenName', 'Support Contact'), ('emailAddress', 'support@example.com')]),
)
register(
@@ -1347,35 +1367,49 @@ register(
field_class=SAMLEnabledIdPsField,
default={},
label=_('SAML Enabled Identity Providers'),
- help_text=_('Configure the Entity ID, SSO URL and certificate for each identity'
- ' provider (IdP) in use. Multiple SAML IdPs are supported. Some IdPs'
- ' may provide user data using attribute names that differ from the'
- ' default OIDs. Attribute names may be overridden for each IdP. Refer'
- ' to the Ansible documentation for additional details and syntax.'),
+ help_text=_(
+ 'Configure the Entity ID, SSO URL and certificate for each identity'
+ ' provider (IdP) in use. Multiple SAML IdPs are supported. Some IdPs'
+ ' may provide user data using attribute names that differ from the'
+ ' default OIDs. Attribute names may be overridden for each IdP. Refer'
+ ' to the Ansible documentation for additional details and syntax.'
+ ),
category=_('SAML'),
category_slug='saml',
- placeholder=collections.OrderedDict([
- ('Okta', collections.OrderedDict([
- ('entity_id', 'http://www.okta.com/HHniyLkaxk9e76wD0Thh'),
- ('url', 'https://dev-123456.oktapreview.com/app/ansibletower/HHniyLkaxk9e76wD0Thh/sso/saml'),
- ('x509cert', 'MIIDpDCCAoygAwIBAgIGAVVZ4rPzMA0GCSqGSIb3...'),
- ('attr_user_permanent_id', 'username'),
- ('attr_first_name', 'first_name'),
- ('attr_last_name', 'last_name'),
- ('attr_username', 'username'),
- ('attr_email', 'email'),
- ])),
- ('OneLogin', collections.OrderedDict([
- ('entity_id', 'https://app.onelogin.com/saml/metadata/123456'),
- ('url', 'https://example.onelogin.com/trust/saml2/http-post/sso/123456'),
- ('x509cert', 'MIIEJjCCAw6gAwIBAgIUfuSD54OPSBhndDHh3gZo...'),
- ('attr_user_permanent_id', 'name_id'),
- ('attr_first_name', 'User.FirstName'),
- ('attr_last_name', 'User.LastName'),
- ('attr_username', 'User.email'),
- ('attr_email', 'User.email'),
- ])),
- ]),
+ placeholder=collections.OrderedDict(
+ [
+ (
+ 'Okta',
+ collections.OrderedDict(
+ [
+ ('entity_id', 'http://www.okta.com/HHniyLkaxk9e76wD0Thh'),
+ ('url', 'https://dev-123456.oktapreview.com/app/ansibletower/HHniyLkaxk9e76wD0Thh/sso/saml'),
+ ('x509cert', 'MIIDpDCCAoygAwIBAgIGAVVZ4rPzMA0GCSqGSIb3...'),
+ ('attr_user_permanent_id', 'username'),
+ ('attr_first_name', 'first_name'),
+ ('attr_last_name', 'last_name'),
+ ('attr_username', 'username'),
+ ('attr_email', 'email'),
+ ]
+ ),
+ ),
+ (
+ 'OneLogin',
+ collections.OrderedDict(
+ [
+ ('entity_id', 'https://app.onelogin.com/saml/metadata/123456'),
+ ('url', 'https://example.onelogin.com/trust/saml2/http-post/sso/123456'),
+ ('x509cert', 'MIIEJjCCAw6gAwIBAgIUfuSD54OPSBhndDHh3gZo...'),
+ ('attr_user_permanent_id', 'name_id'),
+ ('attr_first_name', 'User.FirstName'),
+ ('attr_last_name', 'User.LastName'),
+ ('attr_username', 'User.email'),
+ ('attr_email', 'User.email'),
+ ]
+ ),
+ ),
+ ]
+ ),
)
register(
@@ -1384,30 +1418,32 @@ register(
allow_null=True,
default={'requestedAuthnContext': False},
label=_('SAML Security Config'),
- help_text=_('A dict of key value pairs that are passed to the underlying'
- ' python-saml security setting'
- ' https://github.com/onelogin/python-saml#settings'),
+ help_text=_(
+ 'A dict of key value pairs that are passed to the underlying' ' python-saml security setting' ' https://github.com/onelogin/python-saml#settings'
+ ),
category=_('SAML'),
category_slug='saml',
- placeholder=collections.OrderedDict([
- ("nameIdEncrypted", False),
- ("authnRequestsSigned", False),
- ("logoutRequestSigned", False),
- ("logoutResponseSigned", False),
- ("signMetadata", False),
- ("wantMessagesSigned", False),
- ("wantAssertionsSigned", False),
- ("wantAssertionsEncrypted", False),
- ("wantNameId", True),
- ("wantNameIdEncrypted", False),
- ("wantAttributeStatement", True),
- ("requestedAuthnContext", True),
- ("requestedAuthnContextComparison", "exact"),
- ("metadataValidUntil", "2015-06-26T20:00:00Z"),
- ("metadataCacheDuration", "PT518400S"),
- ("signatureAlgorithm", "http://www.w3.org/2000/09/xmldsig#rsa-sha1"),
- ("digestAlgorithm", "http://www.w3.org/2000/09/xmldsig#sha1"),
- ]),
+ placeholder=collections.OrderedDict(
+ [
+ ("nameIdEncrypted", False),
+ ("authnRequestsSigned", False),
+ ("logoutRequestSigned", False),
+ ("logoutResponseSigned", False),
+ ("signMetadata", False),
+ ("wantMessagesSigned", False),
+ ("wantAssertionsSigned", False),
+ ("wantAssertionsEncrypted", False),
+ ("wantNameId", True),
+ ("wantNameIdEncrypted", False),
+ ("wantAttributeStatement", True),
+ ("requestedAuthnContext", True),
+ ("requestedAuthnContextComparison", "exact"),
+ ("metadataValidUntil", "2015-06-26T20:00:00Z"),
+ ("metadataCacheDuration", "PT518400S"),
+ ("signatureAlgorithm", "http://www.w3.org/2000/09/xmldsig#rsa-sha1"),
+ ("digestAlgorithm", "http://www.w3.org/2000/09/xmldsig#sha1"),
+ ]
+ ),
)
register(
@@ -1416,8 +1452,7 @@ register(
allow_null=True,
default=None,
label=_('SAML Service Provider extra configuration data'),
- help_text=_('A dict of key value pairs to be passed to the underlying'
- ' python-saml Service Provider configuration setting.'),
+ help_text=_('A dict of key value pairs to be passed to the underlying' ' python-saml Service Provider configuration setting.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict(),
@@ -1429,15 +1464,10 @@ register(
allow_null=True,
default=None,
label=_('SAML IDP to extra_data attribute mapping'),
- help_text=_('A list of tuples that maps IDP attributes to extra_attributes.'
- ' Each attribute will be a list of values, even if only 1 value.'),
+ help_text=_('A list of tuples that maps IDP attributes to extra_attributes.' ' Each attribute will be a list of values, even if only 1 value.'),
category=_('SAML'),
category_slug='saml',
- placeholder=[
- ('attribute_name', 'extra_data_name_for_attribute'),
- ('department', 'department'),
- ('manager_full_name', 'manager_full_name')
- ],
+ placeholder=[('attribute_name', 'extra_data_name_for_attribute'), ('department', 'department'), ('manager_full_name', 'manager_full_name')],
)
register(
@@ -1473,14 +1503,16 @@ register(
help_text=_('Used to translate user organization membership into Tower.'),
category=_('SAML'),
category_slug='saml',
- placeholder=collections.OrderedDict([
- ('saml_attr', 'organization'),
- ('saml_admin_attr', 'organization_admin'),
- ('saml_auditor_attr', 'organization_auditor'),
- ('remove', True),
- ('remove_admins', True),
- ('remove_auditors', True),
- ]),
+ placeholder=collections.OrderedDict(
+ [
+ ('saml_attr', 'organization'),
+ ('saml_admin_attr', 'organization_admin'),
+ ('saml_auditor_attr', 'organization_auditor'),
+ ('remove', True),
+ ('remove_admins', True),
+ ('remove_auditors', True),
+ ]
+ ),
)
register(
@@ -1492,43 +1524,28 @@ register(
help_text=_('Used to translate user team membership into Tower.'),
category=_('SAML'),
category_slug='saml',
- placeholder=collections.OrderedDict([
- ('saml_attr', 'team'),
- ('remove', True),
- ('team_org_map', [
- collections.OrderedDict([
- ('team', 'Marketing'),
- ('organization', 'Red Hat'),
- ]),
- collections.OrderedDict([
- ('team', 'Human Resources'),
- ('organization', 'Red Hat'),
- ]),
- collections.OrderedDict([
- ('team', 'Engineering'),
- ('organization', 'Red Hat'),
- ]),
- collections.OrderedDict([
- ('team', 'Engineering'),
- ('organization', 'Ansible'),
- ]),
- collections.OrderedDict([
- ('team', 'Quality Engineering'),
- ('organization', 'Ansible'),
- ]),
- collections.OrderedDict([
- ('team', 'Sales'),
- ('organization', 'Ansible'),
- ]),
- ]),
- ]),
+ placeholder=collections.OrderedDict(
+ [
+ ('saml_attr', 'team'),
+ ('remove', True),
+ (
+ 'team_org_map',
+ [
+ collections.OrderedDict([('team', 'Marketing'), ('organization', 'Red Hat')]),
+ collections.OrderedDict([('team', 'Human Resources'), ('organization', 'Red Hat')]),
+ collections.OrderedDict([('team', 'Engineering'), ('organization', 'Red Hat')]),
+ collections.OrderedDict([('team', 'Engineering'), ('organization', 'Ansible')]),
+ collections.OrderedDict([('team', 'Quality Engineering'), ('organization', 'Ansible')]),
+ collections.OrderedDict([('team', 'Sales'), ('organization', 'Ansible')]),
+ ],
+ ),
+ ]
+ ),
)
def tacacs_validate(serializer, attrs):
- if not serializer.instance or \
- not hasattr(serializer.instance, 'TACACSPLUS_HOST') or \
- not hasattr(serializer.instance, 'TACACSPLUS_SECRET'):
+ if not serializer.instance or not hasattr(serializer.instance, 'TACACSPLUS_HOST') or not hasattr(serializer.instance, 'TACACSPLUS_SECRET'):
return attrs
errors = []
host = serializer.instance.TACACSPLUS_HOST
diff --git a/awx/sso/fields.py b/awx/sso/fields.py
index 5df5f894c9..7ee1725cb4 100644
--- a/awx/sso/fields.py
+++ b/awx/sso/fields.py
@@ -14,10 +14,7 @@ from django.utils.translation import ugettext_lazy as _
# Django Auth LDAP
import django_auth_ldap.config
-from django_auth_ldap.config import (
- LDAPSearch,
- LDAPSearchUnion,
-)
+from django_auth_ldap.config import LDAPSearch, LDAPSearchUnion
from rest_framework.exceptions import ValidationError
from rest_framework.fields import empty, Field, SkipField
@@ -46,9 +43,9 @@ def get_subclasses(cls):
def find_class_in_modules(class_name):
- '''
+ """
Used to find ldap subclasses by string
- '''
+ """
module_search_space = [django_auth_ldap.config, awx.sso.ldap_group_types]
for m in module_search_space:
cls = getattr(m, class_name, None)
@@ -57,7 +54,7 @@ def find_class_in_modules(class_name):
return None
-class DependsOnMixin():
+class DependsOnMixin:
def get_depends_on(self):
"""
Get the value of the dependent field.
@@ -65,38 +62,34 @@ class DependsOnMixin():
Then fall back to the raw value from the setting in the DB.
"""
from django.conf import settings
+
dependent_key = next(iter(self.depends_on))
if self.context:
request = self.context.get('request', None)
- if request and request.data and \
- request.data.get(dependent_key, None):
+ if request and request.data and request.data.get(dependent_key, None):
return request.data.get(dependent_key)
res = settings._get_local(dependent_key, validate=False)
return res
class _Forbidden(Field):
- default_error_messages = {
- 'invalid': _('Invalid field.'),
- }
+ default_error_messages = {'invalid': _('Invalid field.')}
def run_validation(self, value):
self.fail('invalid')
class HybridDictField(fields.DictField):
- """A DictField, but with defined fixed Fields for certain keys.
- """
+ """A DictField, but with defined fixed Fields for certain keys."""
def __init__(self, *args, **kwargs):
self.allow_blank = kwargs.pop('allow_blank', False)
fields = [
sorted(
- ((field_name, obj) for field_name, obj in cls.__dict__.items()
- if isinstance(obj, Field) and field_name != 'child'),
- key=lambda x: x[1]._creation_counter
+ ((field_name, obj) for field_name, obj in cls.__dict__.items() if isinstance(obj, Field) and field_name != 'child'),
+ key=lambda x: x[1]._creation_counter,
)
for cls in reversed(self.__class__.__mro__)
]
@@ -108,10 +101,7 @@ class HybridDictField(fields.DictField):
fields = copy.deepcopy(self._declared_fields)
return {
key: field.to_representation(val) if val is not None else None
- for key, val, field in (
- (six.text_type(key), val, fields.get(key, self.child))
- for key, val in value.items()
- )
+ for key, val, field in ((six.text_type(key), val, fields.get(key, self.child)) for key, val in value.items())
if not field.write_only
}
@@ -147,81 +137,67 @@ class AuthenticationBackendsField(fields.StringListField):
# Mapping of settings that must be set in order to enable each
# authentication backend.
- REQUIRED_BACKEND_SETTINGS = collections.OrderedDict([
- ('awx.sso.backends.LDAPBackend', [
- 'AUTH_LDAP_SERVER_URI',
- ]),
- ('awx.sso.backends.LDAPBackend1', [
- 'AUTH_LDAP_1_SERVER_URI',
- ]),
- ('awx.sso.backends.LDAPBackend2', [
- 'AUTH_LDAP_2_SERVER_URI',
- ]),
- ('awx.sso.backends.LDAPBackend3', [
- 'AUTH_LDAP_3_SERVER_URI',
- ]),
- ('awx.sso.backends.LDAPBackend4', [
- 'AUTH_LDAP_4_SERVER_URI',
- ]),
- ('awx.sso.backends.LDAPBackend5', [
- 'AUTH_LDAP_5_SERVER_URI',
- ]),
- ('awx.sso.backends.RADIUSBackend', [
- 'RADIUS_SERVER',
- ]),
- ('social_core.backends.google.GoogleOAuth2', [
- 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY',
- 'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET',
- ]),
- ('social_core.backends.github.GithubOAuth2', [
- 'SOCIAL_AUTH_GITHUB_KEY',
- 'SOCIAL_AUTH_GITHUB_SECRET',
- ]),
- ('social_core.backends.github.GithubOrganizationOAuth2', [
- 'SOCIAL_AUTH_GITHUB_ORG_KEY',
- 'SOCIAL_AUTH_GITHUB_ORG_SECRET',
- 'SOCIAL_AUTH_GITHUB_ORG_NAME',
- ]),
- ('social_core.backends.github.GithubTeamOAuth2', [
- 'SOCIAL_AUTH_GITHUB_TEAM_KEY',
- 'SOCIAL_AUTH_GITHUB_TEAM_SECRET',
- 'SOCIAL_AUTH_GITHUB_TEAM_ID',
- ]),
- ('social_core.backends.github_enterprise.GithubEnterpriseOAuth2', [
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_URL',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_SECRET',
- ]),
- ('social_core.backends.github_enterprise.GithubEnterpriseOrganizationOAuth2', [
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_URL',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_API_URL',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_SECRET',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_NAME',
- ]),
- ('social_core.backends.github_enterprise.GithubEnterpriseTeamOAuth2', [
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_URL',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_API_URL',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_SECRET',
- 'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_ID',
- ]),
- ('social_core.backends.azuread.AzureADOAuth2', [
- 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY',
- 'SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET',
- ]),
- ('awx.sso.backends.SAMLAuth', [
- 'SOCIAL_AUTH_SAML_SP_ENTITY_ID',
- 'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT',
- 'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY',
- 'SOCIAL_AUTH_SAML_ORG_INFO',
- 'SOCIAL_AUTH_SAML_TECHNICAL_CONTACT',
- 'SOCIAL_AUTH_SAML_SUPPORT_CONTACT',
- 'SOCIAL_AUTH_SAML_ENABLED_IDPS',
- ]),
- ('django.contrib.auth.backends.ModelBackend', []),
- ])
+ REQUIRED_BACKEND_SETTINGS = collections.OrderedDict(
+ [
+ ('awx.sso.backends.LDAPBackend', ['AUTH_LDAP_SERVER_URI']),
+ ('awx.sso.backends.LDAPBackend1', ['AUTH_LDAP_1_SERVER_URI']),
+ ('awx.sso.backends.LDAPBackend2', ['AUTH_LDAP_2_SERVER_URI']),
+ ('awx.sso.backends.LDAPBackend3', ['AUTH_LDAP_3_SERVER_URI']),
+ ('awx.sso.backends.LDAPBackend4', ['AUTH_LDAP_4_SERVER_URI']),
+ ('awx.sso.backends.LDAPBackend5', ['AUTH_LDAP_5_SERVER_URI']),
+ ('awx.sso.backends.RADIUSBackend', ['RADIUS_SERVER']),
+ ('social_core.backends.google.GoogleOAuth2', ['SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', 'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET']),
+ ('social_core.backends.github.GithubOAuth2', ['SOCIAL_AUTH_GITHUB_KEY', 'SOCIAL_AUTH_GITHUB_SECRET']),
+ (
+ 'social_core.backends.github.GithubOrganizationOAuth2',
+ ['SOCIAL_AUTH_GITHUB_ORG_KEY', 'SOCIAL_AUTH_GITHUB_ORG_SECRET', 'SOCIAL_AUTH_GITHUB_ORG_NAME'],
+ ),
+ ('social_core.backends.github.GithubTeamOAuth2', ['SOCIAL_AUTH_GITHUB_TEAM_KEY', 'SOCIAL_AUTH_GITHUB_TEAM_SECRET', 'SOCIAL_AUTH_GITHUB_TEAM_ID']),
+ (
+ 'social_core.backends.github_enterprise.GithubEnterpriseOAuth2',
+ [
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_URL',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_SECRET',
+ ],
+ ),
+ (
+ 'social_core.backends.github_enterprise.GithubEnterpriseOrganizationOAuth2',
+ [
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_URL',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_API_URL',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_SECRET',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_NAME',
+ ],
+ ),
+ (
+ 'social_core.backends.github_enterprise.GithubEnterpriseTeamOAuth2',
+ [
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_URL',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_API_URL',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_SECRET',
+ 'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_ID',
+ ],
+ ),
+ ('social_core.backends.azuread.AzureADOAuth2', ['SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', 'SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET']),
+ (
+ 'awx.sso.backends.SAMLAuth',
+ [
+ 'SOCIAL_AUTH_SAML_SP_ENTITY_ID',
+ 'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT',
+ 'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY',
+ 'SOCIAL_AUTH_SAML_ORG_INFO',
+ 'SOCIAL_AUTH_SAML_TECHNICAL_CONTACT',
+ 'SOCIAL_AUTH_SAML_SUPPORT_CONTACT',
+ 'SOCIAL_AUTH_SAML_ENABLED_IDPS',
+ ],
+ ),
+ ('django.contrib.auth.backends.ModelBackend', []),
+ ]
+ )
@classmethod
def get_all_required_settings(cls):
@@ -236,6 +212,7 @@ class AuthenticationBackendsField(fields.StringListField):
def _default_from_required_settings(self):
from django.conf import settings
+
try:
backends = settings._awx_conf_settings._get_default('AUTHENTICATION_BACKENDS')
except AttributeError:
@@ -252,7 +229,6 @@ class AuthenticationBackendsField(fields.StringListField):
class LDAPServerURIField(fields.URLField):
-
def __init__(self, **kwargs):
kwargs.setdefault('schemes', ('ldap', 'ldaps'))
kwargs.setdefault('allow_plain_hostname', True)
@@ -266,9 +242,7 @@ class LDAPServerURIField(fields.URLField):
class LDAPConnectionOptionsField(fields.DictField):
- default_error_messages = {
- 'invalid_options': _('Invalid connection option(s): {invalid_options}.'),
- }
+ default_error_messages = {'invalid_options': _('Invalid connection option(s): {invalid_options}.')}
def to_representation(self, value):
value = value or {}
@@ -296,7 +270,6 @@ class LDAPConnectionOptionsField(fields.DictField):
class LDAPDNField(fields.CharField):
-
def __init__(self, **kwargs):
super(LDAPDNField, self).__init__(**kwargs)
self.validators.append(validate_ldap_dn)
@@ -309,7 +282,6 @@ class LDAPDNField(fields.CharField):
class LDAPDNListField(fields.StringListField):
-
def __init__(self, **kwargs):
super(LDAPDNListField, self).__init__(**kwargs)
self.validators.append(lambda dn: list(map(validate_ldap_dn, dn)))
@@ -321,7 +293,6 @@ class LDAPDNListField(fields.StringListField):
class LDAPDNWithUserField(fields.CharField):
-
def __init__(self, **kwargs):
super(LDAPDNWithUserField, self).__init__(**kwargs)
self.validators.append(validate_ldap_dn_with_user)
@@ -334,27 +305,20 @@ class LDAPDNWithUserField(fields.CharField):
class LDAPFilterField(fields.CharField):
-
def __init__(self, **kwargs):
super(LDAPFilterField, self).__init__(**kwargs)
self.validators.append(validate_ldap_filter)
class LDAPFilterWithUserField(fields.CharField):
-
def __init__(self, **kwargs):
super(LDAPFilterWithUserField, self).__init__(**kwargs)
self.validators.append(validate_ldap_filter_with_user)
class LDAPScopeField(fields.ChoiceField):
-
def __init__(self, choices=None, **kwargs):
- choices = choices or [
- ('SCOPE_BASE', _('Base')),
- ('SCOPE_ONELEVEL', _('One Level')),
- ('SCOPE_SUBTREE', _('Subtree')),
- ]
+ choices = choices or [('SCOPE_BASE', _('Base')), ('SCOPE_ONELEVEL', _('One Level')), ('SCOPE_SUBTREE', _('Subtree'))]
super(LDAPScopeField, self).__init__(choices, **kwargs)
def to_representation(self, value):
@@ -394,9 +358,7 @@ class LDAPSearchField(fields.ListField):
if len(data) != 3:
self.fail('invalid_length', length=len(data))
return LDAPSearch(
- LDAPDNField().run_validation(data[0]),
- LDAPScopeField().run_validation(data[1]),
- self.ldap_filter_field_class().run_validation(data[2]),
+ LDAPDNField().run_validation(data[0]), LDAPScopeField().run_validation(data[1]), self.ldap_filter_field_class().run_validation(data[2])
)
@@ -407,9 +369,7 @@ class LDAPSearchWithUserField(LDAPSearchField):
class LDAPSearchUnionField(fields.ListField):
- default_error_messages = {
- 'type_error': _('Expected an instance of LDAPSearch or LDAPSearchUnion but got {input_type} instead.'),
- }
+ default_error_messages = {'type_error': _('Expected an instance of LDAPSearch or LDAPSearchUnion but got {input_type} instead.')}
ldap_search_field_class = LDAPSearchWithUserField
def to_representation(self, value):
@@ -432,8 +392,7 @@ class LDAPSearchUnionField(fields.ListField):
search_args = []
for i in range(len(data)):
if not isinstance(data[i], list):
- raise ValidationError('In order to ultilize LDAP Union, input element No. %d'
- ' should be a search query array.' % (i + 1))
+ raise ValidationError('In order to ultilize LDAP Union, input element No. %d' ' should be a search query array.' % (i + 1))
try:
search_args.append(self.ldap_search_field_class().run_validation(data[i]))
except Exception as e:
@@ -445,15 +404,13 @@ class LDAPSearchUnionField(fields.ListField):
class LDAPUserAttrMapField(fields.DictField):
- default_error_messages = {
- 'invalid_attrs': _('Invalid user attribute(s): {invalid_attrs}.'),
- }
+ default_error_messages = {'invalid_attrs': _('Invalid user attribute(s): {invalid_attrs}.')}
valid_user_attrs = {'first_name', 'last_name', 'email'}
child = fields.CharField()
def to_internal_value(self, data):
data = super(LDAPUserAttrMapField, self).to_internal_value(data)
- invalid_attrs = (set(data.keys()) - self.valid_user_attrs)
+ invalid_attrs = set(data.keys()) - self.valid_user_attrs
if invalid_attrs:
invalid_attrs = sorted(list(invalid_attrs))
attrs_display = json.dumps(invalid_attrs).lstrip('[').rstrip(']')
@@ -466,7 +423,7 @@ class LDAPGroupTypeField(fields.ChoiceField, DependsOnMixin):
default_error_messages = {
'type_error': _('Expected an instance of LDAPGroupType but got {input_type} instead.'),
'missing_parameters': _('Missing required parameters in {dependency}.'),
- 'invalid_parameters': _('Invalid group_type parameters. Expected instance of dict but got {parameters_type} instead.')
+ 'invalid_parameters': _('Invalid group_type parameters. Expected instance of dict but got {parameters_type} instead.'),
}
def __init__(self, choices=None, **kwargs):
@@ -515,9 +472,7 @@ class LDAPGroupTypeField(fields.ChoiceField, DependsOnMixin):
class LDAPGroupTypeParamsField(fields.DictField, DependsOnMixin):
- default_error_messages = {
- 'invalid_keys': _('Invalid key(s): {invalid_keys}.'),
- }
+ default_error_messages = {'invalid_keys': _('Invalid key(s): {invalid_keys}.')}
def to_internal_value(self, value):
value = super(LDAPGroupTypeParamsField, self).to_internal_value(value)
@@ -541,15 +496,13 @@ class LDAPGroupTypeParamsField(fields.DictField, DependsOnMixin):
class LDAPUserFlagsField(fields.DictField):
- default_error_messages = {
- 'invalid_flag': _('Invalid user flag: "{invalid_flag}".'),
- }
+ default_error_messages = {'invalid_flag': _('Invalid user flag: "{invalid_flag}".')}
valid_user_flags = {'is_superuser', 'is_system_auditor'}
child = LDAPDNListField()
def to_internal_value(self, data):
data = super(LDAPUserFlagsField, self).to_internal_value(data)
- invalid_flags = (set(data.keys()) - self.valid_user_flags)
+ invalid_flags = set(data.keys()) - self.valid_user_flags
if invalid_flags:
self.fail('invalid_flag', invalid_flag=list(invalid_flags)[0])
return data
@@ -592,7 +545,6 @@ class LDAPTeamMapField(fields.DictField):
class SocialMapStringRegexField(fields.CharField):
-
def to_representation(self, value):
if isinstance(value, type(re.compile(''))):
flags = []
@@ -623,9 +575,7 @@ class SocialMapStringRegexField(fields.CharField):
class SocialMapField(fields.ListField):
- default_error_messages = {
- 'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.'),
- }
+ default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
child = SocialMapStringRegexField()
def to_representation(self, value):
@@ -695,9 +645,7 @@ class SAMLOrgInfoValueField(HybridDictField):
class SAMLOrgInfoField(fields.DictField):
- default_error_messages = {
- 'invalid_lang_code': _('Invalid language code(s) for org info: {invalid_lang_codes}.'),
- }
+ default_error_messages = {'invalid_lang_code': _('Invalid language code(s) for org info: {invalid_lang_codes}.')}
child = SAMLOrgInfoValueField()
def to_internal_value(self, data):
diff --git a/awx/sso/ldap_group_types.py b/awx/sso/ldap_group_types.py
index 69c655e040..2a5434c154 100644
--- a/awx/sso/ldap_group_types.py
+++ b/awx/sso/ldap_group_types.py
@@ -12,7 +12,6 @@ from django_auth_ldap.config import LDAPGroupType
class PosixUIDGroupType(LDAPGroupType):
-
def __init__(self, name_attr='cn', ldap_group_user_attr='uid'):
self.ldap_group_user_attr = ldap_group_user_attr
super(PosixUIDGroupType, self).__init__(name_attr)
@@ -20,6 +19,7 @@ class PosixUIDGroupType(LDAPGroupType):
"""
An LDAPGroupType subclass that handles non-standard DS.
"""
+
def user_groups(self, ldap_user, group_search):
"""
Searches for any group that is either the user's primary or contains the
@@ -34,12 +34,10 @@ class PosixUIDGroupType(LDAPGroupType):
user_gid = ldap_user.attrs['gidNumber'][0]
filterstr = u'(|(gidNumber=%s)(memberUid=%s))' % (
self.ldap.filter.escape_filter_chars(user_gid),
- self.ldap.filter.escape_filter_chars(user_uid)
- )
- else:
- filterstr = u'(memberUid=%s)' % (
self.ldap.filter.escape_filter_chars(user_uid),
)
+ else:
+ filterstr = u'(memberUid=%s)' % (self.ldap.filter.escape_filter_chars(user_uid),)
search = group_search.search_with_additional_term_string(filterstr)
search.attrlist = [str(self.name_attr)]
diff --git a/awx/sso/middleware.py b/awx/sso/middleware.py
index fcdf3e9b2e..f8b2b79741 100644
--- a/awx/sso/middleware.py
+++ b/awx/sso/middleware.py
@@ -17,7 +17,6 @@ from social_django.middleware import SocialAuthExceptionMiddleware
class SocialAuthMiddleware(SocialAuthExceptionMiddleware):
-
def process_request(self, request):
if request.path.startswith('/sso'):
# See upgrade blocker note in requirements/README.md
diff --git a/awx/sso/migrations/0001_initial.py b/awx/sso/migrations/0001_initial.py
index 69bc5ec7c7..e1fc1ba189 100644
--- a/awx/sso/migrations/0001_initial.py
+++ b/awx/sso/migrations/0001_initial.py
@@ -7,9 +7,7 @@ from django.conf import settings
class Migration(migrations.Migration):
- dependencies = [
- migrations.swappable_dependency(settings.AUTH_USER_MODEL),
- ]
+ dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
operations = [
migrations.CreateModel(
@@ -20,8 +18,5 @@ class Migration(migrations.Migration):
('user', models.ForeignKey(related_name='enterprise_auth', on_delete=models.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
- migrations.AlterUniqueTogether(
- name='userenterpriseauth',
- unique_together=set([('user', 'provider')]),
- ),
+ migrations.AlterUniqueTogether(name='userenterpriseauth', unique_together=set([('user', 'provider')])),
]
diff --git a/awx/sso/migrations/0002_expand_provider_options.py b/awx/sso/migrations/0002_expand_provider_options.py
index 600b244efc..d7fd032c75 100644
--- a/awx/sso/migrations/0002_expand_provider_options.py
+++ b/awx/sso/migrations/0002_expand_provider_options.py
@@ -6,14 +6,12 @@ from django.db import migrations, models
class Migration(migrations.Migration):
- dependencies = [
- ('sso', '0001_initial'),
- ]
+ dependencies = [('sso', '0001_initial')]
operations = [
migrations.AlterField(
model_name='userenterpriseauth',
name='provider',
field=models.CharField(max_length=32, choices=[('radius', 'RADIUS'), ('tacacs+', 'TACACS+'), ('saml', 'SAML')]),
- ),
+ )
]
diff --git a/awx/sso/models.py b/awx/sso/models.py
index 474c3d8c57..901386e586 100644
--- a/awx/sso/models.py
+++ b/awx/sso/models.py
@@ -10,18 +10,10 @@ from django.utils.translation import ugettext_lazy as _
class UserEnterpriseAuth(models.Model):
"""Tower Enterprise Auth association model"""
- PROVIDER_CHOICES = (
- ('radius', _('RADIUS')),
- ('tacacs+', _('TACACS+')),
- ('saml', _('SAML')),
- )
+ PROVIDER_CHOICES = (('radius', _('RADIUS')), ('tacacs+', _('TACACS+')), ('saml', _('SAML')))
class Meta:
unique_together = ('user', 'provider')
- user = models.ForeignKey(
- User, related_name='enterprise_auth', on_delete=models.CASCADE
- )
- provider = models.CharField(
- max_length=32, choices=PROVIDER_CHOICES
- )
+ user = models.ForeignKey(User, related_name='enterprise_auth', on_delete=models.CASCADE)
+ provider = models.CharField(max_length=32, choices=PROVIDER_CHOICES)
diff --git a/awx/sso/pipeline.py b/awx/sso/pipeline.py
index 3e73974474..70adbc520a 100644
--- a/awx/sso/pipeline.py
+++ b/awx/sso/pipeline.py
@@ -19,7 +19,6 @@ logger = logging.getLogger('awx.sso.pipeline')
class AuthNotFound(AuthException):
-
def __init__(self, backend, email_or_uid, *args, **kwargs):
self.email_or_uid = email_or_uid
super(AuthNotFound, self).__init__(backend, *args, **kwargs)
@@ -29,7 +28,6 @@ class AuthNotFound(AuthException):
class AuthInactive(AuthException):
-
def __str__(self):
return _('Your account is inactive')
@@ -52,10 +50,10 @@ def prevent_inactive_login(backend, details, user=None, *args, **kwargs):
def _update_m2m_from_expression(user, related, expr, remove=True):
- '''
+ """
Helper function to update m2m relationship based on user matching one or
more expressions.
- '''
+ """
should_add = False
if expr is None:
return
@@ -98,31 +96,28 @@ def _update_org_from_attr(user, related, attr, remove, remove_admins, remove_aud
getattr(org, related).members.add(user)
if remove:
- [o.member_role.members.remove(user) for o in
- Organization.objects.filter(Q(member_role__members=user) & ~Q(id__in=org_ids))]
+ [o.member_role.members.remove(user) for o in Organization.objects.filter(Q(member_role__members=user) & ~Q(id__in=org_ids))]
if remove_admins:
- [o.admin_role.members.remove(user) for o in
- Organization.objects.filter(Q(admin_role__members=user) & ~Q(id__in=org_ids))]
+ [o.admin_role.members.remove(user) for o in Organization.objects.filter(Q(admin_role__members=user) & ~Q(id__in=org_ids))]
if remove_auditors:
- [o.auditor_role.members.remove(user) for o in
- Organization.objects.filter(Q(auditor_role__members=user) & ~Q(id__in=org_ids))]
+ [o.auditor_role.members.remove(user) for o in Organization.objects.filter(Q(auditor_role__members=user) & ~Q(id__in=org_ids))]
def update_user_orgs(backend, details, user=None, *args, **kwargs):
- '''
+ """
Update organization memberships for the given user based on mapping rules
defined in settings.
- '''
+ """
if not user:
return
from awx.main.models import Organization
+
org_map = backend.setting('ORGANIZATION_MAP') or {}
for org_name, org_opts in org_map.items():
org = Organization.objects.get_or_create(name=org_name)[0]
-
# Update org admins from expression(s).
remove = bool(org_opts.get('remove', True))
admins_expr = org_opts.get('admins', None)
@@ -136,13 +131,14 @@ def update_user_orgs(backend, details, user=None, *args, **kwargs):
def update_user_teams(backend, details, user=None, *args, **kwargs):
- '''
+ """
Update team memberships for the given user based on mapping rules defined
in settings.
- '''
+ """
if not user:
return
from awx.main.models import Organization, Team
+
team_map = backend.setting('TEAM_MAP') or {}
for team_name, team_opts in team_map.items():
# Get or create the org to update.
@@ -150,7 +146,6 @@ def update_user_teams(backend, details, user=None, *args, **kwargs):
continue
org = Organization.objects.get_or_create(name=team_opts['organization'])[0]
-
# Update team members from expression(s).
team = Team.objects.get_or_create(name=team_name, organization=org)[0]
users_expr = team_opts.get('users', None)
@@ -162,6 +157,7 @@ def update_user_orgs_by_saml_attr(backend, details, user=None, *args, **kwargs):
if not user:
return
from django.conf import settings
+
org_map = settings.SOCIAL_AUTH_SAML_ORGANIZATION_ATTR
if org_map.get('saml_attr') is None and org_map.get('saml_admin_attr') is None and org_map.get('saml_auditor_attr') is None:
return
@@ -184,14 +180,12 @@ def update_user_teams_by_saml_attr(backend, details, user=None, *args, **kwargs)
return
from awx.main.models import Organization, Team
from django.conf import settings
+
team_map = settings.SOCIAL_AUTH_SAML_TEAM_ATTR
if team_map.get('saml_attr') is None:
return
- saml_team_names = set(kwargs
- .get('response', {})
- .get('attributes', {})
- .get(team_map['saml_attr'], []))
+ saml_team_names = set(kwargs.get('response', {}).get('attributes', {}).get(team_map['saml_attr'], []))
team_ids = []
for team_name_map in team_map.get('team_org_map', []):
@@ -230,5 +224,4 @@ def update_user_teams_by_saml_attr(backend, details, user=None, *args, **kwargs)
team.member_role.members.add(user)
if team_map.get('remove', True):
- [t.member_role.members.remove(user) for t in
- Team.objects.filter(Q(member_role__members=user) & ~Q(id__in=team_ids))]
+ [t.member_role.members.remove(user) for t in Team.objects.filter(Q(member_role__members=user) & ~Q(id__in=team_ids))]
diff --git a/awx/sso/tests/functional/test_get_or_set_enterprise_user.py b/awx/sso/tests/functional/test_get_or_set_enterprise_user.py
index 9844c17295..9e6feec080 100644
--- a/awx/sso/tests/functional/test_get_or_set_enterprise_user.py
+++ b/awx/sso/tests/functional/test_get_or_set_enterprise_user.py
@@ -19,9 +19,7 @@ def test_fetch_user_if_exist(existing_tacacsplus_user):
def test_create_user_if_not_exist(existing_tacacsplus_user):
with mock.patch('awx.sso.backends.logger') as mocked_logger:
new_user = _get_or_set_enterprise_user("bar", "password", "tacacs+")
- mocked_logger.debug.assert_called_once_with(
- u'Created enterprise user bar via TACACS+ backend.'
- )
+ mocked_logger.debug.assert_called_once_with(u'Created enterprise user bar via TACACS+ backend.')
assert new_user != existing_tacacsplus_user
@@ -35,7 +33,5 @@ def test_created_user_has_no_usable_password():
def test_non_enterprise_user_does_not_get_pass(existing_normal_user):
with mock.patch('awx.sso.backends.logger') as mocked_logger:
new_user = _get_or_set_enterprise_user("alice", "password", "tacacs+")
- mocked_logger.warn.assert_called_once_with(
- u'Enterprise user alice already defined in Tower.'
- )
+ mocked_logger.warn.assert_called_once_with(u'Enterprise user alice already defined in Tower.')
assert new_user is None
diff --git a/awx/sso/tests/functional/test_ldap.py b/awx/sso/tests/functional/test_ldap.py
index fcb2a8bc4b..881ab29e2b 100644
--- a/awx/sso/tests/functional/test_ldap.py
+++ b/awx/sso/tests/functional/test_ldap.py
@@ -5,20 +5,15 @@ import pytest
from awx.sso.backends import LDAPSettings
-@override_settings(AUTH_LDAP_CONNECTION_OPTIONS = {ldap.OPT_NETWORK_TIMEOUT: 60})
+@override_settings(AUTH_LDAP_CONNECTION_OPTIONS={ldap.OPT_NETWORK_TIMEOUT: 60})
@pytest.mark.django_db
def test_ldap_with_custom_timeout():
settings = LDAPSettings()
- assert settings.CONNECTION_OPTIONS == {
- ldap.OPT_NETWORK_TIMEOUT: 60
- }
+ assert settings.CONNECTION_OPTIONS == {ldap.OPT_NETWORK_TIMEOUT: 60}
-@override_settings(AUTH_LDAP_CONNECTION_OPTIONS = {ldap.OPT_REFERRALS: 0})
+@override_settings(AUTH_LDAP_CONNECTION_OPTIONS={ldap.OPT_REFERRALS: 0})
@pytest.mark.django_db
def test_ldap_with_missing_timeout():
settings = LDAPSettings()
- assert settings.CONNECTION_OPTIONS == {
- ldap.OPT_REFERRALS: 0,
- ldap.OPT_NETWORK_TIMEOUT: 30
- }
+ assert settings.CONNECTION_OPTIONS == {ldap.OPT_REFERRALS: 0, ldap.OPT_NETWORK_TIMEOUT: 30}
diff --git a/awx/sso/tests/functional/test_pipeline.py b/awx/sso/tests/functional/test_pipeline.py
index e691939752..489113a952 100644
--- a/awx/sso/tests/functional/test_pipeline.py
+++ b/awx/sso/tests/functional/test_pipeline.py
@@ -1,20 +1,10 @@
-
import pytest
import re
from unittest import mock
-from awx.sso.pipeline import (
- update_user_orgs,
- update_user_teams,
- update_user_orgs_by_saml_attr,
- update_user_teams_by_saml_attr,
-)
+from awx.sso.pipeline import update_user_orgs, update_user_teams, update_user_orgs_by_saml_attr, update_user_teams_by_saml_attr
-from awx.main.models import (
- User,
- Team,
- Organization
-)
+from awx.main.models import User, Team, Organization
@pytest.fixture
@@ -26,33 +16,13 @@ def users():
@pytest.mark.django_db
-class TestSAMLMap():
-
+class TestSAMLMap:
@pytest.fixture
def backend(self):
class Backend:
s = {
- 'ORGANIZATION_MAP': {
- 'Default': {
- 'remove': True,
- 'admins': 'foobar',
- 'remove_admins': True,
- 'users': 'foo',
- 'remove_users': True,
- }
- },
- 'TEAM_MAP': {
- 'Blue': {
- 'organization': 'Default',
- 'remove': True,
- 'users': '',
- },
- 'Red': {
- 'organization': 'Default',
- 'remove': True,
- 'users': '',
- }
- }
+ 'ORGANIZATION_MAP': {'Default': {'remove': True, 'admins': 'foobar', 'remove_admins': True, 'users': 'foo', 'remove_users': True}},
+ 'TEAM_MAP': {'Blue': {'organization': 'Default', 'remove': True, 'users': ''}, 'Red': {'organization': 'Default', 'remove': True, 'users': ''}},
}
def setting(self, key):
@@ -132,17 +102,13 @@ class TestSAMLMap():
@pytest.mark.django_db
-class TestSAMLAttr():
-
+class TestSAMLAttr:
@pytest.fixture
def kwargs(self):
return {
'username': u'cmeyers@redhat.com',
'uid': 'idp:cmeyers@redhat.com',
- 'request': {
- u'SAMLResponse': [],
- u'RelayState': [u'idp']
- },
+ 'request': {u'SAMLResponse': [], u'RelayState': [u'idp']},
'is_new': False,
'response': {
'session_index': '_0728f0e0-b766-0135-75fa-02842b07c044',
@@ -156,14 +122,14 @@ class TestSAMLAttr():
'User.LastName': ['Meyers'],
'name_id': 'cmeyers@redhat.com',
'User.FirstName': ['Chris'],
- 'PersonImmutableID': []
- }
+ 'PersonImmutableID': [],
+ },
},
#'social': <UserSocialAuth: cmeyers@redhat.com>,
'social': None,
#'strategy': <awx.sso.strategies.django_strategy.AWXDjangoStrategy object at 0x8523a10>,
'strategy': None,
- 'new_association': False
+ 'new_association': False,
}
@pytest.fixture
@@ -181,7 +147,7 @@ class TestSAMLAttr():
else:
autocreate = True
- class MockSettings():
+ class MockSettings:
SAML_AUTO_CREATE_OBJECTS = autocreate
SOCIAL_AUTH_SAML_ORGANIZATION_ATTR = {
'saml_attr': 'memberOf',
@@ -200,12 +166,10 @@ class TestSAMLAttr():
{'team': 'Red', 'organization': 'Default1'},
{'team': 'Green', 'organization': 'Default1'},
{'team': 'Green', 'organization': 'Default3'},
- {
- 'team': 'Yellow', 'team_alias': 'Yellow_Alias',
- 'organization': 'Default4', 'organization_alias': 'Default4_Alias'
- },
- ]
+ {'team': 'Yellow', 'team_alias': 'Yellow_Alias', 'organization': 'Default4', 'organization_alias': 'Default4_Alias'},
+ ],
}
+
return MockSettings()
def test_update_user_orgs_by_saml_attr(self, orgs, users, kwargs, mock_settings):
@@ -308,8 +272,7 @@ class TestSAMLAttr():
assert Team.objects.filter(name='Yellow', organization__name='Default4').count() == 0
assert Team.objects.filter(name='Yellow_Alias', organization__name='Default4_Alias').count() == 1
- assert Team.objects.get(
- name='Yellow_Alias', organization__name='Default4_Alias').member_role.members.count() == 1
+ assert Team.objects.get(name='Yellow_Alias', organization__name='Default4_Alias').member_role.members.count() == 1
@pytest.mark.fixture_args(autocreate=False)
def test_autocreate_disabled(self, users, kwargs, mock_settings):
diff --git a/awx/sso/tests/test_env.py b/awx/sso/tests/test_env.py
index 135c90d99b..b63da8ed8a 100644
--- a/awx/sso/tests/test_env.py
+++ b/awx/sso/tests/test_env.py
@@ -1,5 +1,3 @@
-
-
# Ensure that our autouse overwrites are working
def test_cache(settings):
assert settings.CACHES['default']['BACKEND'] == 'django.core.cache.backends.locmem.LocMemCache'
diff --git a/awx/sso/tests/unit/test_fields.py b/awx/sso/tests/unit/test_fields.py
index 6d7505e022..8109f9f237 100644
--- a/awx/sso/tests/unit/test_fields.py
+++ b/awx/sso/tests/unit/test_fields.py
@@ -1,51 +1,48 @@
-
import pytest
from unittest import mock
from rest_framework.exceptions import ValidationError
-from awx.sso.fields import (
- SAMLOrgAttrField,
- SAMLTeamAttrField,
- LDAPGroupTypeParamsField,
- LDAPServerURIField
-)
-
-
-class TestSAMLOrgAttrField():
-
- @pytest.mark.parametrize("data, expected", [
- ({}, {}),
- ({'remove': True, 'saml_attr': 'foobar'}, {'remove': True, 'saml_attr': 'foobar'}),
- ({'remove': True, 'saml_attr': 1234}, {'remove': True, 'saml_attr': '1234'}),
- ({'remove': True, 'saml_attr': 3.14}, {'remove': True, 'saml_attr': '3.14'}),
- ({'saml_attr': 'foobar'}, {'saml_attr': 'foobar'}),
- ({'remove': True}, {'remove': True}),
- ({'remove': True, 'saml_admin_attr': 'foobar'}, {'remove': True, 'saml_admin_attr': 'foobar'}),
- ({'saml_admin_attr': 'foobar'}, {'saml_admin_attr': 'foobar'}),
- ({'remove_admins': True, 'saml_admin_attr': 'foobar'}, {'remove_admins': True, 'saml_admin_attr': 'foobar'}),
- ({'remove': True, 'saml_attr': 'foo', 'remove_admins': True, 'saml_admin_attr': 'bar'},
- {'remove': True, 'saml_attr': 'foo', 'remove_admins': True, 'saml_admin_attr': 'bar'}),
- ])
+from awx.sso.fields import SAMLOrgAttrField, SAMLTeamAttrField, LDAPGroupTypeParamsField, LDAPServerURIField
+
+
+class TestSAMLOrgAttrField:
+ @pytest.mark.parametrize(
+ "data, expected",
+ [
+ ({}, {}),
+ ({'remove': True, 'saml_attr': 'foobar'}, {'remove': True, 'saml_attr': 'foobar'}),
+ ({'remove': True, 'saml_attr': 1234}, {'remove': True, 'saml_attr': '1234'}),
+ ({'remove': True, 'saml_attr': 3.14}, {'remove': True, 'saml_attr': '3.14'}),
+ ({'saml_attr': 'foobar'}, {'saml_attr': 'foobar'}),
+ ({'remove': True}, {'remove': True}),
+ ({'remove': True, 'saml_admin_attr': 'foobar'}, {'remove': True, 'saml_admin_attr': 'foobar'}),
+ ({'saml_admin_attr': 'foobar'}, {'saml_admin_attr': 'foobar'}),
+ ({'remove_admins': True, 'saml_admin_attr': 'foobar'}, {'remove_admins': True, 'saml_admin_attr': 'foobar'}),
+ (
+ {'remove': True, 'saml_attr': 'foo', 'remove_admins': True, 'saml_admin_attr': 'bar'},
+ {'remove': True, 'saml_attr': 'foo', 'remove_admins': True, 'saml_admin_attr': 'bar'},
+ ),
+ ],
+ )
def test_internal_value_valid(self, data, expected):
field = SAMLOrgAttrField()
res = field.to_internal_value(data)
assert res == expected
- @pytest.mark.parametrize("data, expected", [
- ({'remove': 'blah', 'saml_attr': 'foobar'},
- {'remove': ['Must be a valid boolean.']}),
- ({'remove': True, 'saml_attr': False},
- {'saml_attr': ['Not a valid string.']}),
- ({'remove': True, 'saml_attr': False, 'foo': 'bar', 'gig': 'ity'},
- {'saml_attr': ['Not a valid string.'],
- 'foo': ['Invalid field.'],
- 'gig': ['Invalid field.']}),
- ({'remove_admins': True, 'saml_admin_attr': False},
- {'saml_admin_attr': ['Not a valid string.']}),
- ({'remove_admins': 'blah', 'saml_admin_attr': 'foobar'},
- {'remove_admins': ['Must be a valid boolean.']}),
- ])
+ @pytest.mark.parametrize(
+ "data, expected",
+ [
+ ({'remove': 'blah', 'saml_attr': 'foobar'}, {'remove': ['Must be a valid boolean.']}),
+ ({'remove': True, 'saml_attr': False}, {'saml_attr': ['Not a valid string.']}),
+ (
+ {'remove': True, 'saml_attr': False, 'foo': 'bar', 'gig': 'ity'},
+ {'saml_attr': ['Not a valid string.'], 'foo': ['Invalid field.'], 'gig': ['Invalid field.']},
+ ),
+ ({'remove_admins': True, 'saml_admin_attr': False}, {'saml_admin_attr': ['Not a valid string.']}),
+ ({'remove_admins': 'blah', 'saml_admin_attr': 'foobar'}, {'remove_admins': ['Must be a valid boolean.']}),
+ ],
+ )
def test_internal_value_invalid(self, data, expected):
field = SAMLOrgAttrField()
with pytest.raises(ValidationError) as e:
@@ -53,51 +50,64 @@ class TestSAMLOrgAttrField():
assert e.value.detail == expected
-class TestSAMLTeamAttrField():
-
- @pytest.mark.parametrize("data", [
- {},
- {'remove': True, 'saml_attr': 'foobar', 'team_org_map': []},
- {'remove': True, 'saml_attr': 'foobar', 'team_org_map': [
- {'team': 'Engineering', 'organization': 'Ansible'}
- ]},
- {'remove': True, 'saml_attr': 'foobar', 'team_org_map': [
- {'team': 'Engineering', 'organization': 'Ansible'},
- {'team': 'Engineering', 'organization': 'Ansible2'},
- {'team': 'Engineering2', 'organization': 'Ansible'},
- ]},
- {'remove': True, 'saml_attr': 'foobar', 'team_org_map': [
- {'team': 'Engineering', 'organization': 'Ansible'},
- {'team': 'Engineering', 'organization': 'Ansible2'},
- {'team': 'Engineering2', 'organization': 'Ansible'},
- ]},
- {'remove': True, 'saml_attr': 'foobar', 'team_org_map': [
+class TestSAMLTeamAttrField:
+ @pytest.mark.parametrize(
+ "data",
+ [
+ {},
+ {'remove': True, 'saml_attr': 'foobar', 'team_org_map': []},
+ {'remove': True, 'saml_attr': 'foobar', 'team_org_map': [{'team': 'Engineering', 'organization': 'Ansible'}]},
{
- 'team': 'Engineering', 'team_alias': 'Engineering Team',
- 'organization': 'Ansible', 'organization_alias': 'Awesome Org'
+ 'remove': True,
+ 'saml_attr': 'foobar',
+ 'team_org_map': [
+ {'team': 'Engineering', 'organization': 'Ansible'},
+ {'team': 'Engineering', 'organization': 'Ansible2'},
+ {'team': 'Engineering2', 'organization': 'Ansible'},
+ ],
},
- {'team': 'Engineering', 'organization': 'Ansible2'},
- {'team': 'Engineering2', 'organization': 'Ansible'},
- ]},
- ])
+ {
+ 'remove': True,
+ 'saml_attr': 'foobar',
+ 'team_org_map': [
+ {'team': 'Engineering', 'organization': 'Ansible'},
+ {'team': 'Engineering', 'organization': 'Ansible2'},
+ {'team': 'Engineering2', 'organization': 'Ansible'},
+ ],
+ },
+ {
+ 'remove': True,
+ 'saml_attr': 'foobar',
+ 'team_org_map': [
+ {'team': 'Engineering', 'team_alias': 'Engineering Team', 'organization': 'Ansible', 'organization_alias': 'Awesome Org'},
+ {'team': 'Engineering', 'organization': 'Ansible2'},
+ {'team': 'Engineering2', 'organization': 'Ansible'},
+ ],
+ },
+ ],
+ )
def test_internal_value_valid(self, data):
field = SAMLTeamAttrField()
res = field.to_internal_value(data)
assert res == data
- @pytest.mark.parametrize("data, expected", [
- ({'remove': True, 'saml_attr': 'foobar', 'team_org_map': [
- {'team': 'foobar', 'not_a_valid_key': 'blah', 'organization': 'Ansible'},
- ]}, {'team_org_map': {0: {'not_a_valid_key': ['Invalid field.']}}}),
- ({'remove': False, 'saml_attr': 'foobar', 'team_org_map': [
- {'organization': 'Ansible'},
- ]}, {'team_org_map': {0: {'team': ['This field is required.']}}}),
- ({'remove': False, 'saml_attr': 'foobar', 'team_org_map': [
- {},
- ]}, {'team_org_map': {
- 0: {'organization': ['This field is required.'],
- 'team': ['This field is required.']}}}),
- ])
+ @pytest.mark.parametrize(
+ "data, expected",
+ [
+ (
+ {'remove': True, 'saml_attr': 'foobar', 'team_org_map': [{'team': 'foobar', 'not_a_valid_key': 'blah', 'organization': 'Ansible'}]},
+ {'team_org_map': {0: {'not_a_valid_key': ['Invalid field.']}}},
+ ),
+ (
+ {'remove': False, 'saml_attr': 'foobar', 'team_org_map': [{'organization': 'Ansible'}]},
+ {'team_org_map': {0: {'team': ['This field is required.']}}},
+ ),
+ (
+ {'remove': False, 'saml_attr': 'foobar', 'team_org_map': [{}]},
+ {'team_org_map': {0: {'organization': ['This field is required.'], 'team': ['This field is required.']}}},
+ ),
+ ],
+ )
def test_internal_value_invalid(self, data, expected):
field = SAMLTeamAttrField()
with pytest.raises(ValidationError) as e:
@@ -105,17 +115,19 @@ class TestSAMLTeamAttrField():
assert e.value.detail == expected
-class TestLDAPGroupTypeParamsField():
-
- @pytest.mark.parametrize("group_type, data, expected", [
- ('LDAPGroupType', {'name_attr': 'user', 'bob': ['a', 'b'], 'scooter': 'hello'},
- ['Invalid key(s): "bob", "scooter".']),
- ('MemberDNGroupType', {'name_attr': 'user', 'member_attr': 'west', 'bob': ['a', 'b'], 'scooter': 'hello'},
- ['Invalid key(s): "bob", "scooter".']),
- ('PosixUIDGroupType', {'name_attr': 'user', 'member_attr': 'west', 'ldap_group_user_attr': 'legacyThing',
- 'bob': ['a', 'b'], 'scooter': 'hello'},
- ['Invalid key(s): "bob", "member_attr", "scooter".']),
- ])
+class TestLDAPGroupTypeParamsField:
+ @pytest.mark.parametrize(
+ "group_type, data, expected",
+ [
+ ('LDAPGroupType', {'name_attr': 'user', 'bob': ['a', 'b'], 'scooter': 'hello'}, ['Invalid key(s): "bob", "scooter".']),
+ ('MemberDNGroupType', {'name_attr': 'user', 'member_attr': 'west', 'bob': ['a', 'b'], 'scooter': 'hello'}, ['Invalid key(s): "bob", "scooter".']),
+ (
+ 'PosixUIDGroupType',
+ {'name_attr': 'user', 'member_attr': 'west', 'ldap_group_user_attr': 'legacyThing', 'bob': ['a', 'b'], 'scooter': 'hello'},
+ ['Invalid key(s): "bob", "member_attr", "scooter".'],
+ ),
+ ],
+ )
def test_internal_value_invalid(self, group_type, data, expected):
field = LDAPGroupTypeParamsField()
field.get_depends_on = mock.MagicMock(return_value=group_type)
@@ -125,14 +137,16 @@ class TestLDAPGroupTypeParamsField():
assert e.value.detail == expected
-class TestLDAPServerURIField():
-
- @pytest.mark.parametrize("ldap_uri, exception, expected", [
- (r'ldap://servername.com:444', None, r'ldap://servername.com:444'),
- (r'ldap://servername.so3:444', None, r'ldap://servername.so3:444'),
- (r'ldaps://servername3.s300:344', None, r'ldaps://servername3.s300:344'),
- (r'ldap://servername.-so3:444', ValidationError, None),
- ])
+class TestLDAPServerURIField:
+ @pytest.mark.parametrize(
+ "ldap_uri, exception, expected",
+ [
+ (r'ldap://servername.com:444', None, r'ldap://servername.com:444'),
+ (r'ldap://servername.so3:444', None, r'ldap://servername.so3:444'),
+ (r'ldaps://servername3.s300:344', None, r'ldaps://servername3.s300:344'),
+ (r'ldap://servername.-so3:444', ValidationError, None),
+ ],
+ )
def test_run_validators_valid(self, ldap_uri, exception, expected):
field = LDAPServerURIField()
if exception is None:
diff --git a/awx/sso/tests/unit/test_tacacsplus.py b/awx/sso/tests/unit/test_tacacsplus.py
index e475694d63..60ed0c4799 100644
--- a/awx/sso/tests/unit/test_tacacsplus.py
+++ b/awx/sso/tests/unit/test_tacacsplus.py
@@ -10,17 +10,15 @@ def test_empty_host_fails_auth(tacacsplus_backend):
def test_client_raises_exception(tacacsplus_backend):
client = mock.MagicMock()
- client.authenticate.side_effect=Exception("foo")
- with mock.patch('awx.sso.backends.django_settings') as settings,\
- mock.patch('awx.sso.backends.logger') as logger,\
- mock.patch('tacacs_plus.TACACSClient', return_value=client):
+ client.authenticate.side_effect = Exception("foo")
+ with mock.patch('awx.sso.backends.django_settings') as settings, mock.patch('awx.sso.backends.logger') as logger, mock.patch(
+ 'tacacs_plus.TACACSClient', return_value=client
+ ):
settings.TACACSPLUS_HOST = 'localhost'
settings.TACACSPLUS_AUTH_PROTOCOL = 'ascii'
ret_user = tacacsplus_backend.authenticate(None, u"user", u"pass")
assert ret_user is None
- logger.exception.assert_called_once_with(
- "TACACS+ Authentication Error: foo"
- )
+ logger.exception.assert_called_once_with("TACACS+ Authentication Error: foo")
def test_client_return_invalid_fails_auth(tacacsplus_backend):
@@ -28,8 +26,7 @@ def test_client_return_invalid_fails_auth(tacacsplus_backend):
auth.valid = False
client = mock.MagicMock()
client.authenticate.return_value = auth
- with mock.patch('awx.sso.backends.django_settings') as settings,\
- mock.patch('tacacs_plus.TACACSClient', return_value=client):
+ with mock.patch('awx.sso.backends.django_settings') as settings, mock.patch('tacacs_plus.TACACSClient', return_value=client):
settings.TACACSPLUS_HOST = 'localhost'
settings.TACACSPLUS_AUTH_PROTOCOL = 'ascii'
ret_user = tacacsplus_backend.authenticate(None, u"user", u"pass")
@@ -43,9 +40,9 @@ def test_client_return_valid_passes_auth(tacacsplus_backend):
client.authenticate.return_value = auth
user = mock.MagicMock()
user.has_usable_password = mock.MagicMock(return_value=False)
- with mock.patch('awx.sso.backends.django_settings') as settings,\
- mock.patch('tacacs_plus.TACACSClient', return_value=client),\
- mock.patch('awx.sso.backends._get_or_set_enterprise_user', return_value=user):
+ with mock.patch('awx.sso.backends.django_settings') as settings, mock.patch('tacacs_plus.TACACSClient', return_value=client), mock.patch(
+ 'awx.sso.backends._get_or_set_enterprise_user', return_value=user
+ ):
settings.TACACSPLUS_HOST = 'localhost'
settings.TACACSPLUS_AUTH_PROTOCOL = 'ascii'
ret_user = tacacsplus_backend.authenticate(None, u"user", u"pass")
diff --git a/awx/sso/urls.py b/awx/sso/urls.py
index 99b705f790..a32b11d6d6 100644
--- a/awx/sso/urls.py
+++ b/awx/sso/urls.py
@@ -2,12 +2,7 @@
# All Rights Reserved.
from django.conf.urls import url
-from awx.sso.views import (
- sso_complete,
- sso_error,
- sso_inactive,
- saml_metadata,
-)
+from awx.sso.views import sso_complete, sso_error, sso_inactive, saml_metadata
app_name = 'sso'
diff --git a/awx/sso/validators.py b/awx/sso/validators.py
index 7e89958236..821abc3b15 100644
--- a/awx/sso/validators.py
+++ b/awx/sso/validators.py
@@ -8,10 +8,14 @@ import ldap
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
-__all__ = ['validate_ldap_dn', 'validate_ldap_dn_with_user',
- 'validate_ldap_bind_dn', 'validate_ldap_filter',
- 'validate_ldap_filter_with_user',
- 'validate_tacacsplus_disallow_nonascii']
+__all__ = [
+ 'validate_ldap_dn',
+ 'validate_ldap_dn_with_user',
+ 'validate_ldap_bind_dn',
+ 'validate_ldap_filter',
+ 'validate_ldap_filter_with_user',
+ 'validate_tacacsplus_disallow_nonascii',
+]
def validate_ldap_dn(value, with_user=False):
@@ -32,8 +36,9 @@ def validate_ldap_dn_with_user(value):
def validate_ldap_bind_dn(value):
- if not re.match(r'^[A-Za-z][A-Za-z0-9._-]*?\\[A-Za-z0-9 ._-]+?$', value.strip()) and \
- not re.match(r'^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$', value.strip()):
+ if not re.match(r'^[A-Za-z][A-Za-z0-9._-]*?\\[A-Za-z0-9 ._-]+?$', value.strip()) and not re.match(
+ r'^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$', value.strip()
+ ):
validate_ldap_dn(value)
diff --git a/awx/sso/views.py b/awx/sso/views.py
index 1f2dcad0b9..20117c2af2 100644
--- a/awx/sso/views.py
+++ b/awx/sso/views.py
@@ -37,7 +37,6 @@ sso_inactive = BaseRedirectView.as_view()
class CompleteView(BaseRedirectView):
-
def dispatch(self, request, *args, **kwargs):
response = super(CompleteView, self).dispatch(request, *args, **kwargs)
if self.request.user and self.request.user.is_authenticated:
@@ -54,16 +53,12 @@ sso_complete = CompleteView.as_view()
class MetadataView(View):
-
def get(self, request, *args, **kwargs):
from social_django.utils import load_backend, load_strategy
- complete_url = reverse('social:complete', args=('saml', ))
+
+ complete_url = reverse('social:complete', args=('saml',))
try:
- saml_backend = load_backend(
- load_strategy(request),
- 'saml',
- redirect_uri=complete_url,
- )
+ saml_backend = load_backend(load_strategy(request), 'saml', redirect_uri=complete_url)
metadata, errors = saml_backend.generate_metadata_xml()
except Exception as e:
logger.exception('unable to generate SAML metadata')
diff --git a/awx/ui/__init__.py b/awx/ui/__init__.py
index bfb3e776cd..ac6a554356 100644
--- a/awx/ui/__init__.py
+++ b/awx/ui/__init__.py
@@ -2,4 +2,3 @@
# All Rights Reserved.
default_app_config = 'awx.ui.apps.UIConfig'
-
diff --git a/awx/ui/apps.py b/awx/ui/apps.py
index 5b8e5083c1..40943c6f53 100644
--- a/awx/ui/apps.py
+++ b/awx/ui/apps.py
@@ -7,4 +7,3 @@ class UIConfig(AppConfig):
name = 'awx.ui'
verbose_name = _('UI')
-
diff --git a/awx/ui/conf.py b/awx/ui/conf.py
index 3148aec6ee..cc930113e8 100644
--- a/awx/ui/conf.py
+++ b/awx/ui/conf.py
@@ -12,11 +12,7 @@ from awx.ui.fields import PendoTrackingStateField, CustomLogoField # noqa
register(
'PENDO_TRACKING_STATE',
field_class=PendoTrackingStateField,
- choices=[
- ('off', _('Off')),
- ('anonymous', _('Anonymous')),
- ('detailed', _('Detailed')),
- ],
+ choices=[('off', _('Off')), ('anonymous', _('Anonymous')), ('detailed', _('Detailed'))],
label=_('User Analytics Tracking State'),
help_text=_('Enable or Disable User Analytics Tracking.'),
category=_('UI'),
@@ -29,10 +25,12 @@ register(
allow_blank=True,
default='',
label=_('Custom Login Info'),
- help_text=_('If needed, you can add specific information (such as a legal '
- 'notice or a disclaimer) to a text box in the login modal using '
- 'this setting. Any content added must be in plain text or an '
- 'HTML fragment, as other markup languages are not supported.'),
+ help_text=_(
+ 'If needed, you can add specific information (such as a legal '
+ 'notice or a disclaimer) to a text box in the login modal using '
+ 'this setting. Any content added must be in plain text or an '
+ 'HTML fragment, as other markup languages are not supported.'
+ ),
category=_('UI'),
category_slug='ui',
)
@@ -43,9 +41,11 @@ register(
allow_blank=True,
default='',
label=_('Custom Logo'),
- help_text=_('To set up a custom logo, provide a file that you create. For '
- 'the custom logo to look its best, use a .png file with a '
- 'transparent background. GIF, PNG and JPEG formats are supported.'),
+ help_text=_(
+ 'To set up a custom logo, provide a file that you create. For '
+ 'the custom logo to look its best, use a .png file with a '
+ 'transparent background. GIF, PNG and JPEG formats are supported.'
+ ),
placeholder='data:image/gif;base64,R0lGODlhAQABAIABAP///wAAACwAAAAAAQABAAACAkQBADs=',
category=_('UI'),
category_slug='ui',
@@ -56,8 +56,7 @@ register(
field_class=fields.IntegerField,
min_value=100,
label=_('Max Job Events Retrieved by UI'),
- help_text=_('Maximum number of job events for the UI to retrieve within a '
- 'single request.'),
+ help_text=_('Maximum number of job events for the UI to retrieve within a ' 'single request.'),
category=_('UI'),
category_slug='ui',
)
@@ -66,9 +65,7 @@ register(
'UI_LIVE_UPDATES_ENABLED',
field_class=fields.BooleanField,
label=_('Enable Live Updates in the UI'),
- help_text=_('If disabled, the page will not refresh when events are received. '
- 'Reloading the page will be required to get the latest details.'),
+ help_text=_('If disabled, the page will not refresh when events are received. ' 'Reloading the page will be required to get the latest details.'),
category=_('UI'),
category_slug='ui',
)
-
diff --git a/awx/ui/context_processors.py b/awx/ui/context_processors.py
index 87c071c285..fb06da9bd8 100644
--- a/awx/ui/context_processors.py
+++ b/awx/ui/context_processors.py
@@ -3,6 +3,4 @@ import os
def csp(request):
- return {
- 'csp_nonce': base64.encodebytes(os.urandom(32)).decode().rstrip(),
- }
+ return {'csp_nonce': base64.encodebytes(os.urandom(32)).decode().rstrip()}
diff --git a/awx/ui/fields.py b/awx/ui/fields.py
index 4d96165d4d..6893ed2068 100644
--- a/awx/ui/fields.py
+++ b/awx/ui/fields.py
@@ -14,7 +14,6 @@ from awx.conf import fields, register
class PendoTrackingStateField(fields.ChoiceField):
-
def to_internal_value(self, data):
# Any false/null values get converted to 'off'.
if data in fields.NullBooleanField.FALSE_VALUES or data in fields.NullBooleanField.NULL_VALUES:
@@ -42,4 +41,3 @@ class CustomLogoField(fields.CharField):
except (TypeError, binascii.Error):
self.fail('invalid_data')
return data
-
diff --git a/awx/ui_next/apps.py b/awx/ui_next/apps.py
index 99555337f2..73d2ec3d32 100644
--- a/awx/ui_next/apps.py
+++ b/awx/ui_next/apps.py
@@ -7,4 +7,3 @@ class UINextConfig(AppConfig):
name = 'awx.ui_next'
verbose_name = _('UI_Next')
-
diff --git a/awx/ui_next/urls.py b/awx/ui_next/urls.py
index 58ec7ab251..233cd8d0af 100644
--- a/awx/ui_next/urls.py
+++ b/awx/ui_next/urls.py
@@ -13,7 +13,7 @@ class IndexView(TemplateView):
class MigrationsNotran(TemplateView):
template_name = 'installing.html'
-
+
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
product_name = get_licenser().validate()['product_name']
@@ -27,7 +27,4 @@ class MigrationsNotran(TemplateView):
app_name = 'ui_next'
-urlpatterns = [
- url(r'^$', IndexView.as_view(), name='index'),
- url(r'^migrations_notran/$', MigrationsNotran.as_view(), name='migrations_notran'),
-]
+urlpatterns = [url(r'^$', IndexView.as_view(), name='index'), url(r'^migrations_notran/$', MigrationsNotran.as_view(), name='migrations_notran')]
diff --git a/awx/urls.py b/awx/urls.py
index d26ea0d6f5..d835580505 100644
--- a/awx/urls.py
+++ b/awx/urls.py
@@ -3,14 +3,7 @@
from django.conf.urls import url, include
from django.conf import settings
-from awx.main.views import (
- handle_400,
- handle_403,
- handle_404,
- handle_500,
- handle_csp_violation,
- handle_login_redirect,
-)
+from awx.main.views import handle_400, handle_403, handle_404, handle_500, handle_csp_violation, handle_login_redirect
urlpatterns = [
@@ -29,9 +22,8 @@ urlpatterns = [
if settings.SETTINGS_MODULE == 'awx.settings.development':
try:
import debug_toolbar
- urlpatterns += [
- url(r'^__debug__/', include(debug_toolbar.urls))
- ]
+
+ urlpatterns += [url(r'^__debug__/', include(debug_toolbar.urls))]
except ImportError:
pass
diff --git a/awx/wsgi.py b/awx/wsgi.py
index af8290cea3..77302f1573 100644
--- a/awx/wsgi.py
+++ b/awx/wsgi.py
@@ -6,12 +6,13 @@ from awx import __version__ as tower_version
# Prepare the AWX environment.
from awx import prepare_env, MODE
+
prepare_env()
import django # NOQA
from django.conf import settings # NOQA
from django.urls import resolve # NOQA
-from django.core.wsgi import get_wsgi_application # NOQA
+from django.core.wsgi import get_wsgi_application # NOQA
import social_django # NOQA
diff --git a/awx_collection/plugins/doc_fragments/auth.py b/awx_collection/plugins/doc_fragments/auth.py
index 1e77a63b4b..cc8d449bad 100644
--- a/awx_collection/plugins/doc_fragments/auth.py
+++ b/awx_collection/plugins/doc_fragments/auth.py
@@ -3,7 +3,8 @@
# Copyright: (c) 2017, Wayne Witzel III <wayne@riotousliving.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
diff --git a/awx_collection/plugins/doc_fragments/auth_legacy.py b/awx_collection/plugins/doc_fragments/auth_legacy.py
index bf0ea28832..29c91507f2 100644
--- a/awx_collection/plugins/doc_fragments/auth_legacy.py
+++ b/awx_collection/plugins/doc_fragments/auth_legacy.py
@@ -3,7 +3,8 @@
# Copyright: (c) 2017, Wayne Witzel III <wayne@riotousliving.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
diff --git a/awx_collection/plugins/doc_fragments/auth_plugin.py b/awx_collection/plugins/doc_fragments/auth_plugin.py
index 527054ed27..f3c49ca9ba 100644
--- a/awx_collection/plugins/doc_fragments/auth_plugin.py
+++ b/awx_collection/plugins/doc_fragments/auth_plugin.py
@@ -3,7 +3,8 @@
# Copyright: (c) 2020, Ansible by Red Hat, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
diff --git a/awx_collection/plugins/inventory/tower.py b/awx_collection/plugins/inventory/tower.py
index 5be3af2756..b90f7f782c 100644
--- a/awx_collection/plugins/inventory/tower.py
+++ b/awx_collection/plugins/inventory/tower.py
@@ -1,7 +1,7 @@
# Copyright (c) 2018 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
__metaclass__ = type
@@ -109,10 +109,7 @@ class InventoryModule(BaseInventoryPlugin):
if opt_val is not None:
module_params[module_param] = opt_val
- module = TowerAPIModule(
- argument_spec={}, direct_params=module_params,
- error_callback=handle_error, warn_callback=self.warn_callback
- )
+ module = TowerAPIModule(argument_spec={}, direct_params=module_params, error_callback=handle_error, warn_callback=self.warn_callback)
# validate type of inventory_id because we allow two types as special case
inventory_id = self.get_option('inventory_id')
@@ -123,15 +120,12 @@ class InventoryModule(BaseInventoryPlugin):
inventory_id = ensure_type(inventory_id, 'str')
except ValueError as e:
raise AnsibleOptionsError(
- 'Invalid type for configuration option inventory_id, '
- 'not integer, and cannot convert to string: {err}'.format(err=to_native(e))
+ 'Invalid type for configuration option inventory_id, ' 'not integer, and cannot convert to string: {err}'.format(err=to_native(e))
)
inventory_id = inventory_id.replace('/', '')
inventory_url = '/api/v2/inventories/{inv_id}/script/'.format(inv_id=inventory_id)
- inventory = module.get_endpoint(
- inventory_url, data={'hostvars': '1', 'towervars': '1', 'all': '1'}
- )['json']
+ inventory = module.get_endpoint(inventory_url, data={'hostvars': '1', 'towervars': '1', 'all': '1'})['json']
# To start with, create all the groups.
for group_name in inventory:
diff --git a/awx_collection/plugins/lookup/tower_api.py b/awx_collection/plugins/lookup/tower_api.py
index a10d9c4c8e..c38b33d139 100644
--- a/awx_collection/plugins/lookup/tower_api.py
+++ b/awx_collection/plugins/lookup/tower_api.py
@@ -1,6 +1,7 @@
# (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -145,10 +146,7 @@ class LookupModule(LookupBase):
module_params[module_param] = opt_val
# Create our module
- module = TowerAPIModule(
- argument_spec={}, direct_params=module_params,
- error_callback=self.handle_error, warn_callback=self.warn_callback
- )
+ module = TowerAPIModule(argument_spec={}, direct_params=module_params, error_callback=self.handle_error, warn_callback=self.warn_callback)
response = module.get_endpoint(terms[0], data=self.get_option('query_params', {}))
@@ -162,17 +160,11 @@ class LookupModule(LookupBase):
if self.get_option('expect_objects') or self.get_option('expect_one'):
if ('id' not in return_data) and ('results' not in return_data):
- raise AnsibleError(
- 'Did not obtain a list or detail view at {0}, and '
- 'expect_objects or expect_one is set to True'.format(terms[0])
- )
+ raise AnsibleError('Did not obtain a list or detail view at {0}, and ' 'expect_objects or expect_one is set to True'.format(terms[0]))
if self.get_option('expect_one'):
if 'results' in return_data and len(return_data['results']) != 1:
- raise AnsibleError(
- 'Expected one object from endpoint {0}, '
- 'but obtained {1} from API'.format(terms[0], len(return_data['results']))
- )
+ raise AnsibleError('Expected one object from endpoint {0}, ' 'but obtained {1} from API'.format(terms[0], len(return_data['results'])))
if self.get_option('return_all') and 'results' in return_data:
if return_data['count'] > self.get_option('max_objects'):
diff --git a/awx_collection/plugins/lookup/tower_schedule_rrule.py b/awx_collection/plugins/lookup/tower_schedule_rrule.py
index 918b9fa1d0..9876be153b 100644
--- a/awx_collection/plugins/lookup/tower_schedule_rrule.py
+++ b/awx_collection/plugins/lookup/tower_schedule_rrule.py
@@ -1,6 +1,7 @@
# (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
DOCUMENTATION = """
@@ -104,6 +105,7 @@ except ImportError:
# Validate the version of python.dateutil
try:
import dateutil
+
if LooseVersion(dateutil.__version__) < LooseVersion("2.7.0"):
raise Exception
except Exception:
diff --git a/awx_collection/plugins/module_utils/tower_api.py b/awx_collection/plugins/module_utils/tower_api.py
index cb31f428a4..1ef663fb26 100644
--- a/awx_collection/plugins/module_utils/tower_api.py
+++ b/awx_collection/plugins/module_utils/tower_api.py
@@ -1,7 +1,8 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-from . tower_module import TowerModule
+from .tower_module import TowerModule
from ansible.module_utils.urls import Request, SSLValidationError, ConnectionError
from ansible.module_utils.six import PY2
from ansible.module_utils.six.moves.urllib.error import HTTPError
@@ -22,18 +23,15 @@ class TowerAPIModule(TowerModule):
'tower': 'Red Hat Ansible Tower',
}
session = None
- IDENTITY_FIELDS = {
- 'users': 'username',
- 'workflow_job_template_nodes': 'identifier',
- 'instances': 'hostname'
- }
+ IDENTITY_FIELDS = {'users': 'username', 'workflow_job_template_nodes': 'identifier', 'instances': 'hostname'}
ENCRYPTED_STRING = "$encrypted$"
def __init__(self, argument_spec, direct_params=None, error_callback=None, warn_callback=None, **kwargs):
kwargs['supports_check_mode'] = True
- super(TowerAPIModule, self).__init__(argument_spec=argument_spec, direct_params=direct_params,
- error_callback=error_callback, warn_callback=warn_callback, **kwargs)
+ super(TowerAPIModule, self).__init__(
+ argument_spec=argument_spec, direct_params=direct_params, error_callback=error_callback, warn_callback=warn_callback, **kwargs
+ )
self.session = Request(cookies=CookieJar(), validate_certs=self.verify_ssl)
if 'update_secrets' in self.params:
@@ -43,11 +41,7 @@ class TowerAPIModule(TowerModule):
@staticmethod
def param_to_endpoint(name):
- exceptions = {
- 'inventory': 'inventories',
- 'target_team': 'teams',
- 'workflow': 'workflow_job_templates'
- }
+ exceptions = {'inventory': 'inventories', 'target_team': 'teams', 'workflow': 'workflow_job_templates'}
return exceptions.get(name, '{0}s'.format(name))
@staticmethod
@@ -168,14 +162,12 @@ class TowerAPIModule(TowerModule):
if len(sample['json']['results']) > 1:
sample['json']['results'] = sample['json']['results'][:2] + ['...more results snipped...']
url = self.build_url(endpoint, query_params)
- display_endpoint = url.geturl()[len(self.host):] # truncate to not include the base URL
+ display_endpoint = url.geturl()[len(self.host) :] # truncate to not include the base URL
self.fail_json(
- msg="Request to {0} returned {1} items, expected 1".format(
- display_endpoint, response['json']['count']
- ),
+ msg="Request to {0} returned {1} items, expected 1".format(display_endpoint, response['json']['count']),
query=query_params,
response=sample,
- total_results=response['json']['count']
+ total_results=response['json']['count'],
)
def get_exactly_one(self, endpoint, name_or_id=None, **kwargs):
@@ -215,11 +207,11 @@ class TowerAPIModule(TowerModule):
try:
response = self.session.open(method, url.geturl(), headers=headers, validate_certs=self.verify_ssl, follow_redirects=True, data=data)
- except(SSLValidationError) as ssl_err:
+ except (SSLValidationError) as ssl_err:
self.fail_json(msg="Could not establish a secure connection to your host ({1}): {0}.".format(url.netloc, ssl_err))
- except(ConnectionError) as con_err:
+ except (ConnectionError) as con_err:
self.fail_json(msg="There was a network error of some kind trying to connect to your host ({1}): {0}.".format(url.netloc, con_err))
- except(HTTPError) as he:
+ except (HTTPError) as he:
# Sanity check: Did the server send back some kind of internal error?
if he.code >= 500:
self.fail_json(msg='The host sent back a server error ({1}): {0}. Please check the logs and try again later'.format(url.path, he))
@@ -254,7 +246,7 @@ class TowerAPIModule(TowerModule):
pass
else:
self.fail_json(msg="Unexpected return code when calling {0}: {1}".format(url.geturl(), he))
- except(Exception) as e:
+ except (Exception) as e:
self.fail_json(msg="There was an unknown error when trying to connect to {2}: {0} {1}".format(type(e).__name__, e, url.geturl()))
if not self.version_checked:
@@ -268,26 +260,22 @@ class TowerAPIModule(TowerModule):
tower_version = response.info().getheader('X-API-Product-Version', None)
if self._COLLECTION_TYPE not in self.collection_to_version or self.collection_to_version[self._COLLECTION_TYPE] != tower_type:
- self.warn("You are using the {0} version of this collection but connecting to {1}".format(
- self._COLLECTION_TYPE, tower_type
- ))
+ self.warn("You are using the {0} version of this collection but connecting to {1}".format(self._COLLECTION_TYPE, tower_type))
elif self._COLLECTION_VERSION != tower_version:
- self.warn("You are running collection version {0} but connecting to tower version {1}".format(
- self._COLLECTION_VERSION, tower_version
- ))
+ self.warn("You are running collection version {0} but connecting to tower version {1}".format(self._COLLECTION_VERSION, tower_version))
self.version_checked = True
response_body = ''
try:
response_body = response.read()
- except(Exception) as e:
+ except (Exception) as e:
self.fail_json(msg="Failed to read response body: {0}".format(e))
response_json = {}
if response_body and response_body != '':
try:
response_json = loads(response_body)
- except(Exception) as e:
+ except (Exception) as e:
self.fail_json(msg="Failed to parse the response json: {0}".format(e))
if PY2:
@@ -310,10 +298,15 @@ class TowerAPIModule(TowerModule):
try:
response = self.session.open(
- 'POST', api_token_url,
- validate_certs=self.verify_ssl, follow_redirects=True,
- force_basic_auth=True, url_username=self.username, url_password=self.password,
- data=dumps(login_data), headers={'Content-Type': 'application/json'}
+ 'POST',
+ api_token_url,
+ validate_certs=self.verify_ssl,
+ follow_redirects=True,
+ force_basic_auth=True,
+ url_username=self.username,
+ url_password=self.password,
+ data=dumps(login_data),
+ headers={'Content-Type': 'application/json'},
)
except HTTPError as he:
try:
@@ -321,7 +314,7 @@ class TowerAPIModule(TowerModule):
except Exception as e:
resp = 'unknown {0}'.format(e)
self.fail_json(msg='Failed to get token: {0}'.format(he), response=resp)
- except(Exception) as e:
+ except (Exception) as e:
# Sanity check: Did the server send back some kind of internal error?
self.fail_json(msg='Failed to get token: {0}'.format(e))
@@ -331,7 +324,7 @@ class TowerAPIModule(TowerModule):
response_json = loads(token_response)
self.oauth_token_id = response_json['id']
self.oauth_token = response_json['token']
- except(Exception) as e:
+ except (Exception) as e:
self.fail_json(msg="Failed to extract token information from login response: {0}".format(e), **{'response': token_response})
# If we have neither of these, then we can try un-authenticated access
@@ -429,9 +422,13 @@ class TowerAPIModule(TowerModule):
if item_type == 'workflow_job_template':
copy_get_check = self.get_endpoint(copy_from_lookup['related']['copy'])
if copy_get_check['status_code'] in [200]:
- if (copy_get_check['json']['can_copy'] and copy_get_check['json']['can_copy_without_user_input'] and not
- copy_get_check['json']['templates_unable_to_copy'] and not copy_get_check['json']['credentials_unable_to_copy'] and not
- copy_get_check['json']['inventories_unable_to_copy']):
+ if (
+ copy_get_check['json']['can_copy']
+ and copy_get_check['json']['can_copy_without_user_input']
+ and not copy_get_check['json']['templates_unable_to_copy']
+ and not copy_get_check['json']['credentials_unable_to_copy']
+ and not copy_get_check['json']['inventories_unable_to_copy']
+ ):
# Because checks have passed
self.json_output['copy_checks'] = 'passed'
else:
@@ -521,7 +518,8 @@ class TowerAPIModule(TowerModule):
self.warn(
'The field {0} of {1} {2} has encrypted data and may inaccurately report task is changed.'.format(
field, old.get('type', 'unknown'), old.get('id', 'unknown')
- ))
+ )
+ )
@staticmethod
def has_encrypted_values(obj):
@@ -612,8 +610,7 @@ class TowerAPIModule(TowerModule):
if response['status_code'] == 200:
# compare apples-to-apples, old API data to new API data
# but do so considering the fields given in parameters
- self.json_output['changed'] = self.objects_could_be_different(
- existing_item, response['json'], field_set=new_item.keys(), warning=True)
+ self.json_output['changed'] = self.objects_could_be_different(existing_item, response['json'], field_set=new_item.keys(), warning=True)
elif 'json' in response and '__all__' in response['json']:
self.fail_json(msg=response['json']['__all__'])
else:
@@ -651,7 +648,8 @@ class TowerAPIModule(TowerModule):
return self.update_if_needed(existing_item, new_item, on_update=on_update, auto_exit=auto_exit, associations=associations)
else:
return self.create_if_needed(
- existing_item, new_item, endpoint, on_create=on_create, item_type=item_type, auto_exit=auto_exit, associations=associations)
+ existing_item, new_item, endpoint, on_create=on_create, item_type=item_type, auto_exit=auto_exit, associations=associations
+ )
def logout(self):
if self.authenticated and self.oauth_token_id:
@@ -659,8 +657,7 @@ class TowerAPIModule(TowerModule):
# Post to the tokens endpoint with baisc auth to try and get a token
api_token_url = (
self.url._replace(
- path='/api/v2/tokens/{0}/'.format(self.oauth_token_id),
- query=None # in error cases, fail_json exists before exception handling
+ path='/api/v2/tokens/{0}/'.format(self.oauth_token_id), query=None # in error cases, fail_json exists before exception handling
)
).geturl()
@@ -672,7 +669,7 @@ class TowerAPIModule(TowerModule):
follow_redirects=True,
force_basic_auth=True,
url_username=self.username,
- url_password=self.password
+ url_password=self.password,
)
self.oauth_token_id = None
self.authenticated = False
@@ -682,7 +679,7 @@ class TowerAPIModule(TowerModule):
except Exception as e:
resp = 'unknown {0}'.format(e)
self.warn('Failed to release tower token: {0}, response: {1}'.format(he, resp))
- except(Exception) as e:
+ except (Exception) as e:
# Sanity check: Did the server send back some kind of internal error?
self.warn('Failed to release tower token {0}: {1}'.format(self.oauth_token_id, e))
diff --git a/awx_collection/plugins/module_utils/tower_awxkit.py b/awx_collection/plugins/module_utils/tower_awxkit.py
index 03eb3a2a8f..83bb2990cd 100644
--- a/awx_collection/plugins/module_utils/tower_awxkit.py
+++ b/awx_collection/plugins/module_utils/tower_awxkit.py
@@ -1,13 +1,15 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-from . tower_module import TowerModule
+from .tower_module import TowerModule
from ansible.module_utils.basic import missing_required_lib
try:
from awxkit.api.client import Connection
from awxkit.api.pages.api import ApiV2
from awxkit.api import get_registered_page
+
HAS_AWX_KIT = True
except ImportError:
HAS_AWX_KIT = False
diff --git a/awx_collection/plugins/module_utils/tower_legacy.py b/awx_collection/plugins/module_utils/tower_legacy.py
index 3c8408610d..faed980a8a 100644
--- a/awx_collection/plugins/module_utils/tower_legacy.py
+++ b/awx_collection/plugins/module_utils/tower_legacy.py
@@ -26,7 +26,8 @@
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import os
@@ -47,13 +48,13 @@ from ansible.module_utils.basic import AnsibleModule, missing_required_lib
def tower_auth_config(module):
- '''
+ """
`tower_auth_config` attempts to load the tower-cli.cfg file
specified from the `tower_config_file` parameter. If found,
if returns the contents of the file as a dictionary, else
it will attempt to fetch values from the module params and
only pass those values that have been set.
- '''
+ """
config_file = module.params.pop('tower_config_file', None)
if config_file:
if not os.path.exists(config_file):
@@ -103,15 +104,16 @@ class TowerLegacyModule(AnsibleModule):
args.update(argument_spec)
kwargs.setdefault('mutually_exclusive', [])
- kwargs['mutually_exclusive'].extend((
- ('tower_config_file', 'tower_host'),
- ('tower_config_file', 'tower_username'),
- ('tower_config_file', 'tower_password'),
- ('tower_config_file', 'validate_certs'),
- ))
+ kwargs['mutually_exclusive'].extend(
+ (
+ ('tower_config_file', 'tower_host'),
+ ('tower_config_file', 'tower_username'),
+ ('tower_config_file', 'tower_password'),
+ ('tower_config_file', 'validate_certs'),
+ )
+ )
super(TowerLegacyModule, self).__init__(argument_spec=args, **kwargs)
if not HAS_TOWER_CLI:
- self.fail_json(msg=missing_required_lib('ansible-tower-cli'),
- exception=TOWER_CLI_IMP_ERR)
+ self.fail_json(msg=missing_required_lib('ansible-tower-cli'), exception=TOWER_CLI_IMP_ERR)
diff --git a/awx_collection/plugins/module_utils/tower_module.py b/awx_collection/plugins/module_utils/tower_module.py
index 470ea80de3..60cc44efa5 100644
--- a/awx_collection/plugins/module_utils/tower_module.py
+++ b/awx_collection/plugins/module_utils/tower_module.py
@@ -1,4 +1,5 @@
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
from ansible.module_utils.basic import AnsibleModule, env_fallback
@@ -14,6 +15,7 @@ from distutils.util import strtobool
try:
import yaml
+
HAS_YAML = True
except ImportError:
HAS_YAML = False
@@ -139,15 +141,14 @@ class TowerModule(AnsibleModule):
# If we have a specified tower config, load it
if self.params.get('tower_config_file'):
- duplicated_params = [
- fn for fn in self.AUTH_ARGSPEC
- if fn != 'tower_config_file' and self.params.get(fn) is not None
- ]
+ duplicated_params = [fn for fn in self.AUTH_ARGSPEC if fn != 'tower_config_file' and self.params.get(fn) is not None]
if duplicated_params:
- self.warn((
- 'The parameter(s) {0} were provided at the same time as tower_config_file. '
- 'Precedence may be unstable, we suggest either using config file or params.'
- ).format(', '.join(duplicated_params)))
+ self.warn(
+ (
+ 'The parameter(s) {0} were provided at the same time as tower_config_file. '
+ 'Precedence may be unstable, we suggest either using config file or params.'
+ ).format(', '.join(duplicated_params))
+ )
try:
# TODO: warn if there are conflicts with other params
self.load_config(self.params.get('tower_config_file'))
@@ -186,7 +187,7 @@ class TowerModule(AnsibleModule):
raise AssertionError("The yaml config file is not properly formatted as a dict.")
try_config_parsing = False
- except(AttributeError, yaml.YAMLError, AssertionError):
+ except (AttributeError, yaml.YAMLError, AssertionError):
try_config_parsing = True
if try_config_parsing:
diff --git a/awx_collection/plugins/modules/tower_ad_hoc_command.py b/awx_collection/plugins/modules/tower_ad_hoc_command.py
index 4ee416ccdf..3926df9471 100644
--- a/awx_collection/plugins/modules/tower_ad_hoc_command.py
+++ b/awx_collection/plugins/modules/tower_ad_hoc_command.py
@@ -6,12 +6,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
---
@@ -168,26 +167,25 @@ def main():
module.fail_json(msg="Failed to launch command, see response for details", **{'response': results})
if not wait:
- module.exit_json(**{
+ module.exit_json(
+ **{
+ 'changed': True,
+ 'id': results['json']['id'],
+ 'status': results['json']['status'],
+ }
+ )
+
+ # Invoke wait function
+ results = module.wait_on_url(url=results['json']['url'], object_name=module_name, object_type='Ad Hoc Command', timeout=timeout, interval=interval)
+
+ module.exit_json(
+ **{
'changed': True,
'id': results['json']['id'],
'status': results['json']['status'],
- })
-
- # Invoke wait function
- results = module.wait_on_url(
- url=results['json']['url'],
- object_name=module_name,
- object_type='Ad Hoc Command',
- timeout=timeout, interval=interval
+ }
)
- module.exit_json(**{
- 'changed': True,
- 'id': results['json']['id'],
- 'status': results['json']['status'],
- })
-
if __name__ == '__main__':
main()
diff --git a/awx_collection/plugins/modules/tower_ad_hoc_command_cancel.py b/awx_collection/plugins/modules/tower_ad_hoc_command_cancel.py
index 4e88b22f7a..c909250e4f 100644
--- a/awx_collection/plugins/modules/tower_ad_hoc_command_cancel.py
+++ b/awx_collection/plugins/modules/tower_ad_hoc_command_cancel.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -85,11 +84,14 @@ def main():
timeout = module.params.get('timeout')
# Attempt to look up the command based on the provided name
- command = module.get_one('ad_hoc_commands', **{
- 'data': {
- 'id': command_id,
+ command = module.get_one(
+ 'ad_hoc_commands',
+ **{
+ 'data': {
+ 'id': command_id,
+ }
}
- })
+ )
if command is None:
module.fail_json(msg="Unable to find command with id {0}".format(command_id))
diff --git a/awx_collection/plugins/modules/tower_ad_hoc_command_wait.py b/awx_collection/plugins/modules/tower_ad_hoc_command_wait.py
index 7d1cc19418..13f890c33e 100644
--- a/awx_collection/plugins/modules/tower_ad_hoc_command_wait.py
+++ b/awx_collection/plugins/modules/tower_ad_hoc_command_wait.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -103,22 +102,20 @@ def main():
interval = module.params.get('interval')
# Attempt to look up command based on the provided id
- command = module.get_one('ad_hoc_commands', **{
- 'data': {
- 'id': command_id,
+ command = module.get_one(
+ 'ad_hoc_commands',
+ **{
+ 'data': {
+ 'id': command_id,
+ }
}
- })
+ )
if command is None:
module.fail_json(msg='Unable to wait on ad hoc command {0}; that ID does not exist in Tower.'.format(command_id))
# Invoke wait function
- module.wait_on_url(
- url=command['url'],
- object_name=command_id,
- object_type='ad hoc command',
- timeout=timeout, interval=interval
- )
+ module.wait_on_url(url=command['url'], object_name=command_id, object_type='ad hoc command', timeout=timeout, interval=interval)
module.exit_json(**module.json_output)
diff --git a/awx_collection/plugins/modules/tower_application.py b/awx_collection/plugins/modules/tower_application.py
index 54f53417d2..5841c74318 100644
--- a/awx_collection/plugins/modules/tower_application.py
+++ b/awx_collection/plugins/modules/tower_application.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -105,7 +104,7 @@ def main():
organization=dict(required=True),
redirect_uris=dict(type="list", elements='str'),
state=dict(choices=['present', 'absent'], default='present'),
- skip_authorization=dict(type='bool')
+ skip_authorization=dict(type='bool'),
)
# Create a module for ourselves
@@ -124,11 +123,7 @@ def main():
org_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up application based on the provided name and org ID
- application = module.get_one('applications', name_or_id=name, **{
- 'data': {
- 'organization': org_id
- }
- })
+ application = module.get_one('applications', name_or_id=name, **{'data': {'organization': org_id}})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
@@ -152,10 +147,7 @@ def main():
application_fields['redirect_uris'] = ' '.join(redirect_uris)
# If the state was present and we can let the module build or update the existing application, this will return on its own
- module.create_or_update_if_needed(
- application, application_fields,
- endpoint='applications', item_type='application'
- )
+ module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application')
if __name__ == '__main__':
diff --git a/awx_collection/plugins/modules/tower_credential.py b/awx_collection/plugins/modules/tower_credential.py
index a3609b818d..ad09769d2c 100644
--- a/awx_collection/plugins/modules/tower_credential.py
+++ b/awx_collection/plugins/modules/tower_credential.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -318,12 +317,25 @@ KIND_CHOICES = {
OLD_INPUT_NAMES = (
- 'authorize', 'authorize_password', 'client',
- 'security_token', 'secret', 'tenant', 'subscription',
- 'domain', 'become_method', 'become_username',
- 'become_password', 'vault_password', 'project', 'host',
- 'username', 'password', 'ssh_key_data', 'vault_id',
- 'ssh_key_unlock'
+ 'authorize',
+ 'authorize_password',
+ 'client',
+ 'security_token',
+ 'secret',
+ 'tenant',
+ 'subscription',
+ 'domain',
+ 'become_method',
+ 'become_username',
+ 'become_password',
+ 'vault_password',
+ 'project',
+ 'host',
+ 'username',
+ 'password',
+ 'ssh_key_data',
+ 'vault_id',
+ 'ssh_key_unlock',
)
@@ -409,8 +421,11 @@ def main():
if copy_from:
# a new existing item is formed when copying and is returned.
credential = module.copy_item(
- credential, copy_from, name,
- endpoint='credentials', item_type='credential',
+ credential,
+ copy_from,
+ name,
+ endpoint='credentials',
+ item_type='credential',
copy_lookup_data=copy_lookup_data,
)
@@ -459,9 +474,7 @@ def main():
credential_fields['team'] = team_id
# If the state was present we can let the module build or update the existing group, this will return on its own
- module.create_or_update_if_needed(
- credential, credential_fields, endpoint='credentials', item_type='credential'
- )
+ module.create_or_update_if_needed(credential, credential_fields, endpoint='credentials', item_type='credential')
if __name__ == '__main__':
diff --git a/awx_collection/plugins/modules/tower_credential_input_source.py b/awx_collection/plugins/modules/tower_credential_input_source.py
index e53c632777..2fa32d612c 100644
--- a/awx_collection/plugins/modules/tower_credential_input_source.py
+++ b/awx_collection/plugins/modules/tower_credential_input_source.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
diff --git a/awx_collection/plugins/modules/tower_credential_type.py b/awx_collection/plugins/modules/tower_credential_type.py
index 37bb7f6502..d960f47603 100644
--- a/awx_collection/plugins/modules/tower_credential_type.py
+++ b/awx_collection/plugins/modules/tower_credential_type.py
@@ -6,12 +6,11 @@
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'status': ['preview'],
- 'supported_by': 'community',
- 'metadata_version': '1.1'}
+ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'}
DOCUMENTATION = '''
@@ -83,14 +82,7 @@ RETURN = ''' # '''
from ..module_utils.tower_api import TowerAPIModule
-KIND_CHOICES = {
- 'ssh': 'Machine',
- 'vault': 'Ansible Vault',
- 'net': 'Network',
- 'scm': 'Source Control',
- 'cloud': 'Lots of others',
- 'insights': 'Insights'
-}
+KIND_CHOICES = {'ssh': 'Machine', 'vault': 'Ansible Vault', 'net': 'Network', 'scm': 'Source Control', 'cloud': 'Lots of others', 'insights': 'Insights'}
def main():
diff --git a/awx_collection/plugins/modules/tower_execution_environment.py b/awx_collection/plugins/modules/tower_execution_environment.py
index b728810323..4200016132 100644
--- a/awx_collection/plugins/modules/tower_execution_environment.py
+++ b/awx_collection/plugins/modules/tower_execution_environment.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -81,7 +80,7 @@ def main():
organization=dict(),
credential=dict(default=''),
state=dict(choices=['present', 'absent'], default='present'),
- pull=dict(choices=['always', 'missing', 'never'], default='missing')
+ pull=dict(choices=['always', 'missing', 'never'], default='missing'),
)
# Create a module for ourselves
@@ -118,11 +117,7 @@ def main():
if credential:
new_fields['credential'] = module.resolve_name_to_id('credentials', credential)
- module.create_or_update_if_needed(
- existing_item, new_fields,
- endpoint='execution_environments',
- item_type='execution_environment'
- )
+ module.create_or_update_if_needed(existing_item, new_fields, endpoint='execution_environments', item_type='execution_environment')
if __name__ == '__main__':
diff --git a/awx_collection/plugins/modules/tower_export.py b/awx_collection/plugins/modules/tower_export.py
index e3b2fa37d6..4e6b3b9a3a 100644
--- a/awx_collection/plugins/modules/tower_export.py
+++ b/awx_collection/plugins/modules/tower_export.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -104,6 +103,7 @@ from ..module_utils.tower_awxkit import TowerAWXKitModule
try:
from awxkit.api.pages.api import EXPORTABLE_RESOURCES
+
HAS_EXPORTABLE_RESOURCES = True
except ImportError:
HAS_EXPORTABLE_RESOURCES = False
diff --git a/awx_collection/plugins/modules/tower_group.py b/awx_collection/plugins/modules/tower_group.py
index e93fc02c09..4c1674bb7c 100644
--- a/awx_collection/plugins/modules/tower_group.py
+++ b/awx_collection/plugins/modules/tower_group.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -136,11 +135,7 @@ def main():
inventory_id = module.resolve_name_to_id('inventories', inventory)
# Attempt to look up the object based on the provided name and inventory ID
- group = module.get_one('groups', name_or_id=name, **{
- 'data': {
- 'inventory': inventory_id
- }
- })
+ group = module.get_one('groups', name_or_id=name, **{'data': {'inventory': inventory_id}})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
@@ -163,9 +158,13 @@ def main():
continue
id_list = []
for sub_name in name_list:
- sub_obj = module.get_one(resource, name_or_id=sub_name, **{
- 'data': {'inventory': inventory_id},
- })
+ sub_obj = module.get_one(
+ resource,
+ name_or_id=sub_name,
+ **{
+ 'data': {'inventory': inventory_id},
+ }
+ )
if sub_obj is None:
module.fail_json(msg='Could not find {0} with name {1}'.format(resource, sub_name))
id_list.append(sub_obj['id'])
@@ -178,10 +177,7 @@ def main():
association_fields[relationship] = id_list
# If the state was present we can let the module build or update the existing group, this will return on its own
- module.create_or_update_if_needed(
- group, group_fields, endpoint='groups', item_type='group',
- associations=association_fields
- )
+ module.create_or_update_if_needed(group, group_fields, endpoint='groups', item_type='group', associations=association_fields)
if __name__ == '__main__':
diff --git a/awx_collection/plugins/modules/tower_host.py b/awx_collection/plugins/modules/tower_host.py
index 6c84f88a99..c56d5f877d 100644
--- a/awx_collection/plugins/modules/tower_host.py
+++ b/awx_collection/plugins/modules/tower_host.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -104,11 +103,7 @@ def main():
inventory_id = module.resolve_name_to_id('inventories', inventory)
# Attempt to look up host based on the provided name and inventory ID
- host = module.get_one('hosts', name_or_id=name, **{
- 'data': {
- 'inventory': inventory_id
- }
- })
+ host = module.get_one('hosts', name_or_id=name, **{'data': {'inventory': inventory_id}})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
diff --git a/awx_collection/plugins/modules/tower_import.py b/awx_collection/plugins/modules/tower_import.py
index 6b3282a847..76169f88ba 100644
--- a/awx_collection/plugins/modules/tower_import.py
+++ b/awx_collection/plugins/modules/tower_import.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -58,15 +57,14 @@ import logging
# In this module we don't use EXPORTABLE_RESOURCES, we just want to validate that our installed awxkit has import/export
try:
from awxkit.api.pages.api import EXPORTABLE_RESOURCES
+
HAS_EXPORTABLE_RESOURCES = True
except ImportError:
HAS_EXPORTABLE_RESOURCES = False
def main():
- argument_spec = dict(
- assets=dict(type='dict', required=True)
- )
+ argument_spec = dict(assets=dict(type='dict', required=True))
module = TowerAWXKitModule(argument_spec=argument_spec, supports_check_mode=False)
diff --git a/awx_collection/plugins/modules/tower_instance_group.py b/awx_collection/plugins/modules/tower_instance_group.py
index d80e2e6ed1..4ec76174d3 100644
--- a/awx_collection/plugins/modules/tower_instance_group.py
+++ b/awx_collection/plugins/modules/tower_instance_group.py
@@ -6,12 +6,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
---
@@ -149,11 +148,13 @@ def main():
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
- existing_item, new_fields,
- endpoint='instance_groups', item_type='instance_group',
+ existing_item,
+ new_fields,
+ endpoint='instance_groups',
+ item_type='instance_group',
associations={
'instances': instances_ids,
- }
+ },
)
diff --git a/awx_collection/plugins/modules/tower_inventory.py b/awx_collection/plugins/modules/tower_inventory.py
index 1ec8c5d271..10d469d047 100644
--- a/awx_collection/plugins/modules/tower_inventory.py
+++ b/awx_collection/plugins/modules/tower_inventory.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -127,18 +126,17 @@ def main():
org_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up inventory based on the provided name and org ID
- inventory = module.get_one('inventories', name_or_id=name, **{
- 'data': {
- 'organization': org_id
- }
- })
+ inventory = module.get_one('inventories', name_or_id=name, **{'data': {'organization': org_id}})
# Attempt to look up credential to copy based on the provided name
if copy_from:
# a new existing item is formed when copying and is returned.
inventory = module.copy_item(
- inventory, copy_from, name,
- endpoint='inventories', item_type='inventory',
+ inventory,
+ copy_from,
+ name,
+ endpoint='inventories',
+ item_type='inventory',
copy_lookup_data={},
)
diff --git a/awx_collection/plugins/modules/tower_inventory_source.py b/awx_collection/plugins/modules/tower_inventory_source.py
index 0bde4d7af3..376317c9a4 100644
--- a/awx_collection/plugins/modules/tower_inventory_source.py
+++ b/awx_collection/plugins/modules/tower_inventory_source.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -163,9 +162,7 @@ def main():
#
# How do we handle manual and file? Tower does not seem to be able to activate them
#
- source=dict(choices=["scm", "ec2", "gce",
- "azure_rm", "vmware", "satellite6",
- "openstack", "rhv", "tower", "custom"]),
+ source=dict(choices=["scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "tower", "custom"]),
source_path=dict(),
source_script=dict(),
source_vars=dict(type='dict'),
@@ -211,11 +208,15 @@ def main():
if not inventory_object:
module.fail_json(msg='The specified inventory, {0}, was not found.'.format(lookup_data))
- inventory_source_object = module.get_one('inventory_sources', name_or_id=name, **{
- 'data': {
- 'inventory': inventory_object['id'],
+ inventory_source_object = module.get_one(
+ 'inventory_sources',
+ name_or_id=name,
+ **{
+ 'data': {
+ 'inventory': inventory_object['id'],
+ }
}
- })
+ )
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
@@ -259,10 +260,20 @@ def main():
inventory_source_fields['source_script'] = module.resolve_name_to_id('inventory_scripts', source_script)
OPTIONAL_VARS = (
- 'description', 'source', 'source_path', 'source_vars',
- 'overwrite', 'overwrite_vars',
- 'timeout', 'verbosity', 'update_on_launch', 'update_cache_timeout',
- 'update_on_project_update', 'enabled_var', 'enabled_value', 'host_filter',
+ 'description',
+ 'source',
+ 'source_path',
+ 'source_vars',
+ 'overwrite',
+ 'overwrite_vars',
+ 'timeout',
+ 'verbosity',
+ 'update_on_launch',
+ 'update_cache_timeout',
+ 'update_on_project_update',
+ 'enabled_var',
+ 'enabled_value',
+ 'host_filter',
)
# Layer in all remaining optional information
@@ -281,9 +292,7 @@ def main():
# If the state was present we can let the module build or update the existing inventory_source_object, this will return on its own
module.create_or_update_if_needed(
- inventory_source_object, inventory_source_fields,
- endpoint='inventory_sources', item_type='inventory source',
- associations=association_fields
+ inventory_source_object, inventory_source_fields, endpoint='inventory_sources', item_type='inventory source', associations=association_fields
)
diff --git a/awx_collection/plugins/modules/tower_inventory_source_update.py b/awx_collection/plugins/modules/tower_inventory_source_update.py
index 332dbc60ef..9026720fd5 100644
--- a/awx_collection/plugins/modules/tower_inventory_source_update.py
+++ b/awx_collection/plugins/modules/tower_inventory_source_update.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -117,9 +116,7 @@ def main():
if not inventory_object:
module.fail_json(msg='The specified inventory, {0}, was not found.'.format(lookup_data))
- inventory_source_object = module.get_one('inventory_sources',
- name_or_id=name,
- data={'inventory': inventory_object['id']})
+ inventory_source_object = module.get_one('inventory_sources', name_or_id=name, data={'inventory': inventory_object['id']})
if not inventory_source_object:
module.fail_json(msg='The specified inventory source was not found.')
@@ -139,10 +136,7 @@ def main():
# Invoke wait function
module.wait_on_url(
- url=inventory_source_update_results['json']['url'],
- object_name=inventory_object,
- object_type='inventory_update',
- timeout=timeout, interval=interval
+ url=inventory_source_update_results['json']['url'], object_name=inventory_object, object_type='inventory_update', timeout=timeout, interval=interval
)
module.exit_json(**module.json_output)
diff --git a/awx_collection/plugins/modules/tower_job_cancel.py b/awx_collection/plugins/modules/tower_job_cancel.py
index 7404d452a4..4e1fdbf620 100644
--- a/awx_collection/plugins/modules/tower_job_cancel.py
+++ b/awx_collection/plugins/modules/tower_job_cancel.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -68,11 +67,14 @@ def main():
fail_if_not_running = module.params.get('fail_if_not_running')
# Attempt to look up the job based on the provided name
- job = module.get_one('jobs', **{
- 'data': {
- 'id': job_id,
+ job = module.get_one(
+ 'jobs',
+ **{
+ 'data': {
+ 'id': job_id,
+ }
}
- })
+ )
if job is None:
module.fail_json(msg="Unable to find job with id {0}".format(job_id))
diff --git a/awx_collection/plugins/modules/tower_job_launch.py b/awx_collection/plugins/modules/tower_job_launch.py
index 94828c8b77..d1e47c9b68 100644
--- a/awx_collection/plugins/modules/tower_job_launch.py
+++ b/awx_collection/plugins/modules/tower_job_launch.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -248,26 +247,25 @@ def main():
module.fail_json(msg="Failed to launch job, see response for details", **{'response': results})
if not wait:
- module.exit_json(**{
+ module.exit_json(
+ **{
+ 'changed': True,
+ 'id': results['json']['id'],
+ 'status': results['json']['status'],
+ }
+ )
+
+ # Invoke wait function
+ results = module.wait_on_url(url=results['json']['url'], object_name=name, object_type='Job', timeout=timeout, interval=interval)
+
+ module.exit_json(
+ **{
'changed': True,
'id': results['json']['id'],
'status': results['json']['status'],
- })
-
- # Invoke wait function
- results = module.wait_on_url(
- url=results['json']['url'],
- object_name=name,
- object_type='Job',
- timeout=timeout, interval=interval
+ }
)
- module.exit_json(**{
- 'changed': True,
- 'id': results['json']['id'],
- 'status': results['json']['status'],
- })
-
if __name__ == '__main__':
main()
diff --git a/awx_collection/plugins/modules/tower_job_list.py b/awx_collection/plugins/modules/tower_job_list.py
index 642a48b03b..0bbb79c7e1 100644
--- a/awx_collection/plugins/modules/tower_job_list.py
+++ b/awx_collection/plugins/modules/tower_job_list.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -97,7 +96,7 @@ def main():
argument_spec=argument_spec,
mutually_exclusive=[
('page', 'all_pages'),
- ]
+ ],
)
# Extract our parameters
diff --git a/awx_collection/plugins/modules/tower_job_template.py b/awx_collection/plugins/modules/tower_job_template.py
index 094de9127b..e7ae3f96ff 100644
--- a/awx_collection/plugins/modules/tower_job_template.py
+++ b/awx_collection/plugins/modules/tower_job_template.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -447,8 +446,11 @@ def main():
if copy_from:
# a new existing item is formed when copying and is returned.
existing_item = module.copy_item(
- existing_item, copy_from, name,
- endpoint='job_templates', item_type='job_template',
+ existing_item,
+ copy_from,
+ name,
+ endpoint='job_templates',
+ item_type='job_template',
copy_lookup_data={},
)
@@ -459,12 +461,36 @@ def main():
# Create the data that gets sent for create and update
new_fields['name'] = new_name if new_name else (module.get_item_name(existing_item) if existing_item else name)
for field_name in (
- 'description', 'job_type', 'playbook', 'scm_branch', 'forks', 'limit', 'verbosity',
- 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task', 'timeout', 'use_fact_cache',
- 'host_config_key', 'ask_scm_branch_on_launch', 'ask_diff_mode_on_launch', 'ask_variables_on_launch',
- 'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch',
- 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled',
- 'become_enabled', 'diff_mode', 'allow_simultaneous', 'job_slice_count', 'webhook_service',
+ 'description',
+ 'job_type',
+ 'playbook',
+ 'scm_branch',
+ 'forks',
+ 'limit',
+ 'verbosity',
+ 'job_tags',
+ 'force_handlers',
+ 'skip_tags',
+ 'start_at_task',
+ 'timeout',
+ 'use_fact_cache',
+ 'host_config_key',
+ 'ask_scm_branch_on_launch',
+ 'ask_diff_mode_on_launch',
+ 'ask_variables_on_launch',
+ 'ask_limit_on_launch',
+ 'ask_tags_on_launch',
+ 'ask_skip_tags_on_launch',
+ 'ask_job_type_on_launch',
+ 'ask_verbosity_on_launch',
+ 'ask_inventory_on_launch',
+ 'ask_credential_on_launch',
+ 'survey_enabled',
+ 'become_enabled',
+ 'diff_mode',
+ 'allow_simultaneous',
+ 'job_slice_count',
+ 'webhook_service',
):
field_val = module.params.get(field_name)
if field_val is not None:
@@ -484,15 +510,17 @@ def main():
new_fields['inventory'] = module.resolve_name_to_id('inventories', inventory)
if project is not None:
if organization_id is not None:
- project_data = module.get_one('projects', name_or_id=project, **{
- 'data': {
- 'organization': organization_id,
+ project_data = module.get_one(
+ 'projects',
+ name_or_id=project,
+ **{
+ 'data': {
+ 'organization': organization_id,
+ }
}
- })
+ )
if project_data is None:
- module.fail_json(msg="The project {0} in organization {1} was not found on the Tower server".format(
- project, organization
- ))
+ module.fail_json(msg="The project {0} in organization {1} was not found on the Tower server".format(project, organization))
new_fields['project'] = project_data['id']
else:
new_fields['project'] = module.resolve_name_to_id('projects', project)
@@ -511,12 +539,12 @@ def main():
association_fields['labels'] = []
for item in labels:
association_fields['labels'].append(module.resolve_name_to_id('labels', item))
-# Code to use once Issue #7567 is resolved
-# search_fields = {'name': item}
-# if organization:
-# search_fields['organization'] = organization_id
-# label_id = module.get_one('labels', **{'data': search_fields})
-# association_fields['labels'].append(label_id)
+ # Code to use once Issue #7567 is resolved
+ # search_fields = {'name': item}
+ # if organization:
+ # search_fields['organization'] = organization_id
+ # label_id = module.get_one('labels', **{'data': search_fields})
+ # association_fields['labels'].append(label_id)
notifications_start = module.params.get('notification_templates_started')
if notifications_start is not None:
@@ -551,10 +579,13 @@ def main():
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
- existing_item, new_fields,
- endpoint='job_templates', item_type='job_template',
+ existing_item,
+ new_fields,
+ endpoint='job_templates',
+ item_type='job_template',
associations=association_fields,
- on_create=on_change, on_update=on_change,
+ on_create=on_change,
+ on_update=on_change,
)
diff --git a/awx_collection/plugins/modules/tower_job_wait.py b/awx_collection/plugins/modules/tower_job_wait.py
index 9bbcd096ca..3c77e3f354 100644
--- a/awx_collection/plugins/modules/tower_job_wait.py
+++ b/awx_collection/plugins/modules/tower_job_wait.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -134,26 +133,24 @@ def main():
interval = abs((min_interval + max_interval) / 2)
module.deprecate(
msg="Min and max interval have been deprecated, please use interval instead; interval will be set to {0}".format(interval),
- version="ansible.tower:4.0.0"
+ version="ansible.tower:4.0.0",
)
# Attempt to look up job based on the provided id
- job = module.get_one(job_type, **{
- 'data': {
- 'id': job_id,
+ job = module.get_one(
+ job_type,
+ **{
+ 'data': {
+ 'id': job_id,
+ }
}
- })
+ )
if job is None:
module.fail_json(msg='Unable to wait on ' + job_type.rstrip("s") + ' {0}; that ID does not exist in Tower.'.format(job_id))
# Invoke wait function
- result = module.wait_on_url(
- url=job['url'],
- object_name=job_id,
- object_type='legacy_job_wait',
- timeout=timeout, interval=interval
- )
+ result = module.wait_on_url(url=job['url'], object_name=job_id, object_type='legacy_job_wait', timeout=timeout, interval=interval)
module.exit_json(**module.json_output)
diff --git a/awx_collection/plugins/modules/tower_label.py b/awx_collection/plugins/modules/tower_label.py
index 7c520064a8..52a67ed64f 100644
--- a/awx_collection/plugins/modules/tower_label.py
+++ b/awx_collection/plugins/modules/tower_label.py
@@ -6,12 +6,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
---
@@ -80,11 +79,15 @@ def main():
organization_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up an existing item based on the provided data
- existing_item = module.get_one('labels', name_or_id=name, **{
- 'data': {
- 'organization': organization_id,
+ existing_item = module.get_one(
+ 'labels',
+ name_or_id=name,
+ **{
+ 'data': {
+ 'organization': organization_id,
+ }
}
- })
+ )
# Create the data that gets sent for create and update
new_fields = {}
@@ -92,12 +95,7 @@ def main():
if organization:
new_fields['organization'] = organization_id
- module.create_or_update_if_needed(
- existing_item, new_fields,
- endpoint='labels', item_type='label',
- associations={
- }
- )
+ module.create_or_update_if_needed(existing_item, new_fields, endpoint='labels', item_type='label', associations={})
if __name__ == '__main__':
diff --git a/awx_collection/plugins/modules/tower_license.py b/awx_collection/plugins/modules/tower_license.py
index 165d5255ab..b751f4fe7d 100644
--- a/awx_collection/plugins/modules/tower_license.py
+++ b/awx_collection/plugins/modules/tower_license.py
@@ -5,11 +5,10 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -62,9 +61,7 @@ def main():
module.fail_json(msg='You must accept the EULA by passing in the param eula_accepted as True')
try:
- manifest = base64.b64encode(
- open(module.params.get('manifest'), 'rb').read()
- )
+ manifest = base64.b64encode(open(module.params.get('manifest'), 'rb').read())
except OSError as e:
module.fail_json(msg=str(e))
@@ -72,10 +69,7 @@ def main():
if module.check_mode:
module.exit_json(**json_output)
- module.post_endpoint('config', data={
- 'eula_accepted': True,
- 'manifest': manifest.decode()
- })
+ module.post_endpoint('config', data={'eula_accepted': True, 'manifest': manifest.decode()})
module.exit_json(**json_output)
diff --git a/awx_collection/plugins/modules/tower_meta.py b/awx_collection/plugins/modules/tower_meta.py
index 9455bdf0f4..a62c3f5113 100644
--- a/awx_collection/plugins/modules/tower_meta.py
+++ b/awx_collection/plugins/modules/tower_meta.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
---
@@ -67,17 +66,9 @@ from ..module_utils.tower_api import TowerAPIModule
def main():
module = TowerAPIModule(argument_spec={})
- namespace = {
- 'awx': 'awx',
- 'tower': 'ansible'
- }.get(module._COLLECTION_TYPE, 'unknown')
+ namespace = {'awx': 'awx', 'tower': 'ansible'}.get(module._COLLECTION_TYPE, 'unknown')
namespace_name = '{0}.{1}'.format(namespace, module._COLLECTION_TYPE)
- module.exit_json(
- prefix=namespace_name,
- name=module._COLLECTION_TYPE,
- namespace=namespace,
- version=module._COLLECTION_VERSION
- )
+ module.exit_json(prefix=namespace_name, name=module._COLLECTION_TYPE, namespace=namespace, version=module._COLLECTION_VERSION)
if __name__ == '__main__':
diff --git a/awx_collection/plugins/modules/tower_notification_template.py b/awx_collection/plugins/modules/tower_notification_template.py
index 26f41787e8..6d2af69a97 100644
--- a/awx_collection/plugins/modules/tower_notification_template.py
+++ b/awx_collection/plugins/modules/tower_notification_template.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -317,13 +316,31 @@ RETURN = ''' # '''
from ..module_utils.tower_api import TowerAPIModule
OLD_INPUT_NAMES = (
- 'username', 'sender', 'recipients', 'use_tls',
- 'host', 'use_ssl', 'password', 'port',
- 'channels', 'token', 'account_token', 'from_number',
- 'to_numbers', 'account_sid', 'subdomain', 'service_key',
- 'client_name', 'message_from', 'color',
- 'notify', 'url', 'headers', 'server',
- 'nickname', 'targets',
+ 'username',
+ 'sender',
+ 'recipients',
+ 'use_tls',
+ 'host',
+ 'use_ssl',
+ 'password',
+ 'port',
+ 'channels',
+ 'token',
+ 'account_token',
+ 'from_number',
+ 'to_numbers',
+ 'account_sid',
+ 'subdomain',
+ 'service_key',
+ 'client_name',
+ 'message_from',
+ 'color',
+ 'notify',
+ 'url',
+ 'headers',
+ 'server',
+ 'nickname',
+ 'targets',
)
@@ -335,10 +352,7 @@ def main():
copy_from=dict(),
description=dict(),
organization=dict(),
- notification_type=dict(choices=[
- 'email', 'grafana', 'irc', 'mattermost',
- 'pagerduty', 'rocketchat', 'slack', 'twilio', 'webhook'
- ]),
+ notification_type=dict(choices=['email', 'grafana', 'irc', 'mattermost', 'pagerduty', 'rocketchat', 'slack', 'twilio', 'webhook']),
notification_configuration=dict(type='dict'),
messages=dict(type='dict'),
username=dict(),
@@ -387,8 +401,8 @@ def main():
for legacy_input in OLD_INPUT_NAMES:
if module.params.get(legacy_input) is not None:
module.deprecate(
- msg='{0} parameter has been deprecated, please use notification_configuration instead'.format(legacy_input),
- version="ansible.tower:4.0.0")
+ msg='{0} parameter has been deprecated, please use notification_configuration instead'.format(legacy_input), version="ansible.tower:4.0.0"
+ )
# Attempt to look up the related items the user specified (these will fail the module if not found)
organization_id = None
@@ -396,18 +410,25 @@ def main():
organization_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up an existing item based on the provided data
- existing_item = module.get_one('notification_templates', name_or_id=name, **{
- 'data': {
- 'organization': organization_id,
+ existing_item = module.get_one(
+ 'notification_templates',
+ name_or_id=name,
+ **{
+ 'data': {
+ 'organization': organization_id,
+ }
}
- })
+ )
# Attempt to look up credential to copy based on the provided name
if copy_from:
# a new existing item is formed when copying and is returned.
existing_item = module.copy_item(
- existing_item, copy_from, name,
- endpoint='notification_templates', item_type='notification_template',
+ existing_item,
+ copy_from,
+ name,
+ endpoint='notification_templates',
+ item_type='notification_template',
copy_lookup_data={},
)
@@ -439,12 +460,7 @@ def main():
new_fields['messages'] = messages
# If the state was present and we can let the module build or update the existing item, this will return on its own
- module.create_or_update_if_needed(
- existing_item, new_fields,
- endpoint='notification_templates', item_type='notification_template',
- associations={
- }
- )
+ module.create_or_update_if_needed(existing_item, new_fields, endpoint='notification_templates', item_type='notification_template', associations={})
if __name__ == '__main__':
diff --git a/awx_collection/plugins/modules/tower_organization.py b/awx_collection/plugins/modules/tower_organization.py
index 197099d391..4576094c3a 100644
--- a/awx_collection/plugins/modules/tower_organization.py
+++ b/awx_collection/plugins/modules/tower_organization.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -178,8 +177,10 @@ def main():
# If the state was present and we can let the module build or update the existing organization, this will return on its own
module.create_or_update_if_needed(
- organization, org_fields,
- endpoint='organizations', item_type='organization',
+ organization,
+ org_fields,
+ endpoint='organizations',
+ item_type='organization',
associations=association_fields,
)
diff --git a/awx_collection/plugins/modules/tower_project.py b/awx_collection/plugins/modules/tower_project.py
index 408aa203e9..1cc36c1471 100644
--- a/awx_collection/plugins/modules/tower_project.py
+++ b/awx_collection/plugins/modules/tower_project.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -226,10 +225,7 @@ def wait_for_project_update(module, last_request):
# Invoke wait function
module.wait_on_url(
- url=result['json']['url'],
- object_name=module.get_item_name(last_request),
- object_type='Project Update',
- timeout=timeout, interval=interval
+ url=result['json']['url'], object_name=module.get_item_name(last_request), object_type='Project Update', timeout=timeout, interval=interval
)
module.exit_json(**module.json_output)
@@ -298,8 +294,11 @@ def main():
if copy_from:
# a new existing item is formed when copying and is returned.
project = module.copy_item(
- project, copy_from, name,
- endpoint='projects', item_type='project',
+ project,
+ copy_from,
+ name,
+ endpoint='projects',
+ item_type='project',
copy_lookup_data={},
)
@@ -341,9 +340,16 @@ def main():
}
for field_name in (
- 'scm_url', 'scm_branch', 'scm_refspec', 'scm_clean', 'scm_delete_on_update',
- 'timeout', 'scm_update_cache_timeout', 'custom_virtualenv',
- 'description', 'allow_override',
+ 'scm_url',
+ 'scm_branch',
+ 'scm_refspec',
+ 'scm_clean',
+ 'scm_delete_on_update',
+ 'timeout',
+ 'scm_update_cache_timeout',
+ 'custom_virtualenv',
+ 'description',
+ 'allow_override',
):
field_val = module.params.get(field_name)
if field_val is not None:
@@ -368,10 +374,7 @@ def main():
# If the state was present and we can let the module build or update the existing project, this will return on its own
module.create_or_update_if_needed(
- project, project_fields,
- endpoint='projects', item_type='project',
- associations=association_fields,
- on_create=on_change, on_update=on_change
+ project, project_fields, endpoint='projects', item_type='project', associations=association_fields, on_create=on_change, on_update=on_change
)
diff --git a/awx_collection/plugins/modules/tower_project_update.py b/awx_collection/plugins/modules/tower_project_update.py
index 80493b94ff..0ee764a5e4 100644
--- a/awx_collection/plugins/modules/tower_project_update.py
+++ b/awx_collection/plugins/modules/tower_project_update.py
@@ -4,11 +4,10 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.0',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
---
@@ -127,12 +126,7 @@ def main():
start = time.time()
# Invoke wait function
- module.wait_on_url(
- url=result['json']['url'],
- object_name=module.get_item_name(project),
- object_type='Project Update',
- timeout=timeout, interval=interval
- )
+ module.wait_on_url(url=result['json']['url'], object_name=module.get_item_name(project), object_type='Project Update', timeout=timeout, interval=interval)
module.exit_json(**module.json_output)
diff --git a/awx_collection/plugins/modules/tower_receive.py b/awx_collection/plugins/modules/tower_receive.py
index bd08682503..7952c88d5e 100644
--- a/awx_collection/plugins/modules/tower_receive.py
+++ b/awx_collection/plugins/modules/tower_receive.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['deprecated'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['deprecated'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -142,6 +141,7 @@ try:
from tower_cli.utils.exceptions import TowerCLIError
from tower_cli.conf import settings
+
TOWER_CLI_HAS_EXPORT = True
except ImportError:
TOWER_CLI_HAS_EXPORT = False
diff --git a/awx_collection/plugins/modules/tower_role.py b/awx_collection/plugins/modules/tower_role.py
index 980c187450..2b73f8eab4 100644
--- a/awx_collection/plugins/modules/tower_role.py
+++ b/awx_collection/plugins/modules/tower_role.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -157,9 +156,26 @@ def main():
argument_spec = dict(
user=dict(),
team=dict(),
- role=dict(choices=["admin", "read", "member", "execute", "adhoc", "update", "use", "approval",
- "auditor", "project_admin", "inventory_admin", "credential_admin",
- "workflow_admin", "notification_admin", "job_template_admin"], required=True),
+ role=dict(
+ choices=[
+ "admin",
+ "read",
+ "member",
+ "execute",
+ "adhoc",
+ "update",
+ "use",
+ "approval",
+ "auditor",
+ "project_admin",
+ "inventory_admin",
+ "credential_admin",
+ "workflow_admin",
+ "notification_admin",
+ "job_template_admin",
+ ],
+ required=True,
+ ),
target_team=dict(),
target_teams=dict(type='list', elements='str'),
inventory=dict(),
@@ -194,12 +210,10 @@ def main():
'organizations': 'organization',
'projects': 'project',
'target_teams': 'target_team',
- 'workflows': 'workflow'
+ 'workflows': 'workflow',
}
# Singular parameters
- resource_param_keys = (
- 'user', 'team', 'lookup_organization'
- )
+ resource_param_keys = ('user', 'team', 'lookup_organization')
resources = {}
for resource_group in resource_list_param_keys:
@@ -256,9 +270,9 @@ def main():
resource_roles = resource['summary_fields']['object_roles']
if role_field not in resource_roles:
available_roles = ', '.join(list(resource_roles.keys()))
- module.fail_json(msg='Resource {0} has no role {1}, available roles: {2}'.format(
- resource['url'], role_field, available_roles
- ), changed=False)
+ module.fail_json(
+ msg='Resource {0} has no role {1}, available roles: {2}'.format(resource['url'], role_field, available_roles), changed=False
+ )
role_data = resource_roles[role_field]
endpoint = '/roles/{0}/{1}/'.format(role_data['id'], module.param_to_endpoint(actor_type))
associations.setdefault(endpoint, [])
diff --git a/awx_collection/plugins/modules/tower_schedule.py b/awx_collection/plugins/modules/tower_schedule.py
index 66498c36cd..921b8e8761 100644
--- a/awx_collection/plugins/modules/tower_schedule.py
+++ b/awx_collection/plugins/modules/tower_schedule.py
@@ -6,12 +6,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
---
@@ -227,12 +226,7 @@ def main():
module.delete_if_needed(existing_item)
elif state == 'present':
# If the state was present and we can let the module build or update the existing item, this will return on its own
- module.create_or_update_if_needed(
- existing_item, new_fields,
- endpoint='schedules', item_type='schedule',
- associations={
- }
- )
+ module.create_or_update_if_needed(existing_item, new_fields, endpoint='schedules', item_type='schedule', associations={})
if __name__ == '__main__':
diff --git a/awx_collection/plugins/modules/tower_send.py b/awx_collection/plugins/modules/tower_send.py
index 772b2b67ec..dd87174b61 100644
--- a/awx_collection/plugins/modules/tower_send.py
+++ b/awx_collection/plugins/modules/tower_send.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['deprecated'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['deprecated'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -90,6 +89,7 @@ try:
from tower_cli.utils.exceptions import TowerCLIError
from tower_cli.conf import settings
+
TOWER_CLI_HAS_EXPORT = True
except ImportError:
TOWER_CLI_HAS_EXPORT = False
diff --git a/awx_collection/plugins/modules/tower_settings.py b/awx_collection/plugins/modules/tower_settings.py
index b0f39126c3..4e2f91dd14 100644
--- a/awx_collection/plugins/modules/tower_settings.py
+++ b/awx_collection/plugins/modules/tower_settings.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -74,6 +73,7 @@ from ..module_utils.tower_api import TowerAPIModule
try:
import yaml
+
HAS_YAML = True
except ImportError:
HAS_YAML = False
@@ -84,11 +84,7 @@ def coerce_type(module, value):
if value is None:
return value
- yaml_ish = bool((
- value.startswith('{') and value.endswith('}')
- ) or (
- value.startswith('[') and value.endswith(']'))
- )
+ yaml_ish = bool((value.startswith('{') and value.endswith('}')) or (value.startswith('[') and value.endswith(']')))
if yaml_ish:
if not HAS_YAML:
module.fail_json(msg="yaml is not installed, try 'pip install pyyaml'")
@@ -115,7 +111,7 @@ def main():
argument_spec=argument_spec,
required_one_of=[['name', 'settings']],
mutually_exclusive=[['name', 'settings']],
- required_if=[['name', 'present', ['value']]]
+ required_if=[['name', 'present', ['value']]],
)
# Extract our parameters
@@ -145,10 +141,7 @@ def main():
json_output['new_values'][a_setting] = new_settings[a_setting]
if module._diff:
- json_output['diff'] = {
- 'before': json_output['old_values'],
- 'after': json_output['new_values']
- }
+ json_output['diff'] = {'before': json_output['old_values'], 'after': json_output['new_values']}
# If nothing needs an update we can simply exit with the response (as not changed)
if not needs_update:
diff --git a/awx_collection/plugins/modules/tower_team.py b/awx_collection/plugins/modules/tower_team.py
index 9339610c75..a060223f61 100644
--- a/awx_collection/plugins/modules/tower_team.py
+++ b/awx_collection/plugins/modules/tower_team.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
@@ -87,21 +86,14 @@ def main():
org_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up team based on the provided name and org ID
- team = module.get_one('teams', name_or_id=name, **{
- 'data': {
- 'organization': org_id
- }
- })
+ team = module.get_one('teams', name_or_id=name, **{'data': {'organization': org_id}})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(team)
# Create the data that gets sent for create and update
- team_fields = {
- 'name': new_name if new_name else (module.get_item_name(team) if team else name),
- 'organization': org_id
- }
+ team_fields = {'name': new_name if new_name else (module.get_item_name(team) if team else name), 'organization': org_id}
if description is not None:
team_fields['description'] = description
diff --git a/awx_collection/plugins/modules/tower_token.py b/awx_collection/plugins/modules/tower_token.py
index ee6fd5c200..541a6c7dd1 100644
--- a/awx_collection/plugins/modules/tower_token.py
+++ b/awx_collection/plugins/modules/tower_token.py
@@ -6,12 +6,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
---
@@ -150,7 +149,12 @@ def main():
],
# If we are state absent make sure one of existing_token or existing_token_id are present
required_if=[
- ['state', 'absent', ('existing_token', 'existing_token_id'), True, ],
+ [
+ 'state',
+ 'absent',
+ ('existing_token', 'existing_token_id'),
+ True,
+ ],
],
)
@@ -164,11 +168,14 @@ def main():
if state == 'absent':
if not existing_token:
- existing_token = module.get_one('tokens', **{
- 'data': {
- 'id': existing_token_id,
+ existing_token = module.get_one(
+ 'tokens',
+ **{
+ 'data': {
+ 'id': existing_token_id,
+ }
}
- })
+ )
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_token)
@@ -189,10 +196,11 @@ def main():
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
- None, new_fields,
- endpoint='tokens', item_type='token',
- associations={
- },
+ None,
+ new_fields,
+ endpoint='tokens',
+ item_type='token',
+ associations={},
on_create=return_token,
)
diff --git a/awx_collection/plugins/modules/tower_user.py b/awx_collection/plugins/modules/tower_user.py
index b57266dbc0..871ca872ac 100644
--- a/awx_collection/plugins/modules/tower_user.py
+++ b/awx_collection/plugins/modules/tower_user.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
diff --git a/awx_collection/plugins/modules/tower_workflow_job_template.py b/awx_collection/plugins/modules/tower_workflow_job_template.py
index 6faa4c1353..ec0eecd41a 100644
--- a/awx_collection/plugins/modules/tower_workflow_job_template.py
+++ b/awx_collection/plugins/modules/tower_workflow_job_template.py
@@ -6,12 +6,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
---
@@ -240,8 +239,11 @@ def main():
if copy_from:
# a new existing item is formed when copying and is returned.
existing_item = module.copy_item(
- existing_item, copy_from, name,
- endpoint='workflow_job_templates', item_type='workflow_job_template',
+ existing_item,
+ copy_from,
+ name,
+ endpoint='workflow_job_templates',
+ item_type='workflow_job_template',
copy_lookup_data={},
)
@@ -260,10 +262,18 @@ def main():
# Create the data that gets sent for create and update
new_fields['name'] = new_name if new_name else (module.get_item_name(existing_item) if existing_item else name)
for field_name in (
- 'description', 'survey_enabled', 'allow_simultaneous',
- 'limit', 'scm_branch', 'extra_vars',
- 'ask_inventory_on_launch', 'ask_scm_branch_on_launch', 'ask_limit_on_launch', 'ask_variables_on_launch',
- 'webhook_service',):
+ 'description',
+ 'survey_enabled',
+ 'allow_simultaneous',
+ 'limit',
+ 'scm_branch',
+ 'extra_vars',
+ 'ask_inventory_on_launch',
+ 'ask_scm_branch_on_launch',
+ 'ask_limit_on_launch',
+ 'ask_variables_on_launch',
+ 'webhook_service',
+ ):
field_val = module.params.get(field_name)
if field_val:
new_fields[field_name] = field_val
@@ -302,12 +312,12 @@ def main():
association_fields['labels'] = []
for item in labels:
association_fields['labels'].append(module.resolve_name_to_id('labels', item))
-# Code to use once Issue #7567 is resolved
-# search_fields = {'name': item}
-# if organization:
-# search_fields['organization'] = organization_id
-# label_id = module.get_one('labels', **{'data': search_fields})
-# association_fields['labels'].append(label_id)
+ # Code to use once Issue #7567 is resolved
+ # search_fields = {'name': item}
+ # if organization:
+ # search_fields['organization'] = organization_id
+ # label_id = module.get_one('labels', **{'data': search_fields})
+ # association_fields['labels'].append(label_id)
on_change = None
new_spec = module.params.get('survey_spec')
@@ -324,10 +334,13 @@ def main():
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
- existing_item, new_fields,
- endpoint='workflow_job_templates', item_type='workflow_job_template',
+ existing_item,
+ new_fields,
+ endpoint='workflow_job_templates',
+ item_type='workflow_job_template',
associations=association_fields,
- on_create=on_change, on_update=on_change
+ on_create=on_change,
+ on_update=on_change,
)
diff --git a/awx_collection/plugins/modules/tower_workflow_job_template_node.py b/awx_collection/plugins/modules/tower_workflow_job_template_node.py
index 0ca4021247..bb3c64fcab 100644
--- a/awx_collection/plugins/modules/tower_workflow_job_template_node.py
+++ b/awx_collection/plugins/modules/tower_workflow_job_template_node.py
@@ -6,12 +6,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
---
@@ -234,13 +233,9 @@ def main():
if organization:
organization_id = module.resolve_name_to_id('organizations', organization)
wfjt_search_fields['organization'] = organization_id
- wfjt_data = module.get_one('workflow_job_templates', name_or_id=workflow_job_template, **{
- 'data': wfjt_search_fields
- })
+ wfjt_data = module.get_one('workflow_job_templates', name_or_id=workflow_job_template, **{'data': wfjt_search_fields})
if wfjt_data is None:
- module.fail_json(msg="The workflow {0} in organization {1} was not found on the Tower server".format(
- workflow_job_template, organization
- ))
+ module.fail_json(msg="The workflow {0} in organization {1} was not found on the Tower server".format(workflow_job_template, organization))
workflow_job_template_id = wfjt_data['id']
search_fields['workflow_job_template'] = new_fields['workflow_job_template'] = workflow_job_template_id
@@ -261,8 +256,17 @@ def main():
# Create the data that gets sent for create and update
for field_name in (
- 'identifier', 'extra_data', 'scm_branch', 'job_type', 'job_tags', 'skip_tags',
- 'limit', 'diff_mode', 'verbosity', 'all_parents_must_converge',):
+ 'identifier',
+ 'extra_data',
+ 'scm_branch',
+ 'job_type',
+ 'job_tags',
+ 'skip_tags',
+ 'limit',
+ 'diff_mode',
+ 'verbosity',
+ 'all_parents_must_converge',
+ ):
field_val = module.params.get(field_name)
if field_val:
new_fields[field_name] = field_val
@@ -294,9 +298,12 @@ def main():
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
- existing_item, new_fields,
- endpoint='workflow_job_template_nodes', item_type='workflow_job_template_node', auto_exit=not approval_node,
- associations=association_fields
+ existing_item,
+ new_fields,
+ endpoint='workflow_job_template_nodes',
+ item_type='workflow_job_template_node',
+ auto_exit=not approval_node,
+ associations=association_fields,
)
# Create approval node unified template or update existing
@@ -326,9 +333,7 @@ def main():
existing_item = module.get_endpoint(workflow_job_template_node['related']['unified_job_template'])['json']
approval_endpoint = 'workflow_job_template_nodes/{0}/create_approval_template/'.format(workflow_job_template_node_id)
module.create_or_update_if_needed(
- existing_item, new_fields,
- endpoint=approval_endpoint, item_type='workflow_job_template_approval_node',
- associations=association_fields
+ existing_item, new_fields, endpoint=approval_endpoint, item_type='workflow_job_template_approval_node', associations=association_fields
)
module.exit_json(**module.json_output)
diff --git a/awx_collection/plugins/modules/tower_workflow_launch.py b/awx_collection/plugins/modules/tower_workflow_launch.py
index aebf890d56..cee0c2c649 100644
--- a/awx_collection/plugins/modules/tower_workflow_launch.py
+++ b/awx_collection/plugins/modules/tower_workflow_launch.py
@@ -4,11 +4,10 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
+ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
DOCUMENTATION = '''
---
@@ -180,12 +179,7 @@ def main():
module.exit_json(**module.json_output)
# Invoke wait function
- module.wait_on_url(
- url=result['json']['url'],
- object_name=name,
- object_type='Workflow Job',
- timeout=timeout, interval=interval
- )
+ module.wait_on_url(url=result['json']['url'], object_name=name, object_type='Workflow Job', timeout=timeout, interval=interval)
module.exit_json(**module.json_output)
diff --git a/awx_collection/plugins/modules/tower_workflow_template.py b/awx_collection/plugins/modules/tower_workflow_template.py
index a8557b2ad2..328823bd58 100644
--- a/awx_collection/plugins/modules/tower_workflow_template.py
+++ b/awx_collection/plugins/modules/tower_workflow_template.py
@@ -5,12 +5,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
-ANSIBLE_METADATA = {'status': ['deprecated'],
- 'supported_by': 'community',
- 'metadata_version': '1.1'}
+ANSIBLE_METADATA = {'status': ['deprecated'], 'supported_by': 'community', 'metadata_version': '1.1'}
DOCUMENTATION = '''
@@ -108,11 +107,7 @@ EXAMPLES = '''
RETURN = ''' # '''
-from ..module_utils.tower_legacy import (
- TowerLegacyModule,
- tower_auth_config,
- tower_check_mode
-)
+from ..module_utils.tower_legacy import TowerLegacyModule, tower_auth_config, tower_check_mode
import json
@@ -140,16 +135,16 @@ def main():
state=dict(choices=['present', 'absent'], default='present'),
)
- module = TowerLegacyModule(
- argument_spec=argument_spec,
- supports_check_mode=False
- )
+ module = TowerLegacyModule(argument_spec=argument_spec, supports_check_mode=False)
- module.deprecate(msg=(
- "This module is replaced by the combination of tower_workflow_job_template and "
- "tower_workflow_job_template_node. This uses the old tower-cli and wll be "
- "removed in 2022."
- ), version='awx.awx:14.0.0')
+ module.deprecate(
+ msg=(
+ "This module is replaced by the combination of tower_workflow_job_template and "
+ "tower_workflow_job_template_node. This uses the old tower-cli and wll be "
+ "removed in 2022."
+ ),
+ version='awx.awx:14.0.0',
+ )
name = module.params.get('name')
state = module.params.get('state')
@@ -159,10 +154,7 @@ def main():
schema = module.params.get('schema')
if schema and state == 'absent':
- module.fail_json(
- msg='Setting schema when state is absent is not allowed',
- changed=False
- )
+ module.fail_json(msg='Setting schema when state is absent is not allowed', changed=False)
json_output = {'workflow_template': name, 'state': state}
@@ -179,15 +171,10 @@ def main():
if module.params.get('organization'):
organization_res = tower_cli.get_resource('organization')
try:
- organization = organization_res.get(
- name=module.params.get('organization'))
+ organization = organization_res.get(name=module.params.get('organization'))
params['organization'] = organization['id']
except exc.NotFound as excinfo:
- module.fail_json(
- msg='Failed to update organization source,'
- 'organization not found: {0}'.format(excinfo),
- changed=False
- )
+ module.fail_json(msg='Failed to update organization source,' 'organization not found: {0}'.format(excinfo), changed=False)
if module.params.get('survey'):
params['survey_spec'] = module.params.get('survey')
@@ -198,8 +185,7 @@ def main():
if module.params.get('ask_inventory'):
params['ask_inventory_on_launch'] = module.params.get('ask_inventory')
- for key in ('allow_simultaneous', 'inventory',
- 'survey_enabled', 'description'):
+ for key in ('allow_simultaneous', 'inventory', 'survey_enabled', 'description'):
if module.params.get(key):
params[key] = module.params.get(key)
@@ -219,8 +205,13 @@ def main():
params['fail_on_missing'] = False
result = wfjt_res.delete(**params)
except (exc.ConnectionError, exc.BadRequest, exc.AuthError) as excinfo:
- module.fail_json(msg='Failed to update workflow template: \
- {0}'.format(excinfo), changed=False)
+ module.fail_json(
+ msg='Failed to update workflow template: \
+ {0}'.format(
+ excinfo
+ ),
+ changed=False,
+ )
json_output['changed'] = result['changed']
module.exit_json(**json_output)
diff --git a/awx_collection/setup.cfg b/awx_collection/setup.cfg
deleted file mode 100644
index fdfea44c7e..0000000000
--- a/awx_collection/setup.cfg
+++ /dev/null
@@ -1,3 +0,0 @@
-[flake8]
-max-line-length=160
-ignore=E402 \ No newline at end of file
diff --git a/awx_collection/test/awx/conftest.py b/awx_collection/test/awx/conftest.py
index 10774b9b34..969bb96da0 100644
--- a/awx_collection/test/awx/conftest.py
+++ b/awx_collection/test/awx/conftest.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import io
@@ -21,6 +22,7 @@ from django.db import transaction
try:
import tower_cli # noqa
+
HAS_TOWER_CLI = True
except ImportError:
HAS_TOWER_CLI = False
@@ -30,6 +32,7 @@ try:
# However, awxkit will not contain api whih causes a stack failure down on line 170 when we try to mock it.
# So here we are importing awxkit.api to prevent that. Then you only get an error on tests for awxkit functionality.
import awxkit.api
+
HAS_AWX_KIT = True
except ImportError:
HAS_AWX_KIT = False
@@ -38,9 +41,9 @@ logger = logging.getLogger('awx.main.tests')
def sanitize_dict(din):
- '''Sanitize Django response data to purge it of internal types
+ """Sanitize Django response data to purge it of internal types
so it may be used to cast a requests response object
- '''
+ """
if isinstance(din, (int, str, type(None), bool)):
return din # native JSON types, no problem
elif isinstance(din, datetime.datetime):
@@ -62,9 +65,7 @@ def collection_path_set(monkeypatch):
"""Monkey patch sys.path, insert the root of the collection folder
so that content can be imported without being fully packaged
"""
- base_folder = os.path.abspath(
- os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)
- )
+ base_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
monkeypatch.syspath_prepend(base_folder)
@@ -76,15 +77,16 @@ def collection_import():
go through this fixture so that can be changed if needed.
For instance, we could switch to fully-qualified import paths.
"""
+
def rf(path):
return importlib.import_module(path)
+
return rf
@pytest.fixture
def run_module(request, collection_import):
def rf(module_name, module_params, request_user):
-
def new_request(self, method, url, **kwargs):
kwargs_copy = kwargs.copy()
if 'data' in kwargs:
@@ -95,8 +97,7 @@ def run_module(request, collection_import):
elif isinstance(kwargs['data'], str):
kwargs_copy['data'] = json.loads(kwargs['data'])
else:
- raise RuntimeError('Expected data to be dict or str, got {0}, data: {1}'.format(
- type(kwargs['data']), kwargs['data']))
+ raise RuntimeError('Expected data to be dict or str, got {0}, data: {1}'.format(type(kwargs['data']), kwargs['data']))
if 'params' in kwargs and method == 'GET':
# query params for GET are handled a bit differently by
# tower-cli and python requests as opposed to REST framework APIRequestFactory
@@ -123,11 +124,7 @@ def run_module(request, collection_import):
resp.headers = {'X-API-Product-Name': 'AWX', 'X-API-Product-Version': '0.0.1-devel'}
if request.config.getoption('verbose') > 0:
- logger.info(
- '%s %s by %s, code:%s',
- method, '/api/' + url.split('/api/')[1],
- request_user.username, resp.status_code
- )
+ logger.info('%s %s by %s, code:%s', method, '/api/' + url.split('/api/')[1], request_user.username, resp.status_code)
resp.request = PreparedRequest()
resp.request.prepare(method=method, url=url)
@@ -135,10 +132,7 @@ def run_module(request, collection_import):
def new_open(self, method, url, **kwargs):
r = new_request(self, method, url, **kwargs)
- m = mock.MagicMock(read=mock.MagicMock(return_value=r._content),
- status=r.status_code,
- getheader=mock.MagicMock(side_effect=r.headers.get)
- )
+ m = mock.MagicMock(read=mock.MagicMock(return_value=r._content), status=r.status_code, getheader=mock.MagicMock(side_effect=r.headers.get))
return m
stdout_buffer = io.StringIO()
@@ -163,7 +157,7 @@ def run_module(request, collection_import):
elif getattr(resource_module, 'TowerLegacyModule', None):
resource_class = resource_module.TowerLegacyModule
else:
- raise("The module has neither a TowerLegacyModule, TowerAWXKitModule or a TowerAPIModule")
+ raise ("The module has neither a TowerLegacyModule, TowerAWXKitModule or a TowerAPIModule")
with mock.patch.object(resource_class, '_load_params', new=mock_load_params):
# Call the test utility (like a mock server) instead of issuing HTTP requests
@@ -204,18 +198,9 @@ def run_module(request, collection_import):
@pytest.fixture
def survey_spec():
return {
- "spec": [
- {
- "index": 0,
- "question_name": "my question?",
- "default": "mydef",
- "variable": "myvar",
- "type": "text",
- "required": False
- }
- ],
+ "spec": [{"index": 0, "question_name": "my question?", "default": "mydef", "variable": "myvar", "type": "text", "required": False}],
"description": "test",
- "name": "test"
+ "name": "test",
}
@@ -234,46 +219,32 @@ def project(organization):
local_path='_92__test_proj',
scm_revision='1234567890123456789012345678901234567890',
scm_url='localhost',
- scm_type='git'
+ scm_type='git',
)
@pytest.fixture
def inventory(organization):
- return Inventory.objects.create(
- name='test-inv',
- organization=organization
- )
+ return Inventory.objects.create(name='test-inv', organization=organization)
@pytest.fixture
def job_template(project, inventory):
- return JobTemplate.objects.create(
- name='test-jt',
- project=project,
- inventory=inventory,
- playbook='helloworld.yml'
- )
+ return JobTemplate.objects.create(name='test-jt', project=project, inventory=inventory, playbook='helloworld.yml')
@pytest.fixture
def machine_credential(organization):
ssh_type = CredentialType.defaults['ssh']()
ssh_type.save()
- return Credential.objects.create(
- credential_type=ssh_type, name='machine-cred',
- inputs={'username': 'test_user', 'password': 'pas4word'}
- )
+ return Credential.objects.create(credential_type=ssh_type, name='machine-cred', inputs={'username': 'test_user', 'password': 'pas4word'})
@pytest.fixture
def vault_credential(organization):
ct = CredentialType.defaults['vault']()
ct.save()
- return Credential.objects.create(
- credential_type=ct, name='vault-cred',
- inputs={'vault_id': 'foo', 'vault_password': 'pas4word'}
- )
+ return Credential.objects.create(credential_type=ct, name='vault-cred', inputs={'vault_id': 'foo', 'vault_password': 'pas4word'})
@pytest.fixture
diff --git a/awx_collection/test/awx/test_ad_hoc_wait.py b/awx_collection/test/awx/test_ad_hoc_wait.py
index 976e3d0e80..943268184a 100644
--- a/awx_collection/test/awx/test_ad_hoc_wait.py
+++ b/awx_collection/test/awx/test_ad_hoc_wait.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -10,26 +11,17 @@ from awx.main.models.ad_hoc_commands import AdHocCommand
@pytest.mark.django_db
def test_ad_hoc_command_wait_successful(run_module, admin_user):
command = AdHocCommand.objects.create(status='successful', started=now(), finished=now())
- result = run_module('tower_ad_hoc_command_wait', dict(
- command_id=command.id
- ), admin_user)
+ result = run_module('tower_ad_hoc_command_wait', dict(command_id=command.id), admin_user)
result.pop('invocation', None)
assert result.pop('finished', '')[:10] == str(command.finished)[:10]
assert result.pop('started', '')[:10] == str(command.started)[:10]
- assert result == {
- "status": "successful",
- "changed": False,
- "elapsed": str(command.elapsed),
- "id": command.id
- }
+ assert result == {"status": "successful", "changed": False, "elapsed": str(command.elapsed), "id": command.id}
@pytest.mark.django_db
def test_ad_hoc_command_wait_failed(run_module, admin_user):
command = AdHocCommand.objects.create(status='failed', started=now(), finished=now())
- result = run_module('tower_ad_hoc_command_wait', dict(
- command_id=command.id
- ), admin_user)
+ result = run_module('tower_ad_hoc_command_wait', dict(command_id=command.id), admin_user)
result.pop('invocation', None)
assert result.pop('finished', '')[:10] == str(command.finished)[:10]
assert result.pop('started', '')[:10] == str(command.started)[:10]
@@ -39,17 +31,12 @@ def test_ad_hoc_command_wait_failed(run_module, admin_user):
"changed": False,
"elapsed": str(command.elapsed),
"id": command.id,
- "msg": "The ad hoc command - 1, failed"
+ "msg": "The ad hoc command - 1, failed",
}
@pytest.mark.django_db
def test_ad_hoc_command_wait_not_found(run_module, admin_user):
- result = run_module('tower_ad_hoc_command_wait', dict(
- command_id=42
- ), admin_user)
+ result = run_module('tower_ad_hoc_command_wait', dict(command_id=42), admin_user)
result.pop('invocation', None)
- assert result == {
- "failed": True,
- "msg": "Unable to wait on ad hoc command 42; that ID does not exist in Tower."
- }
+ assert result == {"failed": True, "msg": "Unable to wait on ad hoc command 42; that ID does not exist in Tower."}
diff --git a/awx_collection/test/awx/test_application.py b/awx_collection/test/awx/test_application.py
index ad5f99d430..8b924205b2 100644
--- a/awx_collection/test/awx/test_application.py
+++ b/awx_collection/test/awx/test_application.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
diff --git a/awx_collection/test/awx/test_completeness.py b/awx_collection/test/awx/test_completeness.py
index cb34660959..9deced2485 100644
--- a/awx_collection/test/awx/test_completeness.py
+++ b/awx_collection/test/awx/test_completeness.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -23,15 +24,26 @@ no_module_for_endpoint = []
# Some modules work on the related fields of an endpoint. These modules will not have an auto-associated endpoint
no_endpoint_for_module = [
- 'tower_import', 'tower_meta', 'tower_export', 'tower_inventory_source_update', 'tower_job_launch', 'tower_job_wait',
- 'tower_job_list', 'tower_license', 'tower_ping', 'tower_receive', 'tower_send', 'tower_workflow_launch',
- 'tower_job_cancel', 'tower_workflow_template', 'tower_ad_hoc_command_wait', 'tower_ad_hoc_command_cancel',
+ 'tower_import',
+ 'tower_meta',
+ 'tower_export',
+ 'tower_inventory_source_update',
+ 'tower_job_launch',
+ 'tower_job_wait',
+ 'tower_job_list',
+ 'tower_license',
+ 'tower_ping',
+ 'tower_receive',
+ 'tower_send',
+ 'tower_workflow_launch',
+ 'tower_job_cancel',
+ 'tower_workflow_template',
+ 'tower_ad_hoc_command_wait',
+ 'tower_ad_hoc_command_cancel',
]
# Global module parameters we can ignore
-ignore_parameters = [
- 'state', 'new_name', 'update_secrets', 'copy_from'
-]
+ignore_parameters = ['state', 'new_name', 'update_secrets', 'copy_from']
# Some modules take additional parameters that do not appear in the API
# Add the module name as the key with the value being the list of params to ignore
@@ -57,9 +69,7 @@ no_api_parameter_ok = {
# When this tool was created we were not feature complete. Adding something in here indicates a module
# that needs to be developed. If the module is found on the file system it will auto-detect that the
# work is being done and will bypass this check. At some point this module should be removed from this list.
-needs_development = [
- 'tower_inventory_script', 'tower_workflow_approval'
-]
+needs_development = ['tower_inventory_script', 'tower_workflow_approval']
needs_param_development = {
'tower_host': ['instance_id'],
}
@@ -150,9 +160,7 @@ def determine_state(module_id, endpoint, module, parameter, api_option, module_o
def test_completeness(collection_import, request, admin_user, job_template):
option_comparison = {}
# Load a list of existing module files from disk
- base_folder = os.path.abspath(
- os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)
- )
+ base_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
module_directory = os.path.join(base_folder, 'plugins', 'modules')
for root, dirs, files in os.walk(module_directory):
if root == module_directory:
@@ -166,10 +174,7 @@ def test_completeness(collection_import, request, admin_user, job_template):
'module_name': module_name,
}
resource_module = collection_import('plugins.modules.{0}'.format(module_name))
- option_comparison[module_name]['module_options'] = yaml.load(
- resource_module.DOCUMENTATION,
- Loader=yaml.SafeLoader
- )['options']
+ option_comparison[module_name]['module_options'] = yaml.load(resource_module.DOCUMENTATION, Loader=yaml.SafeLoader)['options']
endpoint_response = _request('get')(
url='/api/v2/',
@@ -222,51 +227,86 @@ def test_completeness(collection_import, request, admin_user, job_template):
longest_option_name = len(option)
# Print out some headers
- print("".join([
- "End Point", " " * (longest_endpoint - len("End Point")),
- " | Module Name", " " * (longest_module_name - len("Module Name")),
- " | Option", " " * (longest_option_name - len("Option")),
- " | API | Module | State",
- ]))
- print("-|-".join([
- "-" * longest_endpoint,
- "-" * longest_module_name,
- "-" * longest_option_name,
- "---",
- "------",
- "---------------------------------------------",
- ]))
+ print(
+ "".join(
+ [
+ "End Point",
+ " " * (longest_endpoint - len("End Point")),
+ " | Module Name",
+ " " * (longest_module_name - len("Module Name")),
+ " | Option",
+ " " * (longest_option_name - len("Option")),
+ " | API | Module | State",
+ ]
+ )
+ )
+ print(
+ "-|-".join(
+ [
+ "-" * longest_endpoint,
+ "-" * longest_module_name,
+ "-" * longest_option_name,
+ "---",
+ "------",
+ "---------------------------------------------",
+ ]
+ )
+ )
# Print out all of our data
for module in sorted(option_comparison):
module_data = option_comparison[module]
all_param_names = list(set(module_data['api_options']) | set(module_data['module_options']))
for parameter in sorted(all_param_names):
- print("".join([
- module_data['endpoint'], " " * (longest_endpoint - len(module_data['endpoint'])), " | ",
- module_data['module_name'], " " * (longest_module_name - len(module_data['module_name'])), " | ",
- parameter, " " * (longest_option_name - len(parameter)), " | ",
- " X " if (parameter in module_data['api_options']) else ' ', " | ",
- ' X ' if (parameter in module_data['module_options']) else ' ', " | ",
- determine_state(
- module,
- module_data['endpoint'],
- module_data['module_name'],
- parameter,
- module_data['api_options'][parameter] if (parameter in module_data['api_options']) else None,
- module_data['module_options'][parameter] if (parameter in module_data['module_options']) else None,
- ),
- ]))
+ print(
+ "".join(
+ [
+ module_data['endpoint'],
+ " " * (longest_endpoint - len(module_data['endpoint'])),
+ " | ",
+ module_data['module_name'],
+ " " * (longest_module_name - len(module_data['module_name'])),
+ " | ",
+ parameter,
+ " " * (longest_option_name - len(parameter)),
+ " | ",
+ " X " if (parameter in module_data['api_options']) else ' ',
+ " | ",
+ ' X ' if (parameter in module_data['module_options']) else ' ',
+ " | ",
+ determine_state(
+ module,
+ module_data['endpoint'],
+ module_data['module_name'],
+ parameter,
+ module_data['api_options'][parameter] if (parameter in module_data['api_options']) else None,
+ module_data['module_options'][parameter] if (parameter in module_data['module_options']) else None,
+ ),
+ ]
+ )
+ )
# This handles cases were we got no params from the options page nor from the modules
if len(all_param_names) == 0:
- print("".join([
- module_data['endpoint'], " " * (longest_endpoint - len(module_data['endpoint'])), " | ",
- module_data['module_name'], " " * (longest_module_name - len(module_data['module_name'])), " | ",
- "N/A", " " * (longest_option_name - len("N/A")), " | ",
- ' ', " | ",
- ' ', " | ",
- determine_state(module, module_data['endpoint'], module_data['module_name'], 'N/A', None, None),
- ]))
+ print(
+ "".join(
+ [
+ module_data['endpoint'],
+ " " * (longest_endpoint - len(module_data['endpoint'])),
+ " | ",
+ module_data['module_name'],
+ " " * (longest_module_name - len(module_data['module_name'])),
+ " | ",
+ "N/A",
+ " " * (longest_option_name - len("N/A")),
+ " | ",
+ ' ',
+ " | ",
+ ' ',
+ " | ",
+ determine_state(module, module_data['endpoint'], module_data['module_name'], 'N/A', None, None),
+ ]
+ )
+ )
if return_value != 0:
raise Exception("One or more failures caused issues")
diff --git a/awx_collection/test/awx/test_credential.py b/awx_collection/test/awx/test_credential.py
index 0ab7017158..79e8560190 100644
--- a/awx_collection/test/awx/test_credential.py
+++ b/awx_collection/test/awx/test_credential.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -11,22 +12,12 @@ def cred_type():
# Make a credential type which will be used by the credential
ct = CredentialType.objects.create(
name='Ansible Galaxy Token',
- inputs={
- "fields": [
- {
- "id": "token",
- "type": "string",
- "secret": True,
- "label": "Ansible Galaxy Secret Token Value"
- }
- ],
- "required": ["token"]
- },
+ inputs={"fields": [{"id": "token", "type": "string", "secret": True, "label": "Ansible Galaxy Secret Token Value"}], "required": ["token"]},
injectors={
"extra_vars": {
"galaxy_token": "{{token}}",
}
- }
+ },
)
return ct
@@ -38,12 +29,7 @@ def test_create_machine_credential(run_module, admin_user, organization, silence
ct = CredentialType.defaults['ssh']()
ct.save()
# Example from docs
- result = run_module('tower_credential', dict(
- name='Test Machine Credential',
- organization=organization.name,
- kind='ssh',
- state='present'
- ), admin_user)
+ result = run_module('tower_credential', dict(name='Test Machine Credential', organization=organization.name, kind='ssh', state='present'), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -61,14 +47,11 @@ def test_create_vault_credential(run_module, admin_user, organization, silence_d
ct = CredentialType.defaults['vault']()
ct.save()
- result = run_module('tower_credential', dict(
- name='Test Vault Credential',
- organization=organization.name,
- kind='vault',
- vault_id='bar',
- vault_password='foobar',
- state='present'
- ), admin_user)
+ result = run_module(
+ 'tower_credential',
+ dict(name='Test Vault Credential', organization=organization.name, kind='vault', vault_id='bar', vault_password='foobar', state='present'),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -83,13 +66,9 @@ def test_create_vault_credential(run_module, admin_user, organization, silence_d
@pytest.mark.django_db
def test_ct_precedence_over_kind(run_module, admin_user, organization, cred_type, silence_deprecation):
- result = run_module('tower_credential', dict(
- name='A credential',
- organization=organization.name,
- kind='ssh',
- credential_type=cred_type.name,
- state='present'
- ), admin_user)
+ result = run_module(
+ 'tower_credential', dict(name='A credential', organization=organization.name, kind='ssh', credential_type=cred_type.name, state='present'), admin_user
+ )
assert not result.get('failed', False), result.get('msg', result)
cred = Credential.objects.get(name='A credential')
@@ -102,14 +81,18 @@ def test_input_overrides_old_fields(run_module, admin_user, organization, silenc
# create the vault credential type
ct = CredentialType.defaults['vault']()
ct.save()
- result = run_module('tower_credential', dict(
- name='A Vault credential',
- organization=organization.name,
- kind='vault',
- vault_id='1234',
- inputs={'vault_id': 'asdf'},
- state='present',
- ), admin_user)
+ result = run_module(
+ 'tower_credential',
+ dict(
+ name='A Vault credential',
+ organization=organization.name,
+ kind='vault',
+ vault_id='1234',
+ inputs={'vault_id': 'asdf'},
+ state='present',
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
cred = Credential.objects.get(name='A Vault credential')
@@ -120,12 +103,7 @@ def test_input_overrides_old_fields(run_module, admin_user, organization, silenc
@pytest.mark.django_db
def test_missing_credential_type(run_module, admin_user, organization):
Organization.objects.create(name='test-org')
- result = run_module('tower_credential', dict(
- name='A credential',
- organization=organization.name,
- credential_type='foobar',
- state='present'
- ), admin_user)
+ result = run_module('tower_credential', dict(name='A credential', organization=organization.name, credential_type='foobar', state='present'), admin_user)
assert result.get('failed', False), result
assert 'credential_type' in result['msg']
assert 'foobar' in result['msg']
@@ -134,12 +112,11 @@ def test_missing_credential_type(run_module, admin_user, organization):
@pytest.mark.django_db
def test_make_use_of_custom_credential_type(run_module, organization, admin_user, cred_type):
- result = run_module('tower_credential', dict(
- name='Galaxy Token for Steve',
- organization=organization.name,
- credential_type=cred_type.name,
- inputs={'token': '7rEZK38DJl58A7RxA6EC7lLvUHbBQ1'}
- ), admin_user)
+ result = run_module(
+ 'tower_credential',
+ dict(name='Galaxy Token for Steve', organization=organization.name, credential_type=cred_type.name, inputs={'token': '7rEZK38DJl58A7RxA6EC7lLvUHbBQ1'}),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', False), result
@@ -159,13 +136,17 @@ def test_secret_field_write_twice(run_module, organization, admin_user, cred_typ
val1 = '7rEZK38DJl58A7RxA6EC7lLvUHbBQ1'
val2 = '7rEZ238DJl5837rxA6xxxlLvUHbBQ1'
for val in (val1, val2):
- result = run_module('tower_credential', dict(
- name='Galaxy Token for Steve',
- organization=organization.name,
- credential_type=cred_type.name,
- inputs={'token': val},
- update_secrets=update_secrets
- ), admin_user)
+ result = run_module(
+ 'tower_credential',
+ dict(
+ name='Galaxy Token for Steve',
+ organization=organization.name,
+ credential_type=cred_type.name,
+ inputs={'token': val},
+ update_secrets=update_secrets,
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
if update_secrets:
diff --git a/awx_collection/test/awx/test_credential_input_source.py b/awx_collection/test/awx/test_credential_input_source.py
index 9eea6f3fe5..bb04535910 100644
--- a/awx_collection/test/awx/test_credential_input_source.py
+++ b/awx_collection/test/awx/test_credential_input_source.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -17,13 +18,7 @@ def aim_cred_type():
@pytest.fixture
def source_cred_aim(aim_cred_type):
return Credential.objects.create(
- name='CyberArk AIM Cred',
- credential_type=aim_cred_type,
- inputs={
- "url": "https://cyberark.example.com",
- "app_id": "myAppID",
- "verify": "false"
- }
+ name='CyberArk AIM Cred', credential_type=aim_cred_type, inputs={"url": "https://cyberark.example.com", "app_id": "myAppID", "verify": "false"}
)
@@ -31,21 +26,20 @@ def source_cred_aim(aim_cred_type):
def test_aim_credential_source(run_module, admin_user, organization, source_cred_aim, silence_deprecation):
ct = CredentialType.defaults['ssh']()
ct.save()
- tgt_cred = Credential.objects.create(
- name='Test Machine Credential',
- organization=organization,
- credential_type=ct,
- inputs={'username': 'bob'}
+ tgt_cred = Credential.objects.create(name='Test Machine Credential', organization=organization, credential_type=ct, inputs={'username': 'bob'})
+
+ result = run_module(
+ 'tower_credential_input_source',
+ dict(
+ source_credential=source_cred_aim.name,
+ target_credential=tgt_cred.name,
+ input_field_name='password',
+ metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"},
+ state='present',
+ ),
+ admin_user,
)
- result = run_module('tower_credential_input_source', dict(
- source_credential=source_cred_aim.name,
- target_credential=tgt_cred.name,
- input_field_name='password',
- metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"},
- state='present'
- ), admin_user)
-
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -68,12 +62,7 @@ def source_cred_conjur(organization):
return Credential.objects.create(
name='CyberArk CONJUR Cred',
credential_type=ct,
- inputs={
- "url": "https://cyberark.example.com",
- "api_key": "myApiKey",
- "account": "account",
- "username": "username"
- }
+ inputs={"url": "https://cyberark.example.com", "api_key": "myApiKey", "account": "account", "username": "username"},
)
@@ -81,21 +70,20 @@ def source_cred_conjur(organization):
def test_conjur_credential_source(run_module, admin_user, organization, source_cred_conjur, silence_deprecation):
ct = CredentialType.defaults['ssh']()
ct.save()
- tgt_cred = Credential.objects.create(
- name='Test Machine Credential',
- organization=organization,
- credential_type=ct,
- inputs={'username': 'bob'}
+ tgt_cred = Credential.objects.create(name='Test Machine Credential', organization=organization, credential_type=ct, inputs={'username': 'bob'})
+
+ result = run_module(
+ 'tower_credential_input_source',
+ dict(
+ source_credential=source_cred_conjur.name,
+ target_credential=tgt_cred.name,
+ input_field_name='password',
+ metadata={"secret_path": "/path/to/secret"},
+ state='present',
+ ),
+ admin_user,
)
- result = run_module('tower_credential_input_source', dict(
- source_credential=source_cred_conjur.name,
- target_credential=tgt_cred.name,
- input_field_name='password',
- metadata={"secret_path": "/path/to/secret"},
- state='present'
- ), admin_user)
-
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -123,8 +111,8 @@ def source_cred_hashi_secret(organization):
"token": "myApiKey",
"role_id": "role",
"secret_id": "secret",
- "default_auth_path": "path-to-approle"
- }
+ "default_auth_path": "path-to-approle",
+ },
)
@@ -132,21 +120,20 @@ def source_cred_hashi_secret(organization):
def test_hashi_secret_credential_source(run_module, admin_user, organization, source_cred_hashi_secret, silence_deprecation):
ct = CredentialType.defaults['ssh']()
ct.save()
- tgt_cred = Credential.objects.create(
- name='Test Machine Credential',
- organization=organization,
- credential_type=ct,
- inputs={'username': 'bob'}
+ tgt_cred = Credential.objects.create(name='Test Machine Credential', organization=organization, credential_type=ct, inputs={'username': 'bob'})
+
+ result = run_module(
+ 'tower_credential_input_source',
+ dict(
+ source_credential=source_cred_hashi_secret.name,
+ target_credential=tgt_cred.name,
+ input_field_name='password',
+ metadata={"secret_path": "/path/to/secret", "auth_path": "/path/to/auth", "secret_backend": "backend", "secret_key": "a_key"},
+ state='present',
+ ),
+ admin_user,
)
- result = run_module('tower_credential_input_source', dict(
- source_credential=source_cred_hashi_secret.name,
- target_credential=tgt_cred.name,
- input_field_name='password',
- metadata={"secret_path": "/path/to/secret", "auth_path": "/path/to/auth", "secret_backend": "backend", "secret_key": "a_key"},
- state='present'
- ), admin_user)
-
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -172,12 +159,7 @@ def source_cred_hashi_ssh(organization):
return Credential.objects.create(
name='HashiCorp ssh Cred',
credential_type=ct,
- inputs={
- "url": "https://ssh.hash.example.com",
- "token": "myApiKey",
- "role_id": "role",
- "secret_id": "secret"
- }
+ inputs={"url": "https://ssh.hash.example.com", "token": "myApiKey", "role_id": "role", "secret_id": "secret"},
)
@@ -185,21 +167,20 @@ def source_cred_hashi_ssh(organization):
def test_hashi_ssh_credential_source(run_module, admin_user, organization, source_cred_hashi_ssh, silence_deprecation):
ct = CredentialType.defaults['ssh']()
ct.save()
- tgt_cred = Credential.objects.create(
- name='Test Machine Credential',
- organization=organization,
- credential_type=ct,
- inputs={'username': 'bob'}
+ tgt_cred = Credential.objects.create(name='Test Machine Credential', organization=organization, credential_type=ct, inputs={'username': 'bob'})
+
+ result = run_module(
+ 'tower_credential_input_source',
+ dict(
+ source_credential=source_cred_hashi_ssh.name,
+ target_credential=tgt_cred.name,
+ input_field_name='password',
+ metadata={"secret_path": "/path/to/secret", "auth_path": "/path/to/auth", "role": "role", "public_key": "a_key", "valid_principals": "some_value"},
+ state='present',
+ ),
+ admin_user,
)
- result = run_module('tower_credential_input_source', dict(
- source_credential=source_cred_hashi_ssh.name,
- target_credential=tgt_cred.name,
- input_field_name='password',
- metadata={"secret_path": "/path/to/secret", "auth_path": "/path/to/auth", "role": "role", "public_key": "a_key", "valid_principals": "some_value"},
- state='present'
- ), admin_user)
-
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -232,7 +213,7 @@ def source_cred_azure_kv(organization):
"secret": "secret",
"tenant": "tenant",
"cloud_name": "the_cloud",
- }
+ },
)
@@ -240,21 +221,20 @@ def source_cred_azure_kv(organization):
def test_azure_kv_credential_source(run_module, admin_user, organization, source_cred_azure_kv, silence_deprecation):
ct = CredentialType.defaults['ssh']()
ct.save()
- tgt_cred = Credential.objects.create(
- name='Test Machine Credential',
- organization=organization,
- credential_type=ct,
- inputs={'username': 'bob'}
+ tgt_cred = Credential.objects.create(name='Test Machine Credential', organization=organization, credential_type=ct, inputs={'username': 'bob'})
+
+ result = run_module(
+ 'tower_credential_input_source',
+ dict(
+ source_credential=source_cred_azure_kv.name,
+ target_credential=tgt_cred.name,
+ input_field_name='password',
+ metadata={"secret_field": "my_pass"},
+ state='present',
+ ),
+ admin_user,
)
- result = run_module('tower_credential_input_source', dict(
- source_credential=source_cred_azure_kv.name,
- target_credential=tgt_cred.name,
- input_field_name='password',
- metadata={"secret_field": "my_pass"},
- state='present'
- ), admin_user)
-
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -274,11 +254,7 @@ def source_cred_aim_alt(aim_cred_type):
return Credential.objects.create(
name='Alternate CyberArk AIM Cred',
credential_type=aim_cred_type,
- inputs={
- "url": "https://cyberark-alt.example.com",
- "app_id": "myAltID",
- "verify": "false"
- }
+ inputs={"url": "https://cyberark-alt.example.com", "app_id": "myAltID", "verify": "false"},
)
@@ -286,41 +262,43 @@ def source_cred_aim_alt(aim_cred_type):
def test_aim_credential_source(run_module, admin_user, organization, source_cred_aim, source_cred_aim_alt, silence_deprecation):
ct = CredentialType.defaults['ssh']()
ct.save()
- tgt_cred = Credential.objects.create(
- name='Test Machine Credential',
- organization=organization,
- credential_type=ct,
- inputs={'username': 'bob'}
+ tgt_cred = Credential.objects.create(name='Test Machine Credential', organization=organization, credential_type=ct, inputs={'username': 'bob'})
+
+ result = run_module(
+ 'tower_credential_input_source',
+ dict(
+ source_credential=source_cred_aim.name,
+ target_credential=tgt_cred.name,
+ input_field_name='password',
+ metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"},
+ state='present',
+ ),
+ admin_user,
)
- result = run_module('tower_credential_input_source', dict(
- source_credential=source_cred_aim.name,
- target_credential=tgt_cred.name,
- input_field_name='password',
- metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"},
- state='present'
- ), admin_user)
-
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
- unchangedResult = run_module('tower_credential_input_source', dict(
- source_credential=source_cred_aim.name,
- target_credential=tgt_cred.name,
- input_field_name='password',
- metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"},
- state='present'
- ), admin_user)
+ unchangedResult = run_module(
+ 'tower_credential_input_source',
+ dict(
+ source_credential=source_cred_aim.name,
+ target_credential=tgt_cred.name,
+ input_field_name='password',
+ metadata={"object_query": "Safe=SUPERSAFE;Object=MyAccount"},
+ state='present',
+ ),
+ admin_user,
+ )
assert not unchangedResult.get('failed', False), result.get('msg', result)
assert not unchangedResult.get('changed'), result
- changedResult = run_module('tower_credential_input_source', dict(
- source_credential=source_cred_aim_alt.name,
- target_credential=tgt_cred.name,
- input_field_name='password',
- state='present'
- ), admin_user)
+ changedResult = run_module(
+ 'tower_credential_input_source',
+ dict(source_credential=source_cred_aim_alt.name, target_credential=tgt_cred.name, input_field_name='password', state='present'),
+ admin_user,
+ )
assert not changedResult.get('failed', False), changedResult.get('msg', result)
assert changedResult.get('changed'), result
@@ -347,7 +325,7 @@ def source_cred_centrify_secret(organization):
"url": "https://tenant_id.my.centrify-dev.net",
"client_id": "secretuser@tenant",
"client_password": "secretuserpassword",
- }
+ },
)
@@ -355,21 +333,20 @@ def source_cred_centrify_secret(organization):
def test_centrify_vault_credential_source(run_module, admin_user, organization, source_cred_centrify_secret, silence_deprecation):
ct = CredentialType.defaults['ssh']()
ct.save()
- tgt_cred = Credential.objects.create(
- name='Test Machine Credential',
- organization=organization,
- credential_type=ct,
- inputs={'username': 'bob'}
+ tgt_cred = Credential.objects.create(name='Test Machine Credential', organization=organization, credential_type=ct, inputs={'username': 'bob'})
+
+ result = run_module(
+ 'tower_credential_input_source',
+ dict(
+ source_credential=source_cred_centrify_secret.name,
+ target_credential=tgt_cred.name,
+ input_field_name='password',
+ metadata={"system-name": "systemname", "account-name": "accountname"},
+ state='present',
+ ),
+ admin_user,
)
- result = run_module('tower_credential_input_source', dict(
- source_credential=source_cred_centrify_secret.name,
- target_credential=tgt_cred.name,
- input_field_name='password',
- metadata={"system-name": "systemname", "account-name": "accountname"},
- state='present'
- ), admin_user)
-
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
assert CredentialInputSource.objects.count() == 1
diff --git a/awx_collection/test/awx/test_credential_type.py b/awx_collection/test/awx/test_credential_type.py
index 29f4869ddf..8afc92a1e9 100644
--- a/awx_collection/test/awx/test_credential_type.py
+++ b/awx_collection/test/awx/test_credential_type.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -9,14 +10,18 @@ from awx.main.models import CredentialType
@pytest.mark.django_db
def test_create_custom_credential_type(run_module, admin_user, silence_deprecation):
# Example from docs
- result = run_module('tower_credential_type', dict(
- name='Nexus',
- description='Credentials type for Nexus',
- kind='cloud',
- inputs={"fields": [{"id": "server", "type": "string", "default": "", "label": ""}], "required": []},
- injectors={'extra_vars': {'nexus_credential': 'test'}},
- state='present',
- ), admin_user)
+ result = run_module(
+ 'tower_credential_type',
+ dict(
+ name='Nexus',
+ description='Credentials type for Nexus',
+ kind='cloud',
+ inputs={"fields": [{"id": "server", "type": "string", "default": "", "label": ""}], "required": []},
+ injectors={'extra_vars': {'nexus_credential': 'test'}},
+ state='present',
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -31,19 +36,27 @@ def test_create_custom_credential_type(run_module, admin_user, silence_deprecati
@pytest.mark.django_db
def test_changed_false_with_api_changes(run_module, admin_user):
- result = run_module('tower_credential_type', dict(
- name='foo',
- kind='cloud',
- inputs={"fields": [{"id": "env_value", "label": "foo", "default": "foo"}]},
- injectors={'env': {'TEST_ENV_VAR': '{{ env_value }}'}},
- ), admin_user)
+ result = run_module(
+ 'tower_credential_type',
+ dict(
+ name='foo',
+ kind='cloud',
+ inputs={"fields": [{"id": "env_value", "label": "foo", "default": "foo"}]},
+ injectors={'env': {'TEST_ENV_VAR': '{{ env_value }}'}},
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
- result = run_module('tower_credential_type', dict(
- name='foo',
- inputs={"fields": [{"id": "env_value", "label": "foo", "default": "foo"}]},
- injectors={'env': {'TEST_ENV_VAR': '{{ env_value }}'}},
- ), admin_user)
+ result = run_module(
+ 'tower_credential_type',
+ dict(
+ name='foo',
+ inputs={"fields": [{"id": "env_value", "label": "foo", "default": "foo"}]},
+ injectors={'env': {'TEST_ENV_VAR': '{{ env_value }}'}},
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert not result.get('changed'), result
diff --git a/awx_collection/test/awx/test_group.py b/awx_collection/test/awx/test_group.py
index 3e5bcc6bdd..e7aeaeb0d6 100644
--- a/awx_collection/test/awx/test_group.py
+++ b/awx_collection/test/awx/test_group.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -12,12 +13,7 @@ def test_create_group(run_module, admin_user):
inv = Inventory.objects.create(name='test-inv', organization=org)
variables = {"ansible_network_os": "iosxr"}
- result = run_module('tower_group', dict(
- name='Test Group',
- inventory='test-inv',
- variables=variables,
- state='present'
- ), admin_user)
+ result = run_module('tower_group', dict(name='Test Group', inventory='test-inv', variables=variables, state='present'), admin_user)
assert result.get('changed'), result
group = Group.objects.get(name='Test Group')
@@ -42,13 +38,11 @@ def test_associate_hosts_and_children(run_module, admin_user, organization):
child = Group.objects.create(inventory=inv, name='child_group')
- result = run_module('tower_group', dict(
- name='Test Group',
- inventory='test-inv',
- hosts=[inv_hosts[1].name, inv_hosts[2].name],
- children=[child.name],
- state='present'
- ), admin_user)
+ result = run_module(
+ 'tower_group',
+ dict(name='Test Group', inventory='test-inv', hosts=[inv_hosts[1].name, inv_hosts[2].name], children=[child.name], state='present'),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result['changed'] is True
@@ -62,13 +56,7 @@ def test_associate_on_create(run_module, admin_user, organization):
child = Group.objects.create(name='test-child', inventory=inv)
host = Host.objects.create(name='test-host', inventory=inv)
- result = run_module('tower_group', dict(
- name='Test Group',
- inventory='test-inv',
- hosts=[host.name],
- groups=[child.name],
- state='present'
- ), admin_user)
+ result = run_module('tower_group', dict(name='Test Group', inventory='test-inv', hosts=[host.name], groups=[child.name], state='present'), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result['changed'] is True
@@ -82,12 +70,7 @@ def test_children_alias_of_groups(run_module, admin_user, organization):
inv = Inventory.objects.create(name='test-inv', organization=organization)
group = Group.objects.create(name='Test Group', inventory=inv)
child = Group.objects.create(inventory=inv, name='child_group')
- result = run_module('tower_group', dict(
- name='Test Group',
- inventory='test-inv',
- groups=[child.name],
- state='present'
- ), admin_user)
+ result = run_module('tower_group', dict(name='Test Group', inventory='test-inv', groups=[child.name], state='present'), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result['changed'] is True
@@ -104,11 +87,7 @@ def test_tower_group_idempotent(run_module, admin_user):
inventory=inv,
)
- result = run_module('tower_group', dict(
- name='Test Group',
- inventory='test-inv',
- state='present'
- ), admin_user)
+ result = run_module('tower_group', dict(name='Test Group', inventory='test-inv', state='present'), admin_user)
result.pop('invocation')
assert result == {
diff --git a/awx_collection/test/awx/test_instance_group.py b/awx_collection/test/awx/test_instance_group.py
index 2516ce20ba..eb06261c99 100644
--- a/awx_collection/test/awx/test_instance_group.py
+++ b/awx_collection/test/awx/test_instance_group.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -9,12 +10,9 @@ from awx.main.tests.functional.conftest import kube_credential, credentialtype_k
@pytest.mark.django_db
def test_instance_group_create(run_module, admin_user):
- result = run_module('tower_instance_group', {
- 'name': 'foo-group',
- 'policy_instance_percentage': 34,
- 'policy_instance_minimum': 12,
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_instance_group', {'name': 'foo-group', 'policy_instance_percentage': 34, 'policy_instance_minimum': 12, 'state': 'present'}, admin_user
+ )
assert not result.get('failed', False), result
assert result['changed']
@@ -26,11 +24,7 @@ def test_instance_group_create(run_module, admin_user):
new_instance = Instance.objects.create(hostname='foo.example.com')
# Set the new instance group only to the one instnace
- result = run_module('tower_instance_group', {
- 'name': 'foo-group',
- 'instances': [new_instance.hostname],
- 'state': 'present'
- }, admin_user)
+ result = run_module('tower_instance_group', {'name': 'foo-group', 'instances': [new_instance.hostname], 'state': 'present'}, admin_user)
assert not result.get('failed', False), result
assert result['changed']
@@ -47,25 +41,20 @@ def test_instance_group_create(run_module, admin_user):
def test_container_group_create(run_module, admin_user, kube_credential):
pod_spec = "{ 'Nothing': True }"
- result = run_module('tower_instance_group', {
- 'name': 'foo-c-group',
- 'credential': kube_credential.id,
- 'is_container_group': True,
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_instance_group', {'name': 'foo-c-group', 'credential': kube_credential.id, 'is_container_group': True, 'state': 'present'}, admin_user
+ )
assert not result.get('failed', False), result['msg']
assert result['changed']
ig = InstanceGroup.objects.get(name='foo-c-group')
assert ig.pod_spec_override == ''
- result = run_module('tower_instance_group', {
- 'name': 'foo-c-group',
- 'credential': kube_credential.id,
- 'is_container_group': True,
- 'pod_spec_override': pod_spec,
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_instance_group',
+ {'name': 'foo-c-group', 'credential': kube_credential.id, 'is_container_group': True, 'pod_spec_override': pod_spec, 'state': 'present'},
+ admin_user,
+ )
assert not result.get('failed', False), result['msg']
assert result['changed']
diff --git a/awx_collection/test/awx/test_inventory.py b/awx_collection/test/awx/test_inventory.py
index f642fb87d5..d2e9d7e39f 100644
--- a/awx_collection/test/awx/test_inventory.py
+++ b/awx_collection/test/awx/test_inventory.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -11,13 +12,17 @@ from awx.main.tests.functional.conftest import insights_credential, credentialty
def test_inventory_create(run_module, admin_user, organization, insights_credential):
# Create an insights credential
- result = run_module('tower_inventory', {
- 'name': 'foo-inventory',
- 'organization': organization.name,
- 'variables': {'foo': 'bar', 'another-foo': {'barz': 'bar2'}},
- 'insights_credential': insights_credential.name,
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_inventory',
+ {
+ 'name': 'foo-inventory',
+ 'organization': organization.name,
+ 'variables': {'foo': 'bar', 'another-foo': {'barz': 'bar2'}},
+ 'insights_credential': insights_credential.name,
+ 'state': 'present',
+ },
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
inv = Inventory.objects.get(name='foo-inventory')
@@ -26,24 +31,18 @@ def test_inventory_create(run_module, admin_user, organization, insights_credent
result.pop('module_args', None)
result.pop('invocation', None)
- assert result == {
- "name": "foo-inventory",
- "id": inv.id,
- "changed": True
- }
+ assert result == {"name": "foo-inventory", "id": inv.id, "changed": True}
assert inv.organization_id == organization.id
@pytest.mark.django_db
def test_invalid_smart_inventory_create(run_module, admin_user, organization):
- result = run_module('tower_inventory', {
- 'name': 'foo-inventory',
- 'organization': organization.name,
- 'kind': 'smart',
- 'host_filter': 'ansible',
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_inventory',
+ {'name': 'foo-inventory', 'organization': organization.name, 'kind': 'smart', 'host_filter': 'ansible', 'state': 'present'},
+ admin_user,
+ )
assert result.get('failed', False), result
assert 'Invalid query ansible' in result['msg']
@@ -51,13 +50,11 @@ def test_invalid_smart_inventory_create(run_module, admin_user, organization):
@pytest.mark.django_db
def test_valid_smart_inventory_create(run_module, admin_user, organization):
- result = run_module('tower_inventory', {
- 'name': 'foo-inventory',
- 'organization': organization.name,
- 'kind': 'smart',
- 'host_filter': 'name=my_host',
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_inventory',
+ {'name': 'foo-inventory', 'organization': organization.name, 'kind': 'smart', 'host_filter': 'name=my_host', 'state': 'present'},
+ admin_user,
+ )
assert not result.get('failed', False), result
inv = Inventory.objects.get(name='foo-inventory')
diff --git a/awx_collection/test/awx/test_inventory_source.py b/awx_collection/test/awx/test_inventory_source.py
index fabd3d9fb8..53aac994b7 100644
--- a/awx_collection/test/awx/test_inventory_source.py
+++ b/awx_collection/test/awx/test_inventory_source.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -26,14 +27,11 @@ def project(base_inventory):
@pytest.mark.django_db
def test_inventory_source_create(run_module, admin_user, base_inventory, project):
source_path = '/var/lib/awx/example_source_path/'
- result = run_module('tower_inventory_source', dict(
- name='foo',
- inventory=base_inventory.name,
- state='present',
- source='scm',
- source_path=source_path,
- source_project=project.name
- ), admin_user)
+ result = run_module(
+ 'tower_inventory_source',
+ dict(name='foo', inventory=base_inventory.name, state='present', source='scm', source_path=source_path, source_project=project.name),
+ admin_user,
+ )
assert result.pop('changed', None), result
inv_src = InventorySource.objects.get(name='foo')
@@ -51,12 +49,7 @@ def test_create_inventory_source_implied_org(run_module, admin_user):
inv = Inventory.objects.create(name='test-inv', organization=org)
# Credential is not required for ec2 source, because of IAM roles
- result = run_module('tower_inventory_source', dict(
- name='Test Inventory Source',
- inventory='test-inv',
- source='ec2',
- state='present'
- ), admin_user)
+ result = run_module('tower_inventory_source', dict(name='Test Inventory Source', inventory='test-inv', source='ec2', state='present'), admin_user)
assert result.pop('changed', None), result
inv_src = InventorySource.objects.get(name='Test Inventory Source')
@@ -78,13 +71,11 @@ def test_create_inventory_source_multiple_orgs(run_module, admin_user):
org2 = Organization.objects.create(name='test-org-number-two')
inv2 = Inventory.objects.create(name='test-inv', organization=org2)
- result = run_module('tower_inventory_source', dict(
- name='Test Inventory Source',
- inventory=inv2.name,
- organization='test-org-number-two',
- source='ec2',
- state='present'
- ), admin_user)
+ result = run_module(
+ 'tower_inventory_source',
+ dict(name='Test Inventory Source', inventory=inv2.name, organization='test-org-number-two', source='ec2', state='present'),
+ admin_user,
+ )
assert result.pop('changed', None), result
inv_src = InventorySource.objects.get(name='Test Inventory Source')
@@ -99,24 +90,14 @@ def test_create_inventory_source_multiple_orgs(run_module, admin_user):
@pytest.mark.django_db
def test_falsy_value(run_module, admin_user, base_inventory):
- result = run_module('tower_inventory_source', dict(
- name='falsy-test',
- inventory=base_inventory.name,
- source='ec2',
- update_on_launch=True
- ), admin_user)
+ result = run_module('tower_inventory_source', dict(name='falsy-test', inventory=base_inventory.name, source='ec2', update_on_launch=True), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', None), result
inv_src = InventorySource.objects.get(name='falsy-test')
assert inv_src.update_on_launch is True
- result = run_module('tower_inventory_source', dict(
- name='falsy-test',
- inventory=base_inventory.name,
- source='ec2',
- update_on_launch=False
- ), admin_user)
+ result = run_module('tower_inventory_source', dict(name='falsy-test', inventory=base_inventory.name, source='ec2', update_on_launch=False), admin_user)
inv_src.refresh_from_db()
assert inv_src.update_on_launch is False
@@ -146,12 +127,7 @@ def test_falsy_value(run_module, admin_user, base_inventory):
@pytest.mark.django_db
def test_missing_required_credential(run_module, admin_user, base_inventory):
- result = run_module('tower_inventory_source', dict(
- name='Test Azure Source',
- inventory=base_inventory.name,
- source='azure_rm',
- state='present'
- ), admin_user)
+ result = run_module('tower_inventory_source', dict(name='Test Azure Source', inventory=base_inventory.name, source='azure_rm', state='present'), admin_user)
assert result.pop('failed', None) is True, result
assert 'Credential is required for a cloud source' in result.get('msg', '')
@@ -159,13 +135,11 @@ def test_missing_required_credential(run_module, admin_user, base_inventory):
@pytest.mark.django_db
def test_source_project_not_for_cloud(run_module, admin_user, base_inventory, project):
- result = run_module('tower_inventory_source', dict(
- name='Test ec2 Inventory Source',
- inventory=base_inventory.name,
- source='ec2',
- state='present',
- source_project=project.name
- ), admin_user)
+ result = run_module(
+ 'tower_inventory_source',
+ dict(name='Test ec2 Inventory Source', inventory=base_inventory.name, source='ec2', state='present', source_project=project.name),
+ admin_user,
+ )
assert result.pop('failed', None) is True, result
assert 'Cannot set source_project if not SCM type' in result.get('msg', '')
@@ -173,13 +147,11 @@ def test_source_project_not_for_cloud(run_module, admin_user, base_inventory, pr
@pytest.mark.django_db
def test_source_path_not_for_cloud(run_module, admin_user, base_inventory):
- result = run_module('tower_inventory_source', dict(
- name='Test ec2 Inventory Source',
- inventory=base_inventory.name,
- source='ec2',
- state='present',
- source_path='where/am/I'
- ), admin_user)
+ result = run_module(
+ 'tower_inventory_source',
+ dict(name='Test ec2 Inventory Source', inventory=base_inventory.name, source='ec2', state='present', source_path='where/am/I'),
+ admin_user,
+ )
assert result.pop('failed', None) is True, result
assert 'Cannot set source_path if not SCM type' in result.get('msg', '')
@@ -187,13 +159,13 @@ def test_source_path_not_for_cloud(run_module, admin_user, base_inventory):
@pytest.mark.django_db
def test_scm_source_needs_project(run_module, admin_user, base_inventory):
- result = run_module('tower_inventory_source', dict(
- name='SCM inventory without project',
- inventory=base_inventory.name,
- state='present',
- source='scm',
- source_path='/var/lib/awx/example_source_path/'
- ), admin_user)
+ result = run_module(
+ 'tower_inventory_source',
+ dict(
+ name='SCM inventory without project', inventory=base_inventory.name, state='present', source='scm', source_path='/var/lib/awx/example_source_path/'
+ ),
+ admin_user,
+ )
assert result.pop('failed', None), result
assert 'Project required for scm type sources' in result.get('msg', '')
diff --git a/awx_collection/test/awx/test_job.py b/awx_collection/test/awx/test_job.py
index 5e478d9685..012c1fd9d1 100644
--- a/awx_collection/test/awx/test_job.py
+++ b/awx_collection/test/awx/test_job.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -10,46 +11,25 @@ from awx.main.models import Job
@pytest.mark.django_db
def test_job_wait_successful(run_module, admin_user):
job = Job.objects.create(status='successful', started=now(), finished=now())
- result = run_module('tower_job_wait', dict(
- job_id=job.id
- ), admin_user)
+ result = run_module('tower_job_wait', dict(job_id=job.id), admin_user)
result.pop('invocation', None)
assert result.pop('finished', '')[:10] == str(job.finished)[:10]
assert result.pop('started', '')[:10] == str(job.started)[:10]
- assert result == {
- "status": "successful",
- "changed": False,
- "elapsed": str(job.elapsed),
- "id": job.id
- }
+ assert result == {"status": "successful", "changed": False, "elapsed": str(job.elapsed), "id": job.id}
@pytest.mark.django_db
def test_job_wait_failed(run_module, admin_user):
job = Job.objects.create(status='failed', started=now(), finished=now())
- result = run_module('tower_job_wait', dict(
- job_id=job.id
- ), admin_user)
+ result = run_module('tower_job_wait', dict(job_id=job.id), admin_user)
result.pop('invocation', None)
assert result.pop('finished', '')[:10] == str(job.finished)[:10]
assert result.pop('started', '')[:10] == str(job.started)[:10]
- assert result == {
- "status": "failed",
- "failed": True,
- "changed": False,
- "elapsed": str(job.elapsed),
- "id": job.id,
- "msg": "Job with id 1 failed"
- }
+ assert result == {"status": "failed", "failed": True, "changed": False, "elapsed": str(job.elapsed), "id": job.id, "msg": "Job with id 1 failed"}
@pytest.mark.django_db
def test_job_wait_not_found(run_module, admin_user):
- result = run_module('tower_job_wait', dict(
- job_id=42
- ), admin_user)
+ result = run_module('tower_job_wait', dict(job_id=42), admin_user)
result.pop('invocation', None)
- assert result == {
- "failed": True,
- "msg": "Unable to wait on job 42; that ID does not exist in Tower."
- }
+ assert result == {"failed": True, "msg": "Unable to wait on job 42; that ID does not exist in Tower."}
diff --git a/awx_collection/test/awx/test_job_template.py b/awx_collection/test/awx/test_job_template.py
index 8ec3d67bfc..60c8f9066f 100644
--- a/awx_collection/test/awx/test_job_template.py
+++ b/awx_collection/test/awx/test_job_template.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -10,11 +11,13 @@ from awx.main.models import ActivityStream, JobTemplate, Job, NotificationTempla
def test_create_job_template(run_module, admin_user, project, inventory):
module_args = {
- 'name': 'foo', 'playbook': 'helloworld.yml',
- 'project': project.name, 'inventory': inventory.name,
+ 'name': 'foo',
+ 'playbook': 'helloworld.yml',
+ 'project': project.name,
+ 'inventory': inventory.name,
'extra_vars': {'foo': 'bar'},
'job_type': 'run',
- 'state': 'present'
+ 'state': 'present',
}
result = run_module('tower_job_template', module_args, admin_user)
@@ -22,14 +25,7 @@ def test_create_job_template(run_module, admin_user, project, inventory):
jt = JobTemplate.objects.get(name='foo')
assert jt.extra_vars == '{"foo": "bar"}'
- assert result == {
- "name": "foo",
- "id": jt.id,
- "changed": True,
- "invocation": {
- "module_args": module_args
- }
- }
+ assert result == {"name": "foo", "id": jt.id, "changed": True, "invocation": {"module_args": module_args}}
assert jt.project_id == project.id
assert jt.inventory_id == inventory.id
@@ -39,8 +35,10 @@ def test_create_job_template(run_module, admin_user, project, inventory):
def test_resets_job_template_values(run_module, admin_user, project, inventory):
module_args = {
- 'name': 'foo', 'playbook': 'helloworld.yml',
- 'project': project.name, 'inventory': inventory.name,
+ 'name': 'foo',
+ 'playbook': 'helloworld.yml',
+ 'project': project.name,
+ 'inventory': inventory.name,
'extra_vars': {'foo': 'bar'},
'job_type': 'run',
'state': 'present',
@@ -59,8 +57,10 @@ def test_resets_job_template_values(run_module, admin_user, project, inventory):
assert jt.ask_limit_on_launch
module_args = {
- 'name': 'foo', 'playbook': 'helloworld.yml',
- 'project': project.name, 'inventory': inventory.name,
+ 'name': 'foo',
+ 'playbook': 'helloworld.yml',
+ 'project': project.name,
+ 'inventory': inventory.name,
'extra_vars': {'foo': 'bar'},
'job_type': 'run',
'state': 'present',
@@ -89,17 +89,18 @@ def test_job_launch_with_prompting(run_module, admin_user, project, organization
playbook='helloworld.yml',
ask_variables_on_launch=True,
ask_inventory_on_launch=True,
- ask_credential_on_launch=True
+ ask_credential_on_launch=True,
+ )
+ result = run_module(
+ 'tower_job_launch',
+ dict(
+ job_template='foo',
+ inventory=inventory.name,
+ credential=machine_credential.name,
+ extra_vars={"var1": "My First Variable", "var2": "My Second Variable", "var3": "My Third Variable"},
+ ),
+ admin_user,
)
- result = run_module('tower_job_launch', dict(
- job_template='foo',
- inventory=inventory.name,
- credential=machine_credential.name,
- extra_vars={"var1": "My First Variable",
- "var2": "My Second Variable",
- "var3": "My Third Variable"
- }
- ), admin_user)
assert result.pop('changed', None), result
job = Job.objects.get(id=result['id'])
@@ -109,51 +110,44 @@ def test_job_launch_with_prompting(run_module, admin_user, project, organization
@pytest.mark.django_db
-def test_job_template_with_new_credentials(
- run_module, admin_user, project, inventory,
- machine_credential, vault_credential):
- result = run_module('tower_job_template', dict(
- name='foo',
- playbook='helloworld.yml',
- project=project.name,
- inventory=inventory.name,
- credentials=[machine_credential.name, vault_credential.name]
- ), admin_user)
+def test_job_template_with_new_credentials(run_module, admin_user, project, inventory, machine_credential, vault_credential):
+ result = run_module(
+ 'tower_job_template',
+ dict(
+ name='foo', playbook='helloworld.yml', project=project.name, inventory=inventory.name, credentials=[machine_credential.name, vault_credential.name]
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', False), result
jt = JobTemplate.objects.get(pk=result['id'])
- assert set([machine_credential.id, vault_credential.id]) == set([
- cred.pk for cred in jt.credentials.all()])
+ assert set([machine_credential.id, vault_credential.id]) == set([cred.pk for cred in jt.credentials.all()])
prior_ct = ActivityStream.objects.count()
- result = run_module('tower_job_template', dict(
- name='foo',
- playbook='helloworld.yml',
- project=project.name,
- inventory=inventory.name,
- credentials=[machine_credential.name, vault_credential.name]
- ), admin_user)
+ result = run_module(
+ 'tower_job_template',
+ dict(
+ name='foo', playbook='helloworld.yml', project=project.name, inventory=inventory.name, credentials=[machine_credential.name, vault_credential.name]
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert not result.get('changed', True), result
jt.refresh_from_db()
assert result['id'] == jt.id
- assert set([machine_credential.id, vault_credential.id]) == set([
- cred.pk for cred in jt.credentials.all()])
+ assert set([machine_credential.id, vault_credential.id]) == set([cred.pk for cred in jt.credentials.all()])
assert ActivityStream.objects.count() == prior_ct
@pytest.mark.django_db
def test_job_template_with_survey_spec(run_module, admin_user, project, inventory, survey_spec):
- result = run_module('tower_job_template', dict(
- name='foo',
- playbook='helloworld.yml',
- project=project.name,
- inventory=inventory.name,
- survey_spec=survey_spec,
- survey_enabled=True
- ), admin_user)
+ result = run_module(
+ 'tower_job_template',
+ dict(name='foo', playbook='helloworld.yml', project=project.name, inventory=inventory.name, survey_spec=survey_spec, survey_enabled=True),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', False), result
jt = JobTemplate.objects.get(pk=result['id'])
@@ -161,14 +155,11 @@ def test_job_template_with_survey_spec(run_module, admin_user, project, inventor
assert jt.survey_spec == survey_spec
prior_ct = ActivityStream.objects.count()
- result = run_module('tower_job_template', dict(
- name='foo',
- playbook='helloworld.yml',
- project=project.name,
- inventory=inventory.name,
- survey_spec=survey_spec,
- survey_enabled=True
- ), admin_user)
+ result = run_module(
+ 'tower_job_template',
+ dict(name='foo', playbook='helloworld.yml', project=project.name, inventory=inventory.name, survey_spec=survey_spec, survey_enabled=True),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert not result.get('changed', True), result
jt.refresh_from_db()
@@ -180,14 +171,11 @@ def test_job_template_with_survey_spec(run_module, admin_user, project, inventor
@pytest.mark.django_db
def test_job_template_with_wrong_survey_spec(run_module, admin_user, project, inventory, survey_spec):
- result = run_module('tower_job_template', dict(
- name='foo',
- playbook='helloworld.yml',
- project=project.name,
- inventory=inventory.name,
- survey_spec=survey_spec,
- survey_enabled=True
- ), admin_user)
+ result = run_module(
+ 'tower_job_template',
+ dict(name='foo', playbook='helloworld.yml', project=project.name, inventory=inventory.name, survey_spec=survey_spec, survey_enabled=True),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', False), result
jt = JobTemplate.objects.get(pk=result['id'])
@@ -198,14 +186,11 @@ def test_job_template_with_wrong_survey_spec(run_module, admin_user, project, in
del survey_spec['description']
- result = run_module('tower_job_template', dict(
- name='foo',
- playbook='helloworld.yml',
- project=project.name,
- inventory=inventory.name,
- survey_spec=survey_spec,
- survey_enabled=True
- ), admin_user)
+ result = run_module(
+ 'tower_job_template',
+ dict(name='foo', playbook='helloworld.yml', project=project.name, inventory=inventory.name, survey_spec=survey_spec, survey_enabled=True),
+ admin_user,
+ )
assert result.get('failed', True)
assert result.get('msg') == "Failed to update survey: Field 'description' is missing from survey spec."
@@ -213,35 +198,23 @@ def test_job_template_with_wrong_survey_spec(run_module, admin_user, project, in
@pytest.mark.django_db
def test_job_template_with_survey_encrypted_default(run_module, admin_user, project, inventory, silence_warning):
spec = {
- "spec": [
- {
- "index": 0,
- "question_name": "my question?",
- "default": "very_secret_value",
- "variable": "myvar",
- "type": "password",
- "required": False
- }
- ],
+ "spec": [{"index": 0, "question_name": "my question?", "default": "very_secret_value", "variable": "myvar", "type": "password", "required": False}],
"description": "test",
- "name": "test"
+ "name": "test",
}
for i in range(2):
- result = run_module('tower_job_template', dict(
- name='foo',
- playbook='helloworld.yml',
- project=project.name,
- inventory=inventory.name,
- survey_spec=spec,
- survey_enabled=True
- ), admin_user)
+ result = run_module(
+ 'tower_job_template',
+ dict(name='foo', playbook='helloworld.yml', project=project.name, inventory=inventory.name, survey_spec=spec, survey_enabled=True),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', False), result # not actually desired, but assert for sanity
silence_warning.assert_called_once_with(
- "The field survey_spec of job_template {0} has encrypted data and "
- "may inaccurately report task is changed.".format(result['id']))
+ "The field survey_spec of job_template {0} has encrypted data and " "may inaccurately report task is changed.".format(result['id'])
+ )
@pytest.mark.django_db
@@ -253,15 +226,9 @@ def test_associate_only_on_success(run_module, admin_user, organization, project
ask_inventory_on_launch=True,
)
create_kwargs = dict(
- notification_configuration={
- 'url': 'http://www.example.com/hook',
- 'headers': {
- 'X-Custom-Header': 'value123'
- },
- 'password': 'bar'
- },
+ notification_configuration={'url': 'http://www.example.com/hook', 'headers': {'X-Custom-Header': 'value123'}, 'password': 'bar'},
notification_type='webhook',
- organization=organization
+ organization=organization,
)
nt1 = NotificationTemplate.objects.create(name='nt1', **create_kwargs)
nt2 = NotificationTemplate.objects.create(name='nt2', **create_kwargs)
@@ -269,12 +236,9 @@ def test_associate_only_on_success(run_module, admin_user, organization, project
jt.notification_templates_error.add(nt1)
# test preservation of error NTs when success NTs are added
- result = run_module('tower_job_template', dict(
- name='foo',
- playbook='helloworld.yml',
- project=project.name,
- notification_templates_success=['nt2']
- ), admin_user)
+ result = run_module(
+ 'tower_job_template', dict(name='foo', playbook='helloworld.yml', project=project.name, notification_templates_success=['nt2']), admin_user
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', True), result
@@ -282,12 +246,7 @@ def test_associate_only_on_success(run_module, admin_user, organization, project
assert list(jt.notification_templates_error.values_list('id', flat=True)) == [nt1.id]
# test removal to empty list
- result = run_module('tower_job_template', dict(
- name='foo',
- playbook='helloworld.yml',
- project=project.name,
- notification_templates_success=[]
- ), admin_user)
+ result = run_module('tower_job_template', dict(name='foo', playbook='helloworld.yml', project=project.name, notification_templates_success=[]), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', True), result
diff --git a/awx_collection/test/awx/test_label.py b/awx_collection/test/awx/test_label.py
index 9ede40f3aa..9797c66323 100644
--- a/awx_collection/test/awx/test_label.py
+++ b/awx_collection/test/awx/test_label.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -8,10 +9,7 @@ from awx.main.models import Label
@pytest.mark.django_db
def test_create_label(run_module, admin_user, organization):
- result = run_module('tower_label', dict(
- name='test-label',
- organization=organization.name
- ), admin_user)
+ result = run_module('tower_label', dict(name='test-label', organization=organization.name), admin_user)
assert not result.get('failed'), result.get('msg', result)
assert result.get('changed', False)
@@ -20,10 +18,7 @@ def test_create_label(run_module, admin_user, organization):
@pytest.mark.django_db
def test_create_label_using_org_id(run_module, admin_user, organization):
- result = run_module('tower_label', dict(
- name='test-label',
- organization=organization.id
- ), admin_user)
+ result = run_module('tower_label', dict(name='test-label', organization=organization.id), admin_user)
assert not result.get('failed'), result.get('msg', result)
assert result.get('changed', False)
@@ -34,11 +29,7 @@ def test_create_label_using_org_id(run_module, admin_user, organization):
def test_modify_label(run_module, admin_user, organization):
label = Label.objects.create(name='test-label', organization=organization)
- result = run_module('tower_label', dict(
- name='test-label',
- new_name='renamed-label',
- organization=organization.name
- ), admin_user)
+ result = run_module('tower_label', dict(name='test-label', new_name='renamed-label', organization=organization.name), admin_user)
assert not result.get('failed'), result.get('msg', result)
assert result.get('changed', False)
diff --git a/awx_collection/test/awx/test_module_utils.py b/awx_collection/test/awx/test_module_utils.py
index a1f0b77594..473bfe9457 100644
--- a/awx_collection/test/awx/test_module_utils.py
+++ b/awx_collection/test/awx/test_module_utils.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import json
@@ -41,9 +42,7 @@ def test_version_warning(collection_import, silence_warning):
my_module._COLLECTION_TYPE = "not-junk"
my_module.collection_to_version['not-junk'] = 'not-junk'
my_module.get_endpoint('ping')
- silence_warning.assert_called_once_with(
- 'You are running collection version 1.0.0 but connecting to tower version 1.2.3'
- )
+ silence_warning.assert_called_once_with('You are running collection version 1.0.0 but connecting to tower version 1.2.3')
def test_type_warning(collection_import, silence_warning):
@@ -57,20 +56,13 @@ def test_type_warning(collection_import, silence_warning):
my_module._COLLECTION_TYPE = "junk"
my_module.collection_to_version['junk'] = 'junk'
my_module.get_endpoint('ping')
- silence_warning.assert_called_once_with(
- 'You are using the junk version of this collection but connecting to not-junk'
- )
+ silence_warning.assert_called_once_with('You are using the junk version of this collection but connecting to not-junk')
def test_duplicate_config(collection_import, silence_warning):
# imports done here because of PATH issues unique to this test suite
TowerAPIModule = collection_import('plugins.module_utils.tower_api').TowerAPIModule
- data = {
- 'name': 'zigzoom',
- 'zig': 'zoom',
- 'tower_username': 'bob',
- 'tower_config_file': 'my_config'
- }
+ data = {'name': 'zigzoom', 'zig': 'zoom', 'tower_username': 'bob', 'tower_config_file': 'my_config'}
with mock.patch.object(TowerAPIModule, 'load_config') as mock_load:
argument_spec = dict(
@@ -95,13 +87,11 @@ def test_no_templated_values(collection_import):
"""
TowerAPIModule = collection_import('plugins.module_utils.tower_api').TowerAPIModule
assert TowerAPIModule._COLLECTION_VERSION == "0.0.1-devel", (
- 'The collection version is templated when the collection is built '
- 'and the code should retain the placeholder of "0.0.1-devel".'
+ 'The collection version is templated when the collection is built ' 'and the code should retain the placeholder of "0.0.1-devel".'
)
InventoryModule = collection_import('plugins.inventory.tower').InventoryModule
assert InventoryModule.NAME == 'awx.awx.tower', (
- 'The inventory plugin FQCN is templated when the collection is built '
- 'and the code should retain the default of awx.awx.'
+ 'The inventory plugin FQCN is templated when the collection is built ' 'and the code should retain the default of awx.awx.'
)
@@ -115,15 +105,10 @@ def test_conflicting_name_and_id(run_module, admin_user):
org_by_id = Organization.objects.create(name='foo')
slug = str(org_by_id.id)
org_by_name = Organization.objects.create(name=slug)
- result = run_module('tower_team', {
- 'name': 'foo_team', 'description': 'fooin around',
- 'organization': slug
- }, admin_user)
+ result = run_module('tower_team', {'name': 'foo_team', 'description': 'fooin around', 'organization': slug}, admin_user)
assert not result.get('failed', False), result.get('msg', result)
team = Team.objects.filter(name='foo_team').first()
- assert str(team.organization_id) == slug, (
- 'Lookup by id should be preferenced over name in cases of conflict.'
- )
+ assert str(team.organization_id) == slug, 'Lookup by id should be preferenced over name in cases of conflict.'
assert team.organization.name == 'foo'
@@ -131,11 +116,21 @@ def test_multiple_lookup(run_module, admin_user):
org1 = Organization.objects.create(name='foo')
org2 = Organization.objects.create(name='bar')
inv = Inventory.objects.create(name='Foo Inv')
- proj1 = Project.objects.create(name='foo', organization=org1, scm_type='git', scm_url="https://github.com/ansible/ansible-tower-samples",)
- proj2 = Project.objects.create(name='foo', organization=org2, scm_type='git', scm_url="https://github.com/ansible/ansible-tower-samples",)
- result = run_module('tower_job_template', {
- 'name': 'Demo Job Template', 'project': proj1.name, 'inventory': inv.id, 'playbook': 'hello_world.yml'
- }, admin_user)
+ proj1 = Project.objects.create(
+ name='foo',
+ organization=org1,
+ scm_type='git',
+ scm_url="https://github.com/ansible/ansible-tower-samples",
+ )
+ proj2 = Project.objects.create(
+ name='foo',
+ organization=org2,
+ scm_type='git',
+ scm_url="https://github.com/ansible/ansible-tower-samples",
+ )
+ result = run_module(
+ 'tower_job_template', {'name': 'Demo Job Template', 'project': proj1.name, 'inventory': inv.id, 'playbook': 'hello_world.yml'}, admin_user
+ )
assert result.get('failed', False)
assert 'projects' in result['msg']
assert 'foo' in result['msg']
diff --git a/awx_collection/test/awx/test_notification_template.py b/awx_collection/test/awx/test_notification_template.py
index cb1ffdca6b..cc3ca6518e 100644
--- a/awx_collection/test/awx/test_notification_template.py
+++ b/awx_collection/test/awx/test_notification_template.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -7,19 +8,17 @@ from awx.main.models import NotificationTemplate, Job
def compare_with_encrypted(model_config, param_config):
- '''Given a model_config from the database, assure that this is consistent
+ """Given a model_config from the database, assure that this is consistent
with the config given in the notification_configuration parameter
this requires handling of password fields
- '''
+ """
for key, model_val in model_config.items():
param_val = param_config.get(key, 'missing')
if isinstance(model_val, str) and (model_val.startswith('$encrypted$') or param_val.startswith('$encrypted$')):
assert model_val.startswith('$encrypted$') # must be saved as encrypted
assert len(model_val) > len('$encrypted$')
else:
- assert model_val == param_val, 'Config key {0} did not match, (model: {1}, input: {2})'.format(
- key, model_val, param_val
- )
+ assert model_val == param_val, 'Config key {0} did not match, (model: {1}, input: {2})'.format(key, model_val, param_val)
@pytest.mark.django_db
@@ -31,15 +30,20 @@ def test_create_modify_notification_template(run_module, admin_user, organizatio
'recipients': ['foo2@invalid.com'],
'host': 'smtp.example.com',
'port': 25,
- 'use_tls': False, 'use_ssl': False,
- 'timeout': 4
+ 'use_tls': False,
+ 'use_ssl': False,
+ 'timeout': 4,
}
- result = run_module('tower_notification_template', dict(
- name='foo-notification-template',
- organization=organization.name,
- notification_type='email',
- notification_configuration=nt_config,
- ), admin_user)
+ result = run_module(
+ 'tower_notification_template',
+ dict(
+ name='foo-notification-template',
+ organization=organization.name,
+ notification_type='email',
+ notification_configuration=nt_config,
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.pop('changed', None), result
@@ -49,22 +53,30 @@ def test_create_modify_notification_template(run_module, admin_user, organizatio
# Test no-op, this is impossible if the notification_configuration is given
# because we cannot determine if password fields changed
- result = run_module('tower_notification_template', dict(
- name='foo-notification-template',
- organization=organization.name,
- notification_type='email',
- ), admin_user)
+ result = run_module(
+ 'tower_notification_template',
+ dict(
+ name='foo-notification-template',
+ organization=organization.name,
+ notification_type='email',
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert not result.pop('changed', None), result
# Test a change in the configuration
nt_config['timeout'] = 12
- result = run_module('tower_notification_template', dict(
- name='foo-notification-template',
- organization=organization.name,
- notification_type='email',
- notification_configuration=nt_config,
- ), admin_user)
+ result = run_module(
+ 'tower_notification_template',
+ dict(
+ name='foo-notification-template',
+ organization=organization.name,
+ notification_type='email',
+ notification_configuration=nt_config,
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.pop('changed', None), result
@@ -74,39 +86,46 @@ def test_create_modify_notification_template(run_module, admin_user, organizatio
@pytest.mark.django_db
def test_invalid_notification_configuration(run_module, admin_user, organization):
- result = run_module('tower_notification_template', dict(
- name='foo-notification-template',
- organization=organization.name,
- notification_type='email',
- notification_configuration={},
- ), admin_user)
+ result = run_module(
+ 'tower_notification_template',
+ dict(
+ name='foo-notification-template',
+ organization=organization.name,
+ notification_type='email',
+ notification_configuration={},
+ ),
+ admin_user,
+ )
assert result.get('failed', False), result.get('msg', result)
assert 'Missing required fields for Notification Configuration' in result['msg']
@pytest.mark.django_db
def test_deprecated_to_modern_no_op(run_module, admin_user, organization):
- nt_config = {
- 'url': 'http://www.example.com/hook',
- 'headers': {
- 'X-Custom-Header': 'value123'
- }
- }
- result = run_module('tower_notification_template', dict(
- name='foo-notification-template',
- organization=organization.name,
- notification_type='webhook',
- notification_configuration=nt_config,
- ), admin_user)
+ nt_config = {'url': 'http://www.example.com/hook', 'headers': {'X-Custom-Header': 'value123'}}
+ result = run_module(
+ 'tower_notification_template',
+ dict(
+ name='foo-notification-template',
+ organization=organization.name,
+ notification_type='webhook',
+ notification_configuration=nt_config,
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.pop('changed', None), result
- result = run_module('tower_notification_template', dict(
- name='foo-notification-template',
- organization=organization.name,
- notification_type='webhook',
- notification_configuration=nt_config,
- ), admin_user)
+ result = run_module(
+ 'tower_notification_template',
+ dict(
+ name='foo-notification-template',
+ organization=organization.name,
+ notification_type='webhook',
+ notification_configuration=nt_config,
+ ),
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert not result.pop('changed', None), result
@@ -119,21 +138,20 @@ def test_build_notification_message_undefined(run_module, admin_user, organizati
like "{{ job.created_by.first_name | default('unknown') }}"."""
job = Job.objects.create(name='foobar')
- nt_config = {
- 'url': 'http://www.example.com/hook',
- 'headers': {
- 'X-Custom-Header': 'value123'
- }
- }
+ nt_config = {'url': 'http://www.example.com/hook', 'headers': {'X-Custom-Header': 'value123'}}
custom_start_template = {'body': '{"started_by": "{{ job.summary_fields.created_by.username | default(\'My Placeholder\') }}"}'}
messages = {'started': custom_start_template, 'success': None, 'error': None, 'workflow_approval': None}
- result = run_module('tower_notification_template', dict(
- name='foo-notification-template',
- organization=organization.name,
- notification_type='webhook',
- notification_configuration=nt_config,
- messages=messages,
- ), admin_user)
+ result = run_module(
+ 'tower_notification_template',
+ dict(
+ name='foo-notification-template',
+ organization=organization.name,
+ notification_type='webhook',
+ notification_configuration=nt_config,
+ messages=messages,
+ ),
+ admin_user,
+ )
nt = NotificationTemplate.objects.get(id=result['id'])
body = job.build_notification_message(nt, 'running')
diff --git a/awx_collection/test/awx/test_organization.py b/awx_collection/test/awx/test_organization.py
index ee58ab3a2c..8777b8c309 100644
--- a/awx_collection/test/awx/test_organization.py
+++ b/awx_collection/test/awx/test_organization.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -26,13 +27,6 @@ def test_create_organization(run_module, admin_user):
assert result.get('changed'), result
org = Organization.objects.get(name='foo')
- assert result == {
- "name": "foo",
- "changed": True,
- "id": org.id,
- "invocation": {
- "module_args": module_args
- }
- }
+ assert result == {"name": "foo", "changed": True, "id": org.id, "invocation": {"module_args": module_args}}
assert org.description == 'barfoo'
diff --git a/awx_collection/test/awx/test_project.py b/awx_collection/test/awx/test_project.py
index 9ef1596d3f..43046a8564 100644
--- a/awx_collection/test/awx/test_project.py
+++ b/awx_collection/test/awx/test_project.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -8,17 +9,12 @@ from awx.main.models import Project
@pytest.mark.django_db
def test_create_project(run_module, admin_user, organization, silence_warning):
- result = run_module('tower_project', dict(
- name='foo',
- organization=organization.name,
- scm_type='git',
- scm_url='https://foo.invalid',
- wait=False,
- scm_update_cache_timeout=5
- ), admin_user)
- silence_warning.assert_called_once_with(
- 'scm_update_cache_timeout will be ignored since scm_update_on_launch '
- 'was not set to true')
+ result = run_module(
+ 'tower_project',
+ dict(name='foo', organization=organization.name, scm_type='git', scm_url='https://foo.invalid', wait=False, scm_update_cache_timeout=5),
+ admin_user,
+ )
+ silence_warning.assert_called_once_with('scm_update_cache_timeout will be ignored since scm_update_on_launch ' 'was not set to true')
assert result.pop('changed', None), result
@@ -27,7 +23,4 @@ def test_create_project(run_module, admin_user, organization, silence_warning):
assert proj.organization == organization
result.pop('invocation')
- assert result == {
- 'name': 'foo',
- 'id': proj.id
- }
+ assert result == {'name': 'foo', 'id': proj.id}
diff --git a/awx_collection/test/awx/test_role.py b/awx_collection/test/awx/test_role.py
index fcf8bf5900..5580c6b577 100644
--- a/awx_collection/test/awx/test_role.py
+++ b/awx_collection/test/awx/test_role.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -13,12 +14,7 @@ def test_grant_organization_permission(run_module, admin_user, organization, sta
if state == 'absent':
organization.admin_role.members.add(rando)
- result = run_module('tower_role', {
- 'user': rando.username,
- 'organization': organization.name,
- 'role': 'admin',
- 'state': state
- }, admin_user)
+ result = run_module('tower_role', {'user': rando.username, 'organization': organization.name, 'role': 'admin', 'state': state}, admin_user)
assert not result.get('failed', False), result.get('msg', result)
if state == 'present':
@@ -35,12 +31,7 @@ def test_grant_workflow_permission(run_module, admin_user, organization, state):
if state == 'absent':
wfjt.execute_role.members.add(rando)
- result = run_module('tower_role', {
- 'user': rando.username,
- 'workflow': wfjt.name,
- 'role': 'execute',
- 'state': state
- }, admin_user)
+ result = run_module('tower_role', {'user': rando.username, 'workflow': wfjt.name, 'role': 'execute', 'state': state}, admin_user)
assert not result.get('failed', False), result.get('msg', result)
if state == 'present':
@@ -57,13 +48,11 @@ def test_grant_workflow_list_permission(run_module, admin_user, organization, st
if state == 'absent':
wfjt.execute_role.members.add(rando)
- result = run_module('tower_role', {
- 'user': rando.username,
- 'lookup_organization': wfjt.organization.name,
- 'workflows': [wfjt.name],
- 'role': 'execute',
- 'state': state
- }, admin_user)
+ result = run_module(
+ 'tower_role',
+ {'user': rando.username, 'lookup_organization': wfjt.organization.name, 'workflows': [wfjt.name], 'role': 'execute', 'state': state},
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
if state == 'present':
@@ -80,12 +69,7 @@ def test_grant_workflow_approval_permission(run_module, admin_user, organization
if state == 'absent':
wfjt.execute_role.members.add(rando)
- result = run_module('tower_role', {
- 'user': rando.username,
- 'workflow': wfjt.name,
- 'role': 'approval',
- 'state': state
- }, admin_user)
+ result = run_module('tower_role', {'user': rando.username, 'workflow': wfjt.name, 'role': 'approval', 'state': state}, admin_user)
assert not result.get('failed', False), result.get('msg', result)
if state == 'present':
@@ -97,12 +81,7 @@ def test_grant_workflow_approval_permission(run_module, admin_user, organization
@pytest.mark.django_db
def test_invalid_role(run_module, admin_user, project):
rando = User.objects.create(username='rando')
- result = run_module('tower_role', {
- 'user': rando.username,
- 'project': project.name,
- 'role': 'adhoc',
- 'state': 'present'
- }, admin_user)
+ result = run_module('tower_role', {'user': rando.username, 'project': project.name, 'role': 'adhoc', 'state': 'present'}, admin_user)
assert result.get('failed', False)
msg = result.get('msg')
assert 'has no role adhoc_role' in msg
diff --git a/awx_collection/test/awx/test_schedule.py b/awx_collection/test/awx/test_schedule.py
index 7a58892dcf..1dfb150607 100644
--- a/awx_collection/test/awx/test_schedule.py
+++ b/awx_collection/test/awx/test_schedule.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -12,11 +13,7 @@ from awx.api.serializers import SchedulePreviewSerializer
@pytest.mark.django_db
def test_create_schedule(run_module, job_template, admin_user):
my_rrule = 'DTSTART;TZID=Zulu:20200416T034507 RRULE:FREQ=MONTHLY;INTERVAL=1'
- result = run_module('tower_schedule', {
- 'name': 'foo_schedule',
- 'unified_job_template': job_template.name,
- 'rrule': my_rrule
- }, admin_user)
+ result = run_module('tower_schedule', {'name': 'foo_schedule', 'unified_job_template': job_template.name, 'rrule': my_rrule}, admin_user)
assert not result.get('failed', False), result.get('msg', result)
schedule = Schedule.objects.filter(name='foo_schedule').first()
@@ -27,32 +24,49 @@ def test_create_schedule(run_module, job_template, admin_user):
assert schedule.rrule == my_rrule
-@pytest.mark.parametrize("freq, kwargs, expect", [
- # Test with a valid start date (no time) (also tests none frequency and count)
- ('none', {'start_date': '2020-04-16'}, 'DTSTART;TZID=America/New_York:20200416T000000 RRULE:FREQ=DAILY;COUNT=1;INTERVAL=1'),
- # Test with a valid start date and time
- ('none', {'start_date': '2020-04-16 03:45:07'}, 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=DAILY;COUNT=1;INTERVAL=1'),
- # Test end_on as count (also integration test)
- ('minute', {'start_date': '2020-4-16 03:45:07', 'end_on': '2'}, 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MINUTELY;COUNT=2;INTERVAL=1'),
- # Test end_on as date
- ('minute', {'start_date': '2020-4-16 03:45:07', 'end_on': '2020-4-17 03:45:07'},
- 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MINUTELY;UNTIL=20200417T034507;INTERVAL=1'),
- # Test on_days as a single day
- ('week', {'start_date': '2020-4-16 03:45:07', 'on_days': 'saturday'},
- 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=WEEKLY;BYDAY=SA;INTERVAL=1'),
- # Test on_days as multiple days (with some whitespaces)
- ('week', {'start_date': '2020-4-16 03:45:07', 'on_days': 'saturday,monday , friday'},
- 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=WEEKLY;BYDAY=MO,FR,SA;INTERVAL=1'),
- # Test valid month_day_number
- ('month', {'start_date': '2020-4-16 03:45:07', 'month_day_number': '18'},
- 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MONTHLY;BYMONTHDAY=18;INTERVAL=1'),
- # Test a valid on_the
- ('month', {'start_date': '2020-4-16 03:45:07', 'on_the': 'second sunday'},
- 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MONTHLY;BYSETPOS=2;BYDAY=SU;INTERVAL=1'),
- # Test an valid timezone
- ('month', {'start_date': '2020-4-16 03:45:07', 'timezone': 'Zulu'},
- 'DTSTART;TZID=Zulu:20200416T034507 RRULE:FREQ=MONTHLY;INTERVAL=1'),
-])
+@pytest.mark.parametrize(
+ "freq, kwargs, expect",
+ [
+ # Test with a valid start date (no time) (also tests none frequency and count)
+ ('none', {'start_date': '2020-04-16'}, 'DTSTART;TZID=America/New_York:20200416T000000 RRULE:FREQ=DAILY;COUNT=1;INTERVAL=1'),
+ # Test with a valid start date and time
+ ('none', {'start_date': '2020-04-16 03:45:07'}, 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=DAILY;COUNT=1;INTERVAL=1'),
+ # Test end_on as count (also integration test)
+ ('minute', {'start_date': '2020-4-16 03:45:07', 'end_on': '2'}, 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MINUTELY;COUNT=2;INTERVAL=1'),
+ # Test end_on as date
+ (
+ 'minute',
+ {'start_date': '2020-4-16 03:45:07', 'end_on': '2020-4-17 03:45:07'},
+ 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MINUTELY;UNTIL=20200417T034507;INTERVAL=1',
+ ),
+ # Test on_days as a single day
+ (
+ 'week',
+ {'start_date': '2020-4-16 03:45:07', 'on_days': 'saturday'},
+ 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=WEEKLY;BYDAY=SA;INTERVAL=1',
+ ),
+ # Test on_days as multiple days (with some whitespaces)
+ (
+ 'week',
+ {'start_date': '2020-4-16 03:45:07', 'on_days': 'saturday,monday , friday'},
+ 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=WEEKLY;BYDAY=MO,FR,SA;INTERVAL=1',
+ ),
+ # Test valid month_day_number
+ (
+ 'month',
+ {'start_date': '2020-4-16 03:45:07', 'month_day_number': '18'},
+ 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MONTHLY;BYMONTHDAY=18;INTERVAL=1',
+ ),
+ # Test a valid on_the
+ (
+ 'month',
+ {'start_date': '2020-4-16 03:45:07', 'on_the': 'second sunday'},
+ 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MONTHLY;BYSETPOS=2;BYDAY=SU;INTERVAL=1',
+ ),
+ # Test an valid timezone
+ ('month', {'start_date': '2020-4-16 03:45:07', 'timezone': 'Zulu'}, 'DTSTART;TZID=Zulu:20200416T034507 RRULE:FREQ=MONTHLY;INTERVAL=1'),
+ ],
+)
def test_rrule_lookup_plugin(collection_import, freq, kwargs, expect):
LookupModule = collection_import('plugins.lookup.tower_schedule_rrule').LookupModule
generated_rule = LookupModule.get_rrule(freq, kwargs)
@@ -75,31 +89,39 @@ def test_empty_schedule_rrule(collection_import, freq):
assert LookupModule.get_rrule(freq, {}).endswith(' RRULE:FREQ={0};INTERVAL=1'.format(pfreq))
-@pytest.mark.parametrize("freq, kwargs, msg", [
- # Test end_on as junk
- ('minute', {'start_date': '2020-4-16 03:45:07', 'end_on': 'junk'},
- 'Parameter end_on must either be an integer or in the format YYYY-MM-DD'),
- # Test on_days as junk
- ('week', {'start_date': '2020-4-16 03:45:07', 'on_days': 'junk'},
- 'Parameter on_days must only contain values monday, tuesday, wednesday, thursday, friday, saturday, sunday'),
- # Test combo of both month_day_number and on_the
- ('month', dict(start_date='2020-4-16 03:45:07', on_the='something', month_day_number='else'),
- "Month based frequencies can have month_day_number or on_the but not both"),
- # Test month_day_number as not an integer
- ('month', dict(start_date='2020-4-16 03:45:07', month_day_number='junk'), "month_day_number must be between 1 and 31"),
- # Test month_day_number < 1
- ('month', dict(start_date='2020-4-16 03:45:07', month_day_number='0'), "month_day_number must be between 1 and 31"),
- # Test month_day_number > 31
- ('month', dict(start_date='2020-4-16 03:45:07', month_day_number='32'), "month_day_number must be between 1 and 31"),
- # Test on_the as junk
- ('month', dict(start_date='2020-4-16 03:45:07', on_the='junk'), "on_the parameter must be two words separated by a space"),
- # Test on_the with invalid occurance
- ('month', dict(start_date='2020-4-16 03:45:07', on_the='junk wednesday'), "The first string of the on_the parameter is not valid"),
- # Test on_the with invalid weekday
- ('month', dict(start_date='2020-4-16 03:45:07', on_the='second junk'), "Weekday portion of on_the parameter is not valid"),
- # Test an invalid timezone
- ('month', dict(start_date='2020-4-16 03:45:07', timezone='junk'), 'Timezone parameter is not valid'),
-])
+@pytest.mark.parametrize(
+ "freq, kwargs, msg",
+ [
+ # Test end_on as junk
+ ('minute', {'start_date': '2020-4-16 03:45:07', 'end_on': 'junk'}, 'Parameter end_on must either be an integer or in the format YYYY-MM-DD'),
+ # Test on_days as junk
+ (
+ 'week',
+ {'start_date': '2020-4-16 03:45:07', 'on_days': 'junk'},
+ 'Parameter on_days must only contain values monday, tuesday, wednesday, thursday, friday, saturday, sunday',
+ ),
+ # Test combo of both month_day_number and on_the
+ (
+ 'month',
+ dict(start_date='2020-4-16 03:45:07', on_the='something', month_day_number='else'),
+ "Month based frequencies can have month_day_number or on_the but not both",
+ ),
+ # Test month_day_number as not an integer
+ ('month', dict(start_date='2020-4-16 03:45:07', month_day_number='junk'), "month_day_number must be between 1 and 31"),
+ # Test month_day_number < 1
+ ('month', dict(start_date='2020-4-16 03:45:07', month_day_number='0'), "month_day_number must be between 1 and 31"),
+ # Test month_day_number > 31
+ ('month', dict(start_date='2020-4-16 03:45:07', month_day_number='32'), "month_day_number must be between 1 and 31"),
+ # Test on_the as junk
+ ('month', dict(start_date='2020-4-16 03:45:07', on_the='junk'), "on_the parameter must be two words separated by a space"),
+ # Test on_the with invalid occurance
+ ('month', dict(start_date='2020-4-16 03:45:07', on_the='junk wednesday'), "The first string of the on_the parameter is not valid"),
+ # Test on_the with invalid weekday
+ ('month', dict(start_date='2020-4-16 03:45:07', on_the='second junk'), "Weekday portion of on_the parameter is not valid"),
+ # Test an invalid timezone
+ ('month', dict(start_date='2020-4-16 03:45:07', timezone='junk'), 'Timezone parameter is not valid'),
+ ],
+)
def test_rrule_lookup_plugin_failure(collection_import, freq, kwargs, msg):
LookupModule = collection_import('plugins.lookup.tower_schedule_rrule').LookupModule
with pytest.raises(AnsibleError) as e:
diff --git a/awx_collection/test/awx/test_send_receive.py b/awx_collection/test/awx/test_send_receive.py
index 14f3c89426..c8a9b927e3 100644
--- a/awx_collection/test/awx/test_send_receive.py
+++ b/awx_collection/test/awx/test_send_receive.py
@@ -1,18 +1,11 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
import json
-from awx.main.models import (
- Organization,
- Project,
- Inventory,
- Host,
- CredentialType,
- Credential,
- JobTemplate
-)
+from awx.main.models import Organization, Project, Inventory, Host, CredentialType, Credential, JobTemplate
# warns based on password_management param, but not security issue
@@ -25,23 +18,14 @@ def test_receive_send_jt(run_module, admin_user, mocker, silence_deprecation):
scm_type='git',
scm_url='https://github.com/ansible/test-playbooks.git',
organization=org,
- allow_override=True # so we do not require playbooks populated
+ allow_override=True, # so we do not require playbooks populated
)
inv = Inventory.objects.create(name='SRtest', organization=org)
Host.objects.create(name='SRtest', inventory=inv)
ct = CredentialType.defaults['ssh']()
ct.save()
- cred = Credential.objects.create(
- name='SRtest',
- credential_type=ct,
- organization=org
- )
- jt = JobTemplate.objects.create(
- name='SRtest',
- project=proj,
- inventory=inv,
- playbook='helloworld.yml'
- )
+ cred = Credential.objects.create(name='SRtest', credential_type=ct, organization=org)
+ jt = JobTemplate.objects.create(name='SRtest', project=proj, inventory=inv, playbook='helloworld.yml')
jt.credentials.add(cred)
jt.admin_role.members.add(admin_user) # work around send/receive bug
@@ -51,10 +35,7 @@ def test_receive_send_jt(run_module, admin_user, mocker, silence_deprecation):
assert 'assets' in result, result
assets = result['assets']
assert not result.get('changed', True)
- assert set(a['asset_type'] for a in assets) == set((
- 'organization', 'inventory', 'job_template', 'credential', 'project',
- 'user'
- ))
+ assert set(a['asset_type'] for a in assets) == set(('organization', 'inventory', 'job_template', 'credential', 'project', 'user'))
# delete everything
for obj in (jt, inv, proj, cred, org):
diff --git a/awx_collection/test/awx/test_settings.py b/awx_collection/test/awx/test_settings.py
index e39d7eaa0b..241dafcfaa 100644
--- a/awx_collection/test/awx/test_settings.py
+++ b/awx_collection/test/awx/test_settings.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -9,10 +10,7 @@ from awx.conf.models import Setting
@pytest.mark.django_db
def test_setting_flat_value(run_module, admin_user):
the_value = 'CN=service_account,OU=ServiceAccounts,DC=domain,DC=company,DC=org'
- result = run_module('tower_settings', dict(
- name='AUTH_LDAP_BIND_DN',
- value=the_value
- ), admin_user)
+ result = run_module('tower_settings', dict(name='AUTH_LDAP_BIND_DN', value=the_value), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -21,15 +19,8 @@ def test_setting_flat_value(run_module, admin_user):
@pytest.mark.django_db
def test_setting_dict_value(run_module, admin_user):
- the_value = {
- 'email': 'mail',
- 'first_name': 'givenName',
- 'last_name': 'surname'
- }
- result = run_module('tower_settings', dict(
- name='AUTH_LDAP_USER_ATTR_MAP',
- value=the_value
- ), admin_user)
+ the_value = {'email': 'mail', 'first_name': 'givenName', 'last_name': 'surname'}
+ result = run_module('tower_settings', dict(name='AUTH_LDAP_USER_ATTR_MAP', value=the_value), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -38,16 +29,8 @@ def test_setting_dict_value(run_module, admin_user):
@pytest.mark.django_db
def test_setting_nested_type(run_module, admin_user):
- the_value = {
- 'email': 'mail',
- 'first_name': 'givenName',
- 'last_name': 'surname'
- }
- result = run_module('tower_settings', dict(
- settings={
- 'AUTH_LDAP_USER_ATTR_MAP': the_value
- }
- ), admin_user)
+ the_value = {'email': 'mail', 'first_name': 'givenName', 'last_name': 'surname'}
+ result = run_module('tower_settings', dict(settings={'AUTH_LDAP_USER_ATTR_MAP': the_value}), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -57,10 +40,7 @@ def test_setting_nested_type(run_module, admin_user):
@pytest.mark.django_db
def test_setting_bool_value(run_module, admin_user):
for the_value in (True, False):
- result = run_module('tower_settings', dict(
- name='ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC',
- value=the_value
- ), admin_user)
+ result = run_module('tower_settings', dict(name='ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC', value=the_value), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
diff --git a/awx_collection/test/awx/test_team.py b/awx_collection/test/awx/test_team.py
index ccc164dcdf..e6c069ed74 100644
--- a/awx_collection/test/awx/test_team.py
+++ b/awx_collection/test/awx/test_team.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -10,12 +11,7 @@ from awx.main.models import Organization, Team
def test_create_team(run_module, admin_user):
org = Organization.objects.create(name='foo')
- result = run_module('tower_team', {
- 'name': 'foo_team',
- 'description': 'fooin around',
- 'state': 'present',
- 'organization': 'foo'
- }, admin_user)
+ result = run_module('tower_team', {'name': 'foo_team', 'description': 'fooin around', 'state': 'present', 'organization': 'foo'}, admin_user)
team = Team.objects.filter(name='foo_team').first()
@@ -33,18 +29,10 @@ def test_create_team(run_module, admin_user):
@pytest.mark.django_db
def test_modify_team(run_module, admin_user):
org = Organization.objects.create(name='foo')
- team = Team.objects.create(
- name='foo_team',
- organization=org,
- description='flat foo'
- )
+ team = Team.objects.create(name='foo_team', organization=org, description='flat foo')
assert team.description == 'flat foo'
- result = run_module('tower_team', {
- 'name': 'foo_team',
- 'description': 'fooin around',
- 'organization': 'foo'
- }, admin_user)
+ result = run_module('tower_team', {'name': 'foo_team', 'description': 'fooin around', 'organization': 'foo'}, admin_user)
team.refresh_from_db()
result.pop('invocation')
assert result == {
@@ -54,13 +42,6 @@ def test_modify_team(run_module, admin_user):
assert team.description == 'fooin around'
# 2nd modification, should cause no change
- result = run_module('tower_team', {
- 'name': 'foo_team',
- 'description': 'fooin around',
- 'organization': 'foo'
- }, admin_user)
+ result = run_module('tower_team', {'name': 'foo_team', 'description': 'fooin around', 'organization': 'foo'}, admin_user)
result.pop('invocation')
- assert result == {
- "id": team.id,
- "changed": False
- }
+ assert result == {"id": team.id, "changed": False}
diff --git a/awx_collection/test/awx/test_token.py b/awx_collection/test/awx/test_token.py
index 442fa2e9fb..9633ff2024 100644
--- a/awx_collection/test/awx/test_token.py
+++ b/awx_collection/test/awx/test_token.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
diff --git a/awx_collection/test/awx/test_user.py b/awx_collection/test/awx/test_user.py
index 6a0dfa123d..49b857576b 100644
--- a/awx_collection/test/awx/test_user.py
+++ b/awx_collection/test/awx/test_user.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -19,10 +20,7 @@ def mock_auth_stuff():
@pytest.mark.django_db
def test_create_user(run_module, admin_user, mock_auth_stuff):
- result = run_module('tower_user', dict(
- username='Bob',
- password='pass4word'
- ), admin_user)
+ result = run_module('tower_user', dict(username='Bob', password='pass4word'), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
@@ -33,27 +31,20 @@ def test_create_user(run_module, admin_user, mock_auth_stuff):
@pytest.mark.django_db
def test_password_no_op_warning(run_module, admin_user, mock_auth_stuff, silence_warning):
for i in range(2):
- result = run_module('tower_user', dict(
- username='Bob',
- password='pass4word'
- ), admin_user)
+ result = run_module('tower_user', dict(username='Bob', password='pass4word'), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed') # not actually desired, but assert for sanity
silence_warning.assert_called_once_with(
- "The field password of user {0} has encrypted data and "
- "may inaccurately report task is changed.".format(result['id']))
+ "The field password of user {0} has encrypted data and " "may inaccurately report task is changed.".format(result['id'])
+ )
@pytest.mark.django_db
def test_update_password_on_create(run_module, admin_user, mock_auth_stuff):
for i in range(2):
- result = run_module('tower_user', dict(
- username='Bob',
- password='pass4word',
- update_secrets=False
- ), admin_user)
+ result = run_module('tower_user', dict(username='Bob', password='pass4word', update_secrets=False), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert not result.get('changed')
@@ -61,18 +52,11 @@ def test_update_password_on_create(run_module, admin_user, mock_auth_stuff):
@pytest.mark.django_db
def test_update_user(run_module, admin_user, mock_auth_stuff):
- result = run_module('tower_user', dict(
- username='Bob',
- password='pass4word',
- is_system_auditor=True
- ), admin_user)
+ result = run_module('tower_user', dict(username='Bob', password='pass4word', is_system_auditor=True), admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed'), result
- update_result = run_module('tower_user', dict(
- username='Bob',
- is_system_auditor=False
- ), admin_user)
+ update_result = run_module('tower_user', dict(username='Bob', is_system_auditor=False), admin_user)
assert update_result.get('changed')
user = User.objects.get(id=result['id'])
diff --git a/awx_collection/test/awx/test_workflow_job_template.py b/awx_collection/test/awx/test_workflow_job_template.py
index 9e5d914cfb..0ad0d12aa6 100644
--- a/awx_collection/test/awx/test_workflow_job_template.py
+++ b/awx_collection/test/awx/test_workflow_job_template.py
@@ -1,4 +1,5 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -8,14 +9,18 @@ from awx.main.models import WorkflowJobTemplate, NotificationTemplate
@pytest.mark.django_db
def test_create_workflow_job_template(run_module, admin_user, organization, survey_spec):
- result = run_module('tower_workflow_job_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'extra_vars': {'foo': 'bar', 'another-foo': {'barz': 'bar2'}},
- 'survey_spec': survey_spec,
- 'survey_enabled': True,
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_job_template',
+ {
+ 'name': 'foo-workflow',
+ 'organization': organization.name,
+ 'extra_vars': {'foo': 'bar', 'another-foo': {'barz': 'bar2'}},
+ 'survey_spec': survey_spec,
+ 'survey_enabled': True,
+ 'state': 'present',
+ },
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
wfjt = WorkflowJobTemplate.objects.get(name='foo-workflow')
@@ -30,10 +35,7 @@ def test_create_workflow_job_template(run_module, admin_user, organization, surv
@pytest.mark.django_db
def test_create_modify_no_survey(run_module, admin_user, organization, survey_spec):
- result = run_module('tower_workflow_job_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name
- }, admin_user)
+ result = run_module('tower_workflow_job_template', {'name': 'foo-workflow', 'organization': organization.name}, admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', False), result
@@ -43,25 +45,15 @@ def test_create_modify_no_survey(run_module, admin_user, organization, survey_sp
result.pop('invocation', None)
assert result == {"name": "foo-workflow", "id": wfjt.id, "changed": True}
- result = run_module('tower_workflow_job_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name
- }, admin_user)
+ result = run_module('tower_workflow_job_template', {'name': 'foo-workflow', 'organization': organization.name}, admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert not result.get('changed', True), result
@pytest.mark.django_db
def test_survey_spec_only_changed(run_module, admin_user, organization, survey_spec):
- wfjt = WorkflowJobTemplate.objects.create(
- organization=organization, name='foo-workflow',
- survey_enabled=True, survey_spec=survey_spec
- )
- result = run_module('tower_workflow_job_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'state': 'present'
- }, admin_user)
+ wfjt = WorkflowJobTemplate.objects.create(organization=organization, name='foo-workflow', survey_enabled=True, survey_spec=survey_spec)
+ result = run_module('tower_workflow_job_template', {'name': 'foo-workflow', 'organization': organization.name, 'state': 'present'}, admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert not result.get('changed', True), result
wfjt.refresh_from_db()
@@ -69,12 +61,9 @@ def test_survey_spec_only_changed(run_module, admin_user, organization, survey_s
survey_spec['description'] = 'changed description'
- result = run_module('tower_workflow_job_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'survey_spec': survey_spec,
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_job_template', {'name': 'foo-workflow', 'organization': organization.name, 'survey_spec': survey_spec, 'state': 'present'}, admin_user
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', True), result
wfjt.refresh_from_db()
@@ -83,15 +72,8 @@ def test_survey_spec_only_changed(run_module, admin_user, organization, survey_s
@pytest.mark.django_db
def test_survey_spec_only_changed(run_module, admin_user, organization, survey_spec):
- wfjt = WorkflowJobTemplate.objects.create(
- organization=organization, name='foo-workflow',
- survey_enabled=True, survey_spec=survey_spec
- )
- result = run_module('tower_workflow_job_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'state': 'present'
- }, admin_user)
+ wfjt = WorkflowJobTemplate.objects.create(organization=organization, name='foo-workflow', survey_enabled=True, survey_spec=survey_spec)
+ result = run_module('tower_workflow_job_template', {'name': 'foo-workflow', 'organization': organization.name, 'state': 'present'}, admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert not result.get('changed', True), result
wfjt.refresh_from_db()
@@ -99,12 +81,9 @@ def test_survey_spec_only_changed(run_module, admin_user, organization, survey_s
del survey_spec['description']
- result = run_module('tower_workflow_job_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'survey_spec': survey_spec,
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_job_template', {'name': 'foo-workflow', 'organization': organization.name, 'survey_spec': survey_spec, 'state': 'present'}, admin_user
+ )
assert result.get('failed', True)
assert result.get('msg') == "Failed to update survey: Field 'description' is missing from survey spec."
@@ -112,19 +91,14 @@ def test_survey_spec_only_changed(run_module, admin_user, organization, survey_s
@pytest.mark.django_db
def test_associate_only_on_success(run_module, admin_user, organization, project):
wfjt = WorkflowJobTemplate.objects.create(
- organization=organization, name='foo-workflow',
+ organization=organization,
+ name='foo-workflow',
# survey_enabled=True, survey_spec=survey_spec
)
create_kwargs = dict(
- notification_configuration={
- 'url': 'http://www.example.com/hook',
- 'headers': {
- 'X-Custom-Header': 'value123'
- },
- 'password': 'bar'
- },
+ notification_configuration={'url': 'http://www.example.com/hook', 'headers': {'X-Custom-Header': 'value123'}, 'password': 'bar'},
notification_type='webhook',
- organization=organization
+ organization=organization,
)
nt1 = NotificationTemplate.objects.create(name='nt1', **create_kwargs)
nt2 = NotificationTemplate.objects.create(name='nt2', **create_kwargs)
@@ -132,11 +106,9 @@ def test_associate_only_on_success(run_module, admin_user, organization, project
wfjt.notification_templates_error.add(nt1)
# test preservation of error NTs when success NTs are added
- result = run_module('tower_workflow_job_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'notification_templates_success': ['nt2']
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_job_template', {'name': 'foo-workflow', 'organization': organization.name, 'notification_templates_success': ['nt2']}, admin_user
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', True), result
@@ -144,11 +116,9 @@ def test_associate_only_on_success(run_module, admin_user, organization, project
assert list(wfjt.notification_templates_error.values_list('id', flat=True)) == [nt1.id]
# test removal to empty list
- result = run_module('tower_workflow_job_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'notification_templates_success': []
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_job_template', {'name': 'foo-workflow', 'organization': organization.name, 'notification_templates_success': []}, admin_user
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', True), result
@@ -158,17 +128,9 @@ def test_associate_only_on_success(run_module, admin_user, organization, project
@pytest.mark.django_db
def test_delete_with_spec(run_module, admin_user, organization, survey_spec):
- WorkflowJobTemplate.objects.create(
- organization=organization, name='foo-workflow',
- survey_enabled=True, survey_spec=survey_spec
- )
- result = run_module('tower_workflow_job_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'state': 'absent'
- }, admin_user)
+ WorkflowJobTemplate.objects.create(organization=organization, name='foo-workflow', survey_enabled=True, survey_spec=survey_spec)
+ result = run_module('tower_workflow_job_template', {'name': 'foo-workflow', 'organization': organization.name, 'state': 'absent'}, admin_user)
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', True), result
- assert WorkflowJobTemplate.objects.filter(
- name='foo-workflow', organization=organization).count() == 0
+ assert WorkflowJobTemplate.objects.filter(name='foo-workflow', organization=organization).count() == 0
diff --git a/awx_collection/test/awx/test_workflow_job_template_node.py b/awx_collection/test/awx/test_workflow_job_template_node.py
index 6127fde27e..203eacc7e1 100644
--- a/awx_collection/test/awx/test_workflow_job_template_node.py
+++ b/awx_collection/test/awx/test_workflow_job_template_node.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
@@ -16,7 +17,7 @@ def job_template(project, inventory):
name='foo-jt',
ask_variables_on_launch=True,
ask_credential_on_launch=True,
- ask_limit_on_launch=True
+ ask_limit_on_launch=True,
)
@@ -29,23 +30,23 @@ def wfjt(organization):
@pytest.mark.django_db
def test_create_workflow_job_template_node(run_module, admin_user, wfjt, job_template):
this_identifier = '42🐉'
- result = run_module('tower_workflow_job_template_node', {
- 'identifier': this_identifier,
- 'workflow_job_template': 'foo-workflow',
- 'organization': wfjt.organization.name,
- 'unified_job_template': 'foo-jt',
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_job_template_node',
+ {
+ 'identifier': this_identifier,
+ 'workflow_job_template': 'foo-workflow',
+ 'organization': wfjt.organization.name,
+ 'unified_job_template': 'foo-jt',
+ 'state': 'present',
+ },
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
node = WorkflowJobTemplateNode.objects.get(identifier=this_identifier)
result.pop('invocation', None)
- assert result == {
- "name": this_identifier, # FIXME: should this be identifier instead
- "id": node.id,
- "changed": True
- }
+ assert result == {"name": this_identifier, "id": node.id, "changed": True} # FIXME: should this be identifier instead
assert node.identifier == this_identifier
assert node.workflow_job_template_id == wfjt.id
@@ -56,12 +57,16 @@ def test_create_workflow_job_template_node(run_module, admin_user, wfjt, job_tem
def test_create_workflow_job_template_node_approval_node(run_module, admin_user, wfjt, job_template):
"""This is a part of the API contract for creating approval nodes"""
this_identifier = '42🐉'
- result = run_module('tower_workflow_job_template_node', {
- 'identifier': this_identifier,
- 'workflow_job_template': wfjt.name,
- 'organization': wfjt.organization.name,
- 'approval_node': {'name': 'foo-jt-approval'}
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_job_template_node',
+ {
+ 'identifier': this_identifier,
+ 'workflow_job_template': wfjt.name,
+ 'organization': wfjt.organization.name,
+ 'approval_node': {'name': 'foo-jt-approval'},
+ },
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', False), result
@@ -77,16 +82,20 @@ def test_create_workflow_job_template_node_approval_node(run_module, admin_user,
@pytest.mark.django_db
def test_make_use_of_prompts(run_module, admin_user, wfjt, job_template, machine_credential, vault_credential):
- result = run_module('tower_workflow_job_template_node', {
- 'identifier': '42',
- 'workflow_job_template': 'foo-workflow',
- 'organization': wfjt.organization.name,
- 'unified_job_template': 'foo-jt',
- 'extra_data': {'foo': 'bar', 'another-foo': {'barz': 'bar2'}},
- 'limit': 'foo_hosts',
- 'credentials': [machine_credential.name, vault_credential.name],
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_job_template_node',
+ {
+ 'identifier': '42',
+ 'workflow_job_template': 'foo-workflow',
+ 'organization': wfjt.organization.name,
+ 'unified_job_template': 'foo-jt',
+ 'extra_data': {'foo': 'bar', 'another-foo': {'barz': 'bar2'}},
+ 'limit': 'foo_hosts',
+ 'credentials': [machine_credential.name, vault_credential.name],
+ 'state': 'present',
+ },
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', False)
@@ -100,23 +109,23 @@ def test_make_use_of_prompts(run_module, admin_user, wfjt, job_template, machine
@pytest.mark.django_db
def test_create_with_edges(run_module, admin_user, wfjt, job_template):
next_nodes = [
- WorkflowJobTemplateNode.objects.create(
- identifier='foo{0}'.format(i),
- workflow_job_template=wfjt,
- unified_job_template=job_template
- ) for i in range(3)
+ WorkflowJobTemplateNode.objects.create(identifier='foo{0}'.format(i), workflow_job_template=wfjt, unified_job_template=job_template) for i in range(3)
]
- result = run_module('tower_workflow_job_template_node', {
- 'identifier': '42',
- 'workflow_job_template': 'foo-workflow',
- 'organization': wfjt.organization.name,
- 'unified_job_template': 'foo-jt',
- 'success_nodes': ['foo0'],
- 'always_nodes': ['foo1'],
- 'failure_nodes': ['foo2'],
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_job_template_node',
+ {
+ 'identifier': '42',
+ 'workflow_job_template': 'foo-workflow',
+ 'organization': wfjt.organization.name,
+ 'unified_job_template': 'foo-jt',
+ 'success_nodes': ['foo0'],
+ 'always_nodes': ['foo1'],
+ 'failure_nodes': ['foo2'],
+ 'state': 'present',
+ },
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
assert result.get('changed', False)
diff --git a/awx_collection/test/awx/test_workflow_template.py b/awx_collection/test/awx/test_workflow_template.py
index c8b401ae1c..b44f6f4234 100644
--- a/awx_collection/test/awx/test_workflow_template.py
+++ b/awx_collection/test/awx/test_workflow_template.py
@@ -1,35 +1,32 @@
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
+
__metaclass__ = type
import pytest
-from awx.main.models import (
- WorkflowJobTemplate, JobTemplate, Project, InventorySource,
- Inventory, WorkflowJobTemplateNode
-)
+from awx.main.models import WorkflowJobTemplate, JobTemplate, Project, InventorySource, Inventory, WorkflowJobTemplateNode
@pytest.mark.django_db
def test_create_workflow_job_template(run_module, admin_user, organization, survey_spec, silence_deprecation):
- result = run_module('tower_workflow_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'extra_vars': {'foo': 'bar', 'another-foo': {'barz': 'bar2'}},
- 'survey': survey_spec,
- 'survey_enabled': True,
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_template',
+ {
+ 'name': 'foo-workflow',
+ 'organization': organization.name,
+ 'extra_vars': {'foo': 'bar', 'another-foo': {'barz': 'bar2'}},
+ 'survey': survey_spec,
+ 'survey_enabled': True,
+ 'state': 'present',
+ },
+ admin_user,
+ )
wfjt = WorkflowJobTemplate.objects.get(name='foo-workflow')
assert wfjt.extra_vars == '{"foo": "bar", "another-foo": {"barz": "bar2"}}'
result.pop('invocation', None)
- assert result == {
- "workflow_template": "foo-workflow", # TODO: remove after refactor
- "state": "present",
- "id": wfjt.id,
- "changed": True
- }
+ assert result == {"workflow_template": "foo-workflow", "state": "present", "id": wfjt.id, "changed": True} # TODO: remove after refactor
assert wfjt.organization_id == organization.id
assert wfjt.survey_spec == survey_spec
@@ -39,14 +36,11 @@ def test_create_workflow_job_template(run_module, admin_user, organization, surv
def test_with_nested_workflow(run_module, admin_user, organization, silence_deprecation):
wfjt1 = WorkflowJobTemplate.objects.create(name='first', organization=organization)
- result = run_module('tower_workflow_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'schema': [
- {'workflow': wfjt1.name}
- ],
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_template',
+ {'name': 'foo-workflow', 'organization': organization.name, 'schema': [{'workflow': wfjt1.name}], 'state': 'present'},
+ admin_user,
+ )
assert not result.get('failed', False), result.get('msg', result)
wfjt = WorkflowJobTemplate.objects.get(name='foo-workflow')
@@ -60,51 +54,34 @@ def test_schema_with_branches(run_module, admin_user, organization, silence_depr
proj = Project.objects.create(organization=organization, name='Ansible Examples')
inv = Inventory.objects.create(organization=organization, name='test-inv')
- jt = JobTemplate.objects.create(
- project=proj,
- playbook='helloworld.yml',
- inventory=inv,
- name='Hello world'
- )
- inv_src = InventorySource.objects.create(
- inventory=inv,
- name='AWS servers',
- source='ec2'
+ jt = JobTemplate.objects.create(project=proj, playbook='helloworld.yml', inventory=inv, name='Hello world')
+ inv_src = InventorySource.objects.create(inventory=inv, name='AWS servers', source='ec2')
+
+ result = run_module(
+ 'tower_workflow_template',
+ {
+ 'name': 'foo-workflow',
+ 'organization': organization.name,
+ 'schema': [
+ {
+ 'job_template': 'Hello world',
+ 'failure': [{'inventory_source': 'AWS servers', 'success': [{'project': 'Ansible Examples', 'always': [{'job_template': "Hello world"}]}]}],
+ }
+ ],
+ 'state': 'present',
+ },
+ admin_user,
)
-
- result = run_module('tower_workflow_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'schema': [
- {
- 'job_template': 'Hello world',
- 'failure': [
- {
- 'inventory_source': 'AWS servers',
- 'success': [
- {
- 'project': 'Ansible Examples',
- 'always': [
- {
- 'job_template': "Hello world"
- }
- ]
- }
- ]
- }
- ]
- }
- ],
- 'state': 'present'
- }, admin_user)
assert not result.get('failed', False), result.get('msg', result)
wfjt = WorkflowJobTemplate.objects.get(name='foo-workflow')
- root_nodes = wfjt.workflow_nodes.filter(**{
- '%ss_success__isnull' % WorkflowJobTemplateNode.__name__.lower(): True,
- '%ss_failure__isnull' % WorkflowJobTemplateNode.__name__.lower(): True,
- '%ss_always__isnull' % WorkflowJobTemplateNode.__name__.lower(): True,
- })
+ root_nodes = wfjt.workflow_nodes.filter(
+ **{
+ '%ss_success__isnull' % WorkflowJobTemplateNode.__name__.lower(): True,
+ '%ss_failure__isnull' % WorkflowJobTemplateNode.__name__.lower(): True,
+ '%ss_always__isnull' % WorkflowJobTemplateNode.__name__.lower(): True,
+ }
+ )
assert len(root_nodes) == 1
node = root_nodes[0]
assert node.unified_job_template == jt
@@ -118,13 +95,8 @@ def test_schema_with_branches(run_module, admin_user, organization, silence_depr
@pytest.mark.django_db
def test_with_missing_ujt(run_module, admin_user, organization, silence_deprecation):
- result = run_module('tower_workflow_template', {
- 'name': 'foo-workflow',
- 'organization': organization.name,
- 'schema': [
- {'foo': 'bar'}
- ],
- 'state': 'present'
- }, admin_user)
+ result = run_module(
+ 'tower_workflow_template', {'name': 'foo-workflow', 'organization': organization.name, 'schema': [{'foo': 'bar'}], 'state': 'present'}, admin_user
+ )
assert result.get('failed', False), result
assert 'You should provide exactly one of the attributes job_template,' in result['msg']
diff --git a/awxkit/awxkit/__init__.py b/awxkit/awxkit/__init__.py
index 23e0598237..c75166cf5a 100644
--- a/awxkit/awxkit/__init__.py
+++ b/awxkit/awxkit/__init__.py
@@ -1,4 +1,4 @@
-from awxkit.api import pages, client, resources # NOQA
-from awxkit.config import config # NOQA
-from awxkit import awx # NOQA
-from awxkit.ws import WSClient # NOQA
+from awxkit.api import pages, client, resources # NOQA
+from awxkit.config import config # NOQA
+from awxkit import awx # NOQA
+from awxkit.ws import WSClient # NOQA
diff --git a/awxkit/awxkit/api/__init__.py b/awxkit/awxkit/api/__init__.py
index 6f65601438..04f0f38514 100644
--- a/awxkit/awxkit/api/__init__.py
+++ b/awxkit/awxkit/api/__init__.py
@@ -1,2 +1,2 @@
-from .pages import * # NOQA
-from .client import * # NOQA
+from .pages import * # NOQA
+from .client import * # NOQA
diff --git a/awxkit/awxkit/api/client.py b/awxkit/awxkit/api/client.py
index 77c71a569b..1cea4a61c2 100644
--- a/awxkit/awxkit/api/client.py
+++ b/awxkit/awxkit/api/client.py
@@ -49,8 +49,7 @@ class Connection(object):
_next = kwargs.get('next')
if _next:
headers = self.session.headers.copy()
- self.post('/api/login/', headers=headers,
- data=dict(username=username, password=password, next=_next))
+ self.post('/api/login/', headers=headers, data=dict(username=username, password=password, next=_next))
self.session_id = self.session.cookies.get('sessionid')
self.uses_session_cookie = True
else:
@@ -79,8 +78,7 @@ class Connection(object):
use_endpoint = use_endpoint[1:]
url = '/'.join([self.server, use_endpoint])
- kwargs = dict(verify=self.verify, params=query_parameters, json=json, data=data,
- hooks=dict(response=log_elapsed))
+ kwargs = dict(verify=self.verify, params=query_parameters, json=json, data=data, hooks=dict(response=log_elapsed))
if headers is not None:
kwargs['headers'] = headers
diff --git a/awxkit/awxkit/api/mixins/has_copy.py b/awxkit/awxkit/api/mixins/has_copy.py
index d05ebd4fcd..79bc5589c0 100644
--- a/awxkit/awxkit/api/mixins/has_copy.py
+++ b/awxkit/awxkit/api/mixins/has_copy.py
@@ -3,7 +3,6 @@ from awxkit.utils import random_title
class HasCopy(object):
-
def can_copy(self):
return self.get_related('copy').can_copy
diff --git a/awxkit/awxkit/api/mixins/has_create.py b/awxkit/awxkit/api/mixins/has_create.py
index 43a2810697..1e8b24db5a 100644
--- a/awxkit/awxkit/api/mixins/has_create.py
+++ b/awxkit/awxkit/api/mixins/has_create.py
@@ -24,7 +24,7 @@ def dependency_graph(page, *provided_dependencies):
return graph
-def optional_dependency_graph(page, *provided_dependencies):
+def optional_dependency_graph(page, *provided_dependencies):
"""Creates a dependency graph for a page including all dependencies and optional_dependencies
Any optional provided_dependencies will be included as if they were dependencies,
without affecting the value of each keyed page.
@@ -104,8 +104,7 @@ def all_instantiated_dependencies(*potential_parents):
"""
scope_provided_dependencies = []
- instantiated = set([x for x in potential_parents
- if not isinstance(x, type) and not isinstance(x, tuple)])
+ instantiated = set([x for x in potential_parents if not isinstance(x, type) and not isinstance(x, tuple)])
for potential_parent in [x for x in instantiated if hasattr(x, '_dependency_store')]:
for dependency in potential_parent._dependency_store.values():
@@ -178,7 +177,6 @@ class DSAdapter(object):
# Hijack json.dumps and simplejson.dumps (used by requests)
# to allow HasCreate.create_payload() serialization without impacting payload.ds access
def filter_ds_from_payload(dumps):
-
def _filter_ds_from_payload(obj, *a, **kw):
if hasattr(obj, 'get') and isinstance(obj.get('ds'), DSAdapter):
filtered = obj.copy()
@@ -191,10 +189,12 @@ def filter_ds_from_payload(dumps):
import json # noqa
+
json.dumps = filter_ds_from_payload(json.dumps)
try:
import simplejson # noqa
+
simplejson.dumps = filter_ds_from_payload(simplejson.dumps)
except ImportError:
pass
@@ -299,8 +299,7 @@ class HasCreate(object):
# remove falsy values
provided_and_desired_dependencies = [x for x in provided_and_desired_dependencies if x]
# (HasCreate(), True) tells HasCreate._update_dependencies to link
- provided_dependencies = [(x, True) for x in provided_and_desired_dependencies
- if not isinstance(x, type) and not isinstance(x, tuple)]
+ provided_dependencies = [(x, True) for x in provided_and_desired_dependencies if not isinstance(x, type) and not isinstance(x, tuple)]
# Since dependencies are often declared at runtime, we need to use some introspection
# to determine previously created ones for proper dependency store linking.
@@ -374,12 +373,7 @@ class HasCreate(object):
to_teardown = all_instantiated_dependencies(self)
to_teardown_types = set(map(get_class_if_instance, to_teardown))
order = [
- set(
- [
- potential for potential in (
- get_class_if_instance(x) for x in group) if potential in to_teardown_types
- ]
- )
+ set([potential for potential in (get_class_if_instance(x) for x in group) if potential in to_teardown_types])
for group in page_creation_order(self, *to_teardown)
]
order.reverse()
diff --git a/awxkit/awxkit/api/mixins/has_instance_groups.py b/awxkit/awxkit/api/mixins/has_instance_groups.py
index 7d06499e21..8a61e7618e 100644
--- a/awxkit/awxkit/api/mixins/has_instance_groups.py
+++ b/awxkit/awxkit/api/mixins/has_instance_groups.py
@@ -3,7 +3,6 @@ import awxkit.exceptions as exc
class HasInstanceGroups(object):
-
def add_instance_group(self, instance_group):
with suppress(exc.NoContent):
self.related['instance_groups'].post(dict(id=instance_group.id))
diff --git a/awxkit/awxkit/api/mixins/has_notifications.py b/awxkit/awxkit/api/mixins/has_notifications.py
index aced603af2..15d387712b 100644
--- a/awxkit/awxkit/api/mixins/has_notifications.py
+++ b/awxkit/awxkit/api/mixins/has_notifications.py
@@ -2,29 +2,25 @@ from awxkit.utils import suppress
import awxkit.exceptions as exc
-notification_endpoints = ("notification_templates", "notification_templates_started", "notification_templates_error",
- "notification_templates_success")
+notification_endpoints = ("notification_templates", "notification_templates_started", "notification_templates_error", "notification_templates_success")
wfjt_notification_endpoints = notification_endpoints + ('notification_templates_approvals',)
class HasNotifications(object):
-
def add_notification_template(self, notification_template, endpoint="notification_templates_success"):
from awxkit.api.pages.workflow_job_templates import WorkflowJobTemplate
- supported_endpoints = wfjt_notification_endpoints if isinstance(self, WorkflowJobTemplate) \
- else notification_endpoints
+
+ supported_endpoints = wfjt_notification_endpoints if isinstance(self, WorkflowJobTemplate) else notification_endpoints
if endpoint not in supported_endpoints:
- raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'
- .format(endpoint, notification_endpoints))
+ raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'.format(endpoint, notification_endpoints))
with suppress(exc.NoContent):
self.related[endpoint].post(dict(id=notification_template.id))
def remove_notification_template(self, notification_template, endpoint="notification_templates_success"):
from awxkit.api.pages.workflow_job_templates import WorkflowJobTemplate
- supported_endpoints = wfjt_notification_endpoints if isinstance(self, WorkflowJobTemplate) \
- else notification_endpoints
+
+ supported_endpoints = wfjt_notification_endpoints if isinstance(self, WorkflowJobTemplate) else notification_endpoints
if endpoint not in supported_endpoints:
- raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'
- .format(endpoint, notification_endpoints))
+ raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'.format(endpoint, notification_endpoints))
with suppress(exc.NoContent):
self.related[endpoint].post(dict(id=notification_template.id, disassociate=notification_template.id))
diff --git a/awxkit/awxkit/api/mixins/has_status.py b/awxkit/awxkit/api/mixins/has_status.py
index db14874b6c..e4bd603327 100644
--- a/awxkit/awxkit/api/mixins/has_status.py
+++ b/awxkit/awxkit/api/mixins/has_status.py
@@ -40,8 +40,7 @@ class HasStatus(object):
if not getattr(self, 'event_processing_finished', True):
elapsed = datetime.utcnow() - start_time
time_left = timeout - elapsed.total_seconds()
- poll_until(lambda: getattr(self.get(), 'event_processing_finished', True),
- interval=interval, timeout=time_left, **kwargs)
+ poll_until(lambda: getattr(self.get(), 'event_processing_finished', True), interval=interval, timeout=time_left, **kwargs)
return self
def wait_until_started(self, interval=1, timeout=60):
@@ -65,9 +64,7 @@ class HasStatus(object):
msg = ''
else:
msg += '\n'
- msg += '{0}-{1} has status of {2}, which is not in {3}.'.format(
- self.type.title(), self.id, self.status, status_list
- )
+ msg += '{0}-{1} has status of {2}, which is not in {3}.'.format(self.type.title(), self.id, self.status, status_list)
if getattr(self, 'job_explanation', ''):
msg += '\njob_explanation: {}'.format(bytes_to_str(self.job_explanation))
if getattr(self, 'result_traceback', ''):
@@ -79,10 +76,8 @@ class HasStatus(object):
try:
data = json.loads(self.job_explanation.replace('Previous Task Failed: ', ''))
dep_output = self.connection.get(
- '{0}/api/v2/{1}s/{2}/stdout/'.format(
- self.endpoint.split('/api')[0], data['job_type'], data['job_id']
- ),
- query_parameters=dict(format='txt_download')
+ '{0}/api/v2/{1}s/{2}/stdout/'.format(self.endpoint.split('/api')[0], data['job_type'], data['job_id']),
+ query_parameters=dict(format='txt_download'),
).content
msg += '\nDependency output:\n{}'.format(bytes_to_str(dep_output))
except Exception as e:
diff --git a/awxkit/awxkit/api/mixins/has_survey.py b/awxkit/awxkit/api/mixins/has_survey.py
index 07729f805e..e447580595 100644
--- a/awxkit/awxkit/api/mixins/has_survey.py
+++ b/awxkit/awxkit/api/mixins/has_survey.py
@@ -3,13 +3,11 @@ from awxkit.utils import random_title
class HasSurvey(object):
def add_survey(self, spec=None, name=None, description=None, required=False, enabled=True):
- payload = dict(name=name or 'Survey - {}'.format(random_title()),
- description=description or random_title(10),
- spec=spec or [dict(required=required,
- question_name="What's the password?",
- variable="secret",
- type="password",
- default="foo")])
+ payload = dict(
+ name=name or 'Survey - {}'.format(random_title()),
+ description=description or random_title(10),
+ spec=spec or [dict(required=required, question_name="What's the password?", variable="secret", type="password", default="foo")],
+ )
if enabled != self.survey_enabled:
self.patch(survey_enabled=enabled)
return self.related.survey_spec.post(payload).get()
diff --git a/awxkit/awxkit/api/mixins/has_variables.py b/awxkit/awxkit/api/mixins/has_variables.py
index f69b9b38e9..d3cb859582 100644
--- a/awxkit/awxkit/api/mixins/has_variables.py
+++ b/awxkit/awxkit/api/mixins/has_variables.py
@@ -4,7 +4,6 @@ from awxkit.utils import PseudoNamespace
class HasVariables(object):
-
@property
def variables(self):
return PseudoNamespace(yaml.safe_load(self.json.variables))
diff --git a/awxkit/awxkit/api/pages/__init__.py b/awxkit/awxkit/api/pages/__init__.py
index 1d78d4ba5e..aa55a938e6 100644
--- a/awxkit/awxkit/api/pages/__init__.py
+++ b/awxkit/awxkit/api/pages/__init__.py
@@ -33,7 +33,7 @@ from .workflow_job_templates import * # NOQA
from .workflow_job_template_nodes import * # NOQA
from .workflow_jobs import * # NOQA
from .workflow_job_nodes import * # NOQA
-from .workflow_approvals import * # NOQA
+from .workflow_approvals import * # NOQA
from .settings import * # NOQA
from .instances import * # NOQA
from .instance_groups import * # NOQA
diff --git a/awxkit/awxkit/api/pages/access_list.py b/awxkit/awxkit/api/pages/access_list.py
index f654f84a01..f037fcfa72 100644
--- a/awxkit/awxkit/api/pages/access_list.py
+++ b/awxkit/awxkit/api/pages/access_list.py
@@ -8,11 +8,16 @@ class AccessList(page.PageList, users.User):
pass
-page.register_page([resources.organization_access_list,
- resources.user_access_list,
- resources.inventory_access_list,
- resources.group_access_list,
- resources.credential_access_list,
- resources.project_access_list,
- resources.job_template_access_list,
- resources.team_access_list], AccessList)
+page.register_page(
+ [
+ resources.organization_access_list,
+ resources.user_access_list,
+ resources.inventory_access_list,
+ resources.group_access_list,
+ resources.credential_access_list,
+ resources.project_access_list,
+ resources.job_template_access_list,
+ resources.team_access_list,
+ ],
+ AccessList,
+)
diff --git a/awxkit/awxkit/api/pages/activity_stream.py b/awxkit/awxkit/api/pages/activity_stream.py
index fda9d429e0..0be25e47c9 100644
--- a/awxkit/awxkit/api/pages/activity_stream.py
+++ b/awxkit/awxkit/api/pages/activity_stream.py
@@ -16,5 +16,4 @@ class ActivityStreams(page.PageList, ActivityStream):
pass
-page.register_page([resources.activity_stream,
- resources.object_activity_stream], ActivityStreams)
+page.register_page([resources.activity_stream, resources.object_activity_stream], ActivityStreams)
diff --git a/awxkit/awxkit/api/pages/ad_hoc_commands.py b/awxkit/awxkit/api/pages/ad_hoc_commands.py
index 374e13c127..39b11d8746 100644
--- a/awxkit/awxkit/api/pages/ad_hoc_commands.py
+++ b/awxkit/awxkit/api/pages/ad_hoc_commands.py
@@ -24,31 +24,40 @@ class AdHocCommand(HasCreate, UnifiedJob):
return self.walk(result.url)
def payload(self, inventory, credential, module_name='ping', **kwargs):
- payload = PseudoNamespace(inventory=inventory.id,
- credential=credential.id,
- module_name=module_name)
+ payload = PseudoNamespace(inventory=inventory.id, credential=credential.id, module_name=module_name)
- optional_fields = ('diff_mode', 'extra_vars', 'module_args', 'job_type', 'limit', 'forks',
- 'verbosity')
+ optional_fields = ('diff_mode', 'extra_vars', 'module_args', 'job_type', 'limit', 'forks', 'verbosity')
return update_payload(payload, optional_fields, kwargs)
- def create_payload(self, module_name='ping', module_args=np, job_type=np, limit=np, verbosity=np,
- inventory=Inventory, credential=Credential, **kwargs):
+ def create_payload(self, module_name='ping', module_args=np, job_type=np, limit=np, verbosity=np, inventory=Inventory, credential=Credential, **kwargs):
self.create_and_update_dependencies(inventory, credential)
- payload = self.payload(module_name=module_name, module_args=module_args, job_type=job_type, limit=limit,
- verbosity=verbosity, inventory=self.ds.inventory, credential=self.ds.credential,
- **kwargs)
+ payload = self.payload(
+ module_name=module_name,
+ module_args=module_args,
+ job_type=job_type,
+ limit=limit,
+ verbosity=verbosity,
+ inventory=self.ds.inventory,
+ credential=self.ds.credential,
+ **kwargs
+ )
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
- def create(self, module_name='ping', module_args=np, job_type=np, limit=np, verbosity=np,
- inventory=Inventory, credential=Credential, **kwargs):
-
- payload = self.create_payload(module_name=module_name, module_args=module_args,
- job_type=job_type, limit=limit, verbosity=verbosity,
- inventory=inventory, credential=credential, **kwargs)
+ def create(self, module_name='ping', module_args=np, job_type=np, limit=np, verbosity=np, inventory=Inventory, credential=Credential, **kwargs):
+
+ payload = self.create_payload(
+ module_name=module_name,
+ module_args=module_args,
+ job_type=job_type,
+ limit=limit,
+ verbosity=verbosity,
+ inventory=inventory,
+ credential=credential,
+ **kwargs
+ )
return self.update_identity(AdHocCommands(self.connection).post(payload))
@@ -60,7 +69,7 @@ class AdHocCommands(page.PageList, AdHocCommand):
pass
-page.register_page([resources.ad_hoc_commands,
- resources.inventory_related_ad_hoc_commands,
- resources.group_related_ad_hoc_commands,
- resources.host_related_ad_hoc_commands], AdHocCommands)
+page.register_page(
+ [resources.ad_hoc_commands, resources.inventory_related_ad_hoc_commands, resources.group_related_ad_hoc_commands, resources.host_related_ad_hoc_commands],
+ AdHocCommands,
+)
diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py
index 4edc07857f..a333314fc1 100644
--- a/awxkit/awxkit/api/pages/api.py
+++ b/awxkit/awxkit/api/pages/api.py
@@ -90,18 +90,14 @@ class ApiV2(base.Base):
return None
# Note: doing _page[key] automatically parses json blob strings, which can be a problem.
- fields = {
- key: _page.json[key] for key in post_fields
- if key in _page.json and key not in _page.related and key != 'id'
- }
+ fields = {key: _page.json[key] for key in post_fields if key in _page.json and key not in _page.related and key != 'id'}
for key in post_fields:
if key in _page.related:
related = _page.related[key]
else:
if post_fields[key]['type'] == 'id' and _page.json.get(key) is not None:
- log.warning("Related link %r missing from %s, attempting to reconstruct endpoint.",
- key, _page.endpoint)
+ log.warning("Related link %r missing from %s, attempting to reconstruct endpoint.", key, _page.endpoint)
resource = getattr(self, key, None)
if resource is None:
log.error("Unable to infer endpoint for %r on %s.", key, _page.endpoint)
@@ -119,8 +115,7 @@ class ApiV2(base.Base):
continue
rel_natural_key = rel_endpoint.get_natural_key(self._cache)
if rel_natural_key is None:
- log.error("Unable to construct a natural key for foreign key %r of object %s.",
- key, _page.endpoint)
+ log.error("Unable to construct a natural key for foreign key %r of object %s.", key, _page.endpoint)
return None # This foreign key has unresolvable dependencies
fields[key] = rel_natural_key
@@ -154,10 +149,7 @@ class ApiV2(base.Base):
continue
if 'results' in rel_page:
- results = (
- x.get_natural_key(self._cache) if by_natural_key else self._export(x, rel_post_fields)
- for x in rel_page.results
- )
+ results = (x.get_natural_key(self._cache) if by_natural_key else self._export(x, rel_post_fields) for x in rel_page.results)
related[key] = [x for x in results if x is not None]
else:
related[key] = rel_page.json
@@ -190,8 +182,7 @@ class ApiV2(base.Base):
if isinstance(value, int) or value.isdecimal():
return endpoint.get(id=int(value))
options = self._cache.get_options(endpoint)
- identifier = next(field for field in options['search_fields']
- if field in ('name', 'username', 'hostname'))
+ identifier = next(field for field in options['search_fields'] if field in ('name', 'username', 'hostname'))
return endpoint.get(**{identifier: value})
def export_assets(self, **kwargs):
@@ -214,8 +205,7 @@ class ApiV2(base.Base):
# Import methods
def _dependent_resources(self, data):
- page_resource = {getattr(self, resource)._create().__item_class__: resource
- for resource in self.json}
+ page_resource = {getattr(self, resource)._create().__item_class__: resource for resource in self.json}
data_pages = [getattr(self, resource)._create().__item_class__ for resource in EXPORTABLE_RESOURCES]
for page_cls in itertools.chain(*has_create.page_creation_order(*data_pages)):
diff --git a/awxkit/awxkit/api/pages/applications.py b/awxkit/awxkit/api/pages/applications.py
index 843aefec58..18737cd883 100644
--- a/awxkit/awxkit/api/pages/applications.py
+++ b/awxkit/awxkit/api/pages/applications.py
@@ -12,10 +12,12 @@ class OAuth2Application(HasCreate, base.Base):
dependencies = [Organization]
def payload(self, **kwargs):
- payload = PseudoNamespace(name=kwargs.get('name') or 'OAuth2Application - {}'.format(random_title()),
- description=kwargs.get('description') or random_title(10),
- client_type=kwargs.get('client_type', 'public'),
- authorization_grant_type=kwargs.get('authorization_grant_type', 'password'))
+ payload = PseudoNamespace(
+ name=kwargs.get('name') or 'OAuth2Application - {}'.format(random_title()),
+ description=kwargs.get('description') or random_title(10),
+ client_type=kwargs.get('client_type', 'public'),
+ authorization_grant_type=kwargs.get('authorization_grant_type', 'password'),
+ )
if kwargs.get('organization'):
payload.organization = kwargs['organization'].id
@@ -35,8 +37,7 @@ class OAuth2Application(HasCreate, base.Base):
return self.update_identity(OAuth2Applications(self.connection).post(payload))
-page.register_page((resources.application,
- (resources.applications, 'post')), OAuth2Application)
+page.register_page((resources.application, (resources.applications, 'post')), OAuth2Application)
class OAuth2Applications(page.PageList, OAuth2Application):
@@ -51,8 +52,7 @@ class OAuth2AccessToken(HasCreate, base.Base):
optional_dependencies = [OAuth2Application]
def payload(self, **kwargs):
- payload = PseudoNamespace(description=kwargs.get('description') or random_title(10),
- scope=kwargs.get('scope', 'write'))
+ payload = PseudoNamespace(description=kwargs.get('description') or random_title(10), scope=kwargs.get('scope', 'write'))
if kwargs.get('oauth_2_application'):
payload.application = kwargs['oauth_2_application'].id
@@ -73,8 +73,7 @@ class OAuth2AccessToken(HasCreate, base.Base):
return self.update_identity(OAuth2AccessTokens(self.connection).post(payload))
-page.register_page((resources.token,
- (resources.tokens, 'post')), OAuth2AccessToken)
+page.register_page((resources.token, (resources.tokens, 'post')), OAuth2AccessToken)
class OAuth2AccessTokens(page.PageList, OAuth2AccessToken):
diff --git a/awxkit/awxkit/api/pages/base.py b/awxkit/awxkit/api/pages/base.py
index f3c3957c9d..7e9d210ab3 100644
--- a/awxkit/awxkit/api/pages/base.py
+++ b/awxkit/awxkit/api/pages/base.py
@@ -3,11 +3,7 @@ import logging
from requests.auth import HTTPBasicAuth
-from awxkit.api.pages import (
- Page,
- get_registered_page,
- exception_from_status_code
-)
+from awxkit.api.pages import Page, get_registered_page, exception_from_status_code
from awxkit.config import config
from awxkit.api.resources import resources
import awxkit.exceptions as exc
@@ -17,7 +13,6 @@ log = logging.getLogger(__name__)
class Base(Page):
-
def silent_delete(self):
"""Delete the object. If it's already deleted, ignore the error"""
try:
@@ -129,14 +124,14 @@ class Base(Page):
@property
def object_roles(self):
from awxkit.api.pages import Roles, Role
+
url = self.get().json.related.object_roles
for obj_role in Roles(self.connection, endpoint=url).get().json.results:
yield Role(self.connection, endpoint=obj_role.url).get()
def get_authtoken(self, username='', password=''):
default_cred = config.credentials.default
- payload = dict(username=username or default_cred.username,
- password=password or default_cred.password)
+ payload = dict(username=username or default_cred.username, password=password or default_cred.password)
auth_url = resources.authtoken
return get_registered_page(auth_url)(self.connection, endpoint=auth_url).post(payload).token
@@ -146,9 +141,7 @@ class Base(Page):
load_default_authtoken = load_authtoken
- def get_oauth2_token(self, username='', password='', client_id=None,
- description='AWX CLI',
- client_secret=None, scope='write'):
+ def get_oauth2_token(self, username='', password='', client_id=None, description='AWX CLI', client_secret=None, scope='write'):
default_cred = config.credentials.default
username = username or default_cred.username
password = password or default_cred.password
@@ -157,38 +150,21 @@ class Base(Page):
HTTPBasicAuth(client_id, client_secret)(req)
req.headers['Content-Type'] = 'application/x-www-form-urlencoded'
resp = self.connection.post(
- '/api/o/token/',
- data={
- "grant_type": "password",
- "username": username,
- "password": password,
- "scope": scope
- },
- headers=req.headers
+ '/api/o/token/', data={"grant_type": "password", "username": username, "password": password, "scope": scope}, headers=req.headers
)
elif client_id:
req.headers['Content-Type'] = 'application/x-www-form-urlencoded'
resp = self.connection.post(
'/api/o/token/',
- data={
- "grant_type": "password",
- "username": username,
- "password": password,
- "client_id": client_id,
- "scope": scope
- },
- headers=req.headers
+ data={"grant_type": "password", "username": username, "password": password, "client_id": client_id, "scope": scope},
+ headers=req.headers,
)
else:
HTTPBasicAuth(username, password)(req)
resp = self.connection.post(
'/api/v2/users/{}/personal_tokens/'.format(username),
- json={
- "description": description,
- "application": None,
- "scope": scope
- },
- headers=req.headers
+ json={"description": description, "application": None, "scope": scope},
+ headers=req.headers,
)
if resp.ok:
result = resp.json()
@@ -201,9 +177,9 @@ class Base(Page):
def load_session(self, username='', password=''):
default_cred = config.credentials.default
- self.connection.login(username=username or default_cred.username,
- password=password or default_cred.password,
- **self.connection.get_session_requirements())
+ self.connection.login(
+ username=username or default_cred.username, password=password or default_cred.password, **self.connection.get_session_requirements()
+ )
return self
def cleanup(self):
diff --git a/awxkit/awxkit/api/pages/config.py b/awxkit/awxkit/api/pages/config.py
index 56a620da7f..e4ea14765f 100644
--- a/awxkit/awxkit/api/pages/config.py
+++ b/awxkit/awxkit/api/pages/config.py
@@ -4,22 +4,17 @@ from . import page
class Config(base.Base):
-
@property
def is_aws_license(self):
- return self.license_info.get('is_aws', False) or \
- 'ami-id' in self.license_info or \
- 'instance-id' in self.license_info
+ return self.license_info.get('is_aws', False) or 'ami-id' in self.license_info or 'instance-id' in self.license_info
@property
def is_valid_license(self):
- return self.license_info.get('valid_key', False) and \
- 'instance_count' in self.license_info
+ return self.license_info.get('valid_key', False) and 'instance_count' in self.license_info
@property
def is_trial_license(self):
- return self.is_valid_license and \
- self.license_info.get('trial', False)
+ return self.is_valid_license and self.license_info.get('trial', False)
@property
def is_awx_license(self):
@@ -27,8 +22,7 @@ class Config(base.Base):
@property
def is_enterprise_license(self):
- return self.is_valid_license and \
- self.license_info.get('license_type', None) == 'enterprise'
+ return self.is_valid_license and self.license_info.get('license_type', None) == 'enterprise'
@property
def features(self):
@@ -37,7 +31,6 @@ class Config(base.Base):
class ConfigAttach(page.Page):
-
def attach(self, **kwargs):
return self.post(json=kwargs).json
diff --git a/awxkit/awxkit/api/pages/credential_input_sources.py b/awxkit/awxkit/api/pages/credential_input_sources.py
index 72112ec098..c500cfca81 100644
--- a/awxkit/awxkit/api/pages/credential_input_sources.py
+++ b/awxkit/awxkit/api/pages/credential_input_sources.py
@@ -16,5 +16,4 @@ class CredentialInputSources(page.PageList, CredentialInputSource):
pass
-page.register_page([resources.credential_input_sources,
- resources.related_input_sources], CredentialInputSources)
+page.register_page([resources.credential_input_sources, resources.related_input_sources], CredentialInputSources)
diff --git a/awxkit/awxkit/api/pages/credentials.py b/awxkit/awxkit/api/pages/credentials.py
index e5b80cf7d0..11fadd8711 100644
--- a/awxkit/awxkit/api/pages/credentials.py
+++ b/awxkit/awxkit/api/pages/credentials.py
@@ -44,7 +44,8 @@ credential_input_fields = (
'tenant',
'username',
'vault_password',
- 'vault_id')
+ 'vault_id',
+)
def generate_private_key():
@@ -52,15 +53,9 @@ def generate_private_key():
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
- key = rsa.generate_private_key(
- public_exponent=65537,
- key_size=4096,
- backend=default_backend()
- )
+ key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend())
return key.private_bytes(
- encoding=serialization.Encoding.PEM,
- format=serialization.PrivateFormat.TraditionalOpenSSL,
- encryption_algorithm=serialization.NoEncryption()
+ encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()
).decode('utf-8')
@@ -98,11 +93,10 @@ credential_type_name_to_config_kind_map = {
'source control': 'scm',
'machine': 'ssh',
'vault': 'vault',
- 'vmware vcenter': 'vmware'}
+ 'vmware vcenter': 'vmware',
+}
-config_kind_to_credential_type_name_map = {
- kind: name
- for name, kind in credential_type_name_to_config_kind_map.items()}
+config_kind_to_credential_type_name_map = {kind: name for name, kind in credential_type_name_to_config_kind_map.items()}
def kind_and_config_cred_from_credential_type(credential_type):
@@ -115,8 +109,7 @@ def kind_and_config_cred_from_credential_type(credential_type):
config_cred = config.credentials.network
kind = 'net'
elif credential_type.kind == 'cloud':
- kind = credential_type_name_to_config_kind_map[credential_type.name.lower(
- )]
+ kind = credential_type_name_to_config_kind_map[credential_type.name.lower()]
config_kind = kind if kind != 'azure_rm' else 'azure'
config_cred = config.credentials.cloud[config_kind]
else:
@@ -127,11 +120,8 @@ def kind_and_config_cred_from_credential_type(credential_type):
return kind, PseudoNamespace()
-def get_payload_field_and_value_from_kwargs_or_config_cred(
- field, kind, kwargs, config_cred):
- if field in (
- 'project_id',
- 'project_name'): # Needed to prevent Project kwarg collision
+def get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, kwargs, config_cred):
+ if field in ('project_id', 'project_name'): # Needed to prevent Project kwarg collision
config_field = 'project'
elif field == 'subscription' and 'azure' in kind:
config_field = 'subscription_id'
@@ -159,10 +149,8 @@ class CredentialType(HasCreate, base.Base):
def payload(self, kind='cloud', **kwargs):
payload = PseudoNamespace(
- name=kwargs.get('name') or 'CredentialType - {}'.format(
- random_title()),
- description=kwargs.get('description') or random_title(10),
- kind=kind)
+ name=kwargs.get('name') or 'CredentialType - {}'.format(random_title()), description=kwargs.get('description') or random_title(10), kind=kind
+ )
fields = ('inputs', 'injectors')
update_payload(payload, fields, kwargs)
return payload
@@ -174,17 +162,13 @@ class CredentialType(HasCreate, base.Base):
def create(self, kind='cloud', **kwargs):
payload = self.create_payload(kind=kind, **kwargs)
- return self.update_identity(
- CredentialTypes(
- self.connection).post(payload))
+ return self.update_identity(CredentialTypes(self.connection).post(payload))
def test(self, data):
"""Test the credential type endpoint."""
response = self.connection.post(urljoin(str(self.url), 'test/'), data)
exception = exception_from_status_code(response.status_code)
- exc_str = "%s (%s) received" % (
- http.responses[response.status_code], response.status_code
- )
+ exc_str = "%s (%s) received" % (http.responses[response.status_code], response.status_code)
if exception:
raise exception(exc_str, response.json())
elif response.status_code == http.FORBIDDEN:
@@ -192,8 +176,7 @@ class CredentialType(HasCreate, base.Base):
return response
-page.register_page([resources.credential_type,
- (resources.credential_types, 'post')], CredentialType)
+page.register_page([resources.credential_type, (resources.credential_types, 'post')], CredentialType)
class CredentialTypes(page.PageList, CredentialType):
@@ -210,27 +193,19 @@ class Credential(HasCopy, HasCreate, base.Base):
optional_dependencies = [Organization, User, Team]
NATURAL_KEY = ('organization', 'name', 'credential_type')
- def payload(
- self,
- credential_type,
- user=None,
- team=None,
- organization=None,
- inputs=None,
- **kwargs):
+ def payload(self, credential_type, user=None, team=None, organization=None, inputs=None, **kwargs):
if not any((user, team, organization)):
- raise TypeError(
- '{0.__class__.__name__} requires user, team, and/or organization instances.'.format(self))
+ raise TypeError('{0.__class__.__name__} requires user, team, and/or organization instances.'.format(self))
if inputs is None:
inputs = {}
payload = PseudoNamespace(
- name=kwargs.get('name') or 'Credential - {}'.format(
- random_title()),
+ name=kwargs.get('name') or 'Credential - {}'.format(random_title()),
description=kwargs.get('description') or random_title(10),
credential_type=credential_type.id,
- inputs=inputs)
+ inputs=inputs,
+ )
if user:
payload.user = user.id
if team:
@@ -238,38 +213,26 @@ class Credential(HasCopy, HasCreate, base.Base):
if organization:
payload.organization = organization.id
- kind, config_cred = kind_and_config_cred_from_credential_type(
- credential_type)
+ kind, config_cred = kind_and_config_cred_from_credential_type(credential_type)
for field in credential_input_fields:
- field, value = get_payload_field_and_value_from_kwargs_or_config_cred(
- field, kind, inputs or kwargs, config_cred)
+ field, value = get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, inputs or kwargs, config_cred)
if value != not_provided:
payload.inputs[field] = value
if kind == 'net':
- payload.inputs.authorize = inputs.get(
- 'authorize', bool(inputs.get('authorize_password')))
+ payload.inputs.authorize = inputs.get('authorize', bool(inputs.get('authorize_password')))
if kind in ('ssh', 'net') and 'ssh_key_data' not in payload.inputs:
- payload.inputs.ssh_key_data = inputs.get(
- 'ssh_key_data', generate_private_key())
+ payload.inputs.ssh_key_data = inputs.get('ssh_key_data', generate_private_key())
return payload
- def create_payload(
- self,
- credential_type=CredentialType,
- user=None,
- team=None,
- organization=Organization,
- inputs=None,
- **kwargs):
+ def create_payload(self, credential_type=CredentialType, user=None, team=None, organization=Organization, inputs=None, **kwargs):
if isinstance(credential_type, int):
# if an int was passed, it is assumed to be the pk id of a
# credential type
- credential_type = CredentialTypes(
- self.connection).get(id=credential_type).results.pop()
+ credential_type = CredentialTypes(self.connection).get(id=credential_type).results.pop()
if credential_type == CredentialType:
kind = kwargs.pop('kind', 'ssh')
@@ -282,57 +245,29 @@ class Credential(HasCopy, HasCreate, base.Base):
inputs = config.credentials.cloud['openstack']
else:
credential_type_name = config_kind_to_credential_type_name_map[kind]
- credential_type = CredentialTypes(
- self.connection).get(
- managed_by_tower=True,
- name__icontains=credential_type_name).results.pop()
+ credential_type = CredentialTypes(self.connection).get(managed_by_tower=True, name__icontains=credential_type_name).results.pop()
- credential_type, organization, user, team = filter_by_class(
- (credential_type, CredentialType), (organization, Organization), (user, User), (team, Team))
+ credential_type, organization, user, team = filter_by_class((credential_type, CredentialType), (organization, Organization), (user, User), (team, Team))
if not any((user, team, organization)):
organization = Organization
- self.create_and_update_dependencies(
- credential_type, organization, user, team)
+ self.create_and_update_dependencies(credential_type, organization, user, team)
user = self.ds.user if user else None
team = self.ds.team if team else None
organization = self.ds.organization if organization else None
- payload = self.payload(
- self.ds.credential_type,
- user=user,
- team=team,
- organization=organization,
- inputs=inputs,
- **kwargs)
+ payload = self.payload(self.ds.credential_type, user=user, team=team, organization=organization, inputs=inputs, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
- def create(
- self,
- credential_type=CredentialType,
- user=None,
- team=None,
- organization=None,
- inputs=None,
- **kwargs):
- payload = self.create_payload(
- credential_type=credential_type,
- user=user,
- team=team,
- organization=organization,
- inputs=inputs,
- **kwargs)
- return self.update_identity(
- Credentials(
- self.connection)).post(payload)
+ def create(self, credential_type=CredentialType, user=None, team=None, organization=None, inputs=None, **kwargs):
+ payload = self.create_payload(credential_type=credential_type, user=user, team=team, organization=organization, inputs=inputs, **kwargs)
+ return self.update_identity(Credentials(self.connection)).post(payload)
def test(self, data):
"""Test the credential endpoint."""
response = self.connection.post(urljoin(str(self.url), 'test/'), data)
exception = exception_from_status_code(response.status_code)
- exc_str = "%s (%s) received" % (
- http.responses[response.status_code], response.status_code
- )
+ exc_str = "%s (%s) received" % (http.responses[response.status_code], response.status_code)
if exception:
raise exception(exc_str, response.json())
elif response.status_code == http.FORBIDDEN:
@@ -343,11 +278,7 @@ class Credential(HasCopy, HasCreate, base.Base):
def expected_passwords_needed_to_start(self):
"""Return a list of expected passwords needed to start a job using this credential."""
passwords = []
- for field in (
- 'password',
- 'become_password',
- 'ssh_key_unlock',
- 'vault_password'):
+ for field in ('password', 'become_password', 'ssh_key_unlock', 'vault_password'):
if getattr(self.inputs, field, None) == 'ASK':
if field == 'password':
passwords.append('ssh_password')
@@ -356,9 +287,7 @@ class Credential(HasCopy, HasCreate, base.Base):
return passwords
-page.register_page([resources.credential,
- (resources.credentials, 'post'),
- (resources.credential_copy, 'post')], Credential)
+page.register_page([resources.credential, (resources.credentials, 'post'), (resources.credential_copy, 'post')], Credential)
class Credentials(page.PageList, Credential):
@@ -366,9 +295,7 @@ class Credentials(page.PageList, Credential):
pass
-page.register_page([resources.credentials,
- resources.related_credentials],
- Credentials)
+page.register_page([resources.credentials, resources.related_credentials], Credentials)
class CredentialCopy(base.Base):
diff --git a/awxkit/awxkit/api/pages/execution_environments.py b/awxkit/awxkit/api/pages/execution_environments.py
index 0471b1f1d3..01ee33afb5 100644
--- a/awxkit/awxkit/api/pages/execution_environments.py
+++ b/awxkit/awxkit/api/pages/execution_environments.py
@@ -46,14 +46,13 @@ class ExecutionEnvironment(HasCreate, HasCopy, base.Base):
return payload
-page.register_page([resources.execution_environment,
- (resources.execution_environments, 'post'),
- (resources.organization_execution_environments, 'post')], ExecutionEnvironment)
+page.register_page(
+ [resources.execution_environment, (resources.execution_environments, 'post'), (resources.organization_execution_environments, 'post')], ExecutionEnvironment
+)
class ExecutionEnvironments(page.PageList, ExecutionEnvironment):
pass
-page.register_page([resources.execution_environments,
- resources.organization_execution_environments], ExecutionEnvironments)
+page.register_page([resources.execution_environments, resources.organization_execution_environments], ExecutionEnvironments)
diff --git a/awxkit/awxkit/api/pages/instance_groups.py b/awxkit/awxkit/api/pages/instance_groups.py
index 31af3e82fe..28aa614300 100644
--- a/awxkit/awxkit/api/pages/instance_groups.py
+++ b/awxkit/awxkit/api/pages/instance_groups.py
@@ -7,7 +7,6 @@ from . import page
class InstanceGroup(HasCreate, base.Base):
-
def add_instance(self, instance):
with suppress(exc.NoContent):
self.related.instances.post(dict(id=instance.id))
@@ -17,8 +16,7 @@ class InstanceGroup(HasCreate, base.Base):
self.related.instances.post(dict(id=instance.id, disassociate=True))
def payload(self, **kwargs):
- payload = PseudoNamespace(name=kwargs.get('name') or
- 'Instance Group - {}'.format(random_title()))
+ payload = PseudoNamespace(name=kwargs.get('name') or 'Instance Group - {}'.format(random_title()))
fields = ('policy_instance_percentage', 'policy_instance_minimum', 'policy_instance_list', 'is_container_group')
update_payload(payload, fields, kwargs)
@@ -35,8 +33,7 @@ class InstanceGroup(HasCreate, base.Base):
return self.update_identity(InstanceGroups(self.connection).post(payload))
-page.register_page([resources.instance_group,
- (resources.instance_groups, 'post')], InstanceGroup)
+page.register_page([resources.instance_group, (resources.instance_groups, 'post')], InstanceGroup)
class InstanceGroups(page.PageList, InstanceGroup):
@@ -44,5 +41,4 @@ class InstanceGroups(page.PageList, InstanceGroup):
pass
-page.register_page([resources.instance_groups,
- resources.related_instance_groups], InstanceGroups)
+page.register_page([resources.instance_groups, resources.related_instance_groups], InstanceGroups)
diff --git a/awxkit/awxkit/api/pages/instances.py b/awxkit/awxkit/api/pages/instances.py
index 7e8a0cff6d..38695014bf 100644
--- a/awxkit/awxkit/api/pages/instances.py
+++ b/awxkit/awxkit/api/pages/instances.py
@@ -16,5 +16,4 @@ class Instances(page.PageList, Instance):
pass
-page.register_page([resources.instances,
- resources.related_instances], Instances)
+page.register_page([resources.instances, resources.related_instances], Instances)
diff --git a/awxkit/awxkit/api/pages/inventory.py b/awxkit/awxkit/api/pages/inventory.py
index 31393b9ad4..beeb36a5a3 100644
--- a/awxkit/awxkit/api/pages/inventory.py
+++ b/awxkit/awxkit/api/pages/inventory.py
@@ -2,23 +2,8 @@ import logging
import json
import re
-from awxkit.api.pages import (
- Credential,
- Organization,
- Project,
- UnifiedJob,
- UnifiedJobTemplate
-)
-from awxkit.utils import (
- filter_by_class,
- random_title,
- update_payload,
- suppress,
- not_provided,
- PseudoNamespace,
- poll_until,
- random_utf8
-)
+from awxkit.api.pages import Credential, Organization, Project, UnifiedJob, UnifiedJobTemplate
+from awxkit.utils import filter_by_class, random_title, update_payload, suppress, not_provided, PseudoNamespace, poll_until, random_utf8
from awxkit.api.mixins import DSAdapter, HasCreate, HasInstanceGroups, HasNotifications, HasVariables, HasCopy
from awxkit.api.resources import resources
import awxkit.exceptions as exc
@@ -68,56 +53,31 @@ class Inventory(HasCopy, HasCreate, HasInstanceGroups, HasVariables, base.Base):
def payload(self, organization, **kwargs):
payload = PseudoNamespace(
- name=kwargs.get('name') or 'Inventory - {}'.format(
- random_title()),
+ name=kwargs.get('name') or 'Inventory - {}'.format(random_title()),
description=kwargs.get('description') or random_title(10),
- organization=organization.id)
+ organization=organization.id,
+ )
- optional_fields = (
- 'host_filter',
- 'insights_credential',
- 'kind',
- 'variables')
+ optional_fields = ('host_filter', 'insights_credential', 'kind', 'variables')
update_payload(payload, optional_fields, kwargs)
if 'variables' in payload and isinstance(payload.variables, dict):
payload.variables = json.dumps(payload.variables)
- if 'insights_credential' in payload and isinstance(
- payload.insights_credential, Credential):
+ if 'insights_credential' in payload and isinstance(payload.insights_credential, Credential):
payload.insights_credential = payload.insights_credential.id
return payload
- def create_payload(
- self,
- name='',
- description='',
- organization=Organization,
- **kwargs):
+ def create_payload(self, name='', description='', organization=Organization, **kwargs):
self.create_and_update_dependencies(organization)
- payload = self.payload(
- name=name,
- description=description,
- organization=self.ds.organization,
- **kwargs)
+ payload = self.payload(name=name, description=description, organization=self.ds.organization, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
- def create(
- self,
- name='',
- description='',
- organization=Organization,
- **kwargs):
- payload = self.create_payload(
- name=name,
- description=description,
- organization=organization,
- **kwargs)
- return self.update_identity(
- Inventories(
- self.connection).post(payload))
+ def create(self, name='', description='', organization=Organization, **kwargs):
+ payload = self.create_payload(name=name, description=description, organization=organization, **kwargs)
+ return self.update_identity(Inventories(self.connection).post(payload))
def add_host(self, host=None):
if host is None:
@@ -135,17 +95,16 @@ class Inventory(HasCopy, HasCreate, HasInstanceGroups, HasVariables, base.Base):
self.get()
except exc.NotFound:
return True
+
poll_until(_wait, interval=1, timeout=60)
def update_inventory_sources(self, wait=False):
response = self.related.update_inventory_sources.post()
- source_ids = [entry['inventory_source']
- for entry in response if entry['status'] == 'started']
+ source_ids = [entry['inventory_source'] for entry in response if entry['status'] == 'started']
inv_updates = []
for source_id in source_ids:
- inv_source = self.related.inventory_sources.get(
- id=source_id).results.pop()
+ inv_source = self.related.inventory_sources.get(id=source_id).results.pop()
inv_updates.append(inv_source.related.current_job.get())
if wait:
@@ -154,9 +113,7 @@ class Inventory(HasCopy, HasCreate, HasInstanceGroups, HasVariables, base.Base):
return inv_updates
-page.register_page([resources.inventory,
- (resources.inventories, 'post'),
- (resources.inventory_copy, 'post')], Inventory)
+page.register_page([resources.inventory, (resources.inventories, 'post'), (resources.inventory_copy, 'post')], Inventory)
class Inventories(page.PageList, Inventory):
@@ -164,8 +121,7 @@ class Inventories(page.PageList, Inventory):
pass
-page.register_page([resources.inventories,
- resources.related_inventories], Inventories)
+page.register_page([resources.inventories, resources.related_inventories], Inventories)
class InventoryScript(HasCopy, HasCreate, base.Base):
@@ -174,77 +130,48 @@ class InventoryScript(HasCopy, HasCreate, base.Base):
def payload(self, organization, **kwargs):
payload = PseudoNamespace(
- name=kwargs.get('name') or 'Inventory Script - {}'.format(
- random_title()),
+ name=kwargs.get('name') or 'Inventory Script - {}'.format(random_title()),
description=kwargs.get('description') or random_title(10),
organization=organization.id,
- script=kwargs.get('script') or self._generate_script())
+ script=kwargs.get('script') or self._generate_script(),
+ )
return payload
- def create_payload(
- self,
- name='',
- description='',
- organization=Organization,
- script='',
- **kwargs):
+ def create_payload(self, name='', description='', organization=Organization, script='', **kwargs):
self.create_and_update_dependencies(organization)
- payload = self.payload(
- name=name,
- description=description,
- organization=self.ds.organization,
- script=script,
- **kwargs)
+ payload = self.payload(name=name, description=description, organization=self.ds.organization, script=script, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
- def create(
- self,
- name='',
- description='',
- organization=Organization,
- script='',
- **kwargs):
- payload = self.create_payload(
- name=name,
- description=description,
- organization=organization,
- script=script,
- **kwargs)
- return self.update_identity(
- InventoryScripts(
- self.connection).post(payload))
+ def create(self, name='', description='', organization=Organization, script='', **kwargs):
+ payload = self.create_payload(name=name, description=description, organization=organization, script=script, **kwargs)
+ return self.update_identity(InventoryScripts(self.connection).post(payload))
def _generate_script(self):
- script = '\n'.join([
- '#!/usr/bin/env python',
- '# -*- coding: utf-8 -*-',
- 'import json',
- 'inventory = dict()',
- 'inventory["{0}"] = dict()',
- 'inventory["{0}"]["hosts"] = list()',
- 'inventory["{0}"]["hosts"].append("{1}")',
- 'inventory["{0}"]["hosts"].append("{2}")',
- 'inventory["{0}"]["hosts"].append("{3}")',
- 'inventory["{0}"]["hosts"].append("{4}")',
- 'inventory["{0}"]["hosts"].append("{5}")',
- 'inventory["{0}"]["vars"] = dict(ansible_host="127.0.0.1", ansible_connection="local")',
- 'print(json.dumps(inventory))'
- ])
+ script = '\n'.join(
+ [
+ '#!/usr/bin/env python',
+ '# -*- coding: utf-8 -*-',
+ 'import json',
+ 'inventory = dict()',
+ 'inventory["{0}"] = dict()',
+ 'inventory["{0}"]["hosts"] = list()',
+ 'inventory["{0}"]["hosts"].append("{1}")',
+ 'inventory["{0}"]["hosts"].append("{2}")',
+ 'inventory["{0}"]["hosts"].append("{3}")',
+ 'inventory["{0}"]["hosts"].append("{4}")',
+ 'inventory["{0}"]["hosts"].append("{5}")',
+ 'inventory["{0}"]["vars"] = dict(ansible_host="127.0.0.1", ansible_connection="local")',
+ 'print(json.dumps(inventory))',
+ ]
+ )
group_name = re.sub(r"[\']", "", "group_{}".format(random_title(non_ascii=False)))
- host_names = [
- re.sub(
- r"[\':]",
- "",
- "host_{}".format(
- random_utf8())) for _ in range(5)]
+ host_names = [re.sub(r"[\':]", "", "host_{}".format(random_utf8())) for _ in range(5)]
return script.format(group_name, *host_names)
-page.register_page([resources.inventory_script,
- (resources.inventory_scripts, 'post'),
- (resources.inventory_script_copy, 'post')], InventoryScript)
+page.register_page([resources.inventory_script, (resources.inventory_scripts, 'post'), (resources.inventory_script_copy, 'post')], InventoryScript)
class InventoryScripts(page.PageList, InventoryScript):
@@ -272,11 +199,10 @@ class Group(HasCreate, HasVariables, base.Base):
def payload(self, inventory, credential=None, **kwargs):
payload = PseudoNamespace(
- name=kwargs.get('name') or 'Group{}'.format(
- random_title(
- non_ascii=False)),
+ name=kwargs.get('name') or 'Group{}'.format(random_title(non_ascii=False)),
description=kwargs.get('description') or random_title(10),
- inventory=inventory.id)
+ inventory=inventory.id,
+ )
if credential:
payload.credential = credential.id
@@ -288,38 +214,19 @@ class Group(HasCreate, HasVariables, base.Base):
return payload
- def create_payload(
- self,
- name='',
- description='',
- inventory=Inventory,
- credential=None,
- source_script=None,
- **kwargs):
- credential, source_script = filter_by_class(
- (credential, Credential), (source_script, InventoryScript))
- self.create_and_update_dependencies(
- inventory, credential, source_script)
+ def create_payload(self, name='', description='', inventory=Inventory, credential=None, source_script=None, **kwargs):
+ credential, source_script = filter_by_class((credential, Credential), (source_script, InventoryScript))
+ self.create_and_update_dependencies(inventory, credential, source_script)
credential = self.ds.credential if credential else None
- payload = self.payload(
- inventory=self.ds.inventory,
- credential=credential,
- name=name,
- description=description,
- **kwargs)
+ payload = self.payload(inventory=self.ds.inventory, credential=credential, name=name, description=description, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, name='', description='', inventory=Inventory, **kwargs):
- payload = self.create_payload(
- name=name,
- description=description,
- inventory=inventory,
- **kwargs)
+ payload = self.create_payload(name=name, description=description, inventory=inventory, **kwargs)
parent = kwargs.get('parent', None) # parent must be a Group instance
- resource = parent.related.children if parent else Groups(
- self.connection)
+ resource = parent.related.children if parent else Groups(self.connection)
return self.update_identity(resource.post(payload))
def add_host(self, host=None):
@@ -348,8 +255,7 @@ class Group(HasCreate, HasVariables, base.Base):
self.related.children.post(dict(id=group.id, disassociate=True))
-page.register_page([resources.group,
- (resources.groups, 'post')], Group)
+page.register_page([resources.group, (resources.groups, 'post')], Group)
class Groups(page.PageList, Group):
@@ -357,12 +263,17 @@ class Groups(page.PageList, Group):
pass
-page.register_page([resources.groups,
- resources.host_groups,
- resources.inventory_related_groups,
- resources.inventory_related_root_groups,
- resources.group_children,
- resources.group_potential_children], Groups)
+page.register_page(
+ [
+ resources.groups,
+ resources.host_groups,
+ resources.inventory_related_groups,
+ resources.inventory_related_root_groups,
+ resources.group_children,
+ resources.group_potential_children,
+ ],
+ Groups,
+)
class Host(HasCreate, HasVariables, base.Base):
@@ -372,11 +283,10 @@ class Host(HasCreate, HasVariables, base.Base):
def payload(self, inventory, **kwargs):
payload = PseudoNamespace(
- name=kwargs.get('name') or 'Host{}'.format(
- random_title(
- non_ascii=False)),
+ name=kwargs.get('name') or 'Host{}'.format(random_title(non_ascii=False)),
description=kwargs.get('description') or random_title(10),
- inventory=inventory.id)
+ inventory=inventory.id,
+ )
optional_fields = ('enabled', 'instance_id')
@@ -385,9 +295,7 @@ class Host(HasCreate, HasVariables, base.Base):
variables = kwargs.get('variables', not_provided)
if variables is None:
- variables = dict(
- ansible_host='127.0.0.1',
- ansible_connection='local')
+ variables = dict(ansible_host='127.0.0.1', ansible_connection='local')
if variables != not_provided:
if isinstance(variables, dict):
@@ -396,42 +304,18 @@ class Host(HasCreate, HasVariables, base.Base):
return payload
- def create_payload(
- self,
- name='',
- description='',
- variables=None,
- inventory=Inventory,
- **kwargs):
- self.create_and_update_dependencies(
- *filter_by_class((inventory, Inventory)))
- payload = self.payload(
- inventory=self.ds.inventory,
- name=name,
- description=description,
- variables=variables,
- **kwargs)
+ def create_payload(self, name='', description='', variables=None, inventory=Inventory, **kwargs):
+ self.create_and_update_dependencies(*filter_by_class((inventory, Inventory)))
+ payload = self.payload(inventory=self.ds.inventory, name=name, description=description, variables=variables, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
- def create(
- self,
- name='',
- description='',
- variables=None,
- inventory=Inventory,
- **kwargs):
- payload = self.create_payload(
- name=name,
- description=description,
- variables=variables,
- inventory=inventory,
- **kwargs)
+ def create(self, name='', description='', variables=None, inventory=Inventory, **kwargs):
+ payload = self.create_payload(name=name, description=description, variables=variables, inventory=inventory, **kwargs)
return self.update_identity(Hosts(self.connection).post(payload))
-page.register_page([resources.host,
- (resources.hosts, 'post')], Host)
+page.register_page([resources.host, (resources.hosts, 'post')], Host)
class Hosts(page.PageList, Host):
@@ -439,10 +323,7 @@ class Hosts(page.PageList, Host):
pass
-page.register_page([resources.hosts,
- resources.group_related_hosts,
- resources.inventory_related_hosts,
- resources.inventory_sources_related_hosts], Hosts)
+page.register_page([resources.hosts, resources.group_related_hosts, resources.inventory_related_hosts, resources.inventory_sources_related_hosts], Hosts)
class FactVersion(base.Base):
@@ -454,7 +335,6 @@ page.register_page(resources.host_related_fact_version, FactVersion)
class FactVersions(page.PageList, FactVersion):
-
@property
def count(self):
return len(self.results)
@@ -478,20 +358,13 @@ class InventorySource(HasCreate, HasNotifications, UnifiedJobTemplate):
optional_dependencies = [Credential, InventoryScript, Project]
NATURAL_KEY = ('organization', 'name', 'inventory')
- def payload(
- self,
- inventory,
- source='custom',
- credential=None,
- source_script=None,
- project=None,
- **kwargs):
+ def payload(self, inventory, source='custom', credential=None, source_script=None, project=None, **kwargs):
payload = PseudoNamespace(
- name=kwargs.get('name') or 'InventorySource - {}'.format(
- random_title()),
+ name=kwargs.get('name') or 'InventorySource - {}'.format(random_title()),
description=kwargs.get('description') or random_title(10),
inventory=inventory.id,
- source=source)
+ source=source,
+ )
if credential:
payload.credential = credential.id
@@ -509,22 +382,16 @@ class InventorySource(HasCreate, HasNotifications, UnifiedJobTemplate):
'update_cache_timeout',
'update_on_launch',
'update_on_project_update',
- 'verbosity')
+ 'verbosity',
+ )
update_payload(payload, optional_fields, kwargs)
return payload
def create_payload(
- self,
- name='',
- description='',
- source='custom',
- inventory=Inventory,
- credential=None,
- source_script=InventoryScript,
- project=None,
- **kwargs):
+ self, name='', description='', source='custom', inventory=Inventory, credential=None, source_script=InventoryScript, project=None, **kwargs
+ ):
if source != 'custom' and source_script == InventoryScript:
source_script = None
if source == 'scm':
@@ -532,12 +399,10 @@ class InventorySource(HasCreate, HasNotifications, UnifiedJobTemplate):
if project is None:
project = Project
- inventory, credential, source_script, project = filter_by_class((inventory, Inventory),
- (credential, Credential),
- (source_script, InventoryScript),
- (project, Project))
- self.create_and_update_dependencies(
- inventory, credential, source_script, project)
+ inventory, credential, source_script, project = filter_by_class(
+ (inventory, Inventory), (credential, Credential), (source_script, InventoryScript), (project, Project)
+ )
+ self.create_and_update_dependencies(inventory, credential, source_script, project)
if credential:
credential = self.ds.credential
@@ -554,20 +419,12 @@ class InventorySource(HasCreate, HasNotifications, UnifiedJobTemplate):
project=project,
name=name,
description=description,
- **kwargs)
+ **kwargs
+ )
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
- def create(
- self,
- name='',
- description='',
- source='custom',
- inventory=Inventory,
- credential=None,
- source_script=InventoryScript,
- project=None,
- **kwargs):
+ def create(self, name='', description='', source='custom', inventory=Inventory, credential=None, source_script=InventoryScript, project=None, **kwargs):
payload = self.create_payload(
name=name,
description=description,
@@ -576,10 +433,9 @@ class InventorySource(HasCreate, HasNotifications, UnifiedJobTemplate):
credential=credential,
source_script=source_script,
project=project,
- **kwargs)
- return self.update_identity(
- InventorySources(
- self.connection).post(payload))
+ **kwargs
+ )
+ return self.update_identity(InventorySources(self.connection).post(payload))
def update(self):
"""Update the inventory_source using related->update endpoint"""
@@ -587,45 +443,37 @@ class InventorySource(HasCreate, HasNotifications, UnifiedJobTemplate):
update_pg = self.get_related('update')
# assert can_update == True
- assert update_pg.can_update, \
- "The specified inventory_source (id:%s) is not able to update (can_update:%s)" % \
- (self.id, update_pg.can_update)
+ assert update_pg.can_update, "The specified inventory_source (id:%s) is not able to update (can_update:%s)" % (self.id, update_pg.can_update)
# start the inventory_update
result = update_pg.post()
# assert JSON response
- assert 'inventory_update' in result.json, \
- "Unexpected JSON response when starting an inventory_update.\n%s" % \
- json.dumps(result.json, indent=2)
+ assert 'inventory_update' in result.json, "Unexpected JSON response when starting an inventory_update.\n%s" % json.dumps(result.json, indent=2)
# locate and return the inventory_update
- jobs_pg = self.related.inventory_updates.get(
- id=result.json['inventory_update'])
- assert jobs_pg.count == 1, \
- "An inventory_update started (id:%s) but job not found in response at %s/inventory_updates/" % \
- (result.json['inventory_update'], self.url)
+ jobs_pg = self.related.inventory_updates.get(id=result.json['inventory_update'])
+ assert jobs_pg.count == 1, "An inventory_update started (id:%s) but job not found in response at %s/inventory_updates/" % (
+ result.json['inventory_update'],
+ self.url,
+ )
return jobs_pg.results[0]
@property
def is_successful(self):
"""An inventory_source is considered successful when source != "" and super().is_successful ."""
- return self.source != "" and super(
- InventorySource, self).is_successful
+ return self.source != "" and super(InventorySource, self).is_successful
def add_credential(self, credential):
with suppress(exc.NoContent):
- self.related.credentials.post(
- dict(id=credential.id, associate=True))
+ self.related.credentials.post(dict(id=credential.id, associate=True))
def remove_credential(self, credential):
with suppress(exc.NoContent):
- self.related.credentials.post(
- dict(id=credential.id, disassociate=True))
+ self.related.credentials.post(dict(id=credential.id, disassociate=True))
-page.register_page([resources.inventory_source,
- (resources.inventory_sources, 'post')], InventorySource)
+page.register_page([resources.inventory_source, (resources.inventory_sources, 'post')], InventorySource)
class InventorySources(page.PageList, InventorySource):
@@ -633,9 +481,7 @@ class InventorySources(page.PageList, InventorySource):
pass
-page.register_page([resources.inventory_sources,
- resources.related_inventory_sources],
- InventorySources)
+page.register_page([resources.inventory_sources, resources.related_inventory_sources], InventorySources)
class InventorySourceGroups(page.PageList, Group):
@@ -643,9 +489,7 @@ class InventorySourceGroups(page.PageList, Group):
pass
-page.register_page(
- resources.inventory_sources_related_groups,
- InventorySourceGroups)
+page.register_page(resources.inventory_sources_related_groups, InventorySourceGroups)
class InventorySourceUpdate(base.Base):
@@ -653,9 +497,7 @@ class InventorySourceUpdate(base.Base):
pass
-page.register_page([resources.inventory_sources_related_update,
- resources.inventory_related_update_inventory_sources],
- InventorySourceUpdate)
+page.register_page([resources.inventory_sources_related_update, resources.inventory_related_update_inventory_sources], InventorySourceUpdate)
class InventoryUpdate(UnifiedJob):
@@ -671,10 +513,7 @@ class InventoryUpdates(page.PageList, InventoryUpdate):
pass
-page.register_page([resources.inventory_updates,
- resources.inventory_source_updates,
- resources.project_update_scm_inventory_updates],
- InventoryUpdates)
+page.register_page([resources.inventory_updates, resources.inventory_source_updates, resources.project_update_scm_inventory_updates], InventoryUpdates)
class InventoryUpdateCancel(base.Base):
diff --git a/awxkit/awxkit/api/pages/job_templates.py b/awxkit/awxkit/api/pages/job_templates.py
index 693112375a..7de4cc902e 100644
--- a/awxkit/awxkit/api/pages/job_templates.py
+++ b/awxkit/awxkit/api/pages/job_templates.py
@@ -1,13 +1,6 @@
import json
-from awxkit.utils import (
- filter_by_class,
- not_provided,
- random_title,
- suppress,
- update_payload,
- set_payload_foreign_key_args,
- PseudoNamespace)
+from awxkit.utils import filter_by_class, not_provided, random_title, suppress, update_payload, set_payload_foreign_key_args, PseudoNamespace
from awxkit.api.pages import Credential, Inventory, Project, UnifiedJobTemplate
from awxkit.api.mixins import HasCreate, HasInstanceGroups, HasNotifications, HasSurvey, HasCopy, DSAdapter
from awxkit.api.resources import resources
@@ -16,13 +9,7 @@ from . import base
from . import page
-class JobTemplate(
- HasCopy,
- HasCreate,
- HasInstanceGroups,
- HasNotifications,
- HasSurvey,
- UnifiedJobTemplate):
+class JobTemplate(HasCopy, HasCreate, HasInstanceGroups, HasNotifications, HasSurvey, UnifiedJobTemplate):
optional_dependencies = [Inventory, Credential, Project]
NATURAL_KEY = ('organization', 'name')
@@ -38,16 +25,13 @@ class JobTemplate(
# return job
if result.json['type'] == 'job':
jobs_pg = self.get_related('jobs', id=result.json['job'])
- assert jobs_pg.count == 1, \
- "job_template launched (id:%s) but job not found in response at %s/jobs/" % \
- (result.json['job'], self.url)
+ assert jobs_pg.count == 1, "job_template launched (id:%s) but job not found in response at %s/jobs/" % (result.json['job'], self.url)
return jobs_pg.results[0]
elif result.json['type'] == 'workflow_job':
- slice_workflow_jobs = self.get_related(
- 'slice_workflow_jobs', id=result.json['id'])
- assert slice_workflow_jobs.count == 1, (
- "job_template launched sliced job (id:%s) but not found in related %s/slice_workflow_jobs/" %
- (result.json['id'], self.url)
+ slice_workflow_jobs = self.get_related('slice_workflow_jobs', id=result.json['id'])
+ assert slice_workflow_jobs.count == 1, "job_template launched sliced job (id:%s) but not found in related %s/slice_workflow_jobs/" % (
+ result.json['id'],
+ self.url,
)
return slice_workflow_jobs.results[0]
else:
@@ -56,10 +40,7 @@ class JobTemplate(
def payload(self, job_type='run', playbook='ping.yml', **kwargs):
name = kwargs.get('name') or 'JobTemplate - {}'.format(random_title())
description = kwargs.get('description') or random_title(10)
- payload = PseudoNamespace(
- name=name,
- description=description,
- job_type=job_type)
+ payload = PseudoNamespace(name=name, description=description, job_type=job_type)
optional_fields = (
'ask_scm_branch_on_launch',
@@ -90,7 +71,8 @@ class JobTemplate(
'job_slice_count',
'webhook_service',
'webhook_credential',
- 'scm_branch')
+ 'scm_branch',
+ )
update_payload(payload, optional_fields, kwargs)
@@ -113,94 +95,53 @@ class JobTemplate(
with suppress(exc.NoContent):
self.related.labels.post(label)
- def create_payload(
- self,
- name='',
- description='',
- job_type='run',
- playbook='ping.yml',
- credential=Credential,
- inventory=Inventory,
- project=None,
- **kwargs):
+ def create_payload(self, name='', description='', job_type='run', playbook='ping.yml', credential=Credential, inventory=Inventory, project=None, **kwargs):
if not project:
project = Project
if not inventory and not kwargs.get('ask_inventory_on_launch', False):
inventory = Inventory
- self.create_and_update_dependencies(
- *
- filter_by_class(
- (credential,
- Credential),
- (inventory,
- Inventory),
- (project,
- Project)))
+ self.create_and_update_dependencies(*filter_by_class((credential, Credential), (inventory, Inventory), (project, Project)))
project = self.ds.project if project else None
inventory = self.ds.inventory if inventory else None
credential = self.ds.credential if credential else None
payload = self.payload(
- name=name,
- description=description,
- job_type=job_type,
- playbook=playbook,
- credential=credential,
- inventory=inventory,
- project=project,
- **kwargs)
+ name=name, description=description, job_type=job_type, playbook=playbook, credential=credential, inventory=inventory, project=project, **kwargs
+ )
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload, credential
- def create(
- self,
- name='',
- description='',
- job_type='run',
- playbook='ping.yml',
- credential=Credential,
- inventory=Inventory,
- project=None,
- **kwargs):
- payload, credential = self.create_payload(name=name, description=description, job_type=job_type,
- playbook=playbook, credential=credential, inventory=inventory,
- project=project, **kwargs)
- ret = self.update_identity(
- JobTemplates(
- self.connection).post(payload))
+ def create(self, name='', description='', job_type='run', playbook='ping.yml', credential=Credential, inventory=Inventory, project=None, **kwargs):
+ payload, credential = self.create_payload(
+ name=name, description=description, job_type=job_type, playbook=playbook, credential=credential, inventory=inventory, project=project, **kwargs
+ )
+ ret = self.update_identity(JobTemplates(self.connection).post(payload))
if credential:
with suppress(exc.NoContent):
self.related.credentials.post(dict(id=credential.id))
if 'vault_credential' in kwargs:
with suppress(exc.NoContent):
if not isinstance(kwargs['vault_credential'], int):
- raise ValueError(
- "Expected 'vault_credential' value to be an integer, the id of the desired vault credential")
- self.related.credentials.post(
- dict(id=kwargs['vault_credential']))
+ raise ValueError("Expected 'vault_credential' value to be an integer, the id of the desired vault credential")
+ self.related.credentials.post(dict(id=kwargs['vault_credential']))
return ret
def add_credential(self, credential):
with suppress(exc.NoContent):
- self.related.credentials.post(
- dict(id=credential.id, associate=True))
+ self.related.credentials.post(dict(id=credential.id, associate=True))
def remove_credential(self, credential):
with suppress(exc.NoContent):
- self.related.credentials.post(
- dict(id=credential.id, disassociate=True))
+ self.related.credentials.post(dict(id=credential.id, disassociate=True))
def remove_all_credentials(self):
for cred in self.related.credentials.get().results:
with suppress(exc.NoContent):
- self.related.credentials.post(
- dict(id=cred.id, disassociate=True))
+ self.related.credentials.post(dict(id=cred.id, disassociate=True))
-page.register_page([resources.job_template,
- (resources.job_templates, 'post'),
- (resources.job_template_copy, 'post')], JobTemplate)
+page.register_page([resources.job_template, (resources.job_templates, 'post'), (resources.job_template_copy, 'post')], JobTemplate)
class JobTemplates(page.PageList, JobTemplate):
@@ -208,8 +149,7 @@ class JobTemplates(page.PageList, JobTemplate):
pass
-page.register_page([resources.job_templates,
- resources.related_job_templates], JobTemplates)
+page.register_page([resources.job_templates, resources.related_job_templates], JobTemplates)
class JobTemplateCallback(base.Base):
diff --git a/awxkit/awxkit/api/pages/jobs.py b/awxkit/awxkit/api/pages/jobs.py
index 5c0ed24f28..358009e59b 100644
--- a/awxkit/awxkit/api/pages/jobs.py
+++ b/awxkit/awxkit/api/pages/jobs.py
@@ -5,7 +5,6 @@ from . import page
class Job(UnifiedJob):
-
def relaunch(self, payload={}):
result = self.related.relaunch.post(payload)
return self.walk(result.endpoint)
@@ -19,9 +18,7 @@ class Jobs(page.PageList, Job):
pass
-page.register_page([resources.jobs,
- resources.job_template_jobs,
- resources.system_job_template_jobs], Jobs)
+page.register_page([resources.jobs, resources.job_template_jobs, resources.system_job_template_jobs], Jobs)
class JobCancel(UnifiedJob):
@@ -37,8 +34,7 @@ class JobEvent(base.Base):
pass
-page.register_page([resources.job_event,
- resources.job_job_event], JobEvent)
+page.register_page([resources.job_event, resources.job_job_event], JobEvent)
class JobEvents(page.PageList, JobEvent):
@@ -46,10 +42,7 @@ class JobEvents(page.PageList, JobEvent):
pass
-page.register_page([resources.job_events,
- resources.job_job_events,
- resources.job_event_children,
- resources.group_related_job_events], JobEvents)
+page.register_page([resources.job_events, resources.job_job_events, resources.job_event_children, resources.group_related_job_events], JobEvents)
class JobPlay(base.Base):
@@ -97,8 +90,7 @@ class JobHostSummaries(page.PageList, JobHostSummary):
pass
-page.register_page([resources.job_host_summaries,
- resources.group_related_job_host_summaries], JobHostSummaries)
+page.register_page([resources.job_host_summaries, resources.group_related_job_host_summaries], JobHostSummaries)
class JobRelaunch(base.Base):
diff --git a/awxkit/awxkit/api/pages/labels.py b/awxkit/awxkit/api/pages/labels.py
index 34022f66d2..b6cb88b073 100644
--- a/awxkit/awxkit/api/pages/labels.py
+++ b/awxkit/awxkit/api/pages/labels.py
@@ -19,43 +19,24 @@ class Label(HasCreate, base.Base):
def payload(self, organization, **kwargs):
payload = PseudoNamespace(
- name=kwargs.get('name') or 'Label - {}'.format(
- random_title()),
+ name=kwargs.get('name') or 'Label - {}'.format(random_title()),
description=kwargs.get('description') or random_title(10),
- organization=organization.id)
+ organization=organization.id,
+ )
return payload
- def create_payload(
- self,
- name='',
- description='',
- organization=Organization,
- **kwargs):
+ def create_payload(self, name='', description='', organization=Organization, **kwargs):
self.create_and_update_dependencies(organization)
- payload = self.payload(
- organization=self.ds.organization,
- name=name,
- description=description,
- **kwargs)
+ payload = self.payload(organization=self.ds.organization, name=name, description=description, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
- def create(
- self,
- name='',
- description='',
- organization=Organization,
- **kwargs):
- payload = self.create_payload(
- name=name,
- description=description,
- organization=organization,
- **kwargs)
+ def create(self, name='', description='', organization=Organization, **kwargs):
+ payload = self.create_payload(name=name, description=description, organization=organization, **kwargs)
return self.update_identity(Labels(self.connection).post(payload))
-page.register_page([resources.label,
- (resources.labels, 'post')], Label)
+page.register_page([resources.label, (resources.labels, 'post')], Label)
class Labels(page.PageList, Label):
@@ -63,7 +44,4 @@ class Labels(page.PageList, Label):
pass
-page.register_page([resources.labels,
- resources.job_labels,
- resources.job_template_labels,
- resources.workflow_job_template_labels], Labels)
+page.register_page([resources.labels, resources.job_labels, resources.job_template_labels, resources.workflow_job_template_labels], Labels)
diff --git a/awxkit/awxkit/api/pages/metrics.py b/awxkit/awxkit/api/pages/metrics.py
index 2e3cafaafd..88a57b7139 100644
--- a/awxkit/awxkit/api/pages/metrics.py
+++ b/awxkit/awxkit/api/pages/metrics.py
@@ -4,12 +4,9 @@ from . import page
class Metrics(base.Base):
-
def get(self, **query_parameters):
- request = self.connection.get(self.endpoint, query_parameters,
- headers={'Accept': 'application/json'})
+ request = self.connection.get(self.endpoint, query_parameters, headers={'Accept': 'application/json'})
return self.page_identity(request)
-page.register_page([resources.metrics,
- (resources.metrics, 'get')], Metrics)
+page.register_page([resources.metrics, (resources.metrics, 'get')], Metrics)
diff --git a/awxkit/awxkit/api/pages/notification_templates.py b/awxkit/awxkit/api/pages/notification_templates.py
index ff192d4433..0688cc1342 100644
--- a/awxkit/awxkit/api/pages/notification_templates.py
+++ b/awxkit/awxkit/api/pages/notification_templates.py
@@ -9,16 +9,7 @@ from . import page
job_results = ('any', 'error', 'success')
-notification_types = (
- 'email',
- 'irc',
- 'pagerduty',
- 'slack',
- 'twilio',
- 'webhook',
- 'mattermost',
- 'grafana',
- 'rocketchat')
+notification_types = ('email', 'irc', 'pagerduty', 'slack', 'twilio', 'webhook', 'mattermost', 'grafana', 'rocketchat')
class NotificationTemplate(HasCopy, HasCreate, base.Base):
@@ -28,18 +19,17 @@ class NotificationTemplate(HasCopy, HasCreate, base.Base):
def test(self):
"""Create test notification"""
- assert 'test' in self.related, \
- "No such related attribute 'test'"
+ assert 'test' in self.related, "No such related attribute 'test'"
# trigger test notification
notification_id = self.related.test.post().notification
# return notification page
- notifications_pg = self.get_related(
- 'notifications', id=notification_id).wait_until_count(1)
- assert notifications_pg.count == 1, \
- "test notification triggered (id:%s) but notification not found in response at %s/notifications/" % \
- (notification_id, self.url)
+ notifications_pg = self.get_related('notifications', id=notification_id).wait_until_count(1)
+ assert notifications_pg.count == 1, "test notification triggered (id:%s) but notification not found in response at %s/notifications/" % (
+ notification_id,
+ self.url,
+ )
return notifications_pg.results[0]
def silent_delete(self):
@@ -53,41 +43,25 @@ class NotificationTemplate(HasCopy, HasCreate, base.Base):
def payload(self, organization, notification_type='slack', messages=not_provided, **kwargs):
payload = PseudoNamespace(
- name=kwargs.get('name') or 'NotificationTemplate ({0}) - {1}' .format(
- notification_type,
- random_title()),
+ name=kwargs.get('name') or 'NotificationTemplate ({0}) - {1}'.format(notification_type, random_title()),
description=kwargs.get('description') or random_title(10),
organization=organization.id,
- notification_type=notification_type)
+ notification_type=notification_type,
+ )
if messages != not_provided:
payload['messages'] = messages
- notification_configuration = kwargs.get(
- 'notification_configuration', {})
+ notification_configuration = kwargs.get('notification_configuration', {})
payload.notification_configuration = notification_configuration
if payload.notification_configuration == {}:
services = config.credentials.notification_services
if notification_type == 'email':
- fields = (
- 'host',
- 'username',
- 'password',
- 'port',
- 'use_ssl',
- 'use_tls',
- 'sender',
- 'recipients')
+ fields = ('host', 'username', 'password', 'port', 'use_ssl', 'use_tls', 'sender', 'recipients')
cred = services.email
elif notification_type == 'irc':
- fields = (
- 'server',
- 'port',
- 'use_ssl',
- 'password',
- 'nickname',
- 'targets')
+ fields = ('server', 'port', 'use_ssl', 'password', 'nickname', 'targets')
cred = services.irc
elif notification_type == 'pagerduty':
fields = ('client_name', 'service_key', 'subdomain', 'token')
@@ -96,34 +70,22 @@ class NotificationTemplate(HasCopy, HasCreate, base.Base):
fields = ('channels', 'token')
cred = services.slack
elif notification_type == 'twilio':
- fields = (
- 'account_sid',
- 'account_token',
- 'from_number',
- 'to_numbers')
+ fields = ('account_sid', 'account_token', 'from_number', 'to_numbers')
cred = services.twilio
elif notification_type == 'webhook':
fields = ('url', 'headers')
cred = services.webhook
elif notification_type == 'mattermost':
- fields = (
- 'mattermost_url',
- 'mattermost_username',
- 'mattermost_channel',
- 'mattermost_icon_url',
- 'mattermost_no_verify_ssl')
+ fields = ('mattermost_url', 'mattermost_username', 'mattermost_channel', 'mattermost_icon_url', 'mattermost_no_verify_ssl')
cred = services.mattermost
elif notification_type == 'grafana':
- fields = ('grafana_url',
- 'grafana_key')
+ fields = ('grafana_url', 'grafana_key')
cred = services.grafana
elif notification_type == 'rocketchat':
- fields = ('rocketchat_url',
- 'rocketchat_no_verify_ssl')
+ fields = ('rocketchat_url', 'rocketchat_no_verify_ssl')
cred = services.rocketchat
else:
- raise ValueError(
- 'Unknown notification_type {0}'.format(notification_type))
+ raise ValueError('Unknown notification_type {0}'.format(notification_type))
for field in fields:
if field == 'bot_token':
@@ -136,47 +98,21 @@ class NotificationTemplate(HasCopy, HasCreate, base.Base):
return payload
- def create_payload(
- self,
- name='',
- description='',
- notification_type='slack',
- organization=Organization,
- messages=not_provided,
- **kwargs):
+ def create_payload(self, name='', description='', notification_type='slack', organization=Organization, messages=not_provided, **kwargs):
if notification_type not in notification_types:
- raise ValueError(
- 'Unsupported notification type "{0}". Please use one of {1}.' .format(
- notification_type, notification_types))
+ raise ValueError('Unsupported notification type "{0}". Please use one of {1}.'.format(notification_type, notification_types))
self.create_and_update_dependencies(organization)
payload = self.payload(
- organization=self.ds.organization,
- notification_type=notification_type,
- name=name,
- description=description,
- messages=messages,
- **kwargs)
+ organization=self.ds.organization, notification_type=notification_type, name=name, description=description, messages=messages, **kwargs
+ )
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
- def create(
- self,
- name='',
- description='',
- notification_type='slack',
- organization=Organization,
- messages=not_provided,
- **kwargs):
+ def create(self, name='', description='', notification_type='slack', organization=Organization, messages=not_provided, **kwargs):
payload = self.create_payload(
- name=name,
- description=description,
- notification_type=notification_type,
- organization=organization,
- messages=messages,
- **kwargs)
- return self.update_identity(
- NotificationTemplates(
- self.connection).post(payload))
+ name=name, description=description, notification_type=notification_type, organization=organization, messages=messages, **kwargs
+ )
+ return self.update_identity(NotificationTemplates(self.connection).post(payload))
def associate(self, resource, job_result='any'):
"""Associates a NotificationTemplate with the provided resource"""
@@ -188,15 +124,11 @@ class NotificationTemplate(HasCopy, HasCreate, base.Base):
def _associate(self, resource, job_result='any', disassociate=False):
if job_result not in job_results:
- raise ValueError(
- 'Unsupported job_result type "{0}". Please use one of {1}.' .format(
- job_result, job_results))
+ raise ValueError('Unsupported job_result type "{0}". Please use one of {1}.'.format(job_result, job_results))
result_attr = 'notification_templates_{0}'.format(job_result)
if result_attr not in resource.related:
- raise ValueError(
- 'Unsupported resource "{0}". Does not have a related {1} field.' .format(
- resource, result_attr))
+ raise ValueError('Unsupported resource "{0}". Does not have a related {1} field.'.format(resource, result_attr))
payload = dict(id=self.id)
if disassociate:
@@ -206,14 +138,19 @@ class NotificationTemplate(HasCopy, HasCreate, base.Base):
getattr(resource.related, result_attr).post(payload)
-page.register_page([resources.notification_template,
- (resources.notification_templates, 'post'),
- (resources.notification_template_copy, 'post'),
- resources.notification_template_any,
- resources.notification_template_started,
- resources.notification_template_error,
- resources.notification_template_success,
- resources.notification_template_approval], NotificationTemplate)
+page.register_page(
+ [
+ resources.notification_template,
+ (resources.notification_templates, 'post'),
+ (resources.notification_template_copy, 'post'),
+ resources.notification_template_any,
+ resources.notification_template_started,
+ resources.notification_template_error,
+ resources.notification_template_success,
+ resources.notification_template_approval,
+ ],
+ NotificationTemplate,
+)
class NotificationTemplates(page.PageList, NotificationTemplate):
@@ -221,14 +158,18 @@ class NotificationTemplates(page.PageList, NotificationTemplate):
pass
-page.register_page([resources.notification_templates,
- resources.related_notification_templates,
- resources.notification_templates_any,
- resources.notification_templates_started,
- resources.notification_templates_error,
- resources.notification_templates_success,
- resources.notification_templates_approvals],
- NotificationTemplates)
+page.register_page(
+ [
+ resources.notification_templates,
+ resources.related_notification_templates,
+ resources.notification_templates_any,
+ resources.notification_templates_started,
+ resources.notification_templates_error,
+ resources.notification_templates_success,
+ resources.notification_templates_approvals,
+ ],
+ NotificationTemplates,
+)
class NotificationTemplateCopy(base.Base):
@@ -244,6 +185,4 @@ class NotificationTemplateTest(base.Base):
pass
-page.register_page(
- resources.notification_template_test,
- NotificationTemplateTest)
+page.register_page(resources.notification_template_test, NotificationTemplateTest)
diff --git a/awxkit/awxkit/api/pages/notifications.py b/awxkit/awxkit/api/pages/notifications.py
index 0342f3565f..f4ee63313e 100644
--- a/awxkit/awxkit/api/pages/notifications.py
+++ b/awxkit/awxkit/api/pages/notifications.py
@@ -6,10 +6,8 @@ from . import page
class Notification(HasStatus, base.Base):
-
def __str__(self):
- items = ['id', 'notification_type', 'status', 'error', 'notifications_sent',
- 'subject', 'recipients']
+ items = ['id', 'notification_type', 'status', 'error', 'notifications_sent', 'subject', 'recipients']
info = []
for item in [x for x in items if hasattr(self, x)]:
info.append('{0}:{1}'.format(item, getattr(self, item)))
@@ -40,13 +38,10 @@ page.register_page(resources.notification, Notification)
class Notifications(page.PageList, Notification):
-
def wait_until_count(self, count, interval=10, timeout=60, **kw):
"""Poll notifications page until it is populated with `count` number of notifications."""
- poll_until(lambda: getattr(self.get(), 'count') == count,
- interval=interval, timeout=timeout, **kw)
+ poll_until(lambda: getattr(self.get(), 'count') == count, interval=interval, timeout=timeout, **kw)
return self
-page.register_page([resources.notifications,
- resources.related_notifications], Notifications)
+page.register_page([resources.notifications, resources.related_notifications], Notifications)
diff --git a/awxkit/awxkit/api/pages/organizations.py b/awxkit/awxkit/api/pages/organizations.py
index b03c9e6a3a..4ef9b8a26e 100644
--- a/awxkit/awxkit/api/pages/organizations.py
+++ b/awxkit/awxkit/api/pages/organizations.py
@@ -26,22 +26,27 @@ class Organization(HasCreate, HasInstanceGroups, HasNotifications, base.Base):
if isinstance(credential, page.Page):
credential = credential.json
with suppress(exc.NoContent):
- self.related.galaxy_credentials.post({
- "id": credential.id,
- })
+ self.related.galaxy_credentials.post(
+ {
+ "id": credential.id,
+ }
+ )
def remove_galaxy_credential(self, credential):
if isinstance(credential, page.Page):
credential = credential.json
with suppress(exc.NoContent):
- self.related.galaxy_credentials.post({
- "id": credential.id,
- "disassociate": True,
- })
+ self.related.galaxy_credentials.post(
+ {
+ "id": credential.id,
+ "disassociate": True,
+ }
+ )
def payload(self, **kwargs):
- payload = PseudoNamespace(name=kwargs.get('name') or 'Organization - {}'.format(random_title()),
- description=kwargs.get('description') or random_title(10))
+ payload = PseudoNamespace(
+ name=kwargs.get('name') or 'Organization - {}'.format(random_title()), description=kwargs.get('description') or random_title(10)
+ )
payload = set_payload_foreign_key_args(payload, ('default_environment',), kwargs)
@@ -57,8 +62,7 @@ class Organization(HasCreate, HasInstanceGroups, HasNotifications, base.Base):
return self.update_identity(Organizations(self.connection).post(payload))
-page.register_page([resources.organization,
- (resources.organizations, 'post')], Organization)
+page.register_page([resources.organization, (resources.organizations, 'post')], Organization)
class Organizations(page.PageList, Organization):
@@ -66,6 +70,4 @@ class Organizations(page.PageList, Organization):
pass
-page.register_page([resources.organizations,
- resources.user_organizations,
- resources.project_organizations], Organizations)
+page.register_page([resources.organizations, resources.user_organizations, resources.project_organizations], Organizations)
diff --git a/awxkit/awxkit/api/pages/page.py b/awxkit/awxkit/api/pages/page.py
index 3ee1c38490..82872fe6f1 100644
--- a/awxkit/awxkit/api/pages/page.py
+++ b/awxkit/awxkit/api/pages/page.py
@@ -6,15 +6,7 @@ import re
from requests import Response
import http.client as http
-from awxkit.utils import (
- PseudoNamespace,
- is_relative_endpoint,
- are_same_endpoint,
- super_dir_set,
- suppress,
- is_list_or_tuple,
- to_str
-)
+from awxkit.utils import PseudoNamespace, is_relative_endpoint, are_same_endpoint, super_dir_set, suppress, is_list_or_tuple, to_str
from awxkit.api import utils
from awxkit.api.client import Connection
from awxkit.api.registry import URLRegistry
@@ -41,17 +33,11 @@ def is_license_invalid(response):
def is_license_exceeded(response):
- if re.match(
- r".*license range of.*instances has been exceeded.*",
- response.text):
+ if re.match(r".*license range of.*instances has been exceeded.*", response.text):
return True
- if re.match(
- r".*License count of.*instances has been reached.*",
- response.text):
+ if re.match(r".*License count of.*instances has been reached.*", response.text):
return True
- if re.match(
- r".*License count of.*instances has been exceeded.*",
- response.text):
+ if re.match(r".*License count of.*instances has been exceeded.*", response.text):
return True
if re.match(r".*License has expired.*", response.text):
return True
@@ -67,6 +53,7 @@ def is_duplicate_error(response):
def register_page(urls, page_cls):
if not _page_registry.default:
from awxkit.api.pages import Base
+
_page_registry.setdefault(Base)
if not is_list_or_tuple(urls):
@@ -108,32 +95,23 @@ class Page(object):
if 'endpoint' in kw:
self.endpoint = kw['endpoint']
- self.connection = connection or Connection(
- config.base_url, kw.get(
- 'verify', not config.assume_untrusted))
+ self.connection = connection or Connection(config.base_url, kw.get('verify', not config.assume_untrusted))
self.r = kw.get('r', None)
- self.json = kw.get(
- 'json', objectify_response_json(
- self.r) if self.r else {})
+ self.json = kw.get('json', objectify_response_json(self.r) if self.r else {})
self.last_elapsed = kw.get('last_elapsed', None)
def __getattr__(self, name):
if 'json' in self.__dict__ and name in self.json:
value = self.json[name]
- if not isinstance(
- value,
- TentativePage) and is_relative_endpoint(value):
+ if not isinstance(value, TentativePage) and is_relative_endpoint(value):
value = TentativePage(value, self.connection)
elif isinstance(value, dict):
for key, item in value.items():
- if not isinstance(
- item, TentativePage) and is_relative_endpoint(item):
+ if not isinstance(item, TentativePage) and is_relative_endpoint(item):
value[key] = TentativePage(item, self.connection)
return value
- raise AttributeError(
- "{!r} object has no attribute {!r}".format(
- self.__class__.__name__, name))
+ raise AttributeError("{!r} object has no attribute {!r}".format(self.__class__.__name__, name))
def __setattr__(self, name, value):
if 'json' in self.__dict__ and name in self.json:
@@ -200,20 +178,15 @@ class Page(object):
text = response.text
if len(text) > 1024:
text = text[:1024] + '... <<< Truncated >>> ...'
- log.debug(
- "Unable to parse JSON response ({0.status_code}): {1} - '{2}'".format(response, e, text))
+ log.debug("Unable to parse JSON response ({0.status_code}): {1} - '{2}'".format(response, e, text))
- exc_str = "%s (%s) received" % (
- http.responses[response.status_code], response.status_code)
+ exc_str = "%s (%s) received" % (http.responses[response.status_code], response.status_code)
exception = exception_from_status_code(response.status_code)
if exception:
raise exception(exc_str, data)
- if response.status_code in (
- http.OK,
- http.CREATED,
- http.ACCEPTED):
+ if response.status_code in (http.OK, http.CREATED, http.ACCEPTED):
# Not all JSON responses include a URL. Grab it from the request
# object, if needed.
@@ -232,13 +205,7 @@ class Page(object):
return self
registered_type = get_registered_page(request_path, request_method)
- return registered_type(
- self.connection,
- endpoint=endpoint,
- json=data,
- last_elapsed=response.elapsed,
- r=response,
- ds=ds)
+ return registered_type(self.connection, endpoint=endpoint, json=data, last_elapsed=response.elapsed, r=response, ds=ds)
elif response.status_code == http.FORBIDDEN:
if is_license_invalid(response):
@@ -341,14 +308,16 @@ class Page(object):
return natural_key
-_exception_map = {http.NO_CONTENT: exc.NoContent,
- http.NOT_FOUND: exc.NotFound,
- http.INTERNAL_SERVER_ERROR: exc.InternalServerError,
- http.BAD_GATEWAY: exc.BadGateway,
- http.METHOD_NOT_ALLOWED: exc.MethodNotAllowed,
- http.UNAUTHORIZED: exc.Unauthorized,
- http.PAYMENT_REQUIRED: exc.PaymentRequired,
- http.CONFLICT: exc.Conflict}
+_exception_map = {
+ http.NO_CONTENT: exc.NoContent,
+ http.NOT_FOUND: exc.NotFound,
+ http.INTERNAL_SERVER_ERROR: exc.InternalServerError,
+ http.BAD_GATEWAY: exc.BadGateway,
+ http.METHOD_NOT_ALLOWED: exc.MethodNotAllowed,
+ http.UNAUTHORIZED: exc.Unauthorized,
+ http.PAYMENT_REQUIRED: exc.PaymentRequired,
+ http.CONFLICT: exc.Conflict,
+}
def exception_from_status_code(status_code):
@@ -380,12 +349,7 @@ class PageList(object):
registered_type = self.__item_class__
else:
registered_type = get_registered_page(endpoint)
- items.append(
- registered_type(
- self.connection,
- endpoint=endpoint,
- json=item,
- r=self.r))
+ items.append(registered_type(self.connection, endpoint=endpoint, json=item, r=self.r))
return items
def go_to_next(self):
@@ -407,7 +371,6 @@ class PageList(object):
class TentativePage(str):
-
def __new__(cls, endpoint, connection):
return super(TentativePage, cls).__new__(cls, to_str(endpoint))
@@ -416,10 +379,7 @@ class TentativePage(str):
self.connection = connection
def _create(self):
- return get_registered_page(
- self.endpoint)(
- self.connection,
- endpoint=self.endpoint)
+ return get_registered_page(self.endpoint)(self.connection, endpoint=self.endpoint)
def get(self, **params):
return self._create().get(**params)
@@ -436,21 +396,15 @@ class TentativePage(str):
page = None
# look up users by username not name
if 'users' in self:
- assert query_parameters.get(
- 'username'), 'For this resource, you must call this method with a "username" to look up the object by'
+ assert query_parameters.get('username'), 'For this resource, you must call this method with a "username" to look up the object by'
page = self.get(username=query_parameters['username'])
else:
- assert query_parameters.get(
- 'name'), 'For this resource, you must call this method with a "name" to look up the object by'
+ assert query_parameters.get('name'), 'For this resource, you must call this method with a "name" to look up the object by'
if query_parameters.get('organization'):
if isinstance(query_parameters.get('organization'), int):
- page = self.get(
- name=query_parameters['name'],
- organization=query_parameters.get('organization'))
+ page = self.get(name=query_parameters['name'], organization=query_parameters.get('organization'))
else:
- page = self.get(
- name=query_parameters['name'],
- organization=query_parameters.get('organization').id)
+ page = self.get(name=query_parameters['name'], organization=query_parameters.get('organization').id)
else:
page = self.get(name=query_parameters['name'])
if page and page.results:
@@ -476,13 +430,9 @@ class TentativePage(str):
if query_parameters.get('name'):
if query_parameters.get('organization'):
if isinstance(query_parameters.get('organization'), int):
- page = self.get(
- name=query_parameters['name'],
- organization=query_parameters.get('organization'))
+ page = self.get(name=query_parameters['name'], organization=query_parameters.get('organization'))
else:
- page = self.get(
- name=query_parameters['name'],
- organization=query_parameters.get('organization').id)
+ page = self.get(name=query_parameters['name'], organization=query_parameters.get('organization').id)
else:
page = self.get(name=query_parameters['name'])
diff --git a/awxkit/awxkit/api/pages/projects.py b/awxkit/awxkit/api/pages/projects.py
index 047d471c8f..b00109f61b 100644
--- a/awxkit/awxkit/api/pages/projects.py
+++ b/awxkit/awxkit/api/pages/projects.py
@@ -18,13 +18,11 @@ class Project(HasCopy, HasCreate, HasNotifications, UnifiedJobTemplate):
def payload(self, organization, scm_type='git', **kwargs):
payload = PseudoNamespace(
- name=kwargs.get('name') or 'Project - {}'.format(
- random_title()),
+ name=kwargs.get('name') or 'Project - {}'.format(random_title()),
description=kwargs.get('description') or random_title(10),
scm_type=scm_type,
- scm_url=kwargs.get('scm_url') or config.project_urls.get(
- scm_type,
- ''))
+ scm_url=kwargs.get('scm_url') or config.project_urls.get(scm_type, ''),
+ )
if organization is not None:
payload.organization = organization.id
@@ -40,43 +38,25 @@ class Project(HasCopy, HasCreate, HasNotifications, UnifiedJobTemplate):
'scm_update_cache_timeout',
'scm_update_on_launch',
'scm_refspec',
- 'allow_override')
+ 'allow_override',
+ )
update_payload(payload, fields, kwargs)
payload = set_payload_foreign_key_args(payload, ('execution_environment', 'default_environment'), kwargs)
return payload
- def create_payload(
- self,
- name='',
- description='',
- scm_type='git',
- scm_url='',
- scm_branch='',
- organization=Organization,
- credential=None,
- **kwargs):
+ def create_payload(self, name='', description='', scm_type='git', scm_url='', scm_branch='', organization=Organization, credential=None, **kwargs):
if credential:
if isinstance(credential, Credential):
- if credential.ds.credential_type.namespace not in (
- 'scm', 'insights'):
+ if credential.ds.credential_type.namespace not in ('scm', 'insights'):
credential = None # ignore incompatible credential from HasCreate dependency injection
elif credential in (Credential,):
- credential = (
- Credential, dict(
- credential_type=(
- True, dict(
- kind='scm'))))
+ credential = (Credential, dict(credential_type=(True, dict(kind='scm'))))
elif credential is True:
- credential = (
- Credential, dict(
- credential_type=(
- True, dict(
- kind='scm'))))
+ credential = (Credential, dict(credential_type=(True, dict(kind='scm'))))
- self.create_and_update_dependencies(
- *filter_by_class((credential, Credential), (organization, Organization)))
+ self.create_and_update_dependencies(*filter_by_class((credential, Credential), (organization, Organization)))
credential = self.ds.credential if credential else None
organization = self.ds.organization if organization else None
@@ -89,20 +69,12 @@ class Project(HasCopy, HasCreate, HasNotifications, UnifiedJobTemplate):
scm_url=scm_url,
scm_branch=scm_branch,
credential=credential,
- **kwargs)
+ **kwargs
+ )
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
- def create(
- self,
- name='',
- description='',
- scm_type='git',
- scm_url='',
- scm_branch='',
- organization=Organization,
- credential=None,
- **kwargs):
+ def create(self, name='', description='', scm_type='git', scm_url='', scm_branch='', organization=Organization, credential=None, **kwargs):
payload = self.create_payload(
name=name,
description=description,
@@ -111,7 +83,8 @@ class Project(HasCopy, HasCreate, HasNotifications, UnifiedJobTemplate):
scm_branch=scm_branch,
organization=organization,
credential=credential,
- **kwargs)
+ **kwargs
+ )
self.update_identity(Projects(self.connection).post(payload))
if kwargs.get('wait', True):
@@ -127,25 +100,20 @@ class Project(HasCopy, HasCreate, HasNotifications, UnifiedJobTemplate):
update_pg = self.get_related('update')
# assert can_update == True
- assert update_pg.can_update, \
- "The specified project (id:%s) is not able to update (can_update:%s)" % \
- (self.id, update_pg.can_update)
+ assert update_pg.can_update, "The specified project (id:%s) is not able to update (can_update:%s)" % (self.id, update_pg.can_update)
# start the update
result = update_pg.post()
# assert JSON response
- assert 'project_update' in result.json, \
- "Unexpected JSON response when starting an project_update.\n%s" % \
- json.dumps(result.json, indent=2)
+ assert 'project_update' in result.json, "Unexpected JSON response when starting an project_update.\n%s" % json.dumps(result.json, indent=2)
# locate and return the specific update
- jobs_pg = self.get_related(
- 'project_updates',
- id=result.json['project_update'])
- assert jobs_pg.count == 1, \
- "An project_update started (id:%s) but job not found in response at %s/inventory_updates/" % \
- (result.json['project_update'], self.url)
+ jobs_pg = self.get_related('project_updates', id=result.json['project_update'])
+ assert jobs_pg.count == 1, "An project_update started (id:%s) but job not found in response at %s/inventory_updates/" % (
+ result.json['project_update'],
+ self.url,
+ )
return jobs_pg.results[0]
@property
@@ -154,13 +122,10 @@ class Project(HasCopy, HasCreate, HasNotifications, UnifiedJobTemplate):
0) scm_type != ""
1) unified_job_template.is_successful
"""
- return self.scm_type != "" and \
- super(Project, self).is_successful
+ return self.scm_type != "" and super(Project, self).is_successful
-page.register_page([resources.project,
- (resources.projects, 'post'),
- (resources.project_copy, 'post')], Project)
+page.register_page([resources.project, (resources.projects, 'post'), (resources.project_copy, 'post')], Project)
class Projects(page.PageList, Project):
@@ -168,8 +133,7 @@ class Projects(page.PageList, Project):
pass
-page.register_page([resources.projects,
- resources.related_projects], Projects)
+page.register_page([resources.projects, resources.related_projects], Projects)
class ProjectUpdate(UnifiedJob):
@@ -185,8 +149,7 @@ class ProjectUpdates(page.PageList, ProjectUpdate):
pass
-page.register_page([resources.project_updates,
- resources.project_project_updates], ProjectUpdates)
+page.register_page([resources.project_updates, resources.project_project_updates], ProjectUpdates)
class ProjectUpdateLaunch(base.Base):
diff --git a/awxkit/awxkit/api/pages/roles.py b/awxkit/awxkit/api/pages/roles.py
index 2b317dece1..07f0790b60 100644
--- a/awxkit/awxkit/api/pages/roles.py
+++ b/awxkit/awxkit/api/pages/roles.py
@@ -18,15 +18,11 @@ class Role(base.Base):
cache = page.PageCache()
natural_key = super(Role, self).get_natural_key(cache=cache)
- related_objs = [
- related for name, related in self.related.items()
- if name not in ('users', 'teams')
- ]
+ related_objs = [related for name, related in self.related.items() if name not in ('users', 'teams')]
if related_objs:
related_endpoint = cache.get_page(related_objs[0])
if related_endpoint is None:
- log.error("Unable to obtain content_object %s for role %s",
- related_objs[0], self.endpoint)
+ log.error("Unable to obtain content_object %s for role %s", related_objs[0], self.endpoint)
return None
natural_key['content_object'] = related_endpoint.get_natural_key(cache=cache)
@@ -41,6 +37,4 @@ class Roles(page.PageList, Role):
pass
-page.register_page([resources.roles,
- resources.related_roles,
- resources.related_object_roles], Roles)
+page.register_page([resources.roles, resources.related_roles, resources.related_object_roles], Roles)
diff --git a/awxkit/awxkit/api/pages/schedules.py b/awxkit/awxkit/api/pages/schedules.py
index 8603b2ad5b..d1520a0be4 100644
--- a/awxkit/awxkit/api/pages/schedules.py
+++ b/awxkit/awxkit/api/pages/schedules.py
@@ -11,12 +11,10 @@ class Schedule(UnifiedJob):
NATURAL_KEY = ('unified_job_template', 'name')
-page.register_page([resources.schedule,
- resources.related_schedule], Schedule)
+page.register_page([resources.schedule, resources.related_schedule], Schedule)
class Schedules(page.PageList, Schedule):
-
def get_zoneinfo(self):
return SchedulesZoneInfo(self.connection).get()
@@ -33,8 +31,7 @@ class Schedules(page.PageList, Schedule):
self.related.credentials.post(dict(id=cred.id, disassociate=True))
-page.register_page([resources.schedules,
- resources.related_schedules], Schedules)
+page.register_page([resources.schedules, resources.related_schedules], Schedules)
class SchedulesPreview(base.Base):
@@ -46,7 +43,6 @@ page.register_page(((resources.schedules_preview, 'post'),), SchedulesPreview)
class SchedulesZoneInfo(base.Base):
-
def __getitem__(self, idx):
return self.json[idx]
diff --git a/awxkit/awxkit/api/pages/settings.py b/awxkit/awxkit/api/pages/settings.py
index fbf133dc02..59168d40c5 100644
--- a/awxkit/awxkit/api/pages/settings.py
+++ b/awxkit/awxkit/api/pages/settings.py
@@ -8,27 +8,31 @@ class Setting(base.Base):
pass
-page.register_page([resources.setting,
- resources.settings_all,
- resources.settings_authentication,
- resources.settings_changed,
- resources.settings_github,
- resources.settings_github_org,
- resources.settings_github_team,
- resources.settings_google_oauth2,
- resources.settings_jobs,
- resources.settings_ldap,
- resources.settings_radius,
- resources.settings_saml,
- resources.settings_system,
- resources.settings_tacacsplus,
- resources.settings_ui,
- resources.settings_user,
- resources.settings_user_defaults], Setting)
+page.register_page(
+ [
+ resources.setting,
+ resources.settings_all,
+ resources.settings_authentication,
+ resources.settings_changed,
+ resources.settings_github,
+ resources.settings_github_org,
+ resources.settings_github_team,
+ resources.settings_google_oauth2,
+ resources.settings_jobs,
+ resources.settings_ldap,
+ resources.settings_radius,
+ resources.settings_saml,
+ resources.settings_system,
+ resources.settings_tacacsplus,
+ resources.settings_ui,
+ resources.settings_user,
+ resources.settings_user_defaults,
+ ],
+ Setting,
+)
class Settings(page.PageList, Setting):
-
def get_endpoint(self, endpoint):
"""Helper method used to navigate to a specific settings endpoint.
(Pdb) settings_pg.get_endpoint('all')
diff --git a/awxkit/awxkit/api/pages/subscriptions.py b/awxkit/awxkit/api/pages/subscriptions.py
index 749776c000..8be55ee6fd 100644
--- a/awxkit/awxkit/api/pages/subscriptions.py
+++ b/awxkit/awxkit/api/pages/subscriptions.py
@@ -3,7 +3,6 @@ from . import page
class Subscriptions(page.Page):
-
def get_possible_licenses(self, **kwargs):
return self.post(json=kwargs).json
diff --git a/awxkit/awxkit/api/pages/survey_spec.py b/awxkit/awxkit/api/pages/survey_spec.py
index 28a870cf84..e9ea99bbff 100644
--- a/awxkit/awxkit/api/pages/survey_spec.py
+++ b/awxkit/awxkit/api/pages/survey_spec.py
@@ -5,7 +5,6 @@ from awxkit.api.resources import resources
class SurveySpec(base.Base):
-
def get_variable_default(self, var):
for item in self.spec:
if item.get('variable') == var:
@@ -26,5 +25,4 @@ class SurveySpec(base.Base):
return required_vars
-page.register_page([resources.job_template_survey_spec,
- resources.workflow_job_template_survey_spec], SurveySpec)
+page.register_page([resources.job_template_survey_spec, resources.workflow_job_template_survey_spec], SurveySpec)
diff --git a/awxkit/awxkit/api/pages/system_job_templates.py b/awxkit/awxkit/api/pages/system_job_templates.py
index 0c5f13d7cd..cc0d28857a 100644
--- a/awxkit/awxkit/api/pages/system_job_templates.py
+++ b/awxkit/awxkit/api/pages/system_job_templates.py
@@ -5,16 +5,13 @@ from . import page
class SystemJobTemplate(UnifiedJobTemplate, HasNotifications):
-
def launch(self, payload={}):
"""Launch the system_job_template using related->launch endpoint."""
result = self.related.launch.post(payload)
# return job
jobs_pg = self.get_related('jobs', id=result.json['system_job'])
- assert jobs_pg.count == 1, \
- "system_job_template launched (id:%s) but unable to find matching " \
- "job at %s/jobs/" % (result.json['job'], self.url)
+ assert jobs_pg.count == 1, "system_job_template launched (id:%s) but unable to find matching " "job at %s/jobs/" % (result.json['job'], self.url)
return jobs_pg.results[0]
diff --git a/awxkit/awxkit/api/pages/teams.py b/awxkit/awxkit/api/pages/teams.py
index cb5577b5b2..96fafb3341 100644
--- a/awxkit/awxkit/api/pages/teams.py
+++ b/awxkit/awxkit/api/pages/teams.py
@@ -20,9 +20,11 @@ class Team(HasCreate, base.Base):
self.related.users.post(user)
def payload(self, organization, **kwargs):
- payload = PseudoNamespace(name=kwargs.get('name') or 'Team - {}'.format(random_title()),
- description=kwargs.get('description') or random_title(10),
- organization=organization.id)
+ payload = PseudoNamespace(
+ name=kwargs.get('name') or 'Team - {}'.format(random_title()),
+ description=kwargs.get('description') or random_title(10),
+ organization=organization.id,
+ )
return payload
def create_payload(self, name='', description='', organization=Organization, **kwargs):
@@ -36,8 +38,7 @@ class Team(HasCreate, base.Base):
return self.update_identity(Teams(self.connection).post(payload))
-page.register_page([resources.team,
- (resources.teams, 'post')], Team)
+page.register_page([resources.team, (resources.teams, 'post')], Team)
class Teams(page.PageList, Team):
@@ -45,6 +46,4 @@ class Teams(page.PageList, Team):
pass
-page.register_page([resources.teams,
- resources.credential_owner_teams,
- resources.related_teams], Teams)
+page.register_page([resources.teams, resources.credential_owner_teams, resources.related_teams], Teams)
diff --git a/awxkit/awxkit/api/pages/unified_job_templates.py b/awxkit/awxkit/api/pages/unified_job_templates.py
index 286ca18ed7..22a7a70106 100644
--- a/awxkit/awxkit/api/pages/unified_job_templates.py
+++ b/awxkit/awxkit/api/pages/unified_job_templates.py
@@ -26,38 +26,19 @@ class UnifiedJobTemplate(HasStatus, base.Base):
# formatting issue where result_stdout contained '%s'. This later caused
# a python traceback when attempting to display output from this
# method.
- items = [
- 'id',
- 'name',
- 'status',
- 'source',
- 'last_update_failed',
- 'last_updated',
- 'result_traceback',
- 'job_explanation',
- 'job_args']
+ items = ['id', 'name', 'status', 'source', 'last_update_failed', 'last_updated', 'result_traceback', 'job_explanation', 'job_args']
info = []
for item in [x for x in items if hasattr(self, x)]:
info.append('{0}:{1}'.format(item, getattr(self, item)))
output = '<{0.__class__.__name__} {1}>'.format(self, ', '.join(info))
return output.replace('%', '%%')
- def add_schedule(
- self,
- name='',
- description='',
- enabled=True,
- rrule=None,
- **kwargs):
+ def add_schedule(self, name='', description='', enabled=True, rrule=None, **kwargs):
if rrule is None:
rrule = "DTSTART:30180101T000000Z RRULE:FREQ=YEARLY;INTERVAL=1"
payload = dict(
- name=name or "{0} Schedule {1}".format(
- self.name,
- random_title()),
- description=description or random_title(10),
- enabled=enabled,
- rrule=str(rrule))
+ name=name or "{0} Schedule {1}".format(self.name, random_title()), description=description or random_title(10), enabled=enabled, rrule=str(rrule)
+ )
update_payload(payload, self.optional_schedule_fields, kwargs)
@@ -70,9 +51,7 @@ class UnifiedJobTemplate(HasStatus, base.Base):
2) not last_update_failed
3) last_updated
"""
- return super(
- UnifiedJobTemplate,
- self).is_successful and not self.last_update_failed and self.last_updated is not None
+ return super(UnifiedJobTemplate, self).is_successful and not self.last_update_failed and self.last_updated is not None
page.register_page(resources.unified_job_template, UnifiedJobTemplate)
diff --git a/awxkit/awxkit/api/pages/unified_jobs.py b/awxkit/awxkit/api/pages/unified_jobs.py
index 20c6175ed3..09dea1ebbb 100644
--- a/awxkit/awxkit/api/pages/unified_jobs.py
+++ b/awxkit/awxkit/api/pages/unified_jobs.py
@@ -21,8 +21,7 @@ class UnifiedJob(HasStatus, base.Base):
# NOTE: I use .replace('%', '%%') to workaround an odd string
# formatting issue where result_stdout contained '%s'. This later caused
# a python traceback when attempting to display output from this method.
- items = ['id', 'name', 'status', 'failed', 'result_stdout', 'result_traceback',
- 'job_explanation', 'job_args']
+ items = ['id', 'name', 'status', 'failed', 'result_stdout', 'result_traceback', 'job_explanation', 'job_args']
info = []
for item in [x for x in items if hasattr(self, x)]:
info.append('{0}:{1}'.format(item, getattr(self, item)))
@@ -32,9 +31,7 @@ class UnifiedJob(HasStatus, base.Base):
@property
def result_stdout(self):
if 'result_stdout' not in self.json and 'stdout' in self.related:
- return self.connection.get(
- self.related.stdout, query_parameters=dict(format='txt_download')
- ).content.decode()
+ return self.connection.get(self.related.stdout, query_parameters=dict(format='txt_download')).content.decode()
return self.json.result_stdout.decode()
def assert_text_in_stdout(self, expected_text, replace_spaces=None, replace_newlines=' '):
@@ -55,9 +52,7 @@ class UnifiedJob(HasStatus, base.Base):
stdout = stdout.replace(' ', replace_spaces)
if expected_text not in stdout:
pretty_stdout = pformat(stdout)
- raise AssertionError(
- 'Expected "{}", but it was not found in stdout. Full stdout:\n {}'.format(expected_text, pretty_stdout)
- )
+ raise AssertionError('Expected "{}", but it was not found in stdout. Full stdout:\n {}'.format(expected_text, pretty_stdout))
@property
def is_successful(self):
@@ -103,7 +98,7 @@ class UnifiedJob(HasStatus, base.Base):
# Race condition where job finishes between can_cancel
# check and post.
if not any("not allowed" in field for field in e.msg.values()):
- raise(e)
+ raise (e)
return self.get()
@property
@@ -114,6 +109,7 @@ class UnifiedJob(HasStatus, base.Base):
```assert dict(extra_var=extra_var_val) in unified_job.job_args```
If you need to ensure the job_args are of awx-provided format use raw unified_job.json.job_args.
"""
+
def attempt_yaml_load(arg):
try:
return yaml.safe_load(arg)
@@ -151,10 +147,7 @@ class UnifiedJob(HasStatus, base.Base):
if host_loc.startswith(expected_prefix):
return host_loc
raise RuntimeError(
- 'Could not find a controller private_data_dir for this job. '
- 'Searched for volume mount to {} inside of args {}'.format(
- expected_prefix, job_args
- )
+ 'Could not find a controller private_data_dir for this job. ' 'Searched for volume mount to {} inside of args {}'.format(expected_prefix, job_args)
)
@@ -163,7 +156,4 @@ class UnifiedJobs(page.PageList, UnifiedJob):
pass
-page.register_page([resources.unified_jobs,
- resources.instance_related_jobs,
- resources.instance_group_related_jobs,
- resources.schedules_jobs], UnifiedJobs)
+page.register_page([resources.unified_jobs, resources.instance_related_jobs, resources.instance_group_related_jobs, resources.schedules_jobs], UnifiedJobs)
diff --git a/awxkit/awxkit/api/pages/users.py b/awxkit/awxkit/api/pages/users.py
index 22ab78dd11..f8a4d9cc17 100644
--- a/awxkit/awxkit/api/pages/users.py
+++ b/awxkit/awxkit/api/pages/users.py
@@ -13,26 +13,13 @@ class User(HasCreate, base.Base):
def payload(self, **kwargs):
payload = PseudoNamespace(
- username=kwargs.get('username') or 'User-{}'.format(
- random_title(
- non_ascii=False)),
+ username=kwargs.get('username') or 'User-{}'.format(random_title(non_ascii=False)),
password=kwargs.get('password') or config.credentials.default.password,
- is_superuser=kwargs.get(
- 'is_superuser',
- False),
- is_system_auditor=kwargs.get(
- 'is_system_auditor',
- False),
- first_name=kwargs.get(
- 'first_name',
- random_title()),
- last_name=kwargs.get(
- 'last_name',
- random_title()),
- email=kwargs.get(
- 'email',
- '{}@example.com'.format(random_title(5, non_ascii=False))
- )
+ is_superuser=kwargs.get('is_superuser', False),
+ is_system_auditor=kwargs.get('is_system_auditor', False),
+ first_name=kwargs.get('first_name', random_title()),
+ last_name=kwargs.get('last_name', random_title()),
+ email=kwargs.get('email', '{}@example.com'.format(random_title(5, non_ascii=False))),
)
return payload
@@ -42,8 +29,7 @@ class User(HasCreate, base.Base):
return payload
def create(self, username='', password='', organization=None, **kwargs):
- payload = self.create_payload(
- username=username, password=password, **kwargs)
+ payload = self.create_payload(username=username, password=password, **kwargs)
self.password = payload.password
self.update_identity(Users(self.connection).post(payload))
@@ -54,8 +40,7 @@ class User(HasCreate, base.Base):
return self
-page.register_page([resources.user,
- (resources.users, 'post')], User)
+page.register_page([resources.user, (resources.users, 'post')], User)
class Users(page.PageList, User):
@@ -63,11 +48,9 @@ class Users(page.PageList, User):
pass
-page.register_page([resources.users,
- resources.organization_admins,
- resources.related_users,
- resources.credential_owner_users,
- resources.user_admin_organizations], Users)
+page.register_page(
+ [resources.users, resources.organization_admins, resources.related_users, resources.credential_owner_users, resources.user_admin_organizations], Users
+)
class Me(Users):
diff --git a/awxkit/awxkit/api/pages/workflow_approvals.py b/awxkit/awxkit/api/pages/workflow_approvals.py
index d4ededcdec..bd26aae7e3 100644
--- a/awxkit/awxkit/api/pages/workflow_approvals.py
+++ b/awxkit/awxkit/api/pages/workflow_approvals.py
@@ -5,7 +5,6 @@ from awxkit import exceptions
class WorkflowApproval(UnifiedJob):
-
def approve(self):
try:
self.related.approve.post()
diff --git a/awxkit/awxkit/api/pages/workflow_job_nodes.py b/awxkit/awxkit/api/pages/workflow_job_nodes.py
index 320618323a..4eabaebe16 100644
--- a/awxkit/awxkit/api/pages/workflow_job_nodes.py
+++ b/awxkit/awxkit/api/pages/workflow_job_nodes.py
@@ -5,7 +5,6 @@ from . import page
class WorkflowJobNode(base.Base):
-
def wait_for_job(self, interval=5, timeout=60, **kw):
"""Waits until node's job exists"""
adjusted_timeout = timeout - seconds_since_date_string(self.created)
@@ -30,8 +29,13 @@ class WorkflowJobNodes(page.PageList, WorkflowJobNode):
pass
-page.register_page([resources.workflow_job_nodes,
- resources.workflow_job_workflow_nodes,
- resources.workflow_job_node_always_nodes,
- resources.workflow_job_node_failure_nodes,
- resources.workflow_job_node_success_nodes], WorkflowJobNodes)
+page.register_page(
+ [
+ resources.workflow_job_nodes,
+ resources.workflow_job_workflow_nodes,
+ resources.workflow_job_node_always_nodes,
+ resources.workflow_job_node_failure_nodes,
+ resources.workflow_job_node_success_nodes,
+ ],
+ WorkflowJobNodes,
+)
diff --git a/awxkit/awxkit/api/pages/workflow_job_template_nodes.py b/awxkit/awxkit/api/pages/workflow_job_template_nodes.py
index 5494d6063b..8a68476030 100644
--- a/awxkit/awxkit/api/pages/workflow_job_template_nodes.py
+++ b/awxkit/awxkit/api/pages/workflow_job_template_nodes.py
@@ -15,12 +15,9 @@ class WorkflowJobTemplateNode(HasCreate, base.Base):
def payload(self, workflow_job_template, unified_job_template, **kwargs):
if not unified_job_template:
# May pass "None" to explicitly create an approval node
- payload = PseudoNamespace(
- workflow_job_template=workflow_job_template.id)
+ payload = PseudoNamespace(workflow_job_template=workflow_job_template.id)
else:
- payload = PseudoNamespace(
- workflow_job_template=workflow_job_template.id,
- unified_job_template=unified_job_template.id)
+ payload = PseudoNamespace(workflow_job_template=workflow_job_template.id, unified_job_template=unified_job_template.id)
optional_fields = (
'diff_mode',
@@ -33,7 +30,8 @@ class WorkflowJobTemplateNode(HasCreate, base.Base):
'verbosity',
'extra_data',
'identifier',
- 'all_parents_must_converge')
+ 'all_parents_must_converge',
+ )
update_payload(payload, optional_fields, kwargs)
@@ -42,45 +40,23 @@ class WorkflowJobTemplateNode(HasCreate, base.Base):
return payload
- def create_payload(
- self,
- workflow_job_template=WorkflowJobTemplate,
- unified_job_template=JobTemplate,
- **kwargs):
+ def create_payload(self, workflow_job_template=WorkflowJobTemplate, unified_job_template=JobTemplate, **kwargs):
if not unified_job_template:
self.create_and_update_dependencies(workflow_job_template)
- payload = self.payload(
- workflow_job_template=self.ds.workflow_job_template,
- unified_job_template=None,
- **kwargs)
+ payload = self.payload(workflow_job_template=self.ds.workflow_job_template, unified_job_template=None, **kwargs)
else:
- self.create_and_update_dependencies(
- workflow_job_template, unified_job_template)
- payload = self.payload(
- workflow_job_template=self.ds.workflow_job_template,
- unified_job_template=self.ds.unified_job_template,
- **kwargs)
+ self.create_and_update_dependencies(workflow_job_template, unified_job_template)
+ payload = self.payload(workflow_job_template=self.ds.workflow_job_template, unified_job_template=self.ds.unified_job_template, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
- def create(
- self,
- workflow_job_template=WorkflowJobTemplate,
- unified_job_template=JobTemplate,
- **kwargs):
- payload = self.create_payload(
- workflow_job_template=workflow_job_template,
- unified_job_template=unified_job_template,
- **kwargs)
- return self.update_identity(
- WorkflowJobTemplateNodes(
- self.connection).post(payload))
+ def create(self, workflow_job_template=WorkflowJobTemplate, unified_job_template=JobTemplate, **kwargs):
+ payload = self.create_payload(workflow_job_template=workflow_job_template, unified_job_template=unified_job_template, **kwargs)
+ return self.update_identity(WorkflowJobTemplateNodes(self.connection).post(payload))
def _add_node(self, endpoint, unified_job_template, **kwargs):
- node = endpoint.post(
- dict(unified_job_template=unified_job_template.id, **kwargs))
- node.create_and_update_dependencies(
- self.ds.workflow_job_template, unified_job_template)
+ node = endpoint.post(dict(unified_job_template=unified_job_template.id, **kwargs))
+ node.create_and_update_dependencies(self.ds.workflow_job_template, unified_job_template)
return node
def add_always_node(self, unified_job_template, **kwargs):
@@ -94,24 +70,18 @@ class WorkflowJobTemplateNode(HasCreate, base.Base):
def add_credential(self, credential):
with suppress(exc.NoContent):
- self.related.credentials.post(
- dict(id=credential.id, associate=True))
+ self.related.credentials.post(dict(id=credential.id, associate=True))
def remove_credential(self, credential):
with suppress(exc.NoContent):
- self.related.credentials.post(
- dict(id=credential.id, disassociate=True))
+ self.related.credentials.post(dict(id=credential.id, disassociate=True))
def remove_all_credentials(self):
for cred in self.related.credentials.get().results:
with suppress(exc.NoContent):
- self.related.credentials.post(
- dict(id=cred.id, disassociate=True))
+ self.related.credentials.post(dict(id=cred.id, disassociate=True))
- def make_approval_node(
- self,
- **kwargs
- ):
+ def make_approval_node(self, **kwargs):
if 'name' not in kwargs:
kwargs['name'] = 'approval node {}'.format(random_title())
self.related.create_approval_template.post(kwargs)
@@ -122,10 +92,10 @@ class WorkflowJobTemplateNode(HasCreate, base.Base):
return candidates.results.pop()
-page.register_page([resources.workflow_job_template_node,
- (resources.workflow_job_template_nodes, 'post'),
- (resources.workflow_job_template_workflow_nodes, 'post')],
- WorkflowJobTemplateNode)
+page.register_page(
+ [resources.workflow_job_template_node, (resources.workflow_job_template_nodes, 'post'), (resources.workflow_job_template_workflow_nodes, 'post')],
+ WorkflowJobTemplateNode,
+)
class WorkflowJobTemplateNodes(page.PageList, WorkflowJobTemplateNode):
@@ -133,9 +103,13 @@ class WorkflowJobTemplateNodes(page.PageList, WorkflowJobTemplateNode):
pass
-page.register_page([resources.workflow_job_template_nodes,
- resources.workflow_job_template_workflow_nodes,
- resources.workflow_job_template_node_always_nodes,
- resources.workflow_job_template_node_failure_nodes,
- resources.workflow_job_template_node_success_nodes],
- WorkflowJobTemplateNodes)
+page.register_page(
+ [
+ resources.workflow_job_template_nodes,
+ resources.workflow_job_template_workflow_nodes,
+ resources.workflow_job_template_node_always_nodes,
+ resources.workflow_job_template_node_failure_nodes,
+ resources.workflow_job_template_node_success_nodes,
+ ],
+ WorkflowJobTemplateNodes,
+)
diff --git a/awxkit/awxkit/api/pages/workflow_job_templates.py b/awxkit/awxkit/api/pages/workflow_job_templates.py
index 17f3b56342..b8254f34d9 100644
--- a/awxkit/awxkit/api/pages/workflow_job_templates.py
+++ b/awxkit/awxkit/api/pages/workflow_job_templates.py
@@ -26,15 +26,14 @@ class WorkflowJobTemplate(HasCopy, HasCreate, HasNotifications, HasSurvey, Unifi
# return job
jobs_pg = self.related.workflow_jobs.get(id=result.workflow_job)
if jobs_pg.count != 1:
- msg = "workflow_job_template launched (id:{}) but job not found in response at {}/workflow_jobs/".format(
- result.json['workflow_job'], self.url
- )
+ msg = "workflow_job_template launched (id:{}) but job not found in response at {}/workflow_jobs/".format(result.json['workflow_job'], self.url)
raise exc.UnexpectedAWXState(msg)
return jobs_pg.results[0]
def payload(self, **kwargs):
- payload = PseudoNamespace(name=kwargs.get('name') or 'WorkflowJobTemplate - {}'.format(random_title()),
- description=kwargs.get('description') or random_title(10))
+ payload = PseudoNamespace(
+ name=kwargs.get('name') or 'WorkflowJobTemplate - {}'.format(random_title()), description=kwargs.get('description') or random_title(10)
+ )
optional_fields = (
"allow_simultaneous",
@@ -91,9 +90,9 @@ class WorkflowJobTemplate(HasCopy, HasCreate, HasNotifications, HasSurvey, Unifi
self.related.labels.post(label)
-page.register_page([resources.workflow_job_template,
- (resources.workflow_job_templates, 'post'),
- (resources.workflow_job_template_copy, 'post')], WorkflowJobTemplate)
+page.register_page(
+ [resources.workflow_job_template, (resources.workflow_job_templates, 'post'), (resources.workflow_job_template_copy, 'post')], WorkflowJobTemplate
+)
class WorkflowJobTemplates(page.PageList, WorkflowJobTemplate):
@@ -101,8 +100,7 @@ class WorkflowJobTemplates(page.PageList, WorkflowJobTemplate):
pass
-page.register_page([resources.workflow_job_templates,
- resources.related_workflow_job_templates], WorkflowJobTemplates)
+page.register_page([resources.workflow_job_templates, resources.related_workflow_job_templates], WorkflowJobTemplates)
class WorkflowJobTemplateLaunch(base.Base):
diff --git a/awxkit/awxkit/api/pages/workflow_jobs.py b/awxkit/awxkit/api/pages/workflow_jobs.py
index 36afc94460..200eb0ef30 100644
--- a/awxkit/awxkit/api/pages/workflow_jobs.py
+++ b/awxkit/awxkit/api/pages/workflow_jobs.py
@@ -4,7 +4,6 @@ from . import page
class WorkflowJob(UnifiedJob):
-
def __str__(self):
# TODO: Update after endpoint's fields are finished filling out
return super(UnifiedJob, self).__str__()
@@ -56,7 +55,4 @@ class WorkflowJobs(page.PageList, WorkflowJob):
pass
-page.register_page([resources.workflow_jobs,
- resources.workflow_job_template_jobs,
- resources.job_template_slice_workflow_jobs],
- WorkflowJobs)
+page.register_page([resources.workflow_jobs, resources.workflow_job_template_jobs, resources.job_template_slice_workflow_jobs], WorkflowJobs)
diff --git a/awxkit/awxkit/api/registry.py b/awxkit/awxkit/api/registry.py
index 67d6bb23b8..c7cea080b9 100644
--- a/awxkit/awxkit/api/registry.py
+++ b/awxkit/awxkit/api/registry.py
@@ -8,7 +8,6 @@ log = logging.getLogger(__name__)
class URLRegistry(object):
-
def __init__(self):
self.store = defaultdict(dict)
self.default = {}
@@ -81,8 +80,7 @@ class URLRegistry(object):
if method_pattern.pattern == not_provided:
exc_msg = '"{0.pattern}" already has methodless registration.'.format(url_pattern)
else:
- exc_msg = ('"{0.pattern}" already has registered method "{1.pattern}"'
- .format(url_pattern, method_pattern))
+ exc_msg = '"{0.pattern}" already has registered method "{1.pattern}"'.format(url_pattern, method_pattern)
raise TypeError(exc_msg)
self.store[url_pattern][method_pattern] = resource
diff --git a/awxkit/awxkit/api/resources.py b/awxkit/awxkit/api/resources.py
index 997ada1e70..573c96598f 100644
--- a/awxkit/awxkit/api/resources.py
+++ b/awxkit/awxkit/api/resources.py
@@ -1,4 +1,3 @@
-
class Resources(object):
_activity = r'activity_stream/\d+/'
diff --git a/awxkit/awxkit/api/utils.py b/awxkit/awxkit/api/utils.py
index a3e6739b26..119d68aa41 100644
--- a/awxkit/awxkit/api/utils.py
+++ b/awxkit/awxkit/api/utils.py
@@ -15,12 +15,11 @@ def freeze(key):
def parse_description(desc):
options = {}
- for line in desc[desc.index('POST'):].splitlines():
+ for line in desc[desc.index('POST') :].splitlines():
match = descRE.match(line)
if not match:
continue
- options[match.group(1)] = {'type': match.group(2),
- 'required': match.group(3) == 'required'}
+ options[match.group(1)] = {'type': match.group(2), 'required': match.group(3) == 'required'}
return options
@@ -45,6 +44,5 @@ def get_post_fields(page, cache):
if 'POST' in options_page.json['actions']:
return options_page.json['actions']['POST']
else:
- log.warning(
- "Insufficient privileges on %s, inferring POST fields from description.", options_page.endpoint)
+ log.warning("Insufficient privileges on %s, inferring POST fields from description.", options_page.endpoint)
return parse_description(options_page.json['description'])
diff --git a/awxkit/awxkit/awx/inventory.py b/awxkit/awxkit/awx/inventory.py
index b4872eb3e8..61a31c5754 100644
--- a/awxkit/awxkit/awx/inventory.py
+++ b/awxkit/awxkit/awx/inventory.py
@@ -17,13 +17,14 @@ def upload_inventory(ansible_runner, nhosts=10, ini=False):
copy_content = '''#!/bin/bash
cat <<EOF
%s
-EOF''' % json_inventory(nhosts)
+EOF''' % json_inventory(
+ nhosts
+ )
# Copy script to test system
contacted = ansible_runner.copy(dest=copy_dest, force=True, mode=copy_mode, content=copy_content)
for result in contacted.values():
- assert not result.get('failed', False), \
- "Failed to create inventory file: %s" % result
+ assert not result.get('failed', False), "Failed to create inventory file: %s" % result
return copy_dest
@@ -49,8 +50,7 @@ def generate_inventory(nhosts=100):
group_by_10s = 'group-%07dX.example.com' % (n / 10)
group_by_100s = 'group-%06dXX.example.com' % (n / 100)
group_by_1000s = 'group-%05dXXX.example.com' % (n / 1000)
- for group in [group_evens_odds, group_threes, group_fours, group_fives, group_sixes, group_sevens,
- group_eights, group_nines, group_tens, group_by_10s]:
+ for group in [group_evens_odds, group_threes, group_fours, group_fives, group_sixes, group_sevens, group_eights, group_nines, group_tens, group_by_10s]:
if not group:
continue
if group in inv_list:
@@ -58,11 +58,9 @@ def generate_inventory(nhosts=100):
else:
inv_list[group] = {'hosts': [hostname], 'children': [], 'vars': {'group_prefix': group.split('.')[0]}}
if group_by_1000s not in inv_list:
- inv_list[group_by_1000s] = {'hosts': [], 'children': [],
- 'vars': {'group_prefix': group_by_1000s.split('.')[0]}}
+ inv_list[group_by_1000s] = {'hosts': [], 'children': [], 'vars': {'group_prefix': group_by_1000s.split('.')[0]}}
if group_by_100s not in inv_list:
- inv_list[group_by_100s] = {'hosts': [], 'children': [],
- 'vars': {'group_prefix': group_by_100s.split('.')[0]}}
+ inv_list[group_by_100s] = {'hosts': [], 'children': [], 'vars': {'group_prefix': group_by_100s.split('.')[0]}}
if group_by_100s not in inv_list[group_by_1000s]['children']:
inv_list[group_by_1000s]['children'].append(group_by_100s)
if group_by_10s not in inv_list[group_by_100s]['children']:
diff --git a/awxkit/awxkit/awx/utils.py b/awxkit/awxkit/awx/utils.py
index d25e555ad6..f4aefeeca3 100644
--- a/awxkit/awxkit/awx/utils.py
+++ b/awxkit/awxkit/awx/utils.py
@@ -31,9 +31,22 @@ def _delete_all(endpoint):
def delete_all(v):
- for endpoint in (v.unified_jobs, v.job_templates, v.workflow_job_templates, v.notification_templates,
- v.projects, v.inventory, v.hosts, v.inventory_scripts, v.labels, v.credentials,
- v.teams, v.users, v.organizations, v.schedules):
+ for endpoint in (
+ v.unified_jobs,
+ v.job_templates,
+ v.workflow_job_templates,
+ v.notification_templates,
+ v.projects,
+ v.inventory,
+ v.hosts,
+ v.inventory_scripts,
+ v.labels,
+ v.credentials,
+ v.teams,
+ v.users,
+ v.organizations,
+ v.schedules,
+ ):
_delete_all(endpoint)
diff --git a/awxkit/awxkit/cli/__init__.py b/awxkit/awxkit/cli/__init__.py
index 6b20792eef..a73f18510d 100644
--- a/awxkit/awxkit/cli/__init__.py
+++ b/awxkit/awxkit/cli/__init__.py
@@ -56,14 +56,7 @@ def run(stdout=sys.stdout, stderr=sys.stderr, argv=[]):
json.dump(e.msg, sys.stdout)
print('')
elif cli.get_config('format') == 'yaml':
- sys.stdout.write(to_str(
- yaml.safe_dump(
- e.msg,
- default_flow_style=False,
- encoding='utf-8',
- allow_unicode=True
- )
- ))
+ sys.stdout.write(to_str(yaml.safe_dump(e.msg, default_flow_style=False, encoding='utf-8', allow_unicode=True)))
elif cli.get_config('format') == 'human':
sys.stdout.write(e.__class__.__name__)
print('')
diff --git a/awxkit/awxkit/cli/client.py b/awxkit/awxkit/cli/client.py
index f14d6df135..46dc4fa333 100755
--- a/awxkit/awxkit/cli/client.py
+++ b/awxkit/awxkit/cli/client.py
@@ -8,9 +8,7 @@ import sys
from requests.exceptions import RequestException
from .custom import handle_custom_actions
-from .format import (add_authentication_arguments,
- add_output_formatting_arguments,
- FORMATTERS, format_response)
+from .format import add_authentication_arguments, add_output_formatting_arguments, FORMATTERS, format_response
from .options import ResourceOptionsParser, UNIQUENESS_RULES
from .resource import parse_resource, is_control_resource
from awxkit import api, config, utils, exceptions, WSClient # noqa
@@ -88,7 +86,9 @@ class CLI(object):
token = self.get_config('token')
if token:
self.root.connection.login(
- None, None, token=token,
+ None,
+ None,
+ token=token,
)
else:
config.use_sessions = True
@@ -102,12 +102,14 @@ class CLI(object):
if self.get_config('insecure'):
config.assume_untrusted = True
- config.credentials = utils.PseudoNamespace({
- 'default': {
- 'username': self.get_config('username'),
- 'password': self.get_config('password'),
+ config.credentials = utils.PseudoNamespace(
+ {
+ 'default': {
+ 'username': self.get_config('username'),
+ 'password': self.get_config('password'),
+ }
}
- })
+ )
_, remainder = self.parser.parse_known_args()
if remainder and remainder[0] == 'config':
@@ -133,11 +135,7 @@ class CLI(object):
try:
self.v2 = self.root.get().available_versions.v2.get()
except AttributeError:
- raise RuntimeError(
- 'An error occurred while fetching {}/api/'.format(
- self.get_config('host')
- )
- )
+ raise RuntimeError('An error occurred while fetching {}/api/'.format(self.get_config('host')))
def parse_resource(self, skip_deprecated=False):
"""Attempt to parse the <resource> (e.g., jobs) specified on the CLI
@@ -170,33 +168,15 @@ class CLI(object):
_filter = self.get_config('filter')
# human format for metrics, settings is special
- if (
- self.resource in ('metrics', 'settings') and
- self.get_config('format') == 'human'
- ):
- response.json = {
- 'count': len(response.json),
- 'results': [
- {'key': k, 'value': v}
- for k, v in response.json.items()
- ]
- }
+ if self.resource in ('metrics', 'settings') and self.get_config('format') == 'human':
+ response.json = {'count': len(response.json), 'results': [{'key': k, 'value': v} for k, v in response.json.items()]}
_filter = 'key, value'
- if (
- self.get_config('format') == 'human' and
- _filter == '.' and
- self.resource in UNIQUENESS_RULES
- ):
+ if self.get_config('format') == 'human' and _filter == '.' and self.resource in UNIQUENESS_RULES:
_filter = ', '.join(UNIQUENESS_RULES[self.resource])
formatted = format_response(
- response,
- fmt=self.get_config('format'),
- filter=_filter,
- changed=self.original_action in (
- 'modify', 'create', 'associate', 'disassociate'
- )
+ response, fmt=self.get_config('format'), filter=_filter, changed=self.original_action in ('modify', 'create', 'associate', 'disassociate')
)
if formatted:
print(utils.to_str(formatted), file=self.stdout)
@@ -219,10 +199,7 @@ class CLI(object):
_without_ triggering a SystemExit (argparse's
behavior if required arguments are missing)
"""
- subparsers = self.subparsers[self.resource].add_subparsers(
- dest='action',
- metavar='action'
- )
+ subparsers = self.subparsers[self.resource].add_subparsers(dest='action', metavar='action')
subparsers.required = True
# parse the action from OPTIONS
@@ -252,10 +229,7 @@ class CLI(object):
if self.resource != 'settings':
for method in ('list', 'modify', 'create'):
if method in parser.parser.choices:
- parser.build_query_arguments(
- method,
- 'GET' if method == 'list' else 'POST'
- )
+ parser.build_query_arguments(method, 'GET' if method == 'list' else 'POST')
if from_sphinx:
parsed, extra = self.parser.parse_known_args(self.argv)
else:
@@ -263,10 +237,7 @@ class CLI(object):
if extra and self.verbose:
# If extraneous arguments were provided, warn the user
- cprint('{}: unrecognized arguments: {}'.format(
- self.parser.prog,
- ' '.join(extra)
- ), 'yellow', file=self.stdout)
+ cprint('{}: unrecognized arguments: {}'.format(self.parser.prog, ' '.join(extra)), 'yellow', file=self.stdout)
# build a dictionary of all of the _valid_ flags specified on the
# command line so we can pass them on to the underlying awxkit call
@@ -275,14 +246,7 @@ class CLI(object):
# everything else is a flag used as a query argument for the HTTP
# request we'll make (e.g., --username="Joe", --verbosity=3)
parsed = parsed.__dict__
- parsed = dict(
- (k, v) for k, v in parsed.items()
- if (
- v is not None and
- k not in ('help', 'resource') and
- not k.startswith('conf.')
- )
- )
+ parsed = dict((k, v) for k, v in parsed.items() if (v is not None and k not in ('help', 'resource') and not k.startswith('conf.')))
# if `id` is one of the arguments, it's a detail view
if 'id' in parsed:
@@ -290,9 +254,7 @@ class CLI(object):
# determine the awxkit method to call
action = self.original_action = parsed.pop('action')
- page, action = handle_custom_actions(
- self.resource, action, page
- )
+ page, action = handle_custom_actions(self.resource, action, page)
self.method = {
'list': 'get',
'modify': 'patch',
@@ -327,13 +289,7 @@ class CLI(object):
action='store_true',
help='prints usage information for the awx tool',
)
- self.parser.add_argument(
- '--version',
- dest='conf.version',
- action='version',
- help='display awx CLI version',
- version=__version__
- )
+ self.parser.add_argument('--version', dest='conf.version', action='version', help='display awx CLI version', version=__version__)
add_authentication_arguments(self.parser, env)
add_output_formatting_arguments(self.parser, env)
diff --git a/awxkit/awxkit/cli/custom.py b/awxkit/awxkit/cli/custom.py
index 38d97d8895..d2b9bc6590 100644
--- a/awxkit/awxkit/cli/custom.py
+++ b/awxkit/awxkit/cli/custom.py
@@ -16,7 +16,6 @@ def handle_custom_actions(resource, action, page):
class CustomActionRegistryMeta(CustomRegistryMeta):
-
@property
def name(self):
return ' '.join([self.resource, self.action])
@@ -45,33 +44,16 @@ class CustomAction(metaclass=CustomActionRegistryMeta):
class Launchable(object):
-
def add_arguments(self, parser, resource_options_parser, with_pk=True):
from .options import pk_or_name
+
if with_pk:
- parser.choices[self.action].add_argument(
- 'id',
- type=functools.partial(
- pk_or_name, None, self.resource, page=self.page
- ),
- help=''
- )
- parser.choices[self.action].add_argument(
- '--monitor', action='store_true',
- help='If set, prints stdout of the launched job until it finishes.'
- )
- parser.choices[self.action].add_argument(
- '--timeout', type=int,
- help='If set with --monitor or --wait, time out waiting on job completion.' # noqa
- )
- parser.choices[self.action].add_argument(
- '--wait', action='store_true',
- help='If set, waits until the launched job finishes.'
- )
+ parser.choices[self.action].add_argument('id', type=functools.partial(pk_or_name, None, self.resource, page=self.page), help='')
+ parser.choices[self.action].add_argument('--monitor', action='store_true', help='If set, prints stdout of the launched job until it finishes.')
+ parser.choices[self.action].add_argument('--timeout', type=int, help='If set with --monitor or --wait, time out waiting on job completion.') # noqa
+ parser.choices[self.action].add_argument('--wait', action='store_true', help='If set, waits until the launched job finishes.')
- launch_time_options = self.page.connection.options(
- self.page.endpoint + '1/{}/'.format(self.action)
- )
+ launch_time_options = self.page.connection.options(self.page.endpoint + '1/{}/'.format(self.action))
if launch_time_options.ok:
launch_time_options = launch_time_options.json()['actions']['POST']
resource_options_parser.options['LAUNCH'] = launch_time_options
@@ -118,24 +100,15 @@ class ProjectCreate(CustomAction):
resource = 'projects'
def add_arguments(self, parser, resource_options_parser):
- parser.choices[self.action].add_argument(
- '--monitor', action='store_true',
- help=('If set, prints stdout of the project update until '
- 'it finishes.')
- )
- parser.choices[self.action].add_argument(
- '--wait', action='store_true',
- help='If set, waits until the new project has updated.'
- )
+ parser.choices[self.action].add_argument('--monitor', action='store_true', help=('If set, prints stdout of the project update until ' 'it finishes.'))
+ parser.choices[self.action].add_argument('--wait', action='store_true', help='If set, waits until the new project has updated.')
def post(self, kwargs):
should_monitor = kwargs.pop('monitor', False)
wait = kwargs.pop('wait', False)
response = self.page.post(kwargs)
if should_monitor or wait:
- update = response.related.project_updates.get(
- order_by='-created'
- ).results[0]
+ update = response.related.project_updates.get(order_by='-created').results[0]
monitor(
update,
self.page.connection.session,
@@ -154,9 +127,7 @@ class AdhocCommandLaunch(Launchable, CustomAction):
resource = 'ad_hoc_commands'
def add_arguments(self, parser, resource_options_parser):
- Launchable.add_arguments(
- self, parser, resource_options_parser, with_pk=False
- )
+ Launchable.add_arguments(self, parser, resource_options_parser, with_pk=False)
def perform(self, **kwargs):
monitor_kwargs = {
@@ -182,22 +153,14 @@ class HasStdout(object):
def add_arguments(self, parser, resource_options_parser):
from .options import pk_or_name
- parser.choices['stdout'].add_argument(
- 'id',
- type=functools.partial(
- pk_or_name, None, self.resource, page=self.page
- ),
- help=''
- )
+
+ parser.choices['stdout'].add_argument('id', type=functools.partial(pk_or_name, None, self.resource, page=self.page), help='')
def perform(self):
fmt = 'txt_download'
if color_enabled():
fmt = 'ansi_download'
- return self.page.connection.get(
- self.page.get().related.stdout,
- query_parameters=dict(format=fmt)
- ).content.decode('utf-8')
+ return self.page.connection.get(self.page.get().related.stdout, query_parameters=dict(format=fmt)).content.decode('utf-8')
class JobStdout(HasStdout, CustomAction):
@@ -222,13 +185,8 @@ class AssociationMixin(object):
def add_arguments(self, parser, resource_options_parser):
from .options import pk_or_name
- parser.choices[self.action].add_argument(
- 'id',
- type=functools.partial(
- pk_or_name, None, self.resource, page=self.page
- ),
- help=''
- )
+
+ parser.choices[self.action].add_argument('id', type=functools.partial(pk_or_name, None, self.resource, page=self.page), help='')
group = parser.choices[self.action].add_mutually_exclusive_group(required=True)
for param, endpoint in self.targets.items():
field, model_name = endpoint
@@ -237,7 +195,6 @@ class AssociationMixin(object):
help_text = 'The ID (or name) of the {} to {}'.format(model_name, self.action)
class related_page(object):
-
def __init__(self, connection, resource):
self.conn = connection
self.resource = {
@@ -256,20 +213,15 @@ class AssociationMixin(object):
group.add_argument(
'--{}'.format(param),
metavar='',
- type=functools.partial(
- pk_or_name, None, param,
- page=related_page(self.page.connection, param)
- ),
- help=help_text
+ type=functools.partial(pk_or_name, None, param, page=related_page(self.page.connection, param)),
+ help=help_text,
)
def perform(self, **kwargs):
for k, v in kwargs.items():
endpoint, _ = self.targets[k]
try:
- self.page.get().related[endpoint].post(
- {'id': v, self.action: True}
- )
+ self.page.get().related[endpoint].post({'id': v, self.action: True})
except NoContent:
# we expect to enter this block because these endpoints return
# HTTP 204 on success
@@ -279,18 +231,9 @@ class AssociationMixin(object):
class NotificationAssociateMixin(AssociationMixin):
targets = {
- 'start_notification': [
- 'notification_templates_started',
- 'notification_template'
- ],
- 'success_notification': [
- 'notification_templates_success',
- 'notification_template'
- ],
- 'failure_notification': [
- 'notification_templates_error',
- 'notification_template'
- ],
+ 'start_notification': ['notification_templates_started', 'notification_template'],
+ 'success_notification': ['notification_templates_success', 'notification_template'],
+ 'failure_notification': ['notification_templates_error', 'notification_template'],
}
@@ -306,12 +249,16 @@ class JobTemplateNotificationDisAssociation(NotificationAssociateMixin, CustomAc
targets = NotificationAssociateMixin.targets.copy()
-JobTemplateNotificationAssociation.targets.update({
- 'credential': ['credentials', None],
-})
-JobTemplateNotificationDisAssociation.targets.update({
- 'credential': ['credentials', None],
-})
+JobTemplateNotificationAssociation.targets.update(
+ {
+ 'credential': ['credentials', None],
+ }
+)
+JobTemplateNotificationDisAssociation.targets.update(
+ {
+ 'credential': ['credentials', None],
+ }
+)
class WorkflowJobTemplateNotificationAssociation(NotificationAssociateMixin, CustomAction):
@@ -326,12 +273,16 @@ class WorkflowJobTemplateNotificationDisAssociation(NotificationAssociateMixin,
targets = NotificationAssociateMixin.targets.copy()
-WorkflowJobTemplateNotificationAssociation.targets.update({
- 'approval_notification': ['notification_templates_approvals', 'notification_template'],
-})
-WorkflowJobTemplateNotificationDisAssociation.targets.update({
- 'approval_notification': ['notification_templates_approvals', 'notification_template'],
-})
+WorkflowJobTemplateNotificationAssociation.targets.update(
+ {
+ 'approval_notification': ['notification_templates_approvals', 'notification_template'],
+ }
+)
+WorkflowJobTemplateNotificationDisAssociation.targets.update(
+ {
+ 'approval_notification': ['notification_templates_approvals', 'notification_template'],
+ }
+)
class ProjectNotificationAssociation(NotificationAssociateMixin, CustomAction):
@@ -366,14 +317,18 @@ class OrganizationNotificationDisAssociation(NotificationAssociateMixin, CustomA
targets = NotificationAssociateMixin.targets.copy()
-OrganizationNotificationAssociation.targets.update({
- 'approval_notification': ['notification_templates_approvals', 'notification_template'],
- 'galaxy_credential': ['galaxy_credentials', 'credential'],
-})
-OrganizationNotificationDisAssociation.targets.update({
- 'approval_notification': ['notification_templates_approvals', 'notification_template'],
- 'galaxy_credential': ['galaxy_credentials', 'credential'],
-})
+OrganizationNotificationAssociation.targets.update(
+ {
+ 'approval_notification': ['notification_templates_approvals', 'notification_template'],
+ 'galaxy_credential': ['galaxy_credentials', 'credential'],
+ }
+)
+OrganizationNotificationDisAssociation.targets.update(
+ {
+ 'approval_notification': ['notification_templates_approvals', 'notification_template'],
+ 'galaxy_credential': ['galaxy_credentials', 'credential'],
+ }
+)
class SettingsList(CustomAction):
@@ -381,9 +336,7 @@ class SettingsList(CustomAction):
resource = 'settings'
def add_arguments(self, parser, resource_options_parser):
- parser.choices['list'].add_argument(
- '--slug', help='optional setting category/slug', default='all'
- )
+ parser.choices['list'].add_argument('--slug', help='optional setting category/slug', default='all')
def perform(self, slug):
self.page.endpoint = self.page.endpoint + '{}/'.format(slug)
@@ -409,30 +362,18 @@ class RoleMixin(object):
if not RoleMixin.roles:
for resource, flag in self.has_roles:
- options = self.page.__class__(
- self.page.endpoint.replace(self.resource, resource),
- self.page.connection
- ).options()
- RoleMixin.roles[flag] = [
- role.replace('_role', '')
- for role in options.json.get('object_roles', [])
- ]
+ options = self.page.__class__(self.page.endpoint.replace(self.resource, resource), self.page.connection).options()
+ RoleMixin.roles[flag] = [role.replace('_role', '') for role in options.json.get('object_roles', [])]
possible_roles = set()
for v in RoleMixin.roles.values():
possible_roles.update(v)
- resource_group = parser.choices[self.action].add_mutually_exclusive_group(
- required=True
- )
+ resource_group = parser.choices[self.action].add_mutually_exclusive_group(required=True)
parser.choices[self.action].add_argument(
'id',
- type=functools.partial(
- pk_or_name, None, self.resource, page=self.page
- ),
- help='The ID (or name) of the {} to {} access to/from'.format(
- self.resource, self.action
- )
+ type=functools.partial(pk_or_name, None, self.resource, page=self.page),
+ help='The ID (or name) of the {} to {} access to/from'.format(self.resource, self.action),
)
for _type in RoleMixin.roles.keys():
if _type == 'team' and self.resource == 'team':
@@ -440,7 +381,6 @@ class RoleMixin(object):
continue
class related_page(object):
-
def __init__(self, connection, resource):
self.conn = connection
if resource == 'inventories':
@@ -453,19 +393,12 @@ class RoleMixin(object):
resource_group.add_argument(
'--{}'.format(_type),
- type=functools.partial(
- pk_or_name, None, _type,
- page=related_page(
- self.page.connection,
- dict((v, k) for k, v in self.has_roles)[_type]
- )
- ),
+ type=functools.partial(pk_or_name, None, _type, page=related_page(self.page.connection, dict((v, k) for k, v in self.has_roles)[_type])),
metavar='ID',
help='The ID (or name) of the target {}'.format(_type),
)
parser.choices[self.action].add_argument(
- '--role', type=str, choices=possible_roles, required=True,
- help='The name of the role to {}'.format(self.action)
+ '--role', type=str, choices=possible_roles, required=True, help='The name of the role to {}'.format(self.action)
)
def perform(self, **kwargs):
@@ -474,17 +407,10 @@ class RoleMixin(object):
role = kwargs['role']
if role not in RoleMixin.roles[flag]:
options = ', '.join(RoleMixin.roles[flag])
- raise ValueError(
- "invalid choice: '{}' must be one of {}".format(
- role, options
- )
- )
+ raise ValueError("invalid choice: '{}' must be one of {}".format(role, options))
value = kwargs[flag]
target = '/api/v2/{}/{}'.format(resource, value)
- detail = self.page.__class__(
- target,
- self.page.connection
- ).get()
+ detail = self.page.__class__(target, self.page.connection).get()
object_roles = detail['summary_fields']['object_roles']
actual_role = object_roles[role + '_role']
params = {'id': actual_role['id']}
@@ -530,15 +456,8 @@ class SettingsModify(CustomAction):
resource = 'settings'
def add_arguments(self, parser, resource_options_parser):
- options = self.page.__class__(
- self.page.endpoint + 'all/', self.page.connection
- ).options()
- parser.choices['modify'].add_argument(
- 'key',
- choices=sorted(options['actions']['PUT'].keys()),
- metavar='key',
- help=''
- )
+ options = self.page.__class__(self.page.endpoint + 'all/', self.page.connection).options()
+ parser.choices['modify'].add_argument('key', choices=sorted(options['actions']['PUT'].keys()), metavar='key', help='')
parser.choices['modify'].add_argument('value', help='')
def perform(self, key, value):
@@ -563,13 +482,8 @@ class HasMonitor(object):
def add_arguments(self, parser, resource_options_parser):
from .options import pk_or_name
- parser.choices[self.action].add_argument(
- 'id',
- type=functools.partial(
- pk_or_name, None, self.resource, page=self.page
- ),
- help=''
- )
+
+ parser.choices[self.action].add_argument('id', type=functools.partial(pk_or_name, None, self.resource, page=self.page), help='')
def perform(self, **kwargs):
response = self.page.get()
diff --git a/awxkit/awxkit/cli/docs/source/conf.py b/awxkit/awxkit/cli/docs/source/conf.py
index 7f4cf47185..75eb627103 100644
--- a/awxkit/awxkit/cli/docs/source/conf.py
+++ b/awxkit/awxkit/cli/docs/source/conf.py
@@ -27,9 +27,7 @@ author = 'Ansible by Red Hat'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
-extensions = [
- 'awxkit.cli.sphinx'
-]
+extensions = ['awxkit.cli.sphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
diff --git a/awxkit/awxkit/cli/format.py b/awxkit/awxkit/cli/format.py
index d35c61efbb..adf61337a4 100644
--- a/awxkit/awxkit/cli/format.py
+++ b/awxkit/awxkit/cli/format.py
@@ -48,24 +48,21 @@ def add_output_formatting_arguments(parser, env):
dest='conf.format',
choices=FORMATTERS.keys(),
default=env.get('TOWER_FORMAT', 'json'),
- help=(
- 'specify a format for the input and output'
- ),
+ help=('specify a format for the input and output'),
)
formatting.add_argument(
'--filter',
dest='conf.filter',
default='.',
metavar='TEXT',
- help=(
- 'specify an output filter (only valid with jq or human format)'
- ),
+ help=('specify an output filter (only valid with jq or human format)'),
)
formatting.add_argument(
'--conf.color',
metavar='BOOLEAN',
help='Display colorized output. Defaults to True',
- default=env.get('TOWER_COLOR', 't'), type=strtobool,
+ default=env.get('TOWER_COLOR', 't'),
+ type=strtobool,
)
formatting.add_argument(
'-v',
@@ -73,7 +70,7 @@ def add_output_formatting_arguments(parser, env):
dest='conf.verbose',
help='print debug-level logs, including requests made',
default=strtobool(env.get('TOWER_VERBOSE', 'f')),
- action="store_true"
+ action="store_true",
)
@@ -105,11 +102,10 @@ def format_jq(output, fmt):
if fmt == '.':
return output
raise ImportError(
- 'To use `-f jq`, you must install the optional jq dependency.\n'
- '`pip install jq`\n',
+ 'To use `-f jq`, you must install the optional jq dependency.\n' '`pip install jq`\n',
'Note that some platforms may require additional programs to '
'build jq from source (like `libtool`).\n'
- 'See https://pypi.org/project/jq/ for instructions.'
+ 'See https://pypi.org/project/jq/ for instructions.',
)
results = []
for x in jq.jq(fmt).transform(output, multiple_output=True):
@@ -127,11 +123,7 @@ def format_json(output, fmt):
def format_yaml(output, fmt):
output = json.loads(json.dumps(output))
- return yaml.safe_dump(
- output,
- default_flow_style=False,
- allow_unicode=True
- )
+ return yaml.safe_dump(output, default_flow_style=False, allow_unicode=True)
def format_human(output, fmt):
@@ -151,10 +143,7 @@ def format_human(output, fmt):
column_names.remove(k)
table = [column_names]
- table.extend([
- [record.get(col, '') for col in column_names]
- for record in output
- ])
+ table.extend([[record.get(col, '') for col in column_names] for record in output])
col_paddings = []
def format_num(v):
@@ -184,9 +173,4 @@ def format_human(output, fmt):
return '\n'.join(lines)
-FORMATTERS = {
- 'json': format_json,
- 'yaml': format_yaml,
- 'jq': format_jq,
- 'human': format_human
-}
+FORMATTERS = {'json': format_json, 'yaml': format_yaml, 'jq': format_jq, 'human': format_human}
diff --git a/awxkit/awxkit/cli/options.py b/awxkit/awxkit/cli/options.py
index 4d292f3611..d5ad777feb 100644
--- a/awxkit/awxkit/cli/options.py
+++ b/awxkit/awxkit/cli/options.py
@@ -21,10 +21,7 @@ UNIQUENESS_RULES = {
def pk_or_name_list(v2, model_name, value, page=None):
- return [
- pk_or_name(v2, model_name, v.strip(), page=page)
- for v in value.split(',')
- ]
+ return [pk_or_name(v2, model_name, v.strip(), page=page) for v in value.split(',')]
def pk_or_name(v2, model_name, value, page=None):
@@ -58,17 +55,9 @@ def pk_or_name(v2, model_name, value, page=None):
return int(results.results[0].id)
if results.count > 1:
raise argparse.ArgumentTypeError(
- 'Multiple {0} exist with that {1}. '
- 'To look up an ID, run:\n'
- 'awx {0} list --{1} "{2}" -f human'.format(
- model_name, identity, value
- )
- )
- raise argparse.ArgumentTypeError(
- 'Could not find any {0} with that {1}.'.format(
- model_name, identity
+ 'Multiple {0} exist with that {1}. ' 'To look up an ID, run:\n' 'awx {0} list --{1} "{2}" -f human'.format(model_name, identity, value)
)
- )
+ raise argparse.ArgumentTypeError('Could not find any {0} with that {1}.'.format(model_name, identity))
return value
@@ -90,9 +79,7 @@ class ResourceOptionsParser(object):
self.page = page
self.resource = resource
self.parser = parser
- self.options = getattr(
- self.page.options().json, 'actions', {'GET': {}}
- )
+ self.options = getattr(self.page.options().json, 'actions', {'GET': {}})
self.get_allowed_options()
if self.resource != 'settings':
# /api/v2/settings is a special resource that doesn't have
@@ -103,9 +90,7 @@ class ResourceOptionsParser(object):
self.handle_custom_actions()
def get_allowed_options(self):
- options = self.page.connection.options(
- self.page.endpoint + '1/'
- )
+ options = self.page.connection.options(self.page.endpoint + '1/')
warning = options.headers.get('Warning', '')
if '299' in warning and 'deprecated' in warning:
self.deprecated = True
@@ -121,11 +106,10 @@ class ResourceOptionsParser(object):
parser = self.parser.add_parser(method, help='')
if method == 'list':
parser.add_argument(
- '--all', dest='all_pages', action='store_true',
- help=(
- 'fetch all pages of content from the API when '
- 'returning results (instead of just the first page)'
- )
+ '--all',
+ dest='all_pages',
+ action='store_true',
+ help=('fetch all pages of content from the API when ' 'returning results (instead of just the first page)'),
)
add_output_formatting_arguments(parser, {})
@@ -138,9 +122,7 @@ class ResourceOptionsParser(object):
for method in allowed:
parser = self.parser.add_parser(method, help='')
self.parser.choices[method].add_argument(
- 'id',
- type=functools.partial(pk_or_name, self.v2, self.resource),
- help='the ID (or unique name) of the resource'
+ 'id', type=functools.partial(pk_or_name, self.v2, self.resource), help='the ID (or unique name) of the resource'
)
if method == 'get':
add_output_formatting_arguments(parser, {})
@@ -148,10 +130,7 @@ class ResourceOptionsParser(object):
def build_query_arguments(self, method, http_method):
required_group = None
for k, param in self.options.get(http_method, {}).items():
- required = (
- method == 'create' and
- param.get('required', False) is True
- )
+ required = method == 'create' and param.get('required', False) is True
help_text = param.get('help_text', '')
if method == 'list':
@@ -159,10 +138,7 @@ class ResourceOptionsParser(object):
# don't allow `awx <resource> list` to filter on `--id`
# it's weird, and that's what awx <resource> get is for
continue
- help_text = 'only list {} with the specified {}'.format(
- self.resource,
- k
- )
+ help_text = 'only list {} with the specified {}'.format(self.resource, k)
if method == 'list' and param.get('filterable') is False:
continue
@@ -256,9 +232,8 @@ class ResourceOptionsParser(object):
# unlike *other* actual JSON fields in the API, inventory and JT
# variables *actually* want json.dumps() strings (ugh)
# see: https://github.com/ansible/awx/issues/2371
- if (
- (self.resource in ('job_templates', 'workflow_job_templates') and k == 'extra_vars') or
- (self.resource in ('inventory', 'groups', 'hosts') and k == 'variables')
+ if (self.resource in ('job_templates', 'workflow_job_templates') and k == 'extra_vars') or (
+ self.resource in ('inventory', 'groups', 'hosts') and k == 'variables'
):
kwargs['type'] = jsonstr
@@ -267,15 +242,9 @@ class ResourceOptionsParser(object):
required_group = self.parser.choices[method].add_argument_group('required arguments')
# put the required group first (before the optional args group)
self.parser.choices[method]._action_groups.reverse()
- required_group.add_argument(
- '--{}'.format(k),
- **kwargs
- )
+ required_group.add_argument('--{}'.format(k), **kwargs)
else:
- self.parser.choices[method].add_argument(
- '--{}'.format(k),
- **kwargs
- )
+ self.parser.choices[method].add_argument('--{}'.format(k), **kwargs)
def handle_custom_actions(self):
for _, action in CustomAction.registry.items():
diff --git a/awxkit/awxkit/cli/resource.py b/awxkit/awxkit/cli/resource.py
index 13419357d2..7aa7c32e0a 100644
--- a/awxkit/awxkit/cli/resource.py
+++ b/awxkit/awxkit/cli/resource.py
@@ -40,11 +40,9 @@ DEPRECATED_RESOURCES = {
'teams': 'team',
'workflow_job_templates': 'workflow',
'workflow_jobs': 'workflow_job',
- 'users': 'user'
+ 'users': 'user',
}
-DEPRECATED_RESOURCES_REVERSE = dict(
- (v, k) for k, v in DEPRECATED_RESOURCES.items()
-)
+DEPRECATED_RESOURCES_REVERSE = dict((v, k) for k, v in DEPRECATED_RESOURCES.items())
class CustomCommand(metaclass=CustomRegistryMeta):
@@ -81,9 +79,7 @@ class Login(CustomCommand):
auth.add_argument('--description', help='description of the generated OAuth2.0 token', metavar='TEXT')
auth.add_argument('--conf.client_id', metavar='TEXT')
auth.add_argument('--conf.client_secret', metavar='TEXT')
- auth.add_argument(
- '--conf.scope', choices=['read', 'write'], default='write'
- )
+ auth.add_argument('--conf.scope', choices=['read', 'write'], default='write')
if client.help:
self.print_help(parser)
raise SystemExit()
@@ -99,10 +95,7 @@ class Login(CustomCommand):
token = api.Api().get_oauth2_token(**kwargs)
except Exception as e:
self.print_help(parser)
- cprint(
- 'Error retrieving an OAuth2.0 token ({}).'.format(e.__class__),
- 'red'
- )
+ cprint('Error retrieving an OAuth2.0 token ({}).'.format(e.__class__), 'red')
else:
fmt = client.get_config('format')
if fmt == 'human':
@@ -186,9 +179,7 @@ def parse_resource(client, skip_deprecated=False):
# check if the user is running a custom command
for command in CustomCommand.__subclasses__():
- client.subparsers[command.name] = subparsers.add_parser(
- command.name, help=command.help_text
- )
+ client.subparsers[command.name] = subparsers.add_parser(command.name, help=command.help_text)
if hasattr(client, 'v2'):
for k in client.v2.json.keys():
@@ -202,15 +193,11 @@ def parse_resource(client, skip_deprecated=False):
if k in DEPRECATED_RESOURCES:
kwargs['aliases'] = [DEPRECATED_RESOURCES[k]]
- client.subparsers[k] = subparsers.add_parser(
- k, help='', **kwargs
- )
+ client.subparsers[k] = subparsers.add_parser(k, help='', **kwargs)
resource = client.parser.parse_known_args()[0].resource
if resource in DEPRECATED_RESOURCES.values():
- client.argv[
- client.argv.index(resource)
- ] = DEPRECATED_RESOURCES_REVERSE[resource]
+ client.argv[client.argv.index(resource)] = DEPRECATED_RESOURCES_REVERSE[resource]
resource = DEPRECATED_RESOURCES_REVERSE[resource]
if resource in CustomCommand.registry:
@@ -219,27 +206,14 @@ def parse_resource(client, skip_deprecated=False):
response = command.handle(client, parser)
if response:
_filter = client.get_config('filter')
- if (
- resource == 'config' and
- client.get_config('format') == 'human'
- ):
- response = {
- 'count': len(response),
- 'results': [
- {'key': k, 'value': v}
- for k, v in response.items()
- ]
- }
+ if resource == 'config' and client.get_config('format') == 'human':
+ response = {'count': len(response), 'results': [{'key': k, 'value': v} for k, v in response.items()]}
_filter = 'key, value'
try:
connection = client.root.connection
except AttributeError:
connection = None
- formatted = format_response(
- Page.from_json(response, connection=connection),
- fmt=client.get_config('format'),
- filter=_filter
- )
+ formatted = format_response(Page.from_json(response, connection=connection), fmt=client.get_config('format'), filter=_filter)
print(formatted)
raise SystemExit()
else:
diff --git a/awxkit/awxkit/cli/sphinx.py b/awxkit/awxkit/cli/sphinx.py
index 850d99e014..970f93700a 100644
--- a/awxkit/awxkit/cli/sphinx.py
+++ b/awxkit/awxkit/cli/sphinx.py
@@ -8,7 +8,6 @@ from .resource import is_control_resource, CustomCommand
class CustomAutoprogramDirective(AutoprogramDirective):
-
def run(self):
nodes = super(CustomAutoprogramDirective, self).run()
@@ -23,12 +22,7 @@ class CustomAutoprogramDirective(AutoprogramDirective):
nodes[0][0].children = [heading]
# add a descriptive top synopsis of the reference guide
- nodes[0].children.insert(1, paragraph(
- text=(
- 'This is an exhaustive guide of every available command in '
- 'the awx CLI tool.'
- )
- ))
+ nodes[0].children.insert(1, paragraph(text=('This is an exhaustive guide of every available command in ' 'the awx CLI tool.')))
disclaimer = (
'The commands and parameters documented here can (and will) '
'vary based on a variety of factors, such as the AWX API '
@@ -51,9 +45,7 @@ def render():
# Sphinx document from.
for e in ('TOWER_HOST', 'TOWER_USERNAME', 'TOWER_PASSWORD'):
if not os.environ.get(e):
- raise SystemExit(
- 'Please specify a valid {} for a real (running) Tower install.'.format(e) # noqa
- )
+ raise SystemExit('Please specify a valid {} for a real (running) Tower install.'.format(e)) # noqa
cli = CLI()
cli.parse_args(['awx', '--help'])
cli.connect()
diff --git a/awxkit/awxkit/cli/stdout.py b/awxkit/awxkit/cli/stdout.py
index 1cf18168e1..5de134ccc4 100644
--- a/awxkit/awxkit/cli/stdout.py
+++ b/awxkit/awxkit/cli/stdout.py
@@ -9,8 +9,7 @@ from .utils import cprint, color_enabled, STATUS_COLORS
from awxkit.utils import to_str
-def monitor_workflow(response, session, print_stdout=True, timeout=None,
- interval=.25):
+def monitor_workflow(response, session, print_stdout=True, timeout=None, interval=0.25):
get = response.url.get
payload = {
'order_by': 'finished',
@@ -18,9 +17,7 @@ def monitor_workflow(response, session, print_stdout=True, timeout=None,
}
def fetch(seen):
- results = response.connection.get(
- '/api/v2/unified_jobs', payload
- ).json()['results']
+ results = response.connection.get('/api/v2/unified_jobs', payload).json()['results']
# erase lines we've previously printed
if print_stdout and sys.stdout.isatty():
@@ -61,7 +58,7 @@ def monitor_workflow(response, session, print_stdout=True, timeout=None,
# all at the end
fetch(seen)
- time.sleep(.25)
+ time.sleep(0.25)
json = get().json
if json.finished:
fetch(seen)
@@ -71,7 +68,7 @@ def monitor_workflow(response, session, print_stdout=True, timeout=None,
return get().json.status
-def monitor(response, session, print_stdout=True, timeout=None, interval=.25):
+def monitor(response, session, print_stdout=True, timeout=None, interval=0.25):
get = response.url.get
payload = {'order_by': 'start_line', 'no_truncate': True}
if response.type == 'job':
@@ -108,12 +105,9 @@ def monitor(response, session, print_stdout=True, timeout=None, interval=.25):
if next_line:
payload['start_line__gte'] = next_line
- time.sleep(.25)
+ time.sleep(0.25)
json = get().json
- if (
- json.event_processing_finished is True or
- json.status in ('error', 'canceled')
- ):
+ if json.event_processing_finished is True or json.status in ('error', 'canceled'):
fetch(next_line)
break
if print_stdout:
diff --git a/awxkit/awxkit/cli/utils.py b/awxkit/awxkit/cli/utils.py
index d064158859..61c2fe8f5e 100644
--- a/awxkit/awxkit/cli/utils.py
+++ b/awxkit/awxkit/cli/utils.py
@@ -9,8 +9,7 @@ _color = threading.local()
_color.enabled = True
-__all__ = ['CustomRegistryMeta', 'HelpfulArgumentParser', 'disable_color',
- 'color_enabled', 'colored', 'cprint', 'STATUS_COLORS']
+__all__ = ['CustomRegistryMeta', 'HelpfulArgumentParser', 'disable_color', 'color_enabled', 'colored', 'cprint', 'STATUS_COLORS']
STATUS_COLORS = {
@@ -25,17 +24,12 @@ STATUS_COLORS = {
class CustomRegistryMeta(type):
-
@property
def registry(cls):
- return dict(
- (command.name, command)
- for command in cls.__subclasses__()
- )
+ return dict((command.name, command) for command in cls.__subclasses__())
class HelpfulArgumentParser(ArgumentParser):
-
def error(self, message): # pragma: nocover
"""Prints a usage message incorporating the message to stderr and
exits.
@@ -67,10 +61,16 @@ COLORS = dict(
list(
zip(
[
- 'grey', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan',
+ 'grey',
+ 'red',
+ 'green',
+ 'yellow',
+ 'blue',
+ 'magenta',
+ 'cyan',
'white',
],
- list(range(30, 38))
+ list(range(30, 38)),
)
)
)
diff --git a/awxkit/awxkit/exceptions.py b/awxkit/awxkit/exceptions.py
index 596720b59d..84c37d615e 100644
--- a/awxkit/awxkit/exceptions.py
+++ b/awxkit/awxkit/exceptions.py
@@ -1,6 +1,4 @@
-
class Common(Exception):
-
def __init__(self, status_string='', message=''):
if isinstance(status_string, Exception):
self.status_string = ''
diff --git a/awxkit/awxkit/scripts/basic_session.py b/awxkit/awxkit/scripts/basic_session.py
index 4ede325214..5c58d70504 100755
--- a/awxkit/awxkit/scripts/basic_session.py
+++ b/awxkit/awxkit/scripts/basic_session.py
@@ -17,35 +17,26 @@ def parse_args():
parser.add_argument(
'--base-url',
dest='base_url',
- default=os.getenv(
- 'AWXKIT_BASE_URL',
- 'http://127.0.0.1:8013'),
- help='URL for AWX. Defaults to env var AWXKIT_BASE_URL or http://127.0.0.1:8013')
+ default=os.getenv('AWXKIT_BASE_URL', 'http://127.0.0.1:8013'),
+ help='URL for AWX. Defaults to env var AWXKIT_BASE_URL or http://127.0.0.1:8013',
+ )
parser.add_argument(
'-c',
'--credential-file',
dest='credential_file',
- default=os.getenv(
- 'AWXKIT_CREDENTIAL_FILE',
- utils.not_provided),
+ default=os.getenv('AWXKIT_CREDENTIAL_FILE', utils.not_provided),
help='Path for yml credential file. If not provided or set by AWXKIT_CREDENTIAL_FILE, set '
- 'AWXKIT_USER and AWXKIT_USER_PASSWORD env vars for awx user credentials.')
+ 'AWXKIT_USER and AWXKIT_USER_PASSWORD env vars for awx user credentials.',
+ )
parser.add_argument(
'-p',
'--project-file',
dest='project_file',
- default=os.getenv(
- 'AWXKIT_PROJECT_FILE'),
- help='Path for yml project config file.'
- 'If not provided or set by AWXKIT_PROJECT_FILE, projects will not have default SCM_URL')
- parser.add_argument('-f', '--file', dest='akit_script', default=False,
- help='akit script file to run in interactive session.')
- parser.add_argument(
- '-x',
- '--non-interactive',
- action='store_true',
- dest='non_interactive',
- help='Do not run in interactive mode.')
+ default=os.getenv('AWXKIT_PROJECT_FILE'),
+ help='Path for yml project config file.' 'If not provided or set by AWXKIT_PROJECT_FILE, projects will not have default SCM_URL',
+ )
+ parser.add_argument('-f', '--file', dest='akit_script', default=False, help='akit script file to run in interactive session.')
+ parser.add_argument('-x', '--non-interactive', action='store_true', dest='non_interactive', help='Do not run in interactive mode.')
return parser.parse_known_args()[0]
@@ -57,19 +48,14 @@ def main():
config.base_url = akit_args.base_url
if akit_args.credential_file != utils.not_provided:
- config.credentials = utils.load_credentials(
- akit_args.credential_file)
+ config.credentials = utils.load_credentials(akit_args.credential_file)
else:
- config.credentials = utils.PseudoNamespace({
- 'default': {
- 'username': os.getenv('AWXKIT_USER', 'admin'),
- 'password': os.getenv('AWXKIT_USER_PASSWORD', 'password')
- }
- })
+ config.credentials = utils.PseudoNamespace(
+ {'default': {'username': os.getenv('AWXKIT_USER', 'admin'), 'password': os.getenv('AWXKIT_USER_PASSWORD', 'password')}}
+ )
if akit_args.project_file != utils.not_provided:
- config.project_urls = utils.load_projects(
- akit_args.project_file)
+ config.project_urls = utils.load_projects(akit_args.project_file)
global root
root = api.Api()
@@ -106,6 +92,7 @@ def load_interactive():
try:
from IPython import start_ipython
+
basic_session_path = os.path.abspath(__file__)
if basic_session_path[-1] == 'c': # start_ipython doesn't work w/ .pyc
basic_session_path = basic_session_path[:-1]
@@ -115,6 +102,7 @@ def load_interactive():
return start_ipython(argv=sargs)
except ImportError:
from code import interact
+
main()
interact('', local=dict(globals(), **locals()))
diff --git a/awxkit/awxkit/utils/__init__.py b/awxkit/awxkit/utils/__init__.py
index d5f4055717..67442bee25 100644
--- a/awxkit/awxkit/utils/__init__.py
+++ b/awxkit/awxkit/utils/__init__.py
@@ -34,7 +34,8 @@ cloud_types = (
'rhv',
'satellite6',
'tower',
- 'vmware')
+ 'vmware',
+)
credential_type_kinds = ('cloud', 'net')
not_provided = 'xx__NOT_PROVIDED__xx'
@@ -52,7 +53,6 @@ class NoReloadError(Exception):
class PseudoNamespace(dict):
-
def __init__(self, _d=None, **loaded):
if not isinstance(_d, dict):
_d = {}
@@ -79,9 +79,7 @@ class PseudoNamespace(dict):
try:
return self.__getitem__(attr)
except KeyError:
- raise AttributeError(
- "{!r} has no attribute {!r}".format(
- self.__class__.__name__, attr))
+ raise AttributeError("{!r} has no attribute {!r}".format(self.__class__.__name__, attr))
def __setattr__(self, attr, value):
self.__setitem__(attr, value)
@@ -116,11 +114,7 @@ class PseudoNamespace(dict):
# PseudoNamespaces if applicable
def update(self, iterable=None, **kw):
if iterable:
- if (hasattr(iterable,
- 'keys') and isinstance(iterable.keys,
- (types.FunctionType,
- types.BuiltinFunctionType,
- types.MethodType))):
+ if hasattr(iterable, 'keys') and isinstance(iterable.keys, (types.FunctionType, types.BuiltinFunctionType, types.MethodType)):
for key in iterable:
self[key] = iterable[key]
else:
@@ -161,11 +155,7 @@ def filter_by_class(*item_class_tuples):
examined_item = item[0]
else:
examined_item = item
- if is_class_or_instance(
- examined_item,
- cls) or is_proper_subclass(
- examined_item,
- cls):
+ if is_class_or_instance(examined_item, cls) or is_proper_subclass(examined_item, cls):
results.append(item)
else:
updated = (cls, item[1]) if was_tuple else cls
@@ -249,7 +239,7 @@ def gen_utf_char():
is_char = False
b = 'b'
while not is_char:
- b = random.randint(32, 0x10ffff)
+ b = random.randint(32, 0x10FFFF)
is_char = chr(b).isprintable()
return chr(b)
@@ -266,20 +256,12 @@ def random_ipv4():
def random_ipv6():
"""Generates a random ipv6 address;; useful for testing."""
- return ':'.join(
- '{0:x}'.format(
- random.randint(
- 0,
- 2 ** 16 -
- 1)) for i in range(8))
+ return ':'.join('{0:x}'.format(random.randint(0, 2 ** 16 - 1)) for i in range(8))
def random_loopback_ip():
"""Generates a random loopback ipv4 address;; useful for testing."""
- return "127.{}.{}.{}".format(
- random_int(255),
- random_int(255),
- random_int(255))
+ return "127.{}.{}.{}".format(random_int(255), random_int(255), random_int(255))
def random_utf8(*args, **kwargs):
@@ -289,8 +271,7 @@ def random_utf8(*args, **kwargs):
"""
pattern = re.compile('[^\u0000-\uD7FF\uE000-\uFFFF]', re.UNICODE)
length = args[0] if len(args) else kwargs.get('length', 10)
- scrubbed = pattern.sub('\uFFFD', ''.join(
- [gen_utf_char() for _ in range(length)]))
+ scrubbed = pattern.sub('\uFFFD', ''.join([gen_utf_char() for _ in range(length)]))
return scrubbed
@@ -374,8 +355,10 @@ def is_proper_subclass(obj, cls):
def are_same_endpoint(first, second):
"""Equivalence check of two urls, stripped of query parameters"""
+
def strip(url):
return url.replace('www.', '').split('?')[0]
+
return strip(first) == strip(second)
@@ -421,10 +404,7 @@ class UTC(tzinfo):
return timedelta(0)
-def seconds_since_date_string(
- date_str,
- fmt='%Y-%m-%dT%H:%M:%S.%fZ',
- default_tz=UTC()):
+def seconds_since_date_string(date_str, fmt='%Y-%m-%dT%H:%M:%S.%fZ', default_tz=UTC()):
"""Return the number of seconds since the date and time indicated by a date
string and its corresponding format string.
diff --git a/awxkit/awxkit/utils/toposort.py b/awxkit/awxkit/utils/toposort.py
index 4874cd7ec9..58eec59646 100644
--- a/awxkit/awxkit/utils/toposort.py
+++ b/awxkit/awxkit/utils/toposort.py
@@ -42,18 +42,19 @@ class CircularDependencyError(ValueError):
def __init__(self, data):
# Sort the data just to make the output consistent, for use in
# error messages. That's convenient for doctests.
- s = 'Circular dependencies exist among these items: {{{}}}'.format(', '.join('{!r}:{!r}'.format(key, value) for key, value in sorted(data.items()))) # noqa
+ s = 'Circular dependencies exist among these items: {{{}}}'.format(
+ ', '.join('{!r}:{!r}'.format(key, value) for key, value in sorted(data.items()))
+ ) # noqa
super(CircularDependencyError, self).__init__(s)
self.data = data
def toposort(data):
"""Dependencies are expressed as a dictionary whose keys are items
-and whose values are a set of dependent items. Output is a list of
-sets in topological order. The first set consists of items with no
-dependences, each subsequent set consists of items that depend upon
-items in the preceeding sets.
-"""
+ and whose values are a set of dependent items. Output is a list of
+ sets in topological order. The first set consists of items with no
+ dependences, each subsequent set consists of items that depend upon
+ items in the preceeding sets."""
# Special case empty input.
if len(data) == 0:
@@ -74,9 +75,6 @@ items in the preceeding sets.
if not ordered:
break
yield ordered
- data = {
- item: (dep - ordered)
- for item, dep in data.items() if item not in ordered
- }
+ data = {item: (dep - ordered) for item, dep in data.items() if item not in ordered}
if len(data) != 0:
raise CircularDependencyError(data)
diff --git a/awxkit/awxkit/words.py b/awxkit/awxkit/words.py
index 940d71cd0f..eb3822db02 100644
--- a/awxkit/awxkit/words.py
+++ b/awxkit/awxkit/words.py
@@ -1,193 +1,1529 @@
# list of random English nouns used for resource name utilities
-words = ['People', 'History', 'Way', 'Art', 'World', 'Information', 'Map', 'Two',
- 'Family', 'Government', 'Health', 'System', 'Computer', 'Meat', 'Year', 'Thanks',
- 'Music', 'Person', 'Reading', 'Method', 'Data', 'Food', 'Understanding', 'Theory',
- 'Law', 'Bird', 'Literature', 'Problem', 'Software', 'Control', 'Knowledge', 'Power',
- 'Ability', 'Economics', 'Love', 'Internet', 'Television', 'Science', 'Library', 'Nature',
- 'Fact', 'Product', 'Idea', 'Temperature', 'Investment', 'Area', 'Society', 'Activity',
- 'Story', 'Industry', 'Media', 'Thing', 'Oven', 'Community', 'Definition', 'Safety',
- 'Quality', 'Development', 'Language', 'Management', 'Player', 'Variety', 'Video', 'Week',
- 'Security', 'Country', 'Exam', 'Movie', 'Organization', 'Equipment', 'Physics', 'Analysis',
- 'Policy', 'Series', 'Thought', 'Basis', 'Boyfriend', 'Direction', 'Strategy', 'Technology',
- 'Army', 'Camera', 'Freedom', 'Paper', 'Environment', 'Child', 'Instance', 'Month',
- 'Truth', 'Marketing', 'University', 'Writing', 'Article', 'Department', 'Difference', 'Goal',
- 'News', 'Audience', 'Fishing', 'Growth', 'Income', 'Marriage', 'User', 'Combination',
- 'Failure', 'Meaning', 'Medicine', 'Philosophy', 'Teacher', 'Communication', 'Night', 'Chemistry',
- 'Disease', 'Disk', 'Energy', 'Nation', 'Road', 'Role', 'Soup', 'Advertising',
- 'Location', 'Success', 'Addition', 'Apartment', 'Education', 'Math', 'Moment', 'Painting',
- 'Politics', 'Attention', 'Decision', 'Event', 'Property', 'Shopping', 'Student', 'Wood',
- 'Competition', 'Distribution', 'Entertainment', 'Office', 'Population', 'President', 'Unit', 'Category',
- 'Cigarette', 'Context', 'Introduction', 'Opportunity', 'Performance', 'Driver', 'Flight', 'Length',
- 'Magazine', 'Newspaper', 'Relationship', 'Teaching', 'Cell', 'Dealer', 'Debate', 'Finding',
- 'Lake', 'Member', 'Message', 'Phone', 'Scene', 'Appearance', 'Association', 'Concept',
- 'Customer', 'Death', 'Discussion', 'Housing', 'Inflation', 'Insurance', 'Mood', 'Woman',
- 'Advice', 'Blood', 'Effort', 'Expression', 'Importance', 'Opinion', 'Payment', 'Reality',
- 'Responsibility', 'Situation', 'Skill', 'Statement', 'Wealth', 'Application', 'City', 'County',
- 'Depth', 'Estate', 'Foundation', 'Grandmother', 'Heart', 'Perspective', 'Photo', 'Recipe',
- 'Studio', 'Topic', 'Collection', 'Depression', 'Imagination', 'Passion', 'Percentage', 'Resource',
- 'Setting', 'Ad', 'Agency', 'College', 'Connection', 'Criticism', 'Debt', 'Description',
- 'Memory', 'Patience', 'Secretary', 'Solution', 'Administration', 'Aspect', 'Attitude', 'Director',
- 'Personality', 'Psychology', 'Recommendation', 'Response', 'Selection', 'Storage', 'Version', 'Alcohol',
- 'Argument', 'Complaint', 'Contract', 'Emphasis', 'Highway', 'Loss', 'Membership', 'Possession',
- 'Preparation', 'Steak', 'Union', 'Agreement', 'Cancer', 'Currency', 'Employment', 'Engineering',
- 'Entry', 'Interaction', 'Limit', 'Mixture', 'Preference', 'Region', 'Republic', 'Seat',
- 'Tradition', 'Virus', 'Actor', 'Classroom', 'Delivery', 'Device', 'Difficulty', 'Drama',
- 'Election', 'Engine', 'Football', 'Guidance', 'Hotel', 'Match', 'Owner', 'Priority',
- 'Protection', 'Suggestion', 'Tension', 'Variation', 'Anxiety', 'Atmosphere', 'Awareness', 'Bread',
- 'Climate', 'Comparison', 'Confusion', 'Construction', 'Elevator', 'Emotion', 'Employee', 'Employer',
- 'Guest', 'Height', 'Leadership', 'Mall', 'Manager', 'Operation', 'Recording', 'Respect',
- 'Sample', 'Transportation', 'Boring', 'Charity', 'Cousin', 'Disaster', 'Editor', 'Efficiency',
- 'Excitement', 'Extent', 'Feedback', 'Guitar', 'Homework', 'Leader', 'Mom', 'Outcome',
- 'Permission', 'Presentation', 'Promotion', 'Reflection', 'Refrigerator', 'Resolution', 'Revenue', 'Session',
- 'Singer', 'Tennis', 'Basket', 'Bonus', 'Cabinet', 'Childhood', 'Church', 'Clothes',
- 'Coffee', 'Dinner', 'Drawing', 'Hair', 'Hearing', 'Initiative', 'Judgment', 'Lab',
- 'Measurement', 'Mode', 'Mud', 'Orange', 'Poetry', 'Police', 'Possibility', 'Procedure',
- 'Queen', 'Ratio', 'Relation', 'Restaurant', 'Satisfaction', 'Sector', 'Signature', 'Significance',
- 'Song', 'Tooth', 'Town', 'Vehicle', 'Volume', 'Wife', 'Accident', 'Airport',
- 'Appointment', 'Arrival', 'Assumption', 'Baseball', 'Chapter', 'Committee', 'Conversation', 'Database',
- 'Enthusiasm', 'Error', 'Explanation', 'Farmer', 'Gate', 'Girl', 'Hall', 'Historian',
- 'Hospital', 'Injury', 'Instruction', 'Maintenance', 'Manufacturer', 'Meal', 'Perception', 'Pie',
- 'Poem', 'Presence', 'Proposal', 'Reception', 'Replacement', 'Revolution', 'River', 'Son',
- 'Speech', 'Tea', 'Village', 'Warning', 'Winner', 'Worker', 'Writer', 'Assistance',
- 'Breath', 'Buyer', 'Chest', 'Chocolate', 'Conclusion', 'Contribution', 'Cookie', 'Courage',
- 'Dad', 'Desk', 'Drawer', 'Establishment', 'Examination', 'Garbage', 'Grocery', 'Honey',
- 'Impression', 'Improvement', 'Independence', 'Insect', 'Inspection', 'Inspector', 'King', 'Ladder',
- 'Menu', 'Penalty', 'Piano', 'Potato', 'Profession', 'Professor', 'Quantity', 'Reaction',
- 'Requirement', 'Salad', 'Sister', 'Supermarket', 'Tongue', 'Weakness', 'Wedding', 'Affair',
- 'Ambition', 'Analyst', 'Apple', 'Assignment', 'Assistant', 'Bathroom', 'Bedroom', 'Beer',
- 'Birthday', 'Celebration', 'Championship', 'Cheek', 'Client', 'Consequence', 'Departure', 'Diamond',
- 'Dirt', 'Ear', 'Fortune', 'Friendship', 'Snapewife', 'Funeral', 'Gene', 'Girlfriend', 'Hat',
- 'Indication', 'Intention', 'Lady', 'Midnight', 'Negotiation', 'Obligation', 'Passenger', 'Pizza',
- 'Platform', 'Poet', 'Pollution', 'Recognition', 'Reputation', 'Shirt', 'Sir', 'Speaker',
- 'Stranger', 'Surgery', 'Sympathy', 'Tale', 'Throat', 'Trainer', 'Uncle', 'Youth',
- 'Time', 'Work', 'Film', 'Water', 'Money', 'Example', 'While', 'Business',
- 'Study', 'Game', 'Life', 'Form', 'Air', 'Day', 'Place', 'Number',
- 'Part', 'Field', 'Fish', 'Back', 'Process', 'Heat', 'Hand', 'Experience',
- 'Job', 'Book', 'End', 'Point', 'Type', 'Home', 'Economy', 'Value',
- 'Body', 'Market', 'Guide', 'Interest', 'State', 'Radio', 'Course', 'Company',
- 'Price', 'Size', 'Card', 'List', 'Mind', 'Trade', 'Line', 'Care',
- 'Group', 'Risk', 'Word', 'Fat', 'Force', 'Key', 'Light', 'Training',
- 'Name', 'School', 'Top', 'Amount', 'Level', 'Order', 'Practice', 'Research',
- 'Sense', 'Service', 'Piece', 'Web', 'Boss', 'Sport', 'Fun', 'House',
- 'Page', 'Term', 'Test', 'Answer', 'Sound', 'Focus', 'Matter', 'Kind',
- 'Soil', 'Board', 'Oil', 'Picture', 'Access', 'Garden', 'Range', 'Rate',
- 'Reason', 'Future', 'Site', 'Demand', 'Exercise', 'Image', 'Case', 'Cause',
- 'Coast', 'Action', 'Age', 'Bad', 'Boat', 'Record', 'Result', 'Section',
- 'Building', 'Mouse', 'Cash', 'Class', 'Nothing', 'Period', 'Plan', 'Store',
- 'Tax', 'Side', 'Subject', 'Space', 'Rule', 'Stock', 'Weather', 'Chance',
- 'Figure', 'Man', 'Model', 'Source', 'Beginning', 'Earth', 'Program', 'Chicken',
- 'Design', 'Feature', 'Head', 'Material', 'Purpose', 'Question', 'Rock', 'Salt',
- 'Act', 'Birth', 'Car', 'Dog', 'Object', 'Scale', 'Sun', 'Note',
- 'Profit', 'Rent', 'Speed', 'Style', 'War', 'Bank', 'Craft', 'Half',
- 'Inside', 'Outside', 'Standard', 'Bus', 'Exchange', 'Eye', 'Fire', 'Position',
- 'Pressure', 'Stress', 'Advantage', 'Benefit', 'Box', 'Frame', 'Issue', 'Step',
- 'Cycle', 'Face', 'Item', 'Metal', 'Paint', 'Review', 'Room', 'Screen',
- 'Structure', 'View', 'Account', 'Ball', 'Discipline', 'Medium', 'Share', 'Balance',
- 'Bit', 'Black', 'Bottom', 'Choice', 'Gift', 'Impact', 'Machine', 'Shape',
- 'Tool', 'Wind', 'Address', 'Average', 'Career', 'Culture', 'Morning', 'Pot',
- 'Sign', 'Table', 'Task', 'Condition', 'Contact', 'Credit', 'Egg', 'Hope',
- 'Ice', 'Network', 'North', 'Square', 'Attempt', 'Date', 'Effect', 'Link',
- 'Post', 'Star', 'Voice', 'Capital', 'Challenge', 'Friend', 'Self', 'Shot',
- 'Brush', 'Couple', 'Exit', 'Front', 'Function', 'Lack', 'Living', 'Plant',
- 'Plastic', 'Spot', 'Summer', 'Taste', 'Theme', 'Track', 'Wing', 'Brain',
- 'Button', 'Click', 'Desire', 'Foot', 'Gas', 'Influence', 'Notice', 'Rain',
- 'Wall', 'Base', 'Damage', 'Distance', 'Feeling', 'Pair', 'Savings', 'Staff',
- 'Sugar', 'Target', 'Text', 'Animal', 'Author', 'Budget', 'Discount', 'File',
- 'Ground', 'Lesson', 'Minute', 'Officer', 'Phase', 'Reference', 'Register', 'Sky',
- 'Stage', 'Stick', 'Title', 'Trouble', 'Bowl', 'Bridge', 'Campaign', 'Character',
- 'Club', 'Edge', 'Evidence', 'Fan', 'Letter', 'Lock', 'Maximum', 'Novel',
- 'Option', 'Pack', 'Park', 'Plenty', 'Quarter', 'Skin', 'Sort', 'Weight',
- 'Baby', 'Background', 'Carry', 'Dish', 'Factor', 'Fruit', 'Glass', 'Joint',
- 'Master', 'Muscle', 'Red', 'Strength', 'Traffic', 'Trip', 'Vegetable', 'Appeal',
- 'Chart', 'Gear', 'Ideal', 'Kitchen', 'Land', 'Log', 'Mother', 'Net',
- 'Party', 'Principle', 'Relative', 'Sale', 'Season', 'Signal', 'Spirit', 'Street',
- 'Tree', 'Wave', 'Belt', 'Bench', 'Commission', 'Copy', 'Drop', 'Minimum',
- 'Path', 'Progress', 'Project', 'Sea', 'South', 'Status', 'Stuff', 'Ticket',
- 'Tour', 'Angle', 'Blue', 'Breakfast', 'Confidence', 'Daughter', 'Degree', 'Doctor',
- 'Dot', 'Dream', 'Duty', 'Essay', 'Father', 'Fee', 'Finance', 'Hour',
- 'Juice', 'Luck', 'Milk', 'Mouth', 'Peace', 'Pipe', 'Stable', 'Storm',
- 'Substance', 'Team', 'Trick', 'Afternoon', 'Bat', 'Beach', 'Blank', 'Catch',
- 'Chain', 'Consideration', 'Cream', 'Crew', 'Detail', 'Gold', 'Interview', 'Kid',
- 'Mark', 'Mission', 'Pain', 'Pleasure', 'Score', 'Screw', 'Gratitude', 'Shop',
- 'Shower', 'Suit', 'Tone', 'Window', 'Agent', 'Band', 'Bath', 'Block',
- 'Bone', 'Calendar', 'Candidate', 'Cap', 'Coat', 'Contest', 'Corner', 'Court',
- 'Cup', 'District', 'Door', 'East', 'Finger', 'Garage', 'Guarantee', 'Hole',
- 'Hook', 'Implement', 'Layer', 'Lecture', 'Lie', 'Manner', 'Meeting', 'Nose',
- 'Parking', 'Partner', 'Profile', 'Rice', 'Routine', 'Schedule', 'Swimming', 'Telephone',
- 'Tip', 'Winter', 'Airline', 'Bag', 'Battle', 'Bed', 'Bill', 'Bother',
- 'Cake', 'Code', 'Curve', 'Designer', 'Dimension', 'Dress', 'Ease', 'Emergency',
- 'Evening', 'Extension', 'Farm', 'Fight', 'Gap', 'Grade', 'Holiday', 'Horror',
- 'Horse', 'Host', 'Husband', 'Loan', 'Mistake', 'Mountain', 'Nail', 'Noise',
- 'Occasion', 'Package', 'Patient', 'Pause', 'Phrase', 'Proof', 'Race', 'Relief',
- 'Sand', 'Sentence', 'Shoulder', 'Smoke', 'Stomach', 'String', 'Tourist', 'Towel',
- 'Vacation', 'West', 'Wheel', 'Wine', 'Arm', 'Aside', 'Associate', 'Bet',
- 'Blow', 'Border', 'Branch', 'Breast', 'Brother', 'Buddy', 'Bunch', 'Chip',
- 'Coach', 'Cross', 'Document', 'Draft', 'Dust', 'Expert', 'Floor', 'God',
- 'Golf', 'Habit', 'Iron', 'Judge', 'Knife', 'Landscape', 'League', 'Mail',
- 'Mess', 'Native', 'Opening', 'Parent', 'Pattern', 'Pin', 'Pool', 'Pound',
- 'Request', 'Salary', 'Shame', 'Shelter', 'Shoe', 'Silver', 'Tackle', 'Tank',
- 'Trust', 'Assist', 'Bake', 'Bar', 'Bell', 'Bike', 'Blame', 'Boy',
- 'Brick', 'Chair', 'Closet', 'Clue', 'Collar', 'Comment', 'Conference', 'Devil',
- 'Diet', 'Fear', 'Fuel', 'Glove', 'Jacket', 'Lunch', 'Monitor', 'Mortgage',
- 'Nurse', 'Pace', 'Panic', 'Peak', 'Plane', 'Reward', 'Row', 'Sandwich',
- 'Shock', 'Spite', 'Spray', 'Surprise', 'Till', 'Transition', 'Weekend', 'Welcome',
- 'Yard', 'Alarm', 'Bend', 'Bicycle', 'Bite', 'Blind', 'Bottle', 'Cable',
- 'Candle', 'Clerk', 'Cloud', 'Concert', 'Counter', 'Flower', 'Grandfather', 'Harm',
- 'Knee', 'Lawyer', 'Leather', 'Load', 'Mirror', 'Neck', 'Pension', 'Plate',
- 'Purple', 'Ruin', 'Ship', 'Skirt', 'Slice', 'Snow', 'Specialist', 'Stroke',
- 'Switch', 'Trash', 'Tune', 'Zone', 'Anger', 'Award', 'Bid', 'Bitter',
- 'Boot', 'Bug', 'Camp', 'Candy', 'Carpet', 'Cat', 'Champion', 'Channel',
- 'Clock', 'Comfort', 'Cow', 'Crack', 'Engineer', 'Entrance', 'Fault', 'Grass',
- 'Guy', 'Hell', 'Highlight', 'Incident', 'Island', 'Joke', 'Jury', 'Leg',
- 'Lip', 'Mate', 'Motor', 'Nerve', 'Passage', 'Pen', 'Pride', 'Priest',
- 'Prize', 'Promise', 'Resident', 'Resort', 'Ring', 'Roof', 'Rope', 'Sail',
- 'Scheme', 'Script', 'Sock', 'Station', 'Toe', 'Tower', 'Truck', 'Witness',
- 'Asparagus', 'You', 'It', 'Can', 'Will', 'If', 'One', 'Many',
- 'Most', 'Other', 'Use', 'Make', 'Good', 'Look', 'Help', 'Go',
- 'Great', 'Being', 'Few', 'Might', 'Still', 'Public', 'Read', 'Keep',
- 'Start', 'Give', 'Human', 'Local', 'General', 'She', 'Specific', 'Long',
- 'Play', 'Feel', 'High', 'Tonight', 'Put', 'Common', 'Set', 'Change',
- 'Simple', 'Past', 'Big', 'Possible', 'Particular', 'Today', 'Major', 'Personal',
- 'Current', 'National', 'Cut', 'Natural', 'Physical', 'Show', 'Try', 'Check',
- 'Second', 'Call', 'Move', 'Pay', 'Let', 'Increase', 'Single', 'Individual',
- 'Turn', 'Ask', 'Buy', 'Guard', 'Hold', 'Main', 'Offer', 'Potential',
- 'Professional', 'International', 'Travel', 'Cook', 'Alternative', 'Following', 'Special', 'Working',
- 'Whole', 'Dance', 'Excuse', 'Cold', 'Commercial', 'Low', 'Purchase', 'Deal',
- 'Primary', 'Worth', 'Fall', 'Necessary', 'Positive', 'Produce', 'Search', 'Present',
- 'Spend', 'Talk', 'Creative', 'Tell', 'Cost', 'Drive', 'Green', 'Support',
- 'Glad', 'Remove', 'Return', 'Run', 'Complex', 'Due', 'Effective', 'Middle',
- 'Regular', 'Reserve', 'Independent', 'Leave', 'Original', 'Reach', 'Rest', 'Serve',
- 'Watch', 'Beautiful', 'Charge', 'Active', 'Break', 'Negative', 'Safe', 'Stay',
- 'Visit', 'Visual', 'Affect', 'Cover', 'Report', 'Rise', 'Walk', 'White',
- 'Beyond', 'Junior', 'Pick', 'Unique', 'Anything', 'Classic', 'Final', 'Lift',
- 'Mix', 'Private', 'Stop', 'Teach', 'Western', 'Concern', 'Familiar', 'Fly',
- 'Official', 'Broad', 'Comfortable', 'Gain', 'Maybe', 'Rich', 'Save', 'Stand',
- 'Young', 'Heavy', 'Hello', 'Lead', 'Listen', 'Valuable', 'Worry', 'Handle',
- 'Leading', 'Meet', 'Release', 'Sell', 'Finish', 'Normal', 'Press', 'Ride',
- 'Secret', 'Spread', 'Spring', 'Tough', 'Wait', 'Brown', 'Deep', 'Display',
- 'Flow', 'Hit', 'Objective', 'Shoot', 'Touch', 'Cancel', 'Chemical', 'Cry',
- 'Dump', 'Extreme', 'Push', 'Conflict', 'Eat', 'Fill', 'Formal', 'Jump',
- 'Kick', 'Opposite', 'Pass', 'Pitch', 'Remote', 'Total', 'Treat', 'Vast',
- 'Abuse', 'Beat', 'Burn', 'Deposit', 'Print', 'Raise', 'Sleep', 'Somewhere',
- 'Advance', 'Anywhere', 'Consist', 'Dark', 'Double', 'Draw', 'Equal', 'Fix',
- 'Hire', 'Internal', 'Join', 'Kill', 'Sensitive', 'Tap', 'Win', 'Attack',
- 'Claim', 'Constant', 'Drag', 'Drink', 'Guess', 'Minor', 'Pull', 'Raw',
- 'Soft', 'Solid', 'Wear', 'Weird', 'Wonder', 'Annual', 'Count', 'Dead',
- 'Doubt', 'Feed', 'Forever', 'Impress', 'Nobody', 'Repeat', 'Round', 'Sing',
- 'Slide', 'Strip', 'Whereas', 'Wish', 'Combine', 'Command', 'Dig', 'Divide',
- 'Equivalent', 'Hang', 'Hunt', 'Initial', 'March', 'Mention', 'Spiritual', 'Survey',
- 'Tie', 'Adult', 'Brief', 'Crazy', 'Escape', 'Gather', 'Hate', 'Prior',
- 'Repair', 'Rough', 'Sad', 'Scratch', 'Sick', 'Strike', 'Employ', 'External',
- 'Hurt', 'Illegal', 'Laugh', 'Lay', 'Mobile', 'Nasty', 'Ordinary', 'Respond',
- 'Royal', 'Senior', 'Split', 'Strain', 'Struggle', 'Swim', 'Train', 'Upper',
- 'Wash', 'Yellow', 'Convert', 'Crash', 'Dependent', 'Fold', 'Funny', 'Grab',
- 'Hide', 'Miss', 'Permit', 'Quote', 'Recover', 'Resolve', 'Roll', 'Sink',
- 'Slip', 'Spare', 'Suspect', 'Sweet', 'Swing', 'Twist', 'Upstairs', 'Usual',
- 'Abroad', 'Brave', 'Calm', 'Concentrate', 'Estimate', 'Grand', 'Male', 'Mine',
- 'Prompt', 'Quiet', 'Refuse', 'Regret', 'Reveal', 'Rush', 'Shake', 'Shift',
- 'Shine', 'Steal', 'Suck', 'Surround', 'Anybody', 'Bear', 'Brilliant', 'Dare',
- 'Dear', 'Delay', 'Drunk', 'Female', 'Hurry', 'Inevitable', 'Invite', 'Kiss',
- 'Neat', 'Pop', 'Punch', 'Quit', 'Reply', 'Representative', 'Resist', 'Rip',
- 'Rub', 'Silly', 'Smile', 'Spell', 'Stretch', 'Stupid', 'Tear', 'Temporary',
- 'Tomorrow', 'Wake', 'Wrap', 'Yesterday']
+words = [
+ 'People',
+ 'History',
+ 'Way',
+ 'Art',
+ 'World',
+ 'Information',
+ 'Map',
+ 'Two',
+ 'Family',
+ 'Government',
+ 'Health',
+ 'System',
+ 'Computer',
+ 'Meat',
+ 'Year',
+ 'Thanks',
+ 'Music',
+ 'Person',
+ 'Reading',
+ 'Method',
+ 'Data',
+ 'Food',
+ 'Understanding',
+ 'Theory',
+ 'Law',
+ 'Bird',
+ 'Literature',
+ 'Problem',
+ 'Software',
+ 'Control',
+ 'Knowledge',
+ 'Power',
+ 'Ability',
+ 'Economics',
+ 'Love',
+ 'Internet',
+ 'Television',
+ 'Science',
+ 'Library',
+ 'Nature',
+ 'Fact',
+ 'Product',
+ 'Idea',
+ 'Temperature',
+ 'Investment',
+ 'Area',
+ 'Society',
+ 'Activity',
+ 'Story',
+ 'Industry',
+ 'Media',
+ 'Thing',
+ 'Oven',
+ 'Community',
+ 'Definition',
+ 'Safety',
+ 'Quality',
+ 'Development',
+ 'Language',
+ 'Management',
+ 'Player',
+ 'Variety',
+ 'Video',
+ 'Week',
+ 'Security',
+ 'Country',
+ 'Exam',
+ 'Movie',
+ 'Organization',
+ 'Equipment',
+ 'Physics',
+ 'Analysis',
+ 'Policy',
+ 'Series',
+ 'Thought',
+ 'Basis',
+ 'Boyfriend',
+ 'Direction',
+ 'Strategy',
+ 'Technology',
+ 'Army',
+ 'Camera',
+ 'Freedom',
+ 'Paper',
+ 'Environment',
+ 'Child',
+ 'Instance',
+ 'Month',
+ 'Truth',
+ 'Marketing',
+ 'University',
+ 'Writing',
+ 'Article',
+ 'Department',
+ 'Difference',
+ 'Goal',
+ 'News',
+ 'Audience',
+ 'Fishing',
+ 'Growth',
+ 'Income',
+ 'Marriage',
+ 'User',
+ 'Combination',
+ 'Failure',
+ 'Meaning',
+ 'Medicine',
+ 'Philosophy',
+ 'Teacher',
+ 'Communication',
+ 'Night',
+ 'Chemistry',
+ 'Disease',
+ 'Disk',
+ 'Energy',
+ 'Nation',
+ 'Road',
+ 'Role',
+ 'Soup',
+ 'Advertising',
+ 'Location',
+ 'Success',
+ 'Addition',
+ 'Apartment',
+ 'Education',
+ 'Math',
+ 'Moment',
+ 'Painting',
+ 'Politics',
+ 'Attention',
+ 'Decision',
+ 'Event',
+ 'Property',
+ 'Shopping',
+ 'Student',
+ 'Wood',
+ 'Competition',
+ 'Distribution',
+ 'Entertainment',
+ 'Office',
+ 'Population',
+ 'President',
+ 'Unit',
+ 'Category',
+ 'Cigarette',
+ 'Context',
+ 'Introduction',
+ 'Opportunity',
+ 'Performance',
+ 'Driver',
+ 'Flight',
+ 'Length',
+ 'Magazine',
+ 'Newspaper',
+ 'Relationship',
+ 'Teaching',
+ 'Cell',
+ 'Dealer',
+ 'Debate',
+ 'Finding',
+ 'Lake',
+ 'Member',
+ 'Message',
+ 'Phone',
+ 'Scene',
+ 'Appearance',
+ 'Association',
+ 'Concept',
+ 'Customer',
+ 'Death',
+ 'Discussion',
+ 'Housing',
+ 'Inflation',
+ 'Insurance',
+ 'Mood',
+ 'Woman',
+ 'Advice',
+ 'Blood',
+ 'Effort',
+ 'Expression',
+ 'Importance',
+ 'Opinion',
+ 'Payment',
+ 'Reality',
+ 'Responsibility',
+ 'Situation',
+ 'Skill',
+ 'Statement',
+ 'Wealth',
+ 'Application',
+ 'City',
+ 'County',
+ 'Depth',
+ 'Estate',
+ 'Foundation',
+ 'Grandmother',
+ 'Heart',
+ 'Perspective',
+ 'Photo',
+ 'Recipe',
+ 'Studio',
+ 'Topic',
+ 'Collection',
+ 'Depression',
+ 'Imagination',
+ 'Passion',
+ 'Percentage',
+ 'Resource',
+ 'Setting',
+ 'Ad',
+ 'Agency',
+ 'College',
+ 'Connection',
+ 'Criticism',
+ 'Debt',
+ 'Description',
+ 'Memory',
+ 'Patience',
+ 'Secretary',
+ 'Solution',
+ 'Administration',
+ 'Aspect',
+ 'Attitude',
+ 'Director',
+ 'Personality',
+ 'Psychology',
+ 'Recommendation',
+ 'Response',
+ 'Selection',
+ 'Storage',
+ 'Version',
+ 'Alcohol',
+ 'Argument',
+ 'Complaint',
+ 'Contract',
+ 'Emphasis',
+ 'Highway',
+ 'Loss',
+ 'Membership',
+ 'Possession',
+ 'Preparation',
+ 'Steak',
+ 'Union',
+ 'Agreement',
+ 'Cancer',
+ 'Currency',
+ 'Employment',
+ 'Engineering',
+ 'Entry',
+ 'Interaction',
+ 'Limit',
+ 'Mixture',
+ 'Preference',
+ 'Region',
+ 'Republic',
+ 'Seat',
+ 'Tradition',
+ 'Virus',
+ 'Actor',
+ 'Classroom',
+ 'Delivery',
+ 'Device',
+ 'Difficulty',
+ 'Drama',
+ 'Election',
+ 'Engine',
+ 'Football',
+ 'Guidance',
+ 'Hotel',
+ 'Match',
+ 'Owner',
+ 'Priority',
+ 'Protection',
+ 'Suggestion',
+ 'Tension',
+ 'Variation',
+ 'Anxiety',
+ 'Atmosphere',
+ 'Awareness',
+ 'Bread',
+ 'Climate',
+ 'Comparison',
+ 'Confusion',
+ 'Construction',
+ 'Elevator',
+ 'Emotion',
+ 'Employee',
+ 'Employer',
+ 'Guest',
+ 'Height',
+ 'Leadership',
+ 'Mall',
+ 'Manager',
+ 'Operation',
+ 'Recording',
+ 'Respect',
+ 'Sample',
+ 'Transportation',
+ 'Boring',
+ 'Charity',
+ 'Cousin',
+ 'Disaster',
+ 'Editor',
+ 'Efficiency',
+ 'Excitement',
+ 'Extent',
+ 'Feedback',
+ 'Guitar',
+ 'Homework',
+ 'Leader',
+ 'Mom',
+ 'Outcome',
+ 'Permission',
+ 'Presentation',
+ 'Promotion',
+ 'Reflection',
+ 'Refrigerator',
+ 'Resolution',
+ 'Revenue',
+ 'Session',
+ 'Singer',
+ 'Tennis',
+ 'Basket',
+ 'Bonus',
+ 'Cabinet',
+ 'Childhood',
+ 'Church',
+ 'Clothes',
+ 'Coffee',
+ 'Dinner',
+ 'Drawing',
+ 'Hair',
+ 'Hearing',
+ 'Initiative',
+ 'Judgment',
+ 'Lab',
+ 'Measurement',
+ 'Mode',
+ 'Mud',
+ 'Orange',
+ 'Poetry',
+ 'Police',
+ 'Possibility',
+ 'Procedure',
+ 'Queen',
+ 'Ratio',
+ 'Relation',
+ 'Restaurant',
+ 'Satisfaction',
+ 'Sector',
+ 'Signature',
+ 'Significance',
+ 'Song',
+ 'Tooth',
+ 'Town',
+ 'Vehicle',
+ 'Volume',
+ 'Wife',
+ 'Accident',
+ 'Airport',
+ 'Appointment',
+ 'Arrival',
+ 'Assumption',
+ 'Baseball',
+ 'Chapter',
+ 'Committee',
+ 'Conversation',
+ 'Database',
+ 'Enthusiasm',
+ 'Error',
+ 'Explanation',
+ 'Farmer',
+ 'Gate',
+ 'Girl',
+ 'Hall',
+ 'Historian',
+ 'Hospital',
+ 'Injury',
+ 'Instruction',
+ 'Maintenance',
+ 'Manufacturer',
+ 'Meal',
+ 'Perception',
+ 'Pie',
+ 'Poem',
+ 'Presence',
+ 'Proposal',
+ 'Reception',
+ 'Replacement',
+ 'Revolution',
+ 'River',
+ 'Son',
+ 'Speech',
+ 'Tea',
+ 'Village',
+ 'Warning',
+ 'Winner',
+ 'Worker',
+ 'Writer',
+ 'Assistance',
+ 'Breath',
+ 'Buyer',
+ 'Chest',
+ 'Chocolate',
+ 'Conclusion',
+ 'Contribution',
+ 'Cookie',
+ 'Courage',
+ 'Dad',
+ 'Desk',
+ 'Drawer',
+ 'Establishment',
+ 'Examination',
+ 'Garbage',
+ 'Grocery',
+ 'Honey',
+ 'Impression',
+ 'Improvement',
+ 'Independence',
+ 'Insect',
+ 'Inspection',
+ 'Inspector',
+ 'King',
+ 'Ladder',
+ 'Menu',
+ 'Penalty',
+ 'Piano',
+ 'Potato',
+ 'Profession',
+ 'Professor',
+ 'Quantity',
+ 'Reaction',
+ 'Requirement',
+ 'Salad',
+ 'Sister',
+ 'Supermarket',
+ 'Tongue',
+ 'Weakness',
+ 'Wedding',
+ 'Affair',
+ 'Ambition',
+ 'Analyst',
+ 'Apple',
+ 'Assignment',
+ 'Assistant',
+ 'Bathroom',
+ 'Bedroom',
+ 'Beer',
+ 'Birthday',
+ 'Celebration',
+ 'Championship',
+ 'Cheek',
+ 'Client',
+ 'Consequence',
+ 'Departure',
+ 'Diamond',
+ 'Dirt',
+ 'Ear',
+ 'Fortune',
+ 'Friendship',
+ 'Snapewife',
+ 'Funeral',
+ 'Gene',
+ 'Girlfriend',
+ 'Hat',
+ 'Indication',
+ 'Intention',
+ 'Lady',
+ 'Midnight',
+ 'Negotiation',
+ 'Obligation',
+ 'Passenger',
+ 'Pizza',
+ 'Platform',
+ 'Poet',
+ 'Pollution',
+ 'Recognition',
+ 'Reputation',
+ 'Shirt',
+ 'Sir',
+ 'Speaker',
+ 'Stranger',
+ 'Surgery',
+ 'Sympathy',
+ 'Tale',
+ 'Throat',
+ 'Trainer',
+ 'Uncle',
+ 'Youth',
+ 'Time',
+ 'Work',
+ 'Film',
+ 'Water',
+ 'Money',
+ 'Example',
+ 'While',
+ 'Business',
+ 'Study',
+ 'Game',
+ 'Life',
+ 'Form',
+ 'Air',
+ 'Day',
+ 'Place',
+ 'Number',
+ 'Part',
+ 'Field',
+ 'Fish',
+ 'Back',
+ 'Process',
+ 'Heat',
+ 'Hand',
+ 'Experience',
+ 'Job',
+ 'Book',
+ 'End',
+ 'Point',
+ 'Type',
+ 'Home',
+ 'Economy',
+ 'Value',
+ 'Body',
+ 'Market',
+ 'Guide',
+ 'Interest',
+ 'State',
+ 'Radio',
+ 'Course',
+ 'Company',
+ 'Price',
+ 'Size',
+ 'Card',
+ 'List',
+ 'Mind',
+ 'Trade',
+ 'Line',
+ 'Care',
+ 'Group',
+ 'Risk',
+ 'Word',
+ 'Fat',
+ 'Force',
+ 'Key',
+ 'Light',
+ 'Training',
+ 'Name',
+ 'School',
+ 'Top',
+ 'Amount',
+ 'Level',
+ 'Order',
+ 'Practice',
+ 'Research',
+ 'Sense',
+ 'Service',
+ 'Piece',
+ 'Web',
+ 'Boss',
+ 'Sport',
+ 'Fun',
+ 'House',
+ 'Page',
+ 'Term',
+ 'Test',
+ 'Answer',
+ 'Sound',
+ 'Focus',
+ 'Matter',
+ 'Kind',
+ 'Soil',
+ 'Board',
+ 'Oil',
+ 'Picture',
+ 'Access',
+ 'Garden',
+ 'Range',
+ 'Rate',
+ 'Reason',
+ 'Future',
+ 'Site',
+ 'Demand',
+ 'Exercise',
+ 'Image',
+ 'Case',
+ 'Cause',
+ 'Coast',
+ 'Action',
+ 'Age',
+ 'Bad',
+ 'Boat',
+ 'Record',
+ 'Result',
+ 'Section',
+ 'Building',
+ 'Mouse',
+ 'Cash',
+ 'Class',
+ 'Nothing',
+ 'Period',
+ 'Plan',
+ 'Store',
+ 'Tax',
+ 'Side',
+ 'Subject',
+ 'Space',
+ 'Rule',
+ 'Stock',
+ 'Weather',
+ 'Chance',
+ 'Figure',
+ 'Man',
+ 'Model',
+ 'Source',
+ 'Beginning',
+ 'Earth',
+ 'Program',
+ 'Chicken',
+ 'Design',
+ 'Feature',
+ 'Head',
+ 'Material',
+ 'Purpose',
+ 'Question',
+ 'Rock',
+ 'Salt',
+ 'Act',
+ 'Birth',
+ 'Car',
+ 'Dog',
+ 'Object',
+ 'Scale',
+ 'Sun',
+ 'Note',
+ 'Profit',
+ 'Rent',
+ 'Speed',
+ 'Style',
+ 'War',
+ 'Bank',
+ 'Craft',
+ 'Half',
+ 'Inside',
+ 'Outside',
+ 'Standard',
+ 'Bus',
+ 'Exchange',
+ 'Eye',
+ 'Fire',
+ 'Position',
+ 'Pressure',
+ 'Stress',
+ 'Advantage',
+ 'Benefit',
+ 'Box',
+ 'Frame',
+ 'Issue',
+ 'Step',
+ 'Cycle',
+ 'Face',
+ 'Item',
+ 'Metal',
+ 'Paint',
+ 'Review',
+ 'Room',
+ 'Screen',
+ 'Structure',
+ 'View',
+ 'Account',
+ 'Ball',
+ 'Discipline',
+ 'Medium',
+ 'Share',
+ 'Balance',
+ 'Bit',
+ 'Black',
+ 'Bottom',
+ 'Choice',
+ 'Gift',
+ 'Impact',
+ 'Machine',
+ 'Shape',
+ 'Tool',
+ 'Wind',
+ 'Address',
+ 'Average',
+ 'Career',
+ 'Culture',
+ 'Morning',
+ 'Pot',
+ 'Sign',
+ 'Table',
+ 'Task',
+ 'Condition',
+ 'Contact',
+ 'Credit',
+ 'Egg',
+ 'Hope',
+ 'Ice',
+ 'Network',
+ 'North',
+ 'Square',
+ 'Attempt',
+ 'Date',
+ 'Effect',
+ 'Link',
+ 'Post',
+ 'Star',
+ 'Voice',
+ 'Capital',
+ 'Challenge',
+ 'Friend',
+ 'Self',
+ 'Shot',
+ 'Brush',
+ 'Couple',
+ 'Exit',
+ 'Front',
+ 'Function',
+ 'Lack',
+ 'Living',
+ 'Plant',
+ 'Plastic',
+ 'Spot',
+ 'Summer',
+ 'Taste',
+ 'Theme',
+ 'Track',
+ 'Wing',
+ 'Brain',
+ 'Button',
+ 'Click',
+ 'Desire',
+ 'Foot',
+ 'Gas',
+ 'Influence',
+ 'Notice',
+ 'Rain',
+ 'Wall',
+ 'Base',
+ 'Damage',
+ 'Distance',
+ 'Feeling',
+ 'Pair',
+ 'Savings',
+ 'Staff',
+ 'Sugar',
+ 'Target',
+ 'Text',
+ 'Animal',
+ 'Author',
+ 'Budget',
+ 'Discount',
+ 'File',
+ 'Ground',
+ 'Lesson',
+ 'Minute',
+ 'Officer',
+ 'Phase',
+ 'Reference',
+ 'Register',
+ 'Sky',
+ 'Stage',
+ 'Stick',
+ 'Title',
+ 'Trouble',
+ 'Bowl',
+ 'Bridge',
+ 'Campaign',
+ 'Character',
+ 'Club',
+ 'Edge',
+ 'Evidence',
+ 'Fan',
+ 'Letter',
+ 'Lock',
+ 'Maximum',
+ 'Novel',
+ 'Option',
+ 'Pack',
+ 'Park',
+ 'Plenty',
+ 'Quarter',
+ 'Skin',
+ 'Sort',
+ 'Weight',
+ 'Baby',
+ 'Background',
+ 'Carry',
+ 'Dish',
+ 'Factor',
+ 'Fruit',
+ 'Glass',
+ 'Joint',
+ 'Master',
+ 'Muscle',
+ 'Red',
+ 'Strength',
+ 'Traffic',
+ 'Trip',
+ 'Vegetable',
+ 'Appeal',
+ 'Chart',
+ 'Gear',
+ 'Ideal',
+ 'Kitchen',
+ 'Land',
+ 'Log',
+ 'Mother',
+ 'Net',
+ 'Party',
+ 'Principle',
+ 'Relative',
+ 'Sale',
+ 'Season',
+ 'Signal',
+ 'Spirit',
+ 'Street',
+ 'Tree',
+ 'Wave',
+ 'Belt',
+ 'Bench',
+ 'Commission',
+ 'Copy',
+ 'Drop',
+ 'Minimum',
+ 'Path',
+ 'Progress',
+ 'Project',
+ 'Sea',
+ 'South',
+ 'Status',
+ 'Stuff',
+ 'Ticket',
+ 'Tour',
+ 'Angle',
+ 'Blue',
+ 'Breakfast',
+ 'Confidence',
+ 'Daughter',
+ 'Degree',
+ 'Doctor',
+ 'Dot',
+ 'Dream',
+ 'Duty',
+ 'Essay',
+ 'Father',
+ 'Fee',
+ 'Finance',
+ 'Hour',
+ 'Juice',
+ 'Luck',
+ 'Milk',
+ 'Mouth',
+ 'Peace',
+ 'Pipe',
+ 'Stable',
+ 'Storm',
+ 'Substance',
+ 'Team',
+ 'Trick',
+ 'Afternoon',
+ 'Bat',
+ 'Beach',
+ 'Blank',
+ 'Catch',
+ 'Chain',
+ 'Consideration',
+ 'Cream',
+ 'Crew',
+ 'Detail',
+ 'Gold',
+ 'Interview',
+ 'Kid',
+ 'Mark',
+ 'Mission',
+ 'Pain',
+ 'Pleasure',
+ 'Score',
+ 'Screw',
+ 'Gratitude',
+ 'Shop',
+ 'Shower',
+ 'Suit',
+ 'Tone',
+ 'Window',
+ 'Agent',
+ 'Band',
+ 'Bath',
+ 'Block',
+ 'Bone',
+ 'Calendar',
+ 'Candidate',
+ 'Cap',
+ 'Coat',
+ 'Contest',
+ 'Corner',
+ 'Court',
+ 'Cup',
+ 'District',
+ 'Door',
+ 'East',
+ 'Finger',
+ 'Garage',
+ 'Guarantee',
+ 'Hole',
+ 'Hook',
+ 'Implement',
+ 'Layer',
+ 'Lecture',
+ 'Lie',
+ 'Manner',
+ 'Meeting',
+ 'Nose',
+ 'Parking',
+ 'Partner',
+ 'Profile',
+ 'Rice',
+ 'Routine',
+ 'Schedule',
+ 'Swimming',
+ 'Telephone',
+ 'Tip',
+ 'Winter',
+ 'Airline',
+ 'Bag',
+ 'Battle',
+ 'Bed',
+ 'Bill',
+ 'Bother',
+ 'Cake',
+ 'Code',
+ 'Curve',
+ 'Designer',
+ 'Dimension',
+ 'Dress',
+ 'Ease',
+ 'Emergency',
+ 'Evening',
+ 'Extension',
+ 'Farm',
+ 'Fight',
+ 'Gap',
+ 'Grade',
+ 'Holiday',
+ 'Horror',
+ 'Horse',
+ 'Host',
+ 'Husband',
+ 'Loan',
+ 'Mistake',
+ 'Mountain',
+ 'Nail',
+ 'Noise',
+ 'Occasion',
+ 'Package',
+ 'Patient',
+ 'Pause',
+ 'Phrase',
+ 'Proof',
+ 'Race',
+ 'Relief',
+ 'Sand',
+ 'Sentence',
+ 'Shoulder',
+ 'Smoke',
+ 'Stomach',
+ 'String',
+ 'Tourist',
+ 'Towel',
+ 'Vacation',
+ 'West',
+ 'Wheel',
+ 'Wine',
+ 'Arm',
+ 'Aside',
+ 'Associate',
+ 'Bet',
+ 'Blow',
+ 'Border',
+ 'Branch',
+ 'Breast',
+ 'Brother',
+ 'Buddy',
+ 'Bunch',
+ 'Chip',
+ 'Coach',
+ 'Cross',
+ 'Document',
+ 'Draft',
+ 'Dust',
+ 'Expert',
+ 'Floor',
+ 'God',
+ 'Golf',
+ 'Habit',
+ 'Iron',
+ 'Judge',
+ 'Knife',
+ 'Landscape',
+ 'League',
+ 'Mail',
+ 'Mess',
+ 'Native',
+ 'Opening',
+ 'Parent',
+ 'Pattern',
+ 'Pin',
+ 'Pool',
+ 'Pound',
+ 'Request',
+ 'Salary',
+ 'Shame',
+ 'Shelter',
+ 'Shoe',
+ 'Silver',
+ 'Tackle',
+ 'Tank',
+ 'Trust',
+ 'Assist',
+ 'Bake',
+ 'Bar',
+ 'Bell',
+ 'Bike',
+ 'Blame',
+ 'Boy',
+ 'Brick',
+ 'Chair',
+ 'Closet',
+ 'Clue',
+ 'Collar',
+ 'Comment',
+ 'Conference',
+ 'Devil',
+ 'Diet',
+ 'Fear',
+ 'Fuel',
+ 'Glove',
+ 'Jacket',
+ 'Lunch',
+ 'Monitor',
+ 'Mortgage',
+ 'Nurse',
+ 'Pace',
+ 'Panic',
+ 'Peak',
+ 'Plane',
+ 'Reward',
+ 'Row',
+ 'Sandwich',
+ 'Shock',
+ 'Spite',
+ 'Spray',
+ 'Surprise',
+ 'Till',
+ 'Transition',
+ 'Weekend',
+ 'Welcome',
+ 'Yard',
+ 'Alarm',
+ 'Bend',
+ 'Bicycle',
+ 'Bite',
+ 'Blind',
+ 'Bottle',
+ 'Cable',
+ 'Candle',
+ 'Clerk',
+ 'Cloud',
+ 'Concert',
+ 'Counter',
+ 'Flower',
+ 'Grandfather',
+ 'Harm',
+ 'Knee',
+ 'Lawyer',
+ 'Leather',
+ 'Load',
+ 'Mirror',
+ 'Neck',
+ 'Pension',
+ 'Plate',
+ 'Purple',
+ 'Ruin',
+ 'Ship',
+ 'Skirt',
+ 'Slice',
+ 'Snow',
+ 'Specialist',
+ 'Stroke',
+ 'Switch',
+ 'Trash',
+ 'Tune',
+ 'Zone',
+ 'Anger',
+ 'Award',
+ 'Bid',
+ 'Bitter',
+ 'Boot',
+ 'Bug',
+ 'Camp',
+ 'Candy',
+ 'Carpet',
+ 'Cat',
+ 'Champion',
+ 'Channel',
+ 'Clock',
+ 'Comfort',
+ 'Cow',
+ 'Crack',
+ 'Engineer',
+ 'Entrance',
+ 'Fault',
+ 'Grass',
+ 'Guy',
+ 'Hell',
+ 'Highlight',
+ 'Incident',
+ 'Island',
+ 'Joke',
+ 'Jury',
+ 'Leg',
+ 'Lip',
+ 'Mate',
+ 'Motor',
+ 'Nerve',
+ 'Passage',
+ 'Pen',
+ 'Pride',
+ 'Priest',
+ 'Prize',
+ 'Promise',
+ 'Resident',
+ 'Resort',
+ 'Ring',
+ 'Roof',
+ 'Rope',
+ 'Sail',
+ 'Scheme',
+ 'Script',
+ 'Sock',
+ 'Station',
+ 'Toe',
+ 'Tower',
+ 'Truck',
+ 'Witness',
+ 'Asparagus',
+ 'You',
+ 'It',
+ 'Can',
+ 'Will',
+ 'If',
+ 'One',
+ 'Many',
+ 'Most',
+ 'Other',
+ 'Use',
+ 'Make',
+ 'Good',
+ 'Look',
+ 'Help',
+ 'Go',
+ 'Great',
+ 'Being',
+ 'Few',
+ 'Might',
+ 'Still',
+ 'Public',
+ 'Read',
+ 'Keep',
+ 'Start',
+ 'Give',
+ 'Human',
+ 'Local',
+ 'General',
+ 'She',
+ 'Specific',
+ 'Long',
+ 'Play',
+ 'Feel',
+ 'High',
+ 'Tonight',
+ 'Put',
+ 'Common',
+ 'Set',
+ 'Change',
+ 'Simple',
+ 'Past',
+ 'Big',
+ 'Possible',
+ 'Particular',
+ 'Today',
+ 'Major',
+ 'Personal',
+ 'Current',
+ 'National',
+ 'Cut',
+ 'Natural',
+ 'Physical',
+ 'Show',
+ 'Try',
+ 'Check',
+ 'Second',
+ 'Call',
+ 'Move',
+ 'Pay',
+ 'Let',
+ 'Increase',
+ 'Single',
+ 'Individual',
+ 'Turn',
+ 'Ask',
+ 'Buy',
+ 'Guard',
+ 'Hold',
+ 'Main',
+ 'Offer',
+ 'Potential',
+ 'Professional',
+ 'International',
+ 'Travel',
+ 'Cook',
+ 'Alternative',
+ 'Following',
+ 'Special',
+ 'Working',
+ 'Whole',
+ 'Dance',
+ 'Excuse',
+ 'Cold',
+ 'Commercial',
+ 'Low',
+ 'Purchase',
+ 'Deal',
+ 'Primary',
+ 'Worth',
+ 'Fall',
+ 'Necessary',
+ 'Positive',
+ 'Produce',
+ 'Search',
+ 'Present',
+ 'Spend',
+ 'Talk',
+ 'Creative',
+ 'Tell',
+ 'Cost',
+ 'Drive',
+ 'Green',
+ 'Support',
+ 'Glad',
+ 'Remove',
+ 'Return',
+ 'Run',
+ 'Complex',
+ 'Due',
+ 'Effective',
+ 'Middle',
+ 'Regular',
+ 'Reserve',
+ 'Independent',
+ 'Leave',
+ 'Original',
+ 'Reach',
+ 'Rest',
+ 'Serve',
+ 'Watch',
+ 'Beautiful',
+ 'Charge',
+ 'Active',
+ 'Break',
+ 'Negative',
+ 'Safe',
+ 'Stay',
+ 'Visit',
+ 'Visual',
+ 'Affect',
+ 'Cover',
+ 'Report',
+ 'Rise',
+ 'Walk',
+ 'White',
+ 'Beyond',
+ 'Junior',
+ 'Pick',
+ 'Unique',
+ 'Anything',
+ 'Classic',
+ 'Final',
+ 'Lift',
+ 'Mix',
+ 'Private',
+ 'Stop',
+ 'Teach',
+ 'Western',
+ 'Concern',
+ 'Familiar',
+ 'Fly',
+ 'Official',
+ 'Broad',
+ 'Comfortable',
+ 'Gain',
+ 'Maybe',
+ 'Rich',
+ 'Save',
+ 'Stand',
+ 'Young',
+ 'Heavy',
+ 'Hello',
+ 'Lead',
+ 'Listen',
+ 'Valuable',
+ 'Worry',
+ 'Handle',
+ 'Leading',
+ 'Meet',
+ 'Release',
+ 'Sell',
+ 'Finish',
+ 'Normal',
+ 'Press',
+ 'Ride',
+ 'Secret',
+ 'Spread',
+ 'Spring',
+ 'Tough',
+ 'Wait',
+ 'Brown',
+ 'Deep',
+ 'Display',
+ 'Flow',
+ 'Hit',
+ 'Objective',
+ 'Shoot',
+ 'Touch',
+ 'Cancel',
+ 'Chemical',
+ 'Cry',
+ 'Dump',
+ 'Extreme',
+ 'Push',
+ 'Conflict',
+ 'Eat',
+ 'Fill',
+ 'Formal',
+ 'Jump',
+ 'Kick',
+ 'Opposite',
+ 'Pass',
+ 'Pitch',
+ 'Remote',
+ 'Total',
+ 'Treat',
+ 'Vast',
+ 'Abuse',
+ 'Beat',
+ 'Burn',
+ 'Deposit',
+ 'Print',
+ 'Raise',
+ 'Sleep',
+ 'Somewhere',
+ 'Advance',
+ 'Anywhere',
+ 'Consist',
+ 'Dark',
+ 'Double',
+ 'Draw',
+ 'Equal',
+ 'Fix',
+ 'Hire',
+ 'Internal',
+ 'Join',
+ 'Kill',
+ 'Sensitive',
+ 'Tap',
+ 'Win',
+ 'Attack',
+ 'Claim',
+ 'Constant',
+ 'Drag',
+ 'Drink',
+ 'Guess',
+ 'Minor',
+ 'Pull',
+ 'Raw',
+ 'Soft',
+ 'Solid',
+ 'Wear',
+ 'Weird',
+ 'Wonder',
+ 'Annual',
+ 'Count',
+ 'Dead',
+ 'Doubt',
+ 'Feed',
+ 'Forever',
+ 'Impress',
+ 'Nobody',
+ 'Repeat',
+ 'Round',
+ 'Sing',
+ 'Slide',
+ 'Strip',
+ 'Whereas',
+ 'Wish',
+ 'Combine',
+ 'Command',
+ 'Dig',
+ 'Divide',
+ 'Equivalent',
+ 'Hang',
+ 'Hunt',
+ 'Initial',
+ 'March',
+ 'Mention',
+ 'Spiritual',
+ 'Survey',
+ 'Tie',
+ 'Adult',
+ 'Brief',
+ 'Crazy',
+ 'Escape',
+ 'Gather',
+ 'Hate',
+ 'Prior',
+ 'Repair',
+ 'Rough',
+ 'Sad',
+ 'Scratch',
+ 'Sick',
+ 'Strike',
+ 'Employ',
+ 'External',
+ 'Hurt',
+ 'Illegal',
+ 'Laugh',
+ 'Lay',
+ 'Mobile',
+ 'Nasty',
+ 'Ordinary',
+ 'Respond',
+ 'Royal',
+ 'Senior',
+ 'Split',
+ 'Strain',
+ 'Struggle',
+ 'Swim',
+ 'Train',
+ 'Upper',
+ 'Wash',
+ 'Yellow',
+ 'Convert',
+ 'Crash',
+ 'Dependent',
+ 'Fold',
+ 'Funny',
+ 'Grab',
+ 'Hide',
+ 'Miss',
+ 'Permit',
+ 'Quote',
+ 'Recover',
+ 'Resolve',
+ 'Roll',
+ 'Sink',
+ 'Slip',
+ 'Spare',
+ 'Suspect',
+ 'Sweet',
+ 'Swing',
+ 'Twist',
+ 'Upstairs',
+ 'Usual',
+ 'Abroad',
+ 'Brave',
+ 'Calm',
+ 'Concentrate',
+ 'Estimate',
+ 'Grand',
+ 'Male',
+ 'Mine',
+ 'Prompt',
+ 'Quiet',
+ 'Refuse',
+ 'Regret',
+ 'Reveal',
+ 'Rush',
+ 'Shake',
+ 'Shift',
+ 'Shine',
+ 'Steal',
+ 'Suck',
+ 'Surround',
+ 'Anybody',
+ 'Bear',
+ 'Brilliant',
+ 'Dare',
+ 'Dear',
+ 'Delay',
+ 'Drunk',
+ 'Female',
+ 'Hurry',
+ 'Inevitable',
+ 'Invite',
+ 'Kiss',
+ 'Neat',
+ 'Pop',
+ 'Punch',
+ 'Quit',
+ 'Reply',
+ 'Representative',
+ 'Resist',
+ 'Rip',
+ 'Rub',
+ 'Silly',
+ 'Smile',
+ 'Spell',
+ 'Stretch',
+ 'Stupid',
+ 'Tear',
+ 'Temporary',
+ 'Tomorrow',
+ 'Wake',
+ 'Wrap',
+ 'Yesterday',
+]
diff --git a/awxkit/awxkit/ws.py b/awxkit/awxkit/ws.py
index ee39a5e990..41380d406e 100644
--- a/awxkit/awxkit/ws.py
+++ b/awxkit/awxkit/ws.py
@@ -84,12 +84,9 @@ class WSClient(object):
auth_cookie = ''
pref = 'wss://' if self._use_ssl else 'ws://'
url = '{0}{1.hostname}:{1.port}/websocket/'.format(pref, self)
- self.ws = websocket.WebSocketApp(url,
- on_open=self._on_open,
- on_message=self._on_message,
- on_error=self._on_error,
- on_close=self._on_close,
- cookie=auth_cookie)
+ self.ws = websocket.WebSocketApp(
+ url, on_open=self._on_open, on_message=self._on_message, on_error=self._on_error, on_close=self._on_close, cookie=auth_cookie
+ )
self._message_cache = []
self._should_subscribe_to_pending_job = False
self._pending_unsubscribe = threading.Event()
@@ -199,12 +196,8 @@ class WSClient(object):
message = json.loads(message)
log.debug('received message: {}'.format(message))
- if all([message.get('group_name') == 'jobs',
- message.get('status') == 'pending',
- message.get('unified_job_id'),
- self._should_subscribe_to_pending_job]):
- if bool(message.get('project_id')) == (
- self._should_subscribe_to_pending_job['events'] == 'project_update_events'):
+ if all([message.get('group_name') == 'jobs', message.get('status') == 'pending', message.get('unified_job_id'), self._should_subscribe_to_pending_job]):
+ if bool(message.get('project_id')) == (self._should_subscribe_to_pending_job['events'] == 'project_update_events'):
self._update_subscription(message['unified_job_id'])
ret = self._recv_queue.put(message)
diff --git a/awxkit/awxkit/yaml_file.py b/awxkit/awxkit/yaml_file.py
index 750ee3fbb1..49924cef93 100644
--- a/awxkit/awxkit/yaml_file.py
+++ b/awxkit/awxkit/yaml_file.py
@@ -12,7 +12,6 @@ file_path_cache = {}
class Loader(yaml.SafeLoader):
-
def __init__(self, stream):
self._root = os.path.split(stream.name)[0]
super(Loader, self).__init__(stream)
@@ -82,6 +81,7 @@ def load_file(filename):
random_thing: "{random_string:24}"
"""
from py.path import local
+
if filename is None:
this_file = os.path.abspath(__file__)
path = local(this_file).new(basename='../data.yaml')
diff --git a/awxkit/setup.py b/awxkit/setup.py
index b446ba0a90..23f4e161cb 100644
--- a/awxkit/setup.py
+++ b/awxkit/setup.py
@@ -68,11 +68,7 @@ setup(
'requests',
],
python_requires=">=3.6",
- extras_require={
- 'formatting': ['jq'],
- 'websockets': ['websocket-client==0.57.0'],
- 'crypto': ['cryptography']
- },
+ extras_require={'formatting': ['jq'], 'websockets': ['websocket-client==0.57.0'], 'crypto': ['cryptography']},
license='Apache 2.0',
classifiers=[
'Development Status :: 5 - Production/Stable',
@@ -87,10 +83,5 @@ setup(
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
- entry_points={
- 'console_scripts': [
- 'akit=awxkit.scripts.basic_session:load_interactive',
- 'awx=awxkit.cli:run'
- ]
- }
+ entry_points={'console_scripts': ['akit=awxkit.scripts.basic_session:load_interactive', 'awx=awxkit.cli:run']},
)
diff --git a/awxkit/test/cli/test_client.py b/awxkit/test/cli/test_client.py
index e792b6c267..9a63e37c55 100644
--- a/awxkit/test/cli/test_client.py
+++ b/awxkit/test/cli/test_client.py
@@ -7,7 +7,6 @@ from awxkit.cli import run, CLI
class MockedCLI(CLI):
-
def fetch_version_root(self):
pass
@@ -17,9 +16,7 @@ class MockedCLI(CLI):
@property
def json(self):
- return {
- 'users': None
- }
+ return {'users': None}
@pytest.mark.parametrize('help_param', ['-h', '--help'])
@@ -29,10 +26,7 @@ def test_help(capfd, help_param):
out, err = capfd.readouterr()
assert "usage:" in out
- for snippet in (
- '--conf.host https://example.awx.org]',
- '-v, --verbose'
- ):
+ for snippet in ('--conf.host https://example.awx.org]', '-v, --verbose'):
assert snippet in out
@@ -59,8 +53,5 @@ def test_list_resources(capfd, resource):
_, out = capfd.readouterr()
assert "usage:" in out
- for snippet in (
- '--conf.host https://example.awx.org]',
- '-v, --verbose'
- ):
+ for snippet in ('--conf.host https://example.awx.org]', '-v, --verbose'):
assert snippet in out
diff --git a/awxkit/test/cli/test_config.py b/awxkit/test/cli/test_config.py
index 3154fdb081..61b6b4c54d 100644
--- a/awxkit/test/cli/test_config.py
+++ b/awxkit/test/cli/test_config.py
@@ -4,16 +4,15 @@ from requests.exceptions import ConnectionError
from awxkit.cli import CLI
from awxkit import config
+
def test_host_from_environment():
cli = CLI()
- cli.parse_args(
- ['awx'],
- env={'TOWER_HOST': 'https://xyz.local'}
- )
+ cli.parse_args(['awx'], env={'TOWER_HOST': 'https://xyz.local'})
with pytest.raises(ConnectionError):
cli.connect()
assert config.base_url == 'https://xyz.local'
+
def test_host_from_argv():
cli = CLI()
cli.parse_args(['awx', '--conf.host', 'https://xyz.local'])
@@ -21,43 +20,30 @@ def test_host_from_argv():
cli.connect()
assert config.base_url == 'https://xyz.local'
+
def test_username_and_password_from_environment():
cli = CLI()
- cli.parse_args(
- ['awx'],
- env={
- 'TOWER_USERNAME': 'mary',
- 'TOWER_PASSWORD': 'secret'
- }
- )
+ cli.parse_args(['awx'], env={'TOWER_USERNAME': 'mary', 'TOWER_PASSWORD': 'secret'})
with pytest.raises(ConnectionError):
cli.connect()
assert config.credentials.default.username == 'mary'
assert config.credentials.default.password == 'secret'
+
def test_username_and_password_argv():
cli = CLI()
- cli.parse_args([
- 'awx', '--conf.username', 'mary', '--conf.password', 'secret'
- ])
+ cli.parse_args(['awx', '--conf.username', 'mary', '--conf.password', 'secret'])
with pytest.raises(ConnectionError):
cli.connect()
assert config.credentials.default.username == 'mary'
assert config.credentials.default.password == 'secret'
+
def test_config_precedence():
cli = CLI()
- cli.parse_args(
- [
- 'awx', '--conf.username', 'mary', '--conf.password', 'secret'
- ],
- env={
- 'TOWER_USERNAME': 'IGNORE',
- 'TOWER_PASSWORD': 'IGNORE'
- }
- )
+ cli.parse_args(['awx', '--conf.username', 'mary', '--conf.password', 'secret'], env={'TOWER_USERNAME': 'IGNORE', 'TOWER_PASSWORD': 'IGNORE'})
with pytest.raises(ConnectionError):
cli.connect()
diff --git a/awxkit/test/cli/test_format.py b/awxkit/test/cli/test_format.py
index 7327f91518..adbe0ef463 100644
--- a/awxkit/test/cli/test_format.py
+++ b/awxkit/test/cli/test_format.py
@@ -11,19 +11,17 @@ from awxkit.cli.resource import Import
def test_json_empty_list():
- page = Page.from_json({
- 'results': []
- })
+ page = Page.from_json({'results': []})
formatted = format_response(page)
assert json.loads(formatted) == {'results': []}
+
def test_yaml_empty_list():
- page = Page.from_json({
- 'results': []
- })
+ page = Page.from_json({'results': []})
formatted = format_response(page, fmt='yaml')
assert yaml.safe_load(formatted) == {'results': []}
+
def test_json_list():
users = {
'results': [
@@ -36,6 +34,7 @@ def test_json_list():
formatted = format_response(page)
assert json.loads(formatted) == users
+
def test_yaml_list():
users = {
'results': [
diff --git a/awxkit/test/cli/test_options.py b/awxkit/test/cli/test_options.py
index 83bb4ac36a..fc2e53f957 100644
--- a/awxkit/test/cli/test_options.py
+++ b/awxkit/test/cli/test_options.py
@@ -11,13 +11,11 @@ from awxkit.cli.options import ResourceOptionsParser
class ResourceOptionsParser(ResourceOptionsParser):
-
def get_allowed_options(self):
self.allowed_options = ['GET', 'POST', 'PUT', 'PATCH', 'DELETE']
class OptionsPage(Page):
-
def options(self):
return self
@@ -33,30 +31,31 @@ class OptionsPage(Page):
class TestOptions(unittest.TestCase):
-
def setUp(self):
_parser = argparse.ArgumentParser()
self.parser = _parser.add_subparsers(help='action')
def test_list(self):
- page = OptionsPage.from_json({
- 'actions': {
- 'GET': {},
- 'POST': {},
+ page = OptionsPage.from_json(
+ {
+ 'actions': {
+ 'GET': {},
+ 'POST': {},
+ }
}
- })
+ )
ResourceOptionsParser(None, page, 'users', self.parser)
assert 'list' in self.parser.choices
def test_list_filtering(self):
- page = OptionsPage.from_json({
- 'actions': {
- 'GET': {},
- 'POST': {
- 'first_name': {'type': 'string'}
- },
+ page = OptionsPage.from_json(
+ {
+ 'actions': {
+ 'GET': {},
+ 'POST': {'first_name': {'type': 'string'}},
+ }
}
- })
+ )
options = ResourceOptionsParser(None, page, 'users', self.parser)
options.build_query_arguments('list', 'POST')
assert 'list' in self.parser.choices
@@ -66,14 +65,14 @@ class TestOptions(unittest.TestCase):
assert '--first_name TEXT' in out.getvalue()
def test_list_not_filterable(self):
- page = OptionsPage.from_json({
- 'actions': {
- 'GET': {},
- 'POST': {
- 'middle_name': {'type': 'string', 'filterable': False}
- },
+ page = OptionsPage.from_json(
+ {
+ 'actions': {
+ 'GET': {},
+ 'POST': {'middle_name': {'type': 'string', 'filterable': False}},
+ }
}
- })
+ )
options = ResourceOptionsParser(None, page, 'users', self.parser)
options.build_query_arguments('list', 'POST')
assert 'list' in self.parser.choices
@@ -83,16 +82,18 @@ class TestOptions(unittest.TestCase):
assert '--middle_name' not in out.getvalue()
def test_creation_optional_argument(self):
- page = OptionsPage.from_json({
- 'actions': {
- 'POST': {
- 'first_name': {
- 'type': 'string',
- 'help_text': 'Please specify your first name',
- }
- },
+ page = OptionsPage.from_json(
+ {
+ 'actions': {
+ 'POST': {
+ 'first_name': {
+ 'type': 'string',
+ 'help_text': 'Please specify your first name',
+ }
+ },
+ }
}
- })
+ )
options = ResourceOptionsParser(None, page, 'users', self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
@@ -102,17 +103,13 @@ class TestOptions(unittest.TestCase):
assert '--first_name TEXT Please specify your first name' in out.getvalue()
def test_creation_required_argument(self):
- page = OptionsPage.from_json({
- 'actions': {
- 'POST': {
- 'username': {
- 'type': 'string',
- 'help_text': 'Please specify a username',
- 'required': True
- }
- },
+ page = OptionsPage.from_json(
+ {
+ 'actions': {
+ 'POST': {'username': {'type': 'string', 'help_text': 'Please specify a username', 'required': True}},
+ }
}
- })
+ )
options = ResourceOptionsParser(None, page, 'users', self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
@@ -122,13 +119,13 @@ class TestOptions(unittest.TestCase):
assert '--username TEXT Please specify a username'
def test_integer_argument(self):
- page = OptionsPage.from_json({
- 'actions': {
- 'POST': {
- 'max_hosts': {'type': 'integer'}
- },
+ page = OptionsPage.from_json(
+ {
+ 'actions': {
+ 'POST': {'max_hosts': {'type': 'integer'}},
+ }
}
- })
+ )
options = ResourceOptionsParser(None, page, 'organizations', self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
@@ -138,13 +135,13 @@ class TestOptions(unittest.TestCase):
assert '--max_hosts INTEGER' in out.getvalue()
def test_boolean_argument(self):
- page = OptionsPage.from_json({
- 'actions': {
- 'POST': {
- 'diff_mode': {'type': 'boolean'}
- },
+ page = OptionsPage.from_json(
+ {
+ 'actions': {
+ 'POST': {'diff_mode': {'type': 'boolean'}},
+ }
}
- })
+ )
options = ResourceOptionsParser(None, page, 'users', self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
@@ -154,23 +151,25 @@ class TestOptions(unittest.TestCase):
assert '--diff_mode BOOLEAN' in out.getvalue()
def test_choices(self):
- page = OptionsPage.from_json({
- 'actions': {
- 'POST': {
- 'verbosity': {
- 'type': 'integer',
- 'choices': [
- (0, '0 (Normal)'),
- (1, '1 (Verbose)'),
- (2, '2 (More Verbose)'),
- (3, '3 (Debug)'),
- (4, '4 (Connection Debug)'),
- (5, '5 (WinRM Debug)'),
- ]
- }
- },
+ page = OptionsPage.from_json(
+ {
+ 'actions': {
+ 'POST': {
+ 'verbosity': {
+ 'type': 'integer',
+ 'choices': [
+ (0, '0 (Normal)'),
+ (1, '1 (Verbose)'),
+ (2, '2 (More Verbose)'),
+ (3, '3 (Debug)'),
+ (4, '4 (Connection Debug)'),
+ (5, '5 (WinRM Debug)'),
+ ],
+ }
+ },
+ }
}
- })
+ )
options = ResourceOptionsParser(None, page, 'users', self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
@@ -181,9 +180,7 @@ class TestOptions(unittest.TestCase):
def test_actions_with_primary_key(self):
for method in ('get', 'modify', 'delete'):
- page = OptionsPage.from_json({
- 'actions': {'GET': {}, 'POST': {}}
- })
+ page = OptionsPage.from_json({'actions': {'GET': {}, 'POST': {}}})
ResourceOptionsParser(None, page, 'jobs', self.parser)
assert method in self.parser.choices
@@ -193,19 +190,20 @@ class TestOptions(unittest.TestCase):
class TestSettingsOptions(unittest.TestCase):
-
def setUp(self):
_parser = argparse.ArgumentParser()
self.parser = _parser.add_subparsers(help='action')
def test_list(self):
- page = OptionsPage.from_json({
- 'actions': {
- 'GET': {},
- 'POST': {},
- 'PUT': {},
+ page = OptionsPage.from_json(
+ {
+ 'actions': {
+ 'GET': {},
+ 'POST': {},
+ 'PUT': {},
+ }
}
- })
+ )
page.endpoint = '/settings/all/'
ResourceOptionsParser(None, page, 'settings', self.parser)
assert 'list' in self.parser.choices
diff --git a/awxkit/test/test_credentials.py b/awxkit/test/test_credentials.py
index 714550119e..6adc5e9332 100644
--- a/awxkit/test/test_credentials.py
+++ b/awxkit/test/test_credentials.py
@@ -14,32 +14,39 @@ def set_config_cred_to_desired(config, location):
config_ref = config_ref[_location]
setattr(config_ref, split[-1], 'desired')
-class MockCredentialType(object):
+class MockCredentialType(object):
def __init__(self, name, kind, managed_by_tower=True):
self.name = name
self.kind = kind
self.managed_by_tower = managed_by_tower
-@pytest.mark.parametrize('field, kind, config_cred, desired_field, desired_value',
- [('field', 'ssh', PseudoNamespace(field=123), 'field', 123),
- ('subscription', 'azure', PseudoNamespace(subscription_id=123), 'subscription', 123),
- ('project_id', 'gce', PseudoNamespace(project=123), 'project', 123),
- ('authorize_password', 'net', PseudoNamespace(authorize=123), 'authorize_password', 123)])
+
+@pytest.mark.parametrize(
+ 'field, kind, config_cred, desired_field, desired_value',
+ [
+ ('field', 'ssh', PseudoNamespace(field=123), 'field', 123),
+ ('subscription', 'azure', PseudoNamespace(subscription_id=123), 'subscription', 123),
+ ('project_id', 'gce', PseudoNamespace(project=123), 'project', 123),
+ ('authorize_password', 'net', PseudoNamespace(authorize=123), 'authorize_password', 123),
+ ],
+)
def test_get_payload_field_and_value_from_config_cred(field, kind, config_cred, desired_field, desired_value):
- ret_field, ret_val = credentials.get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, {},
- config_cred)
+ ret_field, ret_val = credentials.get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, {}, config_cred)
assert ret_field == desired_field
assert ret_val == desired_value
-@pytest.mark.parametrize('field, kind, kwargs, desired_field, desired_value',
- [('field', 'ssh', dict(field=123), 'field', 123),
- ('subscription', 'azure', dict(subscription=123), 'subscription', 123),
- ('project_id', 'gce', dict(project_id=123), 'project', 123),
- ('authorize_password', 'net', dict(authorize_password=123), 'authorize_password', 123)])
+@pytest.mark.parametrize(
+ 'field, kind, kwargs, desired_field, desired_value',
+ [
+ ('field', 'ssh', dict(field=123), 'field', 123),
+ ('subscription', 'azure', dict(subscription=123), 'subscription', 123),
+ ('project_id', 'gce', dict(project_id=123), 'project', 123),
+ ('authorize_password', 'net', dict(authorize_password=123), 'authorize_password', 123),
+ ],
+)
def test_get_payload_field_and_value_from_kwarg(field, kind, kwargs, desired_field, desired_value):
- ret_field, ret_val = credentials.get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, kwargs,
- PseudoNamespace())
+ ret_field, ret_val = credentials.get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, kwargs, PseudoNamespace())
assert ret_field == desired_field
assert ret_val == desired_value
diff --git a/awxkit/test/test_dependency_resolver.py b/awxkit/test/test_dependency_resolver.py
index 4a7e85e3c4..b713a73adc 100644
--- a/awxkit/test/test_dependency_resolver.py
+++ b/awxkit/test/test_dependency_resolver.py
@@ -21,7 +21,6 @@ class MockHasCreate(has_create.HasCreate):
class A(MockHasCreate):
-
def create(self, **kw):
return self
@@ -87,13 +86,12 @@ class H(MockHasCreate):
optional_dependencies = [E, A]
- def create(self, a=None, e=None, **kw):
+ def create(self, a=None, e=None, **kw):
self.create_and_update_dependencies(*filter_by_class((a, A), (e, E)))
return self
class MultipleWordClassName(MockHasCreate):
-
def create(self, **kw):
return self
@@ -102,7 +100,7 @@ class AnotherMultipleWordClassName(MockHasCreate):
optional_dependencies = [MultipleWordClassName]
- def create(self, multiple_word_class_name=None, **kw):
+ def create(self, multiple_word_class_name=None, **kw):
self.create_and_update_dependencies(*filter_by_class((multiple_word_class_name, MultipleWordClassName)))
return self
@@ -183,19 +181,17 @@ def test_optional_dependency_graph_with_additional():
def test_creation_order():
"""confirms that `has_create.creation_order()` returns a valid creation order in the desired list of sets format"""
- dependency_graph = dict(eight=set(['seven', 'six']),
- seven=set(['five']),
- six=set(),
- five=set(['two', 'one']),
- four=set(['one']),
- three=set(['two']),
- two=set(['one']),
- one=set())
- desired = [set(['one', 'six']),
- set(['two', 'four']),
- set(['three', 'five']),
- set(['seven']),
- set(['eight'])]
+ dependency_graph = dict(
+ eight=set(['seven', 'six']),
+ seven=set(['five']),
+ six=set(),
+ five=set(['two', 'one']),
+ four=set(['one']),
+ three=set(['two']),
+ two=set(['one']),
+ one=set(),
+ )
+ desired = [set(['one', 'six']), set(['two', 'four']), set(['three', 'five']), set(['seven']), set(['eight'])]
assert has_create.creation_order(dependency_graph) == desired
@@ -203,14 +199,16 @@ def test_creation_order_with_loop():
"""confirms that `has_create.creation_order()` raises toposort.CircularDependencyError when evaluating
a cyclic dependency graph
"""
- dependency_graph = dict(eight=set(['seven', 'six']),
- seven=set(['five']),
- six=set(),
- five=set(['two', 'one']),
- four=set(['one']),
- three=set(['two']),
- two=set(['one']),
- one=set(['eight']))
+ dependency_graph = dict(
+ eight=set(['seven', 'six']),
+ seven=set(['five']),
+ six=set(),
+ five=set(['two', 'one']),
+ four=set(['one']),
+ three=set(['two']),
+ two=set(['one']),
+ one=set(['eight']),
+ )
with pytest.raises(CircularDependencyError):
assert has_create.creation_order(dependency_graph)
@@ -239,9 +237,11 @@ class Five(MockHasCreate):
class IsntAHasCreate(object):
pass
+
class Six(MockHasCreate, IsntAHasCreate):
dependencies = [Two]
+
class Seven(MockHasCreate):
dependencies = [IsntAHasCreate]
@@ -265,8 +265,7 @@ def test_separate_async_optionals_three_exist():
the class that has shared item as a dependency occurs first in a separate creation group
"""
order = has_create.creation_order(has_create.optional_dependency_graph(Five, Four, Three))
- assert has_create.separate_async_optionals(order) == [set([One]), set([Two]), set([Three]),
- set([Five]), set([Four])]
+ assert has_create.separate_async_optionals(order) == [set([One]), set([Two]), set([Three]), set([Five]), set([Four])]
def test_separate_async_optionals_not_has_create():
@@ -345,8 +344,7 @@ def test_dependency_resolution_complete():
for item in (h, a, e, d, c, b):
if item._dependency_store:
- assert all(item._dependency_store.values()
- ), "{0} missing dependency: {0._dependency_store}".format(item)
+ assert all(item._dependency_store.values()), "{0} missing dependency: {0._dependency_store}".format(item)
assert a == b._dependency_store[A], "Duplicate dependency detected"
assert a == c._dependency_store[A], "Duplicate dependency detected"
@@ -468,7 +466,6 @@ def test_teardown_ds_cleared():
class OneWithArgs(MockHasCreate):
-
def create(self, **kw):
self.kw = kw
return self
@@ -492,18 +489,17 @@ class ThreeWithArgs(MockHasCreate):
optional_dependencies = [TwoWithArgs]
def create(self, one_with_args=OneWithArgs, two_with_args=None, **kw):
- self.create_and_update_dependencies(*filter_by_class((one_with_args, OneWithArgs),
- (two_with_args, TwoWithArgs)))
+ self.create_and_update_dependencies(*filter_by_class((one_with_args, OneWithArgs), (two_with_args, TwoWithArgs)))
self.kw = kw
return self
+
class FourWithArgs(MockHasCreate):
dependencies = [TwoWithArgs, ThreeWithArgs]
def create(self, two_with_args=TwoWithArgs, three_with_args=ThreeWithArgs, **kw):
- self.create_and_update_dependencies(*filter_by_class((two_with_args, TwoWithArgs),
- (three_with_args, ThreeWithArgs)))
+ self.create_and_update_dependencies(*filter_by_class((two_with_args, TwoWithArgs), (three_with_args, ThreeWithArgs)))
self.kw = kw
return self
@@ -536,10 +532,9 @@ def test_no_tuple_for_class_arg_causes_shared_dependencies_nested_staggering():
def test_tuple_for_class_arg_causes_unshared_dependencies_when_downstream():
"""Confirms that provided arg-tuple for dependency type is applied instead of chained dependency"""
- three_wa = ThreeWithArgs().create(two_with_args=(TwoWithArgs, dict(one_with_args=False,
- make_one_with_args=True,
- two_with_args_kw_arg=234)),
- three_with_args_kw_arg=345)
+ three_wa = ThreeWithArgs().create(
+ two_with_args=(TwoWithArgs, dict(one_with_args=False, make_one_with_args=True, two_with_args_kw_arg=234)), three_with_args_kw_arg=345
+ )
assert isinstance(three_wa.ds.one_with_args, OneWithArgs)
assert isinstance(three_wa.ds.two_with_args, TwoWithArgs)
assert isinstance(three_wa.ds.two_with_args.ds.one_with_args, OneWithArgs)
@@ -552,13 +547,12 @@ def test_tuple_for_class_arg_causes_unshared_dependencies_when_downstream():
def test_tuples_for_class_arg_cause_unshared_dependencies_when_downstream():
"""Confirms that provided arg-tuple for dependency type is applied instead of chained dependency"""
- four_wa = FourWithArgs().create(two_with_args=(TwoWithArgs, dict(one_with_args=False,
- make_one_with_args=True,
- two_with_args_kw_arg=456)),
- # No shared dependencies with four_wa.ds.two_with_args
- three_with_args=(ThreeWithArgs, dict(one_with_args=(OneWithArgs, {}),
- two_with_args=False)),
- four_with_args_kw=567)
+ four_wa = FourWithArgs().create(
+ two_with_args=(TwoWithArgs, dict(one_with_args=False, make_one_with_args=True, two_with_args_kw_arg=456)),
+ # No shared dependencies with four_wa.ds.two_with_args
+ three_with_args=(ThreeWithArgs, dict(one_with_args=(OneWithArgs, {}), two_with_args=False)),
+ four_with_args_kw=567,
+ )
assert isinstance(four_wa.ds.two_with_args, TwoWithArgs)
assert isinstance(four_wa.ds.three_with_args, ThreeWithArgs)
assert isinstance(four_wa.ds.two_with_args.ds.one_with_args, OneWithArgs)
@@ -575,25 +569,21 @@ class NotHasCreate(object):
class MixinUserA(MockHasCreate, NotHasCreate):
-
def create(self, **kw):
return self
class MixinUserB(MockHasCreate, NotHasCreate):
-
def create(self, **kw):
return self
class MixinUserC(MixinUserB):
-
def create(self, **kw):
return self
class MixinUserD(MixinUserC):
-
def create(self, **kw):
return self
@@ -646,17 +636,12 @@ class DynamicallyDeclaresNotHasCreateDependency(MockHasCreate):
dependencies = [NotHasCreate]
def create(self, not_has_create=MixinUserA):
- dynamic_dependency = dict(mixinusera=MixinUserA,
- mixinuserb=MixinUserB,
- mixinuserc=MixinUserC)
+ dynamic_dependency = dict(mixinusera=MixinUserA, mixinuserb=MixinUserB, mixinuserc=MixinUserC)
self.create_and_update_dependencies(dynamic_dependency[not_has_create])
return self
-@pytest.mark.parametrize('dependency,dependency_class',
- [('mixinusera', MixinUserA),
- ('mixinuserb', MixinUserB),
- ('mixinuserc', MixinUserC)])
+@pytest.mark.parametrize('dependency,dependency_class', [('mixinusera', MixinUserA), ('mixinuserb', MixinUserB), ('mixinuserc', MixinUserC)])
def test_subclass_or_parent_dynamic_not_has_create_dependency_declaration(dependency, dependency_class):
"""Confirms that dependencies that dynamically declare dependencies subclassed from not HasCreate
are properly linked
@@ -670,17 +655,12 @@ class DynamicallyDeclaresHasCreateDependency(MockHasCreate):
dependencies = [MixinUserB]
def create(self, mixin_user_b=MixinUserB):
- dynamic_dependency = dict(mixinuserb=MixinUserB,
- mixinuserc=MixinUserC,
- mixinuserd=MixinUserD)
+ dynamic_dependency = dict(mixinuserb=MixinUserB, mixinuserc=MixinUserC, mixinuserd=MixinUserD)
self.create_and_update_dependencies(dynamic_dependency[mixin_user_b])
return self
-@pytest.mark.parametrize('dependency,dependency_class',
- [('mixinuserb', MixinUserB),
- ('mixinuserc', MixinUserC),
- ('mixinuserd', MixinUserD)])
+@pytest.mark.parametrize('dependency,dependency_class', [('mixinuserb', MixinUserB), ('mixinuserc', MixinUserC), ('mixinuserd', MixinUserD)])
def test_subclass_or_parent_dynamic_has_create_dependency_declaration(dependency, dependency_class):
"""Confirms that dependencies that dynamically declare dependencies subclassed from not HasCreate
are properly linked
diff --git a/awxkit/test/test_registry.py b/awxkit/test/test_registry.py
index bfc3ca09ff..b0f0f2527f 100644
--- a/awxkit/test/test_registry.py
+++ b/awxkit/test/test_registry.py
@@ -169,8 +169,7 @@ def test_wildcard_and_specific_method_registration_acts_as_default(reg):
def test_multiple_method_registrations_disallowed_for_single_path_single_registration(reg, method):
with pytest.raises(TypeError) as e:
reg.register((('some_path', method), ('some_path', method)), One)
- assert str(e.value) == ('"{0.pattern}" already has registered method "{1}"'
- .format(reg.url_pattern('some_path'), method))
+ assert str(e.value) == ('"{0.pattern}" already has registered method "{1}"'.format(reg.url_pattern('some_path'), method))
@pytest.mark.parametrize('method', ('method', '.*'))
@@ -178,8 +177,7 @@ def test_multiple_method_registrations_disallowed_for_single_path_multiple_regis
reg.register('some_path', method, One)
with pytest.raises(TypeError) as e:
reg.register('some_path', method, One)
- assert str(e.value) == ('"{0.pattern}" already has registered method "{1}"'
- .format(reg.url_pattern('some_path'), method))
+ assert str(e.value) == ('"{0.pattern}" already has registered method "{1}"'.format(reg.url_pattern('some_path'), method))
def test_paths_can_be_patterns(reg):
@@ -188,10 +186,9 @@ def test_paths_can_be_patterns(reg):
def test_mixed_form_single_registration(reg):
- reg.register([('some_path_one', 'method_one'),
- 'some_path_two',
- ('some_path_three', ('method_two', 'method_three')),
- 'some_path_four', 'some_path_five'], One)
+ reg.register(
+ [('some_path_one', 'method_one'), 'some_path_two', ('some_path_three', ('method_two', 'method_three')), 'some_path_four', 'some_path_five'], One
+ )
assert reg.get('some_path_one', 'method_one') is One
assert reg.get('some_path_one') is None
assert reg.get('some_path_one', 'nonexistent') is None
@@ -209,10 +206,9 @@ def test_mixed_form_single_registration(reg):
def test_mixed_form_single_registration_with_methodless_default(reg):
reg.setdefault(One)
- reg.register([('some_path_one', 'method_one'),
- 'some_path_two',
- ('some_path_three', ('method_two', 'method_three')),
- 'some_path_four', 'some_path_five'], Two)
+ reg.register(
+ [('some_path_one', 'method_one'), 'some_path_two', ('some_path_three', ('method_two', 'method_three')), 'some_path_four', 'some_path_five'], Two
+ )
assert reg.get('some_path_one', 'method_one') is Two
assert reg.get('some_path_one') is One
assert reg.get('some_path_one', 'nonexistent') is One
@@ -230,10 +226,9 @@ def test_mixed_form_single_registration_with_methodless_default(reg):
def test_mixed_form_single_registration_with_method_default(reg):
reg.setdefault('existent', One)
- reg.register([('some_path_one', 'method_one'),
- 'some_path_two',
- ('some_path_three', ('method_two', 'method_three')),
- 'some_path_four', 'some_path_five'], Two)
+ reg.register(
+ [('some_path_one', 'method_one'), 'some_path_two', ('some_path_three', ('method_two', 'method_three')), 'some_path_four', 'some_path_five'], Two
+ )
assert reg.get('some_path_one', 'method_one') is Two
assert reg.get('some_path_one') is None
assert reg.get('some_path_one', 'existent') is One
diff --git a/awxkit/test/test_utils.py b/awxkit/test/test_utils.py
index 5f497d2e93..1a0a5412f2 100644
--- a/awxkit/test/test_utils.py
+++ b/awxkit/test/test_utils.py
@@ -9,60 +9,68 @@ from awxkit import utils
from awxkit import exceptions as exc
-@pytest.mark.parametrize('inp, out',
- [[True, True],
- [False, False],
- [1, True],
- [0, False],
- [1.0, True],
- [0.0, False],
- ['TrUe', True],
- ['FalSe', False],
- ['yEs', True],
- ['No', False],
- ['oN', True],
- ['oFf', False],
- ['asdf', True],
- ['0', False],
- ['', False],
- [{1: 1}, True],
- [{}, False],
- [(0,), True],
- [(), False],
- [[1], True],
- [[], False]])
+@pytest.mark.parametrize(
+ 'inp, out',
+ [
+ [True, True],
+ [False, False],
+ [1, True],
+ [0, False],
+ [1.0, True],
+ [0.0, False],
+ ['TrUe', True],
+ ['FalSe', False],
+ ['yEs', True],
+ ['No', False],
+ ['oN', True],
+ ['oFf', False],
+ ['asdf', True],
+ ['0', False],
+ ['', False],
+ [{1: 1}, True],
+ [{}, False],
+ [(0,), True],
+ [(), False],
+ [[1], True],
+ [[], False],
+ ],
+)
def test_to_bool(inp, out):
assert utils.to_bool(inp) == out
-@pytest.mark.parametrize('inp, out',
- [["{}", {}],
- ["{'null': null}", {"null": None}],
- ["{'bool': true}", {"bool": True}],
- ["{'bool': false}", {"bool": False}],
- ["{'int': 0}", {"int": 0}],
- ["{'float': 1.0}", {"float": 1.0}],
- ["{'str': 'abc'}", {"str": "abc"}],
- ["{'obj': {}}", {"obj": {}}],
- ["{'list': []}", {"list": []}],
- ["---", None],
- ["---\n'null': null", {'null': None}],
- ["---\n'bool': true", {'bool': True}],
- ["---\n'bool': false", {'bool': False}],
- ["---\n'int': 0", {'int': 0}],
- ["---\n'float': 1.0", {'float': 1.0}],
- ["---\n'string': 'abc'", {'string': 'abc'}],
- ["---\n'obj': {}", {'obj': {}}],
- ["---\n'list': []", {'list': []}],
- ["", None],
- ["'null': null", {'null': None}],
- ["'bool': true", {'bool': True}],
- ["'bool': false", {'bool': False}],
- ["'int': 0", {'int': 0}],
- ["'float': 1.0", {'float': 1.0}],
- ["'string': 'abc'", {'string': 'abc'}],
- ["'obj': {}", {'obj': {}}],
- ["'list': []", {'list': []}]])
+@pytest.mark.parametrize(
+ 'inp, out',
+ [
+ ["{}", {}],
+ ["{'null': null}", {"null": None}],
+ ["{'bool': true}", {"bool": True}],
+ ["{'bool': false}", {"bool": False}],
+ ["{'int': 0}", {"int": 0}],
+ ["{'float': 1.0}", {"float": 1.0}],
+ ["{'str': 'abc'}", {"str": "abc"}],
+ ["{'obj': {}}", {"obj": {}}],
+ ["{'list': []}", {"list": []}],
+ ["---", None],
+ ["---\n'null': null", {'null': None}],
+ ["---\n'bool': true", {'bool': True}],
+ ["---\n'bool': false", {'bool': False}],
+ ["---\n'int': 0", {'int': 0}],
+ ["---\n'float': 1.0", {'float': 1.0}],
+ ["---\n'string': 'abc'", {'string': 'abc'}],
+ ["---\n'obj': {}", {'obj': {}}],
+ ["---\n'list': []", {'list': []}],
+ ["", None],
+ ["'null': null", {'null': None}],
+ ["'bool': true", {'bool': True}],
+ ["'bool': false", {'bool': False}],
+ ["'int': 0", {'int': 0}],
+ ["'float': 1.0", {'float': 1.0}],
+ ["'string': 'abc'", {'string': 'abc'}],
+ ["'obj': {}", {'obj': {}}],
+ ["'list': []", {'list': []}],
+ ],
+)
def test_load_valid_json_or_yaml(inp, out):
assert utils.load_json_or_yaml(inp) == out
@@ -74,19 +82,13 @@ def test_load_invalid_json_or_yaml(inp):
@pytest.mark.parametrize('non_ascii', [True, False])
-@pytest.mark.skipif(
- sys.version_info < (3, 6),
- reason='this is only intended to be used in py3, not the CLI'
-)
+@pytest.mark.skipif(sys.version_info < (3, 6), reason='this is only intended to be used in py3, not the CLI')
def test_random_titles_are_unicode(non_ascii):
assert isinstance(utils.random_title(non_ascii=non_ascii), str)
@pytest.mark.parametrize('non_ascii', [True, False])
-@pytest.mark.skipif(
- sys.version_info < (3, 6),
- reason='this is only intended to be used in py3, not the CLI'
-)
+@pytest.mark.skipif(sys.version_info < (3, 6), reason='this is only intended to be used in py3, not the CLI')
def test_random_titles_generates_correct_characters(non_ascii):
title = utils.random_title(non_ascii=non_ascii)
if non_ascii:
@@ -98,34 +100,39 @@ def test_random_titles_generates_correct_characters(non_ascii):
title.encode('utf-8')
-@pytest.mark.parametrize('inp, out',
- [['ClassNameShouldChange', 'class_name_should_change'],
- ['classnameshouldntchange', 'classnameshouldntchange'],
- ['Classspacingshouldntchange', 'classspacingshouldntchange'],
- ['Class1Name2Should3Change', 'class_1_name_2_should_3_change'],
- ['Class123name234should345change456', 'class_123_name_234_should_345_change_456']])
+@pytest.mark.parametrize(
+ 'inp, out',
+ [
+ ['ClassNameShouldChange', 'class_name_should_change'],
+ ['classnameshouldntchange', 'classnameshouldntchange'],
+ ['Classspacingshouldntchange', 'classspacingshouldntchange'],
+ ['Class1Name2Should3Change', 'class_1_name_2_should_3_change'],
+ ['Class123name234should345change456', 'class_123_name_234_should_345_change_456'],
+ ],
+)
def test_class_name_to_kw_arg(inp, out):
assert utils.class_name_to_kw_arg(inp) == out
-@pytest.mark.parametrize('first, second, expected',
- [['/api/v2/resources/', '/api/v2/resources/', True],
- ['/api/v2/resources/', '/api/v2/resources/?test=ignored', True],
- ['/api/v2/resources/?one=ignored', '/api/v2/resources/?two=ignored', True],
- ['http://one.com', 'http://one.com', True],
- ['http://one.com', 'http://www.one.com', True],
- ['http://one.com', 'http://one.com?test=ignored', True],
- ['http://one.com', 'http://www.one.com?test=ignored', True],
- ['http://one.com', 'https://one.com', False],
- ['http://one.com', 'https://one.com?test=ignored', False]])
+@pytest.mark.parametrize(
+ 'first, second, expected',
+ [
+ ['/api/v2/resources/', '/api/v2/resources/', True],
+ ['/api/v2/resources/', '/api/v2/resources/?test=ignored', True],
+ ['/api/v2/resources/?one=ignored', '/api/v2/resources/?two=ignored', True],
+ ['http://one.com', 'http://one.com', True],
+ ['http://one.com', 'http://www.one.com', True],
+ ['http://one.com', 'http://one.com?test=ignored', True],
+ ['http://one.com', 'http://www.one.com?test=ignored', True],
+ ['http://one.com', 'https://one.com', False],
+ ['http://one.com', 'https://one.com?test=ignored', False],
+ ],
+)
def test_are_same_endpoint(first, second, expected):
assert utils.are_same_endpoint(first, second) == expected
-@pytest.mark.parametrize('endpoint, expected',
- [['/api/v2/resources/', 'v2'],
- ['/api/v2000/resources/', 'v2000'],
- ['/api/', 'common']])
+@pytest.mark.parametrize('endpoint, expected', [['/api/v2/resources/', 'v2'], ['/api/v2000/resources/', 'v2000'], ['/api/', 'common']])
def test_version_from_endpoint(endpoint, expected):
assert utils.version_from_endpoint(endpoint) == expected
@@ -133,42 +140,51 @@ def test_version_from_endpoint(endpoint, expected):
class OneClass:
pass
+
class TwoClass:
pass
+
class ThreeClass:
pass
+
class FourClass(ThreeClass):
pass
+
def test_filter_by_class_with_subclass_class():
filtered = utils.filter_by_class((OneClass, OneClass), (FourClass, ThreeClass))
assert filtered == [OneClass, FourClass]
+
def test_filter_by_class_with_subclass_instance():
one = OneClass()
four = FourClass()
filtered = utils.filter_by_class((one, OneClass), (four, ThreeClass))
assert filtered == [one, four]
+
def test_filter_by_class_no_arg_tuples():
three = ThreeClass()
filtered = utils.filter_by_class((True, OneClass), (False, TwoClass), (three, ThreeClass))
assert filtered == [OneClass, None, three]
+
def test_filter_by_class_with_arg_tuples_containing_class():
one = OneClass()
three = (ThreeClass, dict(one=1, two=2))
filtered = utils.filter_by_class((one, OneClass), (False, TwoClass), (three, ThreeClass))
assert filtered == [one, None, three]
+
def test_filter_by_class_with_arg_tuples_containing_subclass():
one = OneClass()
three = (FourClass, dict(one=1, two=2))
filtered = utils.filter_by_class((one, OneClass), (False, TwoClass), (three, ThreeClass))
assert filtered == [one, None, three]
+
@pytest.mark.parametrize('truthy', (True, 123, 'yes'))
def test_filter_by_class_with_arg_tuples_containing_truthy(truthy):
one = OneClass()
@@ -177,18 +193,20 @@ def test_filter_by_class_with_arg_tuples_containing_truthy(truthy):
assert filtered == [one, None, (ThreeClass, dict(one=1, two=2))]
-@pytest.mark.parametrize('date_string,now,expected', [
- ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 2, 750000), 1.25),
- ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 1, 500000), 0.00),
- ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 0, 500000), -1.00),
-])
+@pytest.mark.parametrize(
+ 'date_string,now,expected',
+ [
+ ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 2, 750000), 1.25),
+ ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 1, 500000), 0.00),
+ ('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 0, 500000), -1.00),
+ ],
+)
def test_seconds_since_date_string(date_string, now, expected):
with mock.patch('awxkit.utils.utcnow', return_value=now):
assert utils.seconds_since_date_string(date_string) == expected
class RecordingCallback(object):
-
def __init__(self, value=True):
self.call_count = 0
self.value = value
@@ -225,7 +243,6 @@ def test_suppress():
class TestPollUntil(object):
-
@pytest.mark.parametrize('timeout', [0, 0.0, -0.5, -1, -9999999])
def test_callback_called_once_for_non_positive_timeout(self, timeout):
with mock.patch('awxkit.utils.logged_sleep') as sleep:
@@ -246,7 +263,6 @@ class TestPollUntil(object):
class TestPseudoNamespace(object):
-
def test_set_item_check_item(self):
pn = utils.PseudoNamespace()
pn['key'] = 'value'
@@ -319,10 +335,7 @@ class TestPseudoNamespace(object):
assert pn == dict(one=[dict(two=2), dict(three=3)])
def test_instantiation_via_nested_dict_with_lists(self):
- pn = utils.PseudoNamespace(dict(one=[dict(two=2),
- dict(three=dict(four=4,
- five=[dict(six=6),
- dict(seven=7)]))]))
+ pn = utils.PseudoNamespace(dict(one=[dict(two=2), dict(three=dict(four=4, five=[dict(six=6), dict(seven=7)]))]))
assert pn.one[1].three.five[1].seven == 7
def test_instantiation_via_nested_dict_with_tuple(self):
@@ -332,10 +345,7 @@ class TestPseudoNamespace(object):
assert pn == dict(one=(dict(two=2), dict(three=3)))
def test_instantiation_via_nested_dict_with_tuples(self):
- pn = utils.PseudoNamespace(dict(one=(dict(two=2),
- dict(three=dict(four=4,
- five=(dict(six=6),
- dict(seven=7)))))))
+ pn = utils.PseudoNamespace(dict(one=(dict(two=2), dict(three=dict(four=4, five=(dict(six=6), dict(seven=7)))))))
assert pn.one[1].three.five[1].seven == 7
def test_update_with_nested_dict(self):
@@ -348,23 +358,16 @@ class TestPseudoNamespace(object):
def test_update_with_nested_dict_with_lists(self):
pn = utils.PseudoNamespace()
- pn.update(dict(one=[dict(two=2),
- dict(three=dict(four=4,
- five=[dict(six=6),
- dict(seven=7)]))]))
+ pn.update(dict(one=[dict(two=2), dict(three=dict(four=4, five=[dict(six=6), dict(seven=7)]))]))
assert pn.one[1].three.five[1].seven == 7
def test_update_with_nested_dict_with_tuples(self):
pn = utils.PseudoNamespace()
- pn.update(dict(one=(dict(two=2),
- dict(three=dict(four=4,
- five=(dict(six=6),
- dict(seven=7)))))))
+ pn.update(dict(one=(dict(two=2), dict(three=dict(four=4, five=(dict(six=6), dict(seven=7)))))))
assert pn.one[1].three.five[1].seven == 7
class TestUpdatePayload(object):
-
def test_empty_payload(self):
fields = ('one', 'two', 'three', 'four')
kwargs = dict(two=2, four=4)
diff --git a/awxkit/test/test_ws.py b/awxkit/test/test_ws.py
index afc6b42fc5..8e89cdc6f2 100644
--- a/awxkit/test/test_ws.py
+++ b/awxkit/test/test_ws.py
@@ -8,6 +8,7 @@ from awxkit.ws import WSClient
ParseResult = namedtuple("ParseResult", ["port", "hostname", "secure"])
+
def test_explicit_hostname():
client = WSClient("token", "some-hostname", 556, False)
assert client.port == 556
@@ -16,12 +17,15 @@ def test_explicit_hostname():
assert client.token == "token"
-@pytest.mark.parametrize('url, result',
- [['https://somename:123', ParseResult(123, "somename", True)],
- ['http://othername:456', ParseResult(456, "othername", False)],
- ['http://othername', ParseResult(80, "othername", False)],
- ['https://othername', ParseResult(443, "othername", True)],
-])
+@pytest.mark.parametrize(
+ 'url, result',
+ [
+ ['https://somename:123', ParseResult(123, "somename", True)],
+ ['http://othername:456', ParseResult(456, "othername", False)],
+ ['http://othername', ParseResult(80, "othername", False)],
+ ['https://othername', ParseResult(443, "othername", True)],
+ ],
+)
def test_urlparsing(url, result):
with patch("awxkit.ws.config") as mock_config:
mock_config.base_url = url
diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt
index 4788e153a2..df8cc1cb13 100644
--- a/requirements/requirements_dev.txt
+++ b/requirements/requirements_dev.txt
@@ -3,9 +3,7 @@ django-rest-swagger
pprofile
ipython==5.2.1
unittest2
-pep8
-flake8
-pyflakes
+black
pytest
pytest-cov
pytest-django
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100755
index c48afe266e..0000000000
--- a/setup.cfg
+++ /dev/null
@@ -1,21 +0,0 @@
-[pep8]
-# E201 - Whitespace after '('
-# E203 - Whitespace before ":"
-# E221 - Multiple spaces after operator
-# E225 - Missing whitespace around operator
-# E231 - Missing whitespace after ','
-# E241 - Multiple spaces after ','
-# E251 - Unexpected spaces around keyword / parameter equals
-# E261 - At least two spaces before inline comment
-# E302 - Expected 2 blank lines found 0
-# E303 - Too many blank lines
-# W291 - Trailing whitespace
-# W391 - Blank line at end of file
-# W293 - Blank line contains whitespace
-ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E303,W291,W391,W293
-exclude=.tox,venv,awx/lib/site-packages,awx/plugins/inventory/ec2.py,awx/plugins/inventory/gce.py,awx/plugins/inventory/vmware.py,awx/plugins/inventory/openstack.py,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/tests/data,installer/openshift/settings.py,tools/docker-compose/ansible/roles/sources/files/local_settings.py,tools/ansible/roles/dockerfile/files/settings.py
-
-[flake8]
-max-line-length=160
-ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E303,W291,W391,W293,E731,W504
-exclude=.tox,venv,awx/lib/site-packages,awx/plugins/inventory,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/tests/data,node_modules/,awx/projects/,tools/docker,awx/settings/local_*.py,installer/openshift/settings.py,build/,installer/,awxkit/test,awx_collection/,tools/docker-compose/ansible/roles/sources/files/local_settings.py,tools/ansible/roles/dockerfile/files/settings.py
diff --git a/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 b/tools/ansible/roles/dockerfile/templates/Dockerfile.j2
index 62964abafe..df076ec5c0 100644
--- a/tools/ansible/roles/dockerfile/templates/Dockerfile.j2
+++ b/tools/ansible/roles/dockerfile/templates/Dockerfile.j2
@@ -119,7 +119,7 @@ RUN curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master
RUN curl -L -o /usr/bin/tini https://github.com/krallin/tini/releases/download/v0.19.0/tini-{{ tini_architecture | default('amd64') }} && \
chmod +x /usr/bin/tini
-RUN python3 -m ensurepip && pip3 install "virtualenv < 20" supervisor {% if build_dev|bool %}flake8{% endif %}
+RUN python3 -m ensurepip && pip3 install "virtualenv < 20" supervisor {% if build_dev|bool %}black{% endif %}
RUN rm -rf /root/.cache && rm -rf /tmp/*
diff --git a/tox.ini b/tox.ini
index 30a245409f..8f82e0bfc4 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,9 +1,11 @@
[testenv:linters]
deps =
make
- flake8
+ black
yamllint
allowlist_externals = make
+setenv =
+ BLACK_ARGS = --check
commands =
- make flake8
+ make black
yamllint -s .